code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmark for KPL implementation of categorical cross hash columns with dense inputs."""
# pylint: disable=g-direct-tensorflow-import
import keras
from keras.layers.preprocessing import hashed_crossing
from keras.layers.preprocessing.benchmarks import feature_column_benchmark as fc_bm
import tensorflow.compat.v2 as tf
from tensorflow.python.eager.def_function import function as tf_function
NUM_REPEATS = 10
BATCH_SIZES = [32, 256]
def embedding_varlen(batch_size):
"""Benchmark a variable-length embedding."""
# Data and constants.
num_buckets = 10000
data_a = tf.random.uniform(shape=(batch_size * NUM_REPEATS, 1),
maxval=32768,
dtype=tf.int64)
data_b = tf.strings.as_string(data_a)
# Keras implementation
input_1 = keras.Input(shape=(1,), name="data_a", dtype=tf.int64)
input_2 = keras.Input(shape=(1,), name="data_b", dtype=tf.string)
outputs = hashed_crossing.HashedCrossing(num_buckets)([input_1, input_2])
model = keras.Model([input_1, input_2], outputs)
# FC implementation
fc = tf.feature_column.crossed_column(["data_a", "data_b"], num_buckets)
# Wrap the FC implementation in a tf.function for a fair comparison
@tf_function()
def fc_fn(tensors):
fc.transform_feature(
tf.__internal__.feature_column.FeatureTransformationCache(tensors),
None)
# Benchmark runs
keras_data = {
"data_a": data_a,
"data_b": data_b,
}
k_avg_time = fc_bm.run_keras(keras_data, model, batch_size, NUM_REPEATS)
fc_data = {
"data_a": data_a,
"data_b": data_b,
}
fc_avg_time = fc_bm.run_fc(fc_data, fc_fn, batch_size, NUM_REPEATS)
return k_avg_time, fc_avg_time
class BenchmarkLayer(fc_bm.LayerBenchmark):
"""Benchmark the layer forward pass."""
def benchmark_layer(self):
for batch in BATCH_SIZES:
name = "hashed_cross|dense|batch_%s" % batch
k_time, f_time = embedding_varlen(batch_size=batch)
self.report(name, k_time, f_time, NUM_REPEATS)
if __name__ == "__main__":
tf.test.main()
| keras-team/keras | keras/layers/preprocessing/benchmarks/hashed_crossing_benchmark.py | Python | apache-2.0 | 2,754 |
from os.path import join
from tempfile import mkdtemp
import unittest
from shutil import rmtree
import pandas as pd
from feagen.data_wrappers.pandas_hdf import get_shape_from_pandas_hdf_storer
class Test(unittest.TestCase):
def setUp(self):
self.test_output_dir = mkdtemp(prefix="feagen_test_output_")
pandas_hdf_path = join(self.test_output_dir, "pandas.h5")
self.hdf_store = pd.HDFStore(pandas_hdf_path)
def tearDown(self):
self.hdf_store.close()
rmtree(self.test_output_dir)
def test_get_shape_from_pandas_hdf_storer_df(self):
idx = [1, 2, 3, 5, 4]
col = [10, 9, 6, 7]
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store['test'] = df
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (5, 4)
def test_get_shape_from_pandas_hdf_storer_df_table(self):
idx = [1, 2, 3, 5, 4]
col = [10, 9, 6, 7]
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store.put('test', df, format='table')
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (5, 4)
def test_get_shape_from_pandas_hdf_storer_df_m_idx(self):
idx = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
col = [10, 9, 6, 7]
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store['test'] = df
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (6, 4)
def test_get_shape_from_pandas_hdf_storer_df_m_idx_table(self):
idx = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
col = [10, 9, 6, 7]
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store.put('test', df, format='table')
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (6, 4)
def test_get_shape_from_pandas_hdf_storer_df_m_col(self):
idx = [10, 9, 6, 7]
col = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store['test'] = df
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
# TODO: change to (4, 6)
assert shape is None
def test_get_shape_from_pandas_hdf_storer_df_m_col_table(self):
idx = [10, 9, 6, 7]
col = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store.put('test', df, format='table')
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (4, 6)
def test_get_shape_from_pandas_hdf_storer_df_m_idx_m_col(self):
idx = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
col = pd.MultiIndex.from_product([[0, 1], [0, 1]])
df = pd.DataFrame(0, index=idx, columns=col)
self.hdf_store['test'] = df
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
# TODO: change to (6, 4)
assert shape is None
def test_get_shape_from_pandas_hdf_storer_s(self):
idx = [0, 2, 1, 4, 3]
s = pd.Series(0, index=idx)
self.hdf_store['test'] = s
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (5,)
def test_get_shape_from_pandas_hdf_storer_s_table(self):
idx = [0, 2, 1, 4, 3]
s = pd.Series(0, index=idx)
self.hdf_store.put('test', s, format='table')
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (5,)
def test_get_shape_from_pandas_hdf_storer_s_m_idx(self):
idx = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
s = pd.Series(0, index=idx)
self.hdf_store['test'] = s
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (6,)
def test_get_shape_from_pandas_hdf_storer_s_m_idx_table(self):
idx = pd.MultiIndex.from_product([[0, 1], [0, 1, 2]])
s = pd.Series(0, index=idx)
self.hdf_store.put('test', s, format='table')
shape = get_shape_from_pandas_hdf_storer(
self.hdf_store.get_storer('test'))
assert shape == (6,)
| ianlini/feagen | feagen/data_wrappers/tests/test_pandas_hdf.py | Python | bsd-2-clause | 4,476 |
# -*- coding: utf-8 -*-
import pytest
import pandas as pd
import warnings
class TestConfig(object):
@classmethod
def setup_class(cls):
from copy import deepcopy
cls.cf = pd.core.config
cls.gc = deepcopy(getattr(cls.cf, '_global_config'))
cls.do = deepcopy(getattr(cls.cf, '_deprecated_options'))
cls.ro = deepcopy(getattr(cls.cf, '_registered_options'))
def setup_method(self, method):
setattr(self.cf, '_global_config', {})
setattr(self.cf, 'options', self.cf.DictWrapper(
self.cf._global_config))
setattr(self.cf, '_deprecated_options', {})
setattr(self.cf, '_registered_options', {})
# Our test fixture in conftest.py sets "chained_assignment"
# to "raise" only after all test methods have been setup.
# However, after this setup, there is no longer any
# "chained_assignment" option, so re-register it.
self.cf.register_option('chained_assignment', 'raise')
def teardown_method(self, method):
setattr(self.cf, '_global_config', self.gc)
setattr(self.cf, '_deprecated_options', self.do)
setattr(self.cf, '_registered_options', self.ro)
def test_api(self):
# the pandas object exposes the user API
assert hasattr(pd, 'get_option')
assert hasattr(pd, 'set_option')
assert hasattr(pd, 'reset_option')
assert hasattr(pd, 'describe_option')
def test_is_one_of_factory(self):
v = self.cf.is_one_of_factory([None, 12])
v(12)
v(None)
pytest.raises(ValueError, v, 1.1)
def test_register_option(self):
self.cf.register_option('a', 1, 'doc')
# can't register an already registered option
pytest.raises(KeyError, self.cf.register_option, 'a', 1, 'doc')
# can't register an already registered option
pytest.raises(KeyError, self.cf.register_option, 'a.b.c.d1', 1,
'doc')
pytest.raises(KeyError, self.cf.register_option, 'a.b.c.d2', 1,
'doc')
# no python keywords
pytest.raises(ValueError, self.cf.register_option, 'for', 0)
pytest.raises(ValueError, self.cf.register_option, 'a.for.b', 0)
# must be valid identifier (ensure attribute access works)
pytest.raises(ValueError, self.cf.register_option,
'Oh my Goddess!', 0)
# we can register options several levels deep
# without predefining the intermediate steps
# and we can define differently named options
# in the same namespace
self.cf.register_option('k.b.c.d1', 1, 'doc')
self.cf.register_option('k.b.c.d2', 1, 'doc')
def test_describe_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b', 1, 'doc2')
self.cf.deprecate_option('b')
self.cf.register_option('c.d.e1', 1, 'doc3')
self.cf.register_option('c.d.e2', 1, 'doc4')
self.cf.register_option('f', 1)
self.cf.register_option('g.h', 1)
self.cf.register_option('k', 2)
self.cf.deprecate_option('g.h', rkey="k")
self.cf.register_option('l', "foo")
# non-existent keys raise KeyError
pytest.raises(KeyError, self.cf.describe_option, 'no.such.key')
# we can get the description for any key we registered
assert 'doc' in self.cf.describe_option('a', _print_desc=False)
assert 'doc2' in self.cf.describe_option('b', _print_desc=False)
assert 'precated' in self.cf.describe_option('b', _print_desc=False)
assert 'doc3' in self.cf.describe_option('c.d.e1', _print_desc=False)
assert 'doc4' in self.cf.describe_option('c.d.e2', _print_desc=False)
# if no doc is specified we get a default message
# saying "description not available"
assert 'vailable' in self.cf.describe_option('f', _print_desc=False)
assert 'vailable' in self.cf.describe_option('g.h', _print_desc=False)
assert 'precated' in self.cf.describe_option('g.h', _print_desc=False)
assert 'k' in self.cf.describe_option('g.h', _print_desc=False)
# default is reported
assert 'foo' in self.cf.describe_option('l', _print_desc=False)
# current value is reported
assert 'bar' not in self.cf.describe_option('l', _print_desc=False)
self.cf.set_option("l", "bar")
assert 'bar' in self.cf.describe_option('l', _print_desc=False)
def test_case_insensitive(self):
self.cf.register_option('KanBAN', 1, 'doc')
assert 'doc' in self.cf.describe_option('kanbaN', _print_desc=False)
assert self.cf.get_option('kanBaN') == 1
self.cf.set_option('KanBan', 2)
assert self.cf.get_option('kAnBaN') == 2
# gets of non-existent keys fail
pytest.raises(KeyError, self.cf.get_option, 'no_such_option')
self.cf.deprecate_option('KanBan')
assert self.cf._is_deprecated('kAnBaN')
def test_get_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
# gets of existing keys succeed
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
assert self.cf.get_option('b.b') is None
# gets of non-existent keys fail
pytest.raises(KeyError, self.cf.get_option, 'no_such_option')
def test_set_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
assert self.cf.get_option('b.b') is None
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
self.cf.set_option('b.b', 1.1)
assert self.cf.get_option('a') == 2
assert self.cf.get_option('b.c') == 'wurld'
assert self.cf.get_option('b.b') == 1.1
pytest.raises(KeyError, self.cf.set_option, 'no.such.key', None)
def test_set_option_empty_args(self):
pytest.raises(ValueError, self.cf.set_option)
def test_set_option_uneven_args(self):
pytest.raises(ValueError, self.cf.set_option, 'a.b', 2, 'b.c')
def test_set_option_invalid_single_argument_type(self):
pytest.raises(ValueError, self.cf.set_option, 2)
def test_set_option_multiple(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
assert self.cf.get_option('b.b') is None
self.cf.set_option('a', '2', 'b.c', None, 'b.b', 10.0)
assert self.cf.get_option('a') == '2'
assert self.cf.get_option('b.c') is None
assert self.cf.get_option('b.b') == 10.0
def test_validation(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_text)
pytest.raises(ValueError, self.cf.register_option, 'a.b.c.d2',
'NO', 'doc', validator=self.cf.is_int)
self.cf.set_option('a', 2) # int is_int
self.cf.set_option('b.c', 'wurld') # str is_str
pytest.raises(
ValueError, self.cf.set_option, 'a', None) # None not is_int
pytest.raises(ValueError, self.cf.set_option, 'a', 'ab')
pytest.raises(ValueError, self.cf.set_option, 'b.c', 1)
validator = self.cf.is_one_of_factory([None, self.cf.is_callable])
self.cf.register_option('b', lambda: None, 'doc',
validator=validator)
self.cf.set_option('b', '%.1f'.format) # Formatter is callable
self.cf.set_option('b', None) # Formatter is none (default)
pytest.raises(ValueError, self.cf.set_option, 'b', '%.1f')
def test_reset_option(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_str)
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
assert self.cf.get_option('a') == 2
assert self.cf.get_option('b.c') == 'wurld'
self.cf.reset_option('a')
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'wurld'
self.cf.reset_option('b.c')
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
def test_reset_option_all(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_str)
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
assert self.cf.get_option('a') == 2
assert self.cf.get_option('b.c') == 'wurld'
self.cf.reset_option("all")
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b.c') == 'hullo'
def test_deprecate_option(self):
# we can deprecate non-existent options
self.cf.deprecate_option('foo')
assert self.cf._is_deprecated('foo')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
try:
self.cf.get_option('foo')
except KeyError:
pass
else:
self.fail("Nonexistent option didn't raise KeyError")
assert len(w) == 1 # should have raised one warning
assert 'deprecated' in str(w[-1]) # we get the default message
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('foo', 'hullo', 'doc2')
self.cf.deprecate_option('a', removal_ver='nifty_ver')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.get_option('a')
assert len(w) == 1 # should have raised one warning
assert 'eprecated' in str(w[-1]) # we get the default message
assert 'nifty_ver' in str(w[-1]) # with the removal_ver quoted
pytest.raises(
KeyError, self.cf.deprecate_option, 'a') # can't depr. twice
self.cf.deprecate_option('b.c', 'zounds!')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.get_option('b.c')
assert len(w) == 1 # should have raised one warning
assert 'zounds!' in str(w[-1]) # we get the custom message
# test rerouting keys
self.cf.register_option('d.a', 'foo', 'doc2')
self.cf.register_option('d.dep', 'bar', 'doc2')
assert self.cf.get_option('d.a') == 'foo'
assert self.cf.get_option('d.dep') == 'bar'
self.cf.deprecate_option('d.dep', rkey='d.a') # reroute d.dep to d.a
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert self.cf.get_option('d.dep') == 'foo'
assert len(w) == 1 # should have raised one warning
assert 'eprecated' in str(w[-1]) # we get the custom message
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.set_option('d.dep', 'baz') # should overwrite "d.a"
assert len(w) == 1 # should have raised one warning
assert 'eprecated' in str(w[-1]) # we get the custom message
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert self.cf.get_option('d.dep') == 'baz'
assert len(w) == 1 # should have raised one warning
assert 'eprecated' in str(w[-1]) # we get the custom message
def test_config_prefix(self):
with self.cf.config_prefix("base"):
self.cf.register_option('a', 1, "doc1")
self.cf.register_option('b', 2, "doc2")
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b') == 2
self.cf.set_option('a', 3)
self.cf.set_option('b', 4)
assert self.cf.get_option('a') == 3
assert self.cf.get_option('b') == 4
assert self.cf.get_option('base.a') == 3
assert self.cf.get_option('base.b') == 4
assert 'doc1' in self.cf.describe_option('base.a', _print_desc=False)
assert 'doc2' in self.cf.describe_option('base.b', _print_desc=False)
self.cf.reset_option('base.a')
self.cf.reset_option('base.b')
with self.cf.config_prefix("base"):
assert self.cf.get_option('a') == 1
assert self.cf.get_option('b') == 2
def test_callback(self):
k = [None]
v = [None]
def callback(key):
k.append(key)
v.append(self.cf.get_option(key))
self.cf.register_option('d.a', 'foo', cb=callback)
self.cf.register_option('d.b', 'foo', cb=callback)
del k[-1], v[-1]
self.cf.set_option("d.a", "fooz")
assert k[-1] == "d.a"
assert v[-1] == "fooz"
del k[-1], v[-1]
self.cf.set_option("d.b", "boo")
assert k[-1] == "d.b"
assert v[-1] == "boo"
del k[-1], v[-1]
self.cf.reset_option("d.b")
assert k[-1] == "d.b"
def test_set_ContextManager(self):
def eq(val):
assert self.cf.get_option("a") == val
self.cf.register_option('a', 0)
eq(0)
with self.cf.option_context("a", 15):
eq(15)
with self.cf.option_context("a", 25):
eq(25)
eq(15)
eq(0)
self.cf.set_option("a", 17)
eq(17)
def test_attribute_access(self):
holder = []
def f():
options.b = 1
def f2():
options.display = 1
def f3(key):
holder.append(True)
self.cf.register_option('a', 0)
self.cf.register_option('c', 0, cb=f3)
options = self.cf.options
assert options.a == 0
with self.cf.option_context("a", 15):
assert options.a == 15
options.a = 500
assert self.cf.get_option("a") == 500
self.cf.reset_option("a")
assert options.a == self.cf.get_option("a", 0)
pytest.raises(KeyError, f)
pytest.raises(KeyError, f2)
# make sure callback kicks when using this form of setting
options.c = 1
assert len(holder) == 1
def test_option_context_scope(self):
# Ensure that creating a context does not affect the existing
# environment as it is supposed to be used with the `with` statement.
# See https://github.com/pandas-dev/pandas/issues/8514
original_value = 60
context_value = 10
option_name = 'a'
self.cf.register_option(option_name, original_value)
# Ensure creating contexts didn't affect the current context.
ctx = self.cf.option_context(option_name, context_value)
assert self.cf.get_option(option_name) == original_value
# Ensure the correct value is available inside the context.
with ctx:
assert self.cf.get_option(option_name) == context_value
# Ensure the current context is reset
assert self.cf.get_option(option_name) == original_value
def test_dictwrapper_getattr(self):
options = self.cf.options
# GH 19789
pytest.raises(self.cf.OptionError, getattr, options, 'bananas')
assert not hasattr(options, 'bananas')
| louispotok/pandas | pandas/tests/test_config.py | Python | bsd-3-clause | 16,237 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class AtomDft(MakefilePackage):
"""ATOM is a program for DFT calculations in atoms and pseudopotential
generation."""
homepage = "https://departments.icmab.es/leem/siesta/Pseudopotentials/"
url = "https://departments.icmab.es/leem/siesta/Pseudopotentials/Code/atom-4.2.6.tgz"
version('4.2.6', sha256='489f0d883af35525647a8b8f691e7845c92fe6b5a25b13e1ed368edfd0391ed2')
depends_on('libgridxc')
depends_on('xmlf90')
def edit(self, spec, prefix):
copy('arch.make.sample', 'arch.make')
@property
def build_targets(self):
return ['XMLF90_ROOT=%s' % self.spec['xmlf90'].prefix,
'GRIDXC_ROOT=%s' % self.spec['libgridxc'].prefix,
'FC=fc']
def install(self, spec, prefix):
mkdir(prefix.bin)
install('atm', prefix.bin)
| LLNL/spack | var/spack/repos/builtin/packages/atom-dft/package.py | Python | lgpl-2.1 | 1,050 |
# -*- coding: utf-8 -*-
""" S3 SQL Forms
@copyright: 2012-15 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3SQLCustomForm",
"S3SQLDefaultForm",
"S3SQLSubFormLayout",
"S3SQLInlineComponent",
"S3SQLInlineComponentCheckbox",
"S3SQLInlineComponentMultiSelectWidget",
"S3SQLInlineLink",
)
from itertools import chain
try:
import json # try stdlib (Python 2.6+)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import *
from gluon.storage import Storage
from gluon.sqlhtml import StringWidget
from gluon.tools import callback
from gluon.validators import Validator
from s3query import FS
from s3utils import s3_debug, s3_mark_required, s3_represent_value, s3_store_last_record_id, s3_strip_markup, s3_unicode, s3_validate
from s3widgets import S3Selector
# Compact JSON encoding
SEPARATORS = (",", ":")
DEFAULT = lambda: None
# =============================================================================
class S3SQLForm(object):
""" SQL Form Base Class"""
# -------------------------------------------------------------------------
def __init__(self, *elements, **attributes):
"""
Constructor to define the form and its elements.
@param elements: the form elements
@param attributes: form attributes
"""
self.elements = []
append = self.elements.append
debug = current.deployment_settings.get_base_debug()
for element in elements:
if not element:
continue
if isinstance(element, S3SQLFormElement):
append(element)
elif isinstance(element, str):
append(S3SQLField(element))
elif isinstance(element, tuple):
l = len(element)
if l > 1:
label, selector = element[:2]
widget = element[2] if l > 2 else DEFAULT
else:
selector = element[0]
label = widget = DEFAULT
append(S3SQLField(selector, label=label, widget=widget))
else:
msg = "Invalid form element: %s" % str(element)
if debug:
raise SyntaxError(msg)
else:
current.log.error(msg)
opts = {}
attr = {}
for k in attributes:
value = attributes[k]
if k[:1] == "_":
attr[k] = value
else:
opts[k] = value
self.attr = attr
self.opts = opts
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request=None,
resource=None,
record_id=None,
readonly=False,
message="Record created/updated",
format=None,
**options):
"""
Render/process the form. To be implemented in subclass.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@return: a FORM instance
"""
return None
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def _config(self, key, default=None):
"""
Get a configuration setting for the current table
@param key: the setting key
@param default: fallback value if the setting is not available
"""
tablename = self.tablename
if tablename:
return current.s3db.get_config(tablename, key, default)
else:
return default
# -------------------------------------------------------------------------
@staticmethod
def _submit_buttons(readonly=False):
"""
Render submit buttons
@param readonly: render the form read-only
@return: list of submit buttons
"""
T = current.T
s3 = current.response.s3
settings = s3.crud
if settings.custom_submit:
submit = [(None,
settings.submit_button,
settings.submit_style)]
submit.extend(settings.custom_submit)
buttons = []
for name, label, _class in submit:
if isinstance(label, basestring):
label = T(label)
button = INPUT(_type="submit",
_class="btn crud-submit-button",
_name=name,
_value=label)
if _class:
button.add_class(_class)
buttons.append(button)
else:
buttons = ["submit"]
# Cancel button
if not readonly and s3.cancel:
if not settings.custom_submit:
if settings.submit_button:
submit_label = T(settings.submit_button)
else:
submit_label = T("Save")
submit_button = INPUT(_type="submit",
_value=submit_label)
if settings.submit_style:
submit_button.add_class(settings.submit_style)
buttons = [submit_button]
cancel = s3.cancel
if isinstance(cancel, DIV):
cancel_button = cancel
else:
cancel_button = A(T("Cancel"),
_class="cancel-form-btn action-lnk")
if isinstance(cancel, dict):
# Script-controlled cancel button (embedded form)
if "script" in cancel:
# Custom script
script = cancel["script"]
else:
# Default script: hide form, show add-button
script = \
'''$('.cancel-form-btn').click(function(){$('#%(hide)s').slideUp('medium',function(){$('#%(show)s').show()})})'''
s3.jquery_ready.append(script % cancel)
elif s3.cancel is True:
cancel_button.add_class("s3-cancel")
else:
cancel_button.update(_href=s3.cancel)
buttons.append(cancel_button)
return buttons
# -------------------------------------------------------------------------
@staticmethod
def _insert_subheadings(form, tablename, subheadings):
"""
Insert subheadings into forms
@param form: the form
@param tablename: the tablename
@param subheadings: a dict of {"Headline": Fieldnames}, where
Fieldname can be either a single field name or a list/tuple
of field names belonging under that headline
"""
if subheadings:
if tablename in subheadings:
subheadings = subheadings.get(tablename)
form_rows = iter(form[0])
tr = form_rows.next()
i = 0
done = []
while tr:
# @ToDo: We need a better way of working than this!
f = tr.attributes.get("_id", None)
if not f:
try:
# DIV-based form-style
f = tr[0][0].attributes.get("_id", None)
if not f:
# DRRPP formstyle
f = tr[0][0][1][0].attributes.get("_id", None)
if not f:
# Date fields are inside an extra TAG()
f = tr[0][0][1][0][0].attributes.get("_id", None)
except:
# Something else
f = None
if f:
if f.startswith(tablename):
f = f[len(tablename) + 1:] # : -6
if f.startswith("sub_"):
# Component
f = f[4:]
elif f.startswith("sub-default"):
# S3SQLInlineComponent[CheckBox]
f = f[11:]
elif f.startswith("sub_"):
# S3GroupedOptionsWidget
f = f[4:]
for k in subheadings.keys():
if k in done:
continue
fields = subheadings[k]
if not isinstance(fields, (list, tuple)):
fields = [fields]
if f in fields:
done.append(k)
if isinstance(k, int):
# Don't display a section title
represent = ""
else:
represent = k
form[0].insert(i, TR(TD(represent, _colspan=3,
_class="subheading"),
_class = "subheading",
_id = "%s_%s__subheading" %
(tablename, f)))
tr.attributes.update(_class="after_subheading")
tr = form_rows.next()
i += 1
try:
tr = form_rows.next()
except StopIteration:
break
else:
i += 1
# =============================================================================
class S3SQLDefaultForm(S3SQLForm):
""" Standard SQL form """
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request=None,
resource=None,
record_id=None,
readonly=False,
message="Record created/updated",
format=None,
**options):
"""
Render/process the form.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@todo: describe keyword arguments
@return: a FORM instance
"""
if resource is None:
self.resource = request.resource
self.prefix, self.name, self.table, self.tablename = \
request.target()
else:
self.resource = resource
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
response = current.response
s3 = response.s3
settings = s3.crud
prefix = self.prefix
name = self.name
tablename = self.tablename
table = self.table
record = None
labels = None
self.record_id = record_id
if not readonly:
# Pre-populate create-form?
if record_id is None:
data = options.get("data", None)
from_table = options.get("from_table", None)
from_record = options.get("from_record", None)
map_fields = options.get("map_fields", None)
record = self.prepopulate(from_table=from_table,
from_record=from_record,
map_fields=map_fields,
data=data,
format=format)
# De-duplicate link table entries
self.record_id = record_id = self.deduplicate_link(request, record_id)
# Add asterisk to labels of required fields
mark_required = self._config("mark_required", default=[])
labels, required = s3_mark_required(table, mark_required)
if required:
# Show the key if there are any required fields.
s3.has_required = True
else:
s3.has_required = False
# Determine form style
if format == "plain":
# Default formstyle works best when we have no formatting
formstyle = "table3cols"
else:
formstyle = settings.formstyle
# Submit buttons
buttons = self._submit_buttons(readonly)
# Generate the form
if record is None:
record = record_id
response.form_label_separator = ""
form = SQLFORM(table,
record = record,
record_id = record_id,
readonly = readonly,
comments = not readonly,
deletable = False,
showid = False,
upload = s3.download_url,
labels = labels,
formstyle = formstyle,
separator = "",
submit_button = settings.submit_button,
buttons = buttons)
# Style the Submit button, if-requested
if settings.submit_style and not settings.custom_submit:
try:
form[0][-1][0][0]["_class"] = settings.submit_style
except:
# Submit button has been removed or a different formstyle,
# such as Bootstrap (which is already styled anyway)
pass
# Subheadings
subheadings = options.get("subheadings", None)
if subheadings:
self._insert_subheadings(form, tablename, subheadings)
# Process the form
logged = False
if not readonly:
_get = options.get
link = _get("link")
hierarchy = _get("hierarchy")
onvalidation = _get("onvalidation")
onaccept = _get("onaccept")
success, error = self.process(form,
request.post_vars,
onvalidation = onvalidation,
onaccept = onaccept,
hierarchy = hierarchy,
link = link,
http = request.http,
format = format,
)
if success:
response.confirmation = message
logged = True
elif error:
response.error = error
# Audit read
if not logged and not form.errors:
current.audit("read", prefix, name,
record=record_id, representation=format)
return form
# -------------------------------------------------------------------------
def prepopulate(self,
from_table=None,
from_record=None,
map_fields=None,
data=None,
format=None):
"""
Pre-populate the form with values from a previous record or
controller-submitted data
@param from_table: the table to copy the data from
@param from_record: the record to copy the data from
@param map_fields: field selection/mapping
@param data: the data to prepopulate the form with
@param format: the request format extension
"""
table = self.table
record = None
# Pre-populate from a previous record?
if from_table is not None:
# Field mapping
if map_fields:
if isinstance(map_fields, dict):
# Map fields with other names
fields = [from_table[map_fields[f]]
for f in map_fields
if f in table.fields and
map_fields[f] in from_table.fields and
table[f].writable]
elif isinstance(map_fields, (list, tuple)):
# Only use a subset of the fields
fields = [from_table[f]
for f in map_fields
if f in table.fields and
f in from_table.fields and
table[f].writable]
else:
raise TypeError
else:
# Use all writable fields
fields = [from_table[f]
for f in table.fields
if f in from_table.fields and
table[f].writable]
# Audit read => this is a read method, after all
prefix, name = from_table._tablename.split("_", 1)
current.audit("read", prefix, name,
record=from_record, representation=format)
# Get original record
query = (from_table.id == from_record)
row = current.db(query).select(limitby=(0, 1), *fields).first()
if row:
if isinstance(map_fields, dict):
record = Storage([(f, row[map_fields[f]])
for f in map_fields])
else:
record = Storage(row)
# Pre-populate from call?
elif isinstance(data, dict):
record = Storage([(f, data[f])
for f in data
if f in table.fields and
table[f].writable])
# Add missing fields to pre-populated record
if record:
missing_fields = Storage()
for f in table.fields:
if f not in record and table[f].writable:
missing_fields[f] = table[f].default
record.update(missing_fields)
record[table._id.name] = None
return record
# -------------------------------------------------------------------------
def deduplicate_link(self, request, record_id):
"""
Change to update if this request attempts to create a
duplicate entry in a link table
@param request: the request
@param record_id: the record ID
"""
linked = self.resource.linked
table = self.table
session = current.session
if request.env.request_method == "POST" and linked is not None:
pkey = table._id.name
post_vars = request.post_vars
if not post_vars[pkey]:
lkey = linked.lkey
rkey = linked.rkey
_lkey = post_vars[lkey]
_rkey = post_vars[rkey]
query = (table[lkey] == _lkey) & (table[rkey] == _rkey)
row = current.db(query).select(table._id, limitby=(0, 1)).first()
if row is not None:
tablename = self.tablename
record_id = row[pkey]
formkey = session.get("_formkey[%s/None]" % tablename)
formname = "%s/%s" % (tablename, record_id)
session["_formkey[%s]" % formname] = formkey
post_vars["_formname"] = formname
post_vars[pkey] = record_id
return record_id
# -------------------------------------------------------------------------
def process(self, form, vars,
onvalidation = None,
onaccept = None,
hierarchy = None,
link = None,
http = "POST",
format = None,
):
"""
Process the form
@param form: FORM instance
@param vars: request POST variables
@param onvalidation: callback(function) upon successful form validation
@param onaccept: callback(function) upon successful form acceptance
@param hierarchy: the data for the hierarchy link to create
@param link: component link
@param http: HTTP method
@param format: request extension
"""
table = self.table
tablename = self.tablename
# Get the proper onvalidation routine
if isinstance(onvalidation, dict):
onvalidation = onvalidation.get(tablename, [])
# Append link.postprocess to onvalidation
if link and link.postprocess:
postprocess = link.postprocess
if isinstance(onvalidation, list):
onvalidation.insert(0, postprocess)
elif onvalidation is not None:
onvalidation = [postprocess, onvalidation]
else:
onvalidation = [postprocess]
success = True
error = None
record_id = self.record_id
formname = "%s/%s" % (tablename, record_id)
if form.accepts(vars,
current.session,
formname=formname,
onvalidation=onvalidation,
keepvalues=False,
hideerror=False):
# Undelete?
if vars.get("_undelete"):
undelete = form.vars.get("deleted") is False
else:
undelete = False
# Audit
prefix = self.prefix
name = self.name
if record_id is None or undelete:
current.audit("create", prefix, name, form=form,
representation=format)
else:
current.audit("update", prefix, name, form=form,
record=record_id, representation=format)
form_vars = form.vars
# Update super entity links
s3db = current.s3db
s3db.update_super(table, form_vars)
# Update component link
if link and link.postprocess is None:
resource = link.resource
master = link.master
resource.update_link(master, form_vars)
if form_vars.id:
if record_id is None or undelete:
# Create hierarchy link
if hierarchy:
from s3hierarchy import S3Hierarchy
h = S3Hierarchy(tablename)
if h.config:
h.postprocess_create_node(hierarchy, form_vars)
# Set record owner
auth = current.auth
auth.s3_set_record_owner(table, form_vars.id)
auth.s3_make_session_owner(table, form_vars.id)
else:
# Update realm
update_realm = s3db.get_config(table, "update_realm")
if update_realm:
current.auth.set_realm_entity(table, form_vars,
force_update=True)
# Store session vars
self.resource.lastid = str(form_vars.id)
s3_store_last_record_id(tablename, form_vars.id)
# Execute onaccept
try:
callback(onaccept, form, tablename=tablename)
except:
error = "onaccept failed: %s" % onaccept
current.log.error(error)
# This is getting swallowed
raise
else:
success = False
if form.errors:
# Revert any records created within widgets/validators
current.db.rollback()
# IS_LIST_OF validation errors need special handling
errors = []
for fieldname in form.errors:
if fieldname in table:
if isinstance(table[fieldname].requires, IS_LIST_OF):
errors.append("%s: %s" % (fieldname,
form.errors[fieldname]))
else:
errors.append(str(form.errors[fieldname]))
if errors:
error = "\n".join(errors)
elif http == "POST":
# Invalid form
error = current.T("Invalid form (re-opened in another window?)")
return success, error
# =============================================================================
class S3SQLCustomForm(S3SQLForm):
""" Custom SQL Form """
# -------------------------------------------------------------------------
def __len__(self):
"""
Support len(crud_form)
"""
return len(self.elements)
# -------------------------------------------------------------------------
def insert(self, index, element):
"""
S.insert(index, object) -- insert object before index
"""
if not element:
return
if isinstance(element, S3SQLFormElement):
self.elements.insert(index, element)
elif isinstance(element, str):
self.elements.insert(index, S3SQLField(element))
elif isinstance(element, tuple):
l = len(element)
if l > 1:
label, selector = element[:2]
widget = element[2] if l > 2 else DEFAULT
else:
selector = element[0]
label = widget = DEFAULT
self.elements.insert(index, S3SQLField(selector, label=label, widget=widget))
else:
msg = "Invalid form element: %s" % str(element)
if current.deployment_settings.get_base_debug():
raise SyntaxError(msg)
else:
current.log.error(msg)
# -------------------------------------------------------------------------
def append(self, element):
"""
S.append(object) -- append object to the end of the sequence
"""
self.insert(len(self), element)
# -------------------------------------------------------------------------
# Rendering/Processing
# -------------------------------------------------------------------------
def __call__(self,
request=None,
resource=None,
record_id=None,
readonly=False,
message="Record created/updated",
format=None,
**options):
"""
Render/process the form.
@param request: the S3Request
@param resource: the target S3Resource
@param record_id: the record ID
@param readonly: render the form read-only
@param message: message upon successful form submission
@param format: data format extension (for audit)
@param options: keyword options for the form
@return: a FORM instance
"""
db = current.db
response = current.response
s3 = response.s3
# Determine the target resource
if resource is None:
resource = request.resource
self.prefix, self.name, self.table, self.tablename = \
request.target()
else:
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
self.resource = resource
# Resolve all form elements against the resource
fields = []
subtables = []
components = []
for element in self.elements:
alias, name, field = element.resolve(resource)
if field is not None:
fields.append((alias, name, field))
if isinstance(alias, str):
if alias not in subtables:
subtables.append(alias)
elif isinstance(alias, S3SQLFormElement):
components.append(alias)
self.subtables = subtables
self.components = components
# Mark required fields with asterisk
if not readonly:
mark_required = self._config("mark_required", default=[])
labels, required = s3_mark_required(self.table, mark_required)
if required:
# Show the key if there are any required fields.
s3.has_required = True
else:
s3.has_required = False
else:
labels = None
# Choose formstyle
settings = s3.crud
if format == "plain":
# Simple formstyle works best when we have no formatting
formstyle = "table3cols"
else:
formstyle = settings.formstyle
# Retrieve the record
record = None
if record_id is not None:
query = (self.table._id == record_id)
# @ToDo: limit fields (at least not meta)
record = db(query).select(limitby=(0, 1)).first()
self.record_id = record_id
self.subrows = Storage()
# Populate the form
data = None
noupdate = []
forbidden = []
has_permission = current.auth.s3_has_permission
rcomponents = resource.components
if record is not None:
# Retrieve the subrows
subrows = self.subrows
for alias in subtables:
# Get the join for this subtable
component = rcomponents[alias]
if component.multiple:
continue
join = component.get_join()
q = query & join
# Retrieve the row
# @todo: Should not need .ALL here
row = db(q).select(component.table.ALL,
limitby=(0, 1)).first()
# Check permission for this subrow
ctname = component.tablename
if not row:
component = rcomponents[alias]
permitted = has_permission("create", ctname)
if not permitted:
forbidden.append(alias)
continue
else:
cid = row[component.table._id]
permitted = has_permission("read", ctname, cid)
if not permitted:
forbidden.append(alias)
continue
permitted = has_permission("update", ctname, cid)
if not permitted:
noupdate.append(alias)
# Add the row to the subrows
subrows[alias] = row
# Build the data Storage for the form
pkey = self.table._id
data = Storage({pkey.name:record[pkey]})
for alias, name, field in fields:
if alias is None:
if name in record:
data[field.name] = record[name]
elif alias in subtables:
if alias in subrows and \
subrows[alias] is not None and \
name in subrows[alias]:
data[field.name] = subrows[alias][name]
elif hasattr(alias, "extract"):
data[field.name] = alias.extract(resource, record_id)
else:
# Record does not exist
self.record_id = record_id = None
# Check create-permission for subtables
for alias in subtables:
if alias in rcomponents:
component = rcomponents[alias]
else:
continue
permitted = has_permission("create", component.tablename)
if not permitted:
forbidden.append(alias)
# Apply permissions for subtables
fields = [f for f in fields if f[0] not in forbidden]
for a, n, f in fields:
if a:
if a in noupdate:
f.writable = False
if labels is not None and f.name not in labels:
if f.required:
flabels = s3_mark_required([f], mark_required=[f])[0]
labels[f.name] = flabels[f.name]
elif f.label:
labels[f.name] = "%s:" % f.label
else:
labels[f.name] = ""
if readonly:
# Strip all comments
for a, n, f in fields:
f.comment = None
else:
# Mark required subtable-fields (retaining override-labels)
for alias in subtables:
if alias in rcomponents:
component = rcomponents[alias]
mark_required = component.get_config("mark_required", [])
ctable = component.table
sfields = dict((n, (f.name, f.label))
for a, n, f in fields
if a == alias and n in ctable)
slabels = s3_mark_required([ctable[n] for n in sfields],
mark_required=mark_required,
map_names=sfields)[0]
if labels:
labels.update(slabels)
else:
labels = slabels
self.subtables = [s for s in self.subtables if s not in forbidden]
# Aggregate the form fields
formfields = [f[-1] for f in fields]
# Submit buttons
buttons = self._submit_buttons(readonly)
# Render the form
tablename = self.tablename
response.form_label_separator = ""
form = SQLFORM.factory(*formfields,
record = data,
showid = False,
labels = labels,
formstyle = formstyle,
table_name = tablename,
upload = s3.download_url,
readonly = readonly,
separator = "",
submit_button = settings.submit_button,
buttons = buttons)
# Style the Submit button, if-requested
if settings.submit_style and not settings.custom_submit:
try:
form[0][-1][0][0]["_class"] = settings.submit_style
except:
# Submit button has been removed or a different formstyle,
# such as Bootstrap (which is already styled anyway)
pass
# Subheadings
subheadings = options.get("subheadings", None)
if subheadings:
self._insert_subheadings(form, tablename, subheadings)
# Process the form
formname = "%s/%s" % (tablename, record_id)
post_vars = request.post_vars
if form.accepts(post_vars,
current.session,
onvalidation = self.validate,
formname = formname,
keepvalues = False,
hideerror = False,
):
# Undelete?
if post_vars.get("_undelete"):
undelete = post_vars.get("deleted") is False
else:
undelete = False
link = options.get("link")
hierarchy = options.get("hierarchy")
self.accept(form,
format = format,
link = link,
hierarchy = hierarchy,
undelete = undelete,
)
# Post-process the form submission after all records have
# been accepted and linked together (self.accept() has
# already updated the form data with any new keys here):
postprocess = self.opts.get("postprocess", None)
if postprocess:
try:
callback(postprocess, form, tablename=tablename)
except:
error = "postprocess failed: %s" % postprocess
current.log.error(error)
raise
response.confirmation = message
if form.errors:
# Revert any records created within widgets/validators
db.rollback()
return form
# -------------------------------------------------------------------------
def validate(self, form):
"""
Run the onvalidation callbacks for the master table
and all subtables in the form, and store any errors
in the form.
@param form: the form
"""
s3db = current.s3db
config = self._config
# Validate against the main table
if self.record_id:
onvalidation = config("update_onvalidation",
config("onvalidation", None))
else:
onvalidation = config("create_onvalidation",
config("onvalidation", None))
if onvalidation is not None:
try:
callback(onvalidation, form, tablename=self.tablename)
except:
error = "onvalidation failed: %s" % onvalidation
current.log.error(error)
raise
# Validate against all subtables
get_config = s3db.get_config
for alias in self.subtables:
# Extract the subtable data
subdata = self._extract(form, alias)
if not subdata:
continue
# Get the onvalidation callback for this subtable
subtable = self.resource.components[alias].table
subform = Storage(vars=subdata, errors=Storage())
rows = self.subrows
if alias in rows and rows[alias] is not None:
#subid = rows[alias][subtable._id]
subonvalidation = get_config(subtable._tablename,
"update_onvalidation",
get_config(subtable._tablename,
"onvalidation", None))
else:
#subid = None
subonvalidation = get_config(subtable._tablename,
"create_onvalidation",
get_config(subtable._tablename,
"onvalidation", None))
# Validate against the subtable, store errors in form
if subonvalidation is not None:
try:
callback(subonvalidation, subform,
tablename = subtable._tablename)
except:
error = "onvalidation failed: %s" % subonvalidation
current.log.error(error)
raise
for fn in subform.errors:
dummy = "sub_%s_%s" % (alias, fn)
form.errors[dummy] = subform.errors[fn]
return
# -------------------------------------------------------------------------
def accept(self,
form,
format=None,
link=None,
hierarchy=None,
undelete=False):
"""
Create/update all records from the form.
@param form: the form
@param format: data format extension (for audit)
@param link: resource.link for linktable components
@param hierarchy: the data for the hierarchy link to create
@param undelete: reinstate a previously deleted record
"""
db = current.db
table = self.table
# Create/update the main record
main_data = self._extract(form)
master_id, master_form_vars = self._accept(self.record_id,
main_data,
format = format,
link = link,
hierarchy = hierarchy,
undelete = undelete,
)
if not master_id:
return
else:
main_data[table._id.name] = master_id
# Create or update the subtables
for alias in self.subtables:
subdata = self._extract(form, alias=alias)
if not subdata:
continue
component = self.resource.components[alias]
subtable = component.table
# Get the key (pkey) of the master record to link the
# subtable record to, and update the subdata with it
pkey = component.pkey
if pkey != table._id.name and pkey not in main_data:
row = db(table._id == master_id).select(table[pkey],
limitby=(0, 1)).first()
if not row:
return
main_data[pkey] = row[table[pkey]]
subdata[component.fkey] = main_data[pkey]
# Do we already have a record for this component?
# If yes, then get the subrecord ID
rows = self.subrows
if alias in rows and rows[alias] is not None:
subid = rows[alias][subtable._id]
else:
subid = None
# Apply component defaults
defaults = component.defaults
if isinstance(defaults, dict):
for k, v in defaults.items():
if k != component.fkey and \
k not in subdata and \
k in component.fields:
subdata[k] = v
# Accept the subrecord
self._accept(subid,
subdata,
alias=alias,
format=format)
# Accept components (e.g. Inline-Forms)
for item in self.components:
if hasattr(item, "accept"):
item.accept(form,
master_id=master_id,
format=format)
# Update form with master form_vars
form_vars = form.vars
# ID
form_vars[table._id.name] = master_id
# Super entities (& anything added manually in table's onaccept)
for var in master_form_vars:
if var not in form_vars:
form_vars[var] = master_form_vars[var]
return
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def _extract(self, form, alias=None):
"""
Extract data for a subtable from the form
@param form: the form
@param alias: the component alias of the subtable
"""
if alias is None:
return self.table._filter_fields(form.vars)
else:
subform = Storage()
alias_length = len(alias)
form_vars = form.vars
for k in form_vars:
if k[:4] == "sub_" and \
form_vars[k] != None and \
k[4:4 + alias_length + 1] == "%s_" % alias:
fn = k[4 + alias_length + 1:]
subform[fn] = form_vars[k]
return subform
# -------------------------------------------------------------------------
def _accept(self,
record_id,
data,
alias=None,
format=None,
hierarchy=None,
link=None,
undelete=False):
"""
Create or update a record
@param record_id: the record ID
@param data: the data
@param alias: the component alias
@param format: the request format (for audit)
@param hierarchy: the data for the hierarchy link to create
@param link: resource.link for linktable components
@param undelete: reinstate a previously deleted record
"""
if not data:
if alias is not None:
# Component, no data to create or update => skip
return None, Storage()
elif record_id:
# Existing master record, no data to update => skip
return record_id, Storage()
s3db = current.s3db
if alias is None:
component = self.resource
else:
component = self.resource.components[alias]
# Get the DB table (without alias)
table = component.table
tablename = component.tablename
if component._alias != tablename:
table = s3db.table(component.tablename)
get_config = s3db.get_config
oldrecord = None
if record_id:
# Update existing record
accept_id = record_id
db = current.db
onaccept = get_config(tablename, "update_onaccept",
get_config(tablename, "onaccept", None))
if onaccept:
# Get oldrecord to save in form
oldrecord = db(table._id == record_id).select(limitby=(0, 1)
).first()
if undelete:
# Re-instating a previously deleted record
table_fields = table.fields
if "deleted" in table_fields:
data["deleted"] = False
if "created_by" in table_fields and current.auth.user:
data["created_by"] = current.auth.user.id
if "created_on" in table_fields:
data["created_on"] = current.request.utcnow
db(table._id == record_id).update(**data)
else:
# Insert new record
accept_id = table.insert(**data)
if not accept_id:
raise RuntimeError("Could not create record")
onaccept = get_config(tablename, "create_onaccept",
get_config(tablename, "onaccept", None))
data[table._id.name] = accept_id
prefix, name = tablename.split("_", 1)
form_vars = Storage(data)
form = Storage(vars=form_vars, record=oldrecord)
# Audit
if record_id is None or undelete:
current.audit("create", prefix, name, form=form,
representation=format)
else:
current.audit("update", prefix, name, form=form,
record=accept_id, representation=format)
# Update super entity links
s3db.update_super(table, form_vars)
# Update component link
if link and link.postprocess is None:
resource = link.resource
master = link.master
resource.update_link(master, form_vars)
if accept_id:
if record_id is None or undelete:
# Create hierarchy link
if hierarchy:
from s3hierarchy import S3Hierarchy
h = S3Hierarchy(tablename)
if h.config:
h.postprocess_create_node(hierarchy, form_vars)
# Set record owner
auth = current.auth
auth.s3_set_record_owner(table, accept_id)
auth.s3_make_session_owner(table, accept_id)
else:
# Update realm
update_realm = get_config(table, "update_realm")
if update_realm:
current.auth.set_realm_entity(table, form_vars,
force_update=True)
# Store session vars
component.lastid = str(accept_id)
s3_store_last_record_id(tablename, accept_id)
# Execute onaccept
try:
callback(onaccept, form, tablename=tablename)
except:
error = "onaccept failed: %s" % onaccept
current.log.error(error)
# This is getting swallowed
raise
if alias is None:
# Return master_form_vars
return accept_id, form.vars
else:
return accept_id
# =============================================================================
class S3SQLFormElement(object):
""" SQL Form Element Base Class """
# -------------------------------------------------------------------------
def __init__(self, selector, **options):
"""
Constructor to define the form element, to be extended
in subclass.
@param selector: the data object selector
@param options: options for the form element
"""
self.selector = selector
self.options = Storage(options)
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
To be implemented in subclass.
@param resource: the resource
@return: a tuple
(
form element,
original field name,
Field instance for the form renderer
)
The form element can be None for the main table, the component
alias for a subtable, or this form element instance for a
subform.
If None is returned as Field instance, this form element will
not be rendered at all. Besides setting readable/writable
in the Field instance, this can be another mechanism to
control access to form elements.
"""
return None, None, None
# -------------------------------------------------------------------------
# Utility methods
# -------------------------------------------------------------------------
@staticmethod
def _rename_field(field, name,
comments=True,
popup=None,
skip_post_validation=False,
label=DEFAULT,
widget=DEFAULT):
"""
Rename a field (actually: create a new Field instance with the
same attributes as the given Field, but a different field name).
@param field: the original Field instance
@param name: the new name
@param comments: render comments - if set to False, only
navigation items with an inline() renderer
method will be rendered (unless popup is None)
@param popup: only if comments=False, additional vars for comment
navigation items (e.g. AddResourceLink), None prevents
rendering of navigation items
@param skip_post_validation: skip field validation during POST,
useful for client-side processed
dummy fields.
@param label: override option for the original field label
@param widget: override option for the original field widget
"""
if not hasattr(field, "type"):
# Virtual Field
field = Storage(comment=None,
type="string",
length=255,
unique=False,
uploadfolder=None,
autodelete=False,
label="",
writable=False,
readable=True,
default=None,
update=None,
compute=None,
represent=lambda v: v or "",
)
requires = None
required = False
notnull = False
elif skip_post_validation and \
current.request.env.request_method == "POST":
requires = SKIP_POST_VALIDATION(field.requires)
required = False
notnull = False
else:
requires = field.requires
required = field.required
notnull = field.notnull
if widget is DEFAULT:
# Some widgets may need disabling during POST
widget = field.widget
if label is DEFAULT:
label = field.label
if not comments:
if popup:
comment = field.comment
if hasattr(comment, "clone"):
comment = comment.clone()
if hasattr(comment, "renderer") and \
hasattr(comment, "inline") and \
isinstance(popup, dict):
comment.vars.update(popup)
comment.renderer = comment.inline
else:
comment = None
else:
comment = None
else:
comment = field.comment
f = Field(str(name),
type = field.type,
length = field.length,
required = required,
notnull = notnull,
unique = field.unique,
uploadfolder = field.uploadfolder,
autodelete = field.autodelete,
widget = widget,
label = label,
comment = comment,
writable = field.writable,
readable = field.readable,
default = field.default,
update = field.update,
compute = field.compute,
represent = field.represent,
requires = requires)
return f
# =============================================================================
class S3SQLField(S3SQLFormElement):
"""
Base class for regular form fields
A regular form field is a field in the main form, which can be
fields in the main record or in a subtable (single-record-component).
"""
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple
(
subtable alias (or None for main table),
original field name,
Field instance for the form renderer
)
"""
# Import S3ResourceField only here, to avoid circular dependency
from s3query import S3ResourceField
rfield = S3ResourceField(resource, self.selector)
components = resource.components
subtables = {}
if components:
for alias, component in components.items():
if component.multiple:
continue
if component._alias:
tablename = component._alias
else:
tablename = component.tablename
subtables[tablename] = alias
tname = rfield.tname
if rfield.field is not None:
field = rfield.field
options = self.options
label = options.get("label", DEFAULT)
widget = options.get("widget", DEFAULT)
# Field in the main table
if tname == resource.tablename:
field = rfield.field
if label is not DEFAULT:
field.label = label
if widget is not DEFAULT:
field.widget = widget
return None, field.name, field
# Field in a subtable (= single-record-component)
elif tname in subtables:
field = rfield.field
alias = subtables[tname]
name = "sub_%s_%s" % (alias, rfield.fname)
renamed_field = self._rename_field(field,
name,
label = label,
widget = widget,
)
return alias, field.name, renamed_field
else:
raise SyntaxError("Invalid subtable: %s" % tname)
else:
raise SyntaxError("Invalid selector: %s" % self.selector)
# =============================================================================
class S3SQLSubForm(S3SQLFormElement):
"""
Base class for subforms
A subform is a form element to be processed after the main
form. Subforms render a single (usually hidden) input field
and a client-side controlled widget to manipulate its contents.
"""
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Initialize this form element for a particular record. This
method will be called by the form renderer to populate the
form for an existing record. To be implemented in subclass.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: the value for the input field that corresponds
to the specified record.
"""
return None
# -------------------------------------------------------------------------
def parse(self, value):
"""
Validator method for the input field, used to extract the
data from the input field and prepare them for further
processing by the accept()-method. To be implemented in
subclass and set as requires=self.parse for the input field
in the resolve()-method of this form element.
@param value: the value returned from the input field
@return: tuple of (value, error) where value is the
pre-processed field value and error an error
message in case of invalid data, or None.
"""
return (value, None)
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer for the input field. To be implemented in
subclass (if required) and to be set as widget=self for the
field returned by the resolve()-method of this form element.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget for this form element as HTML helper
"""
raise NotImplementedError
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this form element. This will be
used instead of the __call__() method when the form element
is to be rendered read-only.
@param value: the value as returned from extract()
@return: the read-only representation of this element as
string or HTML helper
"""
return ""
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-process this form element and perform the related
transactions. This method will be called after the main
form has been accepted, where the master record ID will
be provided.
@param form: the form
@param master_id: the master record ID
@param format: the data format extension
@return: True on success, False on error
"""
return True
# =============================================================================
class SKIP_POST_VALIDATION(Validator):
"""
Pseudo-validator that allows introspection of field options
during GET, but does nothing during POST. Used for Ajax-validated
inline-components to prevent them from throwing validation errors
when the outer form gets submitted.
"""
def __init__(self, other=None):
"""
Constructor, used like:
field.requires = SKIP_POST_VALIDATION(field.requires)
@param other: the actual field validator
"""
if other and isinstance(other, (list, tuple)):
other = other[0]
self.other = other
if other:
if hasattr(other, "multiple"):
self.multiple = other.multiple
if hasattr(other, "options"):
self.options = other.options
if hasattr(other, "formatter"):
self.formatter = other.formatter
def __call__(self, value):
"""
Validation
@param value: the value
"""
other = self.other
if current.request.env.request_method == "POST" or not other:
return value, None
if not isinstance(other, (list, tuple)):
other = [other]
for r in other:
value, error = r(value)
if error:
return value, error
return value, None
# =============================================================================
class S3SQLSubFormLayout(object):
""" Layout for S3SQLInlineComponent (Base Class) """
def __init__(self):
""" Constructor """
self.inject_script()
self.columns = None
self.row_actions = True
# -------------------------------------------------------------------------
def set_columns(self, columns, row_actions=True):
"""
Set column widths for inline-widgets, can be used by subclasses
to render CSS classes for grid-width
@param columns: iterable of column widths
@param actions: whether the subform contains an action column
"""
self.columns = columns
self.row_actions = row_actions
# -------------------------------------------------------------------------
def subform(self,
data,
item_rows,
action_rows,
empty=False,
readonly=False):
"""
Outer container for the subform
@param data: the data dict (as returned from extract())
@param item_rows: the item rows
@param action_rows: the (hidden) action rows
@param empty: no data in this component
@param readonly: render read-only
"""
if empty:
subform = current.T("No entries currently available")
else:
headers = self.headers(data, readonly=readonly)
subform = TABLE(headers,
TBODY(item_rows),
TFOOT(action_rows),
_class="embeddedComponent",
)
return subform
# -------------------------------------------------------------------------
def readonly(self, resource, data):
"""
Render this component read-only (table-style)
@param resource: the S3Resource
@param data: the data dict (as returned from extract())
"""
audit = current.audit
prefix, name = resource.prefix, resource.name
xml_decode = current.xml.xml_decode
items = data["data"]
fields = data["fields"]
trs = []
for item in items:
if "_id" in item:
record_id = item["_id"]
else:
continue
audit("read", prefix, name,
record=record_id, representation="html")
trow = TR(_class="read-row")
for f in fields:
text = xml_decode(item[f["name"]]["text"])
trow.append(XML(xml_decode(text)))
trs.append(trow)
return self.subform(data, trs, [], empty=False, readonly=True)
# -------------------------------------------------------------------------
@staticmethod
def render_list(resource, data):
"""
Render this component read-only (list-style)
@param resource: the S3Resource
@param data: the data dict (as returned from extract())
"""
audit = current.audit
prefix, name = resource.prefix, resource.name
xml_decode = current.xml.xml_decode
items = data["data"]
fields = data["fields"]
# Render as comma-separated list of values (no header)
elements = []
for item in items:
if "_id" in item:
record_id = item["_id"]
else:
continue
audit("read", prefix, name,
record=record_id, representation="html")
t = []
for f in fields:
t.append([XML(xml_decode(item[f["name"]]["text"])), " "])
elements.append([TAG[""](list(chain.from_iterable(t))[:-1]), ", "])
return DIV(list(chain.from_iterable(elements))[:-1],
_class="embeddedComponent",
)
# -------------------------------------------------------------------------
def headers(self, data, readonly=False):
"""
Render the header row with field labels
@param data: the input field data as Python object
@param readonly: whether the form is read-only
"""
fields = data["fields"]
# Don't render a header row if there are no labels
render_header = False
header_row = TR(_class="label-row static")
happend = header_row.append
for f in fields:
label = f["label"]
if label:
render_header = True
label = TD(LABEL(label))
happend(label)
if render_header:
if not readonly:
# Add columns for the Controls
happend(TD())
happend(TD())
return THEAD(header_row)
else:
return THEAD(_class="hide")
# -------------------------------------------------------------------------
@staticmethod
def actions(subform,
formname,
index,
item = None,
readonly=True,
editable=True,
deletable=True):
"""
Render subform row actions into the row
@param subform: the subform row
@param formname: the form name
@param index: the row index
@param item: the row data
@param readonly: this is a read-row
@param editable: this row is editable
@param deletable: this row is deletable
"""
T = current.T
action_id = "%s-%s" % (formname, index)
# Action button helper
def action(title, name, throbber=False):
btn = DIV(_id="%s-%s" % (name, action_id),
_class="inline-%s" % name)
if throbber:
return DIV(btn,
DIV(_class="inline-throbber hide",
_id="throbber-%s" % action_id))
else:
return DIV(btn)
# Render the action icons for this row
append = subform.append
if readonly:
if editable:
append(action(T("Edit this entry"), "edt"))
else:
append(TD())
if deletable:
append(action(T("Remove this entry"), "rmv"))
else:
append(TD())
else:
if index != "none" or item:
append(action(T("Update this entry"), "rdy", throbber=True))
append(action(T("Cancel editing"), "cnc"))
else:
append(TD())
append(action(T("Add this entry"), "add", throbber=True))
# -------------------------------------------------------------------------
def rowstyle_read(self, form, fields, *args, **kwargs):
"""
Formstyle for subform read-rows, normally identical
to rowstyle, but can be different in certain layouts
"""
return self.rowstyle(form, fields, *args, **kwargs)
# -------------------------------------------------------------------------
def rowstyle(self, form, fields, *args, **kwargs):
"""
Formstyle for subform action-rows
"""
def render_col(col_id, label, widget, comment, hidden=False):
if col_id == "submit_record__row":
if hasattr(widget, "add_class"):
widget.add_class("inline-row-actions")
col = TD(widget)
elif comment:
col = TD(DIV(widget, comment), _id=col_id)
else:
col = TD(widget, _id=col_id)
return col
if args:
col_id = form
label = fields
widget, comment = args
hidden = kwargs.get("hidden", False)
return render_col(col_id, label, widget, comment, hidden)
else:
parent = TR()
for col_id, label, widget, comment in fields:
parent.append(render_col(col_id, label, widget, comment))
return parent
# -------------------------------------------------------------------------
@staticmethod
def inject_script():
""" Inject custom JS to render new read-rows """
# Example:
#appname = current.request.application
#scripts = current.response.s3.scripts
#script = "/%s/static/themes/CRMT/js/inlinecomponent.layout.js" % appname
#if script not in scripts:
#scripts.append(script)
# No custom JS in the default layout
return
# =============================================================================
class S3SQLInlineComponent(S3SQLSubForm):
"""
Form element for an inline-component-form
This form element allows CRUD of multi-record-components within
the main record form. It renders a single hidden text field with a
JSON representation of the component records, and a widget which
facilitates client-side manipulation of this JSON.
This widget is a row of fields per component record.
The widget uses the s3.ui.inline_component.js script for client-side
manipulation of the JSON data. Changes made by the script will be
validated through Ajax-calls to the CRUD.validate() method.
During accept(), the component gets updated according to the JSON
returned.
@ToDo: Support filtering of field options
Usecase is inline project_organisation for IFRC
PartnerNS needs to be filtered differently from Partners/Donors,
so can't just set a global requires for the field in the controller
- needs to be inside the widget.
See private/templates/IFRC/config.py
"""
prefix = "sub"
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Method to resolve this form element against the calling resource.
@param resource: the resource
@return: a tuple (self, None, Field instance)
"""
selector = self.selector
# Check selector
if selector not in resource.components:
hook = current.s3db.get_component(resource.tablename, selector)
if hook:
resource._attach(selector, hook)
else:
raise SyntaxError("Undefined component: %s" % selector)
component = resource.components[selector]
# Check permission
permitted = current.auth.s3_has_permission("read",
component.tablename)
if not permitted:
return (None, None, None)
options = self.options
if "name" in options:
self.alias = options["name"]
label = self.alias
else:
self.alias = "default"
label = self.selector
if "label" in options:
label = options["label"]
else:
label = " ".join([s.capitalize() for s in label.split("_")])
fname = self._formname(separator = "_")
field = Field(fname, "text",
comment = options.get("comment", None),
default = self.extract(resource, None),
label = label,
represent = self.represent,
required = options.get("required", False),
requires = self.parse,
widget = self,
)
return (self, None, field)
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Initialize this form element for a particular record. Retrieves
the component data for this record from the database and
converts them into a JSON string to populate the input field with.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: the JSON for the input field.
"""
self.resource = resource
component_name = self.selector
if component_name in resource.components:
component = resource.components[component_name]
options = self.options
if component.link:
link = options.get("link", True)
if link:
# For link-table components, embed the link
# table rather than the component
component = component.link
table = component.table
tablename = component.tablename
pkey = table._id.name
fields_opt = options.get("fields", None)
labels = {}
widgets = {}
if fields_opt:
fields = []
for f in fields_opt:
if isinstance(f, tuple):
if len(f) > 2:
label, f, w = f
widgets[f] = w
else:
label, f = f
labels[f] = label
if f in table.fields:
fields.append(f)
else:
# Really?
fields = [f.name for f in table if f.readable or f.writable]
if pkey not in fields:
fields.insert(0, pkey)
# Support read-only Virtual Fields
if "virtual_fields" in options:
virtual_fields = options["virtual_fields"]
else:
virtual_fields = []
if "orderby" in options:
orderby = options["orderby"]
else:
orderby = component.get_config("orderby")
if record_id:
if "filterby" in options:
# Filter
f = self._filterby_query()
if f is not None:
component.build_query(filter=f)
if "extra_fields" in options:
extra_fields = options["extra_fields"]
else:
extra_fields = []
all_fields = fields + virtual_fields + extra_fields
start = 0
limit = 1 if options.multiple is False else None
data = component.select(all_fields,
start=start,
limit=limit,
represent=True,
raw_data=True,
show_links=False,
orderby=orderby)
records = data["rows"]
rfields = data["rfields"]
for f in rfields:
if f.fname in extra_fields:
rfields.remove(f)
else:
s = f.selector
if s.startswith("~."):
s = s[2:]
label = labels.get(s, None)
if label is not None:
f.label = label
else:
records = []
rfields = []
for s in fields:
rfield = component.resolve_selector(s)
label = labels.get(s, None)
if label is not None:
rfield.label = label
rfields.append(rfield)
for f in virtual_fields:
rfield = component.resolve_selector(f[1])
rfield.label = f[0]
rfields.append(rfield)
headers = [{"name": rfield.fname,
"label": s3_unicode(rfield.label)}
for rfield in rfields if rfield.fname != pkey]
self.widgets = widgets
items = []
has_permission = current.auth.s3_has_permission
for record in records:
row = record["_row"]
row_id = row[str(table._id)]
item = {"_id": row_id}
permitted = has_permission("update", tablename, row_id)
if not permitted:
item["_readonly"] = True
for rfield in rfields:
fname = rfield.fname
if fname == pkey:
continue
colname = rfield.colname
field = rfield.field
widget = field.widget
if isinstance(widget, S3Selector):
# Use the widget extraction/serialization method
value = widget.serialize(widget.extract(row[colname]))
elif hasattr(field, "formatter"):
value = field.formatter(row[colname])
else:
# Virtual Field
value = row[colname]
text = s3_unicode(record[colname])
if "<" in text:
text = s3_strip_markup(text)
item[fname] = {"value": value, "text": text}
items.append(item)
validate = options.get("validate", None)
if not validate or \
not isinstance(validate, tuple) or \
not len(validate) == 2:
request = current.request
validate = (request.controller, request.function)
c, f = validate
data = {"controller": c,
"function": f,
"resource": resource.tablename,
"component": component_name,
"fields": headers,
"defaults": self._filterby_defaults(),
"data": items
}
else:
raise AttributeError("Undefined component")
return json.dumps(data, separators=SEPARATORS)
# -------------------------------------------------------------------------
def parse(self, value):
"""
Validator method, converts the JSON returned from the input
field into a Python object.
@param value: the JSON from the input field.
@return: tuple of (value, error), where value is the converted
JSON, and error the error message if the decoding
fails, otherwise None
"""
# @todo: catch uploads during validation errors
if isinstance(value, basestring):
try:
value = json.loads(value)
except:
import sys
error = sys.exc_info()[1]
if hasattr(error, "message"):
error = error.message
else:
error = None
else:
value = None
error = None
return (value, error)
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget method for this form element. Renders a table with
read-rows for existing entries, a variable edit-row to update
existing entries, and an add-row to add new entries. This widget
uses s3.inline_component.js to facilitate manipulation of the
entries.
@param field: the Field for this form element
@param value: the current value for this field
@param attributes: keyword attributes for this widget
"""
options = self.options
if options.readonly is True:
# Render read-only
return self.represent(value)
if value is None:
value = field.default
if isinstance(value, basestring):
data = json.loads(value)
else:
data = value
value = json.dumps(value, separators=SEPARATORS)
if data is None:
raise SyntaxError("No resource structure information")
self.upload = Storage()
if options.multiple is False:
multiple = False
else:
multiple = True
required = options.get("required", False)
# Get the table
resource = self.resource
component_name = data["component"]
component = resource.components[component_name]
table = component.table
# @ToDo: Hide completely if the user is not permitted to read this
# component
formname = self._formname()
fields = data["fields"]
items = data["data"]
# Flag whether there are any rows (at least an add-row) in the widget
has_rows = False
# Add the item rows
item_rows = []
prefix = component.prefix
name = component.name
audit = current.audit
has_permission = current.auth.s3_has_permission
tablename = component.tablename
# Configure the layout
layout = self._layout()
columns = self.options.get("columns")
if columns:
layout.set_columns(columns, row_actions = multiple)
get_config = current.s3db.get_config
_editable = get_config(tablename, "editable")
if _editable is None:
_editable = True
_deletable = get_config(tablename, "deletable")
if _deletable is None:
_deletable = True
_class = "read-row inline-form"
if not multiple:
# Mark to client-side JS that we should open Edit Row
_class = "%s single" % _class
item = None
for i in xrange(len(items)):
has_rows = True
item = items[i]
# Get the item record ID
if "_delete" in item and item["_delete"]:
continue
elif "_id" in item:
record_id = item["_id"]
# Check permissions to edit this item
if _editable:
editable = has_permission("update", tablename, record_id)
else:
editable = False
if _deletable:
deletable = has_permission("delete", tablename, record_id)
else:
deletable = False
else:
record_id = None
if _editable:
editable = True
else:
editable = False
if _deletable:
deletable = True
else:
deletable = False
# Render read-row accordingly
rowname = "%s-%s" % (formname, i)
read_row = self._render_item(table, item, fields,
editable=editable,
deletable=deletable,
readonly=True,
multiple=multiple,
index=i,
layout=layout,
_id="read-row-%s" % rowname,
_class=_class)
if record_id:
audit("read", prefix, name,
record=record_id, representation="html")
item_rows.append(read_row)
# Add the action rows
action_rows = []
# Edit-row
_class = "edit-row inline-form hide"
if required and has_rows:
_class = "%s required" % _class
edit_row = self._render_item(table, item, fields,
editable=_editable,
deletable=_deletable,
readonly=False,
multiple=multiple,
index=0,
layout=layout,
_id="edit-row-%s" % formname,
_class=_class)
action_rows.append(edit_row)
# Add-row
inline_open_add = ""
insertable = get_config(tablename, "insertable")
if insertable is None:
insertable = True
if insertable:
insertable = has_permission("create", tablename)
if insertable:
_class = "add-row inline-form"
explicit_add = options.explicit_add
if not multiple:
explicit_add = False
if has_rows:
# Add Rows not relevant
_class = "%s hide" % _class
else:
# Mark to client-side JS that we should always validate
_class = "%s single" % _class
if required and not has_rows:
explicit_add = False
_class = "%s required" % _class
# Explicit open-action for add-row (optional)
if explicit_add:
# Hide add-row for explicit open-action
_class = "%s hide" % _class
if explicit_add is True:
label = current.T("Add another")
else:
label = explicit_add
inline_open_add = A(label,
_class="inline-open-add action-lnk",
)
has_rows = True
add_row = self._render_item(table, None, fields,
editable=True,
deletable=True,
readonly=False,
multiple=multiple,
layout=layout,
_id="add-row-%s" % formname,
_class=_class
)
action_rows.append(add_row)
# Empty edit row
empty_row = self._render_item(table, None, fields,
editable=_editable,
deletable=_deletable,
readonly=False,
multiple=multiple,
index="default",
layout=layout,
_id="empty-edit-row-%s" % formname,
_class="empty-row inline-form hide")
action_rows.append(empty_row)
# Empty read row
empty_row = self._render_item(table, None, fields,
editable=_editable,
deletable=_deletable,
readonly=True,
multiple=multiple,
index="none",
layout=layout,
_id="empty-read-row-%s" % formname,
_class="empty-row inline-form hide")
action_rows.append(empty_row)
# Real input: a hidden text field to store the JSON data
real_input = "%s_%s" % (resource.tablename, field.name)
default = dict(_type = "text",
_value = value,
requires=lambda v: (v, None))
attr = StringWidget._attributes(field, default, **attributes)
attr["_class"] = "%s hide" % attr["_class"]
attr["_id"] = real_input
widget = layout.subform(data,
item_rows,
action_rows,
empty = not has_rows,
)
if self.upload:
hidden = DIV(_class="hidden", _style="display:none")
for k, v in self.upload.items():
hidden.append(INPUT(_type="text",
_id=k,
_name=k,
_value=v,
_style="display:none"))
else:
hidden = ""
# Render output HTML
output = DIV(INPUT(**attr),
hidden,
widget,
inline_open_add,
_id = self._formname(separator="-"),
_field = real_input,
_class = "inline-component",
)
# Reset the layout
layout.set_columns(None)
return output
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this sub-form
@param value: the value returned from extract()
"""
if isinstance(value, basestring):
data = json.loads(value)
else:
data = value
if data["data"] == []:
# Don't render a subform for NONE
return current.messages["NONE"]
resource = self.resource
component = resource.components[data["component"]]
layout = self._layout()
columns = self.options.get("columns")
if columns:
layout.set_columns(columns, row_actions=False)
fields = data["fields"]
if len(fields) == 1 and self.options.get("render_list", False):
output = layout.render_list(component, data)
else:
output = layout.readonly(component, data)
# Reset the layout
layout.set_columns(None)
return output
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-processes this form element against the POST data of the
request, and create/update/delete any related records.
@param form: the form
@param master_id: the ID of the master record in the form
@param format: the data format extension (for audit)
"""
# Name of the real input field
fname = self._formname(separator="_")
options = self.options
multiple = options.get("multiple", True)
defaults = options.get("default", {})
if fname in form.vars:
# Retrieve the data
try:
data = json.loads(form.vars[fname])
except ValueError:
return
component_name = data.get("component", None)
if not component_name:
return
data = data.get("data", None)
if not data:
return
# Get the component
resource = self.resource
if component_name in resource.components:
component = resource.components[component_name]
else:
return
# Link table handling
link = component.link
if link and options.get("link", True):
# data are for the link table
actuate_link = False
component = link
else:
# data are for the component
actuate_link = True
# Table, tablename, prefix and name of the component
prefix = component.prefix
name = component.name
tablename = component.tablename
db = current.db
table = db[tablename]
s3db = current.s3db
auth = current.auth
# Process each item
has_permission = current.auth.s3_has_permission
audit = current.audit
onaccept = s3db.onaccept
for item in data:
if not "_changed" in item and not "_delete" in item:
# No changes made to this item - skip
continue
# Get the values
values = Storage()
valid = True
for f, d in item.iteritems():
if f[0] != "_" and d and isinstance(d, dict):
field = table[f]
widget = field.widget
if not hasattr(field, "type"):
# Virtual Field
continue
elif field.type == "upload":
# Find, rename and store the uploaded file
rowindex = item.get("_index", None)
if rowindex is not None:
filename = self._store_file(table, f, rowindex)
if filename:
values[f] = filename
elif isinstance(widget, S3Selector):
# Value must be processed by widget post-process
value, error = widget.postprocess(d["value"])
if not error:
values[f] = value
else:
valid = False
break
else:
# Must run through validator again (despite pre-validation)
# in order to post-process widget output properly (e.g. UTC
# offset subtraction)
try:
value, error = s3_validate(table, f, d["value"])
except AttributeError:
continue
if not error:
values[f] = value
else:
valid = False
break
if not valid:
# Skip invalid items
continue
record_id = item.get("_id")
delete = item.get("_delete")
if not record_id:
if delete:
# Item has been added and then removed again,
# so just ignore it
continue
elif not component.multiple or not multiple:
# Do not create a second record in this component
query = (resource._id == master_id) & \
component.get_join()
f = self._filterby_query()
if f is not None:
query &= f
DELETED = current.xml.DELETED
if DELETED in table.fields:
query &= table[DELETED] != True
row = db(query).select(table._id, limitby=(0, 1)).first()
if row:
record_id = row[table._id]
if record_id:
# Delete..?
if delete:
authorized = has_permission("delete", tablename, record_id)
if not authorized:
continue
c = s3db.resource(tablename, id=record_id)
# Audit happens inside .delete()
# Use cascade=True so that the deletion gets
# rolled back in case subsequent items fail:
success = c.delete(cascade=True, format="html")
# ...or update?
else:
authorized = has_permission("update", tablename, record_id)
if not authorized:
continue
query = (table._id == record_id)
success = db(query).update(**values)
values[table._id.name] = record_id
# Post-process update
if success:
audit("update", prefix, name,
record=record_id, representation=format)
# Update super entity links
s3db.update_super(table, values)
# Update realm
update_realm = s3db.get_config(table, "update_realm")
if update_realm:
auth.set_realm_entity(table, values,
force_update=True)
# Onaccept
onaccept(table, Storage(vars=values), method="update")
else:
# Create a new record
authorized = has_permission("create", tablename)
if not authorized:
continue
# Get master record ID
pkey = component.pkey
mastertable = resource.table
if pkey != mastertable._id.name:
query = (mastertable._id == master_id)
master = db(query).select(mastertable[pkey],
limitby=(0, 1)).first()
if not master:
return
else:
master = Storage({pkey: master_id})
# Apply component defaults
component_defaults = component.defaults
if isinstance(component_defaults, dict):
for k, v in component_defaults.items():
if k != component.fkey and \
k not in values and \
k in component.fields:
values[k] = v
if not actuate_link or not link:
# Add master record ID as linked directly
values[component.fkey] = master[pkey]
else:
# Check whether the component is a link table and we're linking to that via something like pr_person from hrm_human_resource
fkey = component.fkey
if fkey != "id" and fkey in component.fields and fkey not in values:
if fkey == "pe_id" and pkey == "person_id":
# Need to lookup the pe_id manually (bad that we need this special case, must be a better way but this works for now)
ptable = s3db.pr_person
person = db(ptable.id == master[pkey]).select(ptable.pe_id,
limitby=(0, 1)
).first()
if person:
values["pe_id"] = person.pe_id
else:
s3_debug("S3Forms", "Cannot find person with ID: %s" % master[pkey])
else:
values[fkey] = master[pkey]
# Apply defaults
for f, v in defaults.iteritems():
if f not in item:
values[f] = v
# Create the new record
# use _table in case we are using an alias
try:
record_id = component._table.insert(**values)
except:
s3_debug("S3Forms", "Cannot insert values %s into table: %s" % (values, component._table))
raise
# Post-process create
if record_id:
# Ensure we're using the real table, not an alias
table = db[tablename]
# Audit
audit("create", prefix, name,
record=record_id, representation=format)
# Add record_id
values[table._id.name] = record_id
# Update super entity link
s3db.update_super(table, values)
# Update link table
if link and actuate_link:
link.update_link(master, values)
# Set record owner
auth.s3_set_record_owner(table, record_id)
# onaccept
onaccept(table, Storage(vars=values), method="create")
# Success
return True
else:
return False
# -------------------------------------------------------------------------
# Utility methods
# -------------------------------------------------------------------------
def _formname(self, separator=None):
"""
Generate a string representing the formname
@param separator: separator to prepend a prefix
"""
if separator:
return "%s%s%s%s" % (self.prefix,
separator,
self.alias,
self.selector)
else:
return "%s%s" % (self.alias, self.selector)
# -------------------------------------------------------------------------
def _layout(self):
""" Get the current layout """
layout = self.options.layout
if not layout:
layout = current.deployment_settings.get_ui_inline_component_layout()
elif isinstance(layout, type):
layout = layout()
return layout
# -------------------------------------------------------------------------
def _render_item(self,
table,
item,
fields,
readonly=True,
editable=False,
deletable=False,
multiple=True,
index="none",
layout=None,
**attributes):
"""
Render a read- or edit-row.
@param table: the database table
@param item: the data
@param fields: the fields to render (list of strings)
@param readonly: render a read-row (otherwise edit-row)
@param editable: whether the record can be edited
@param deletable: whether the record can be deleted
@param multiple: whether multiple records can be added
@param index: the row index
@param attributes: HTML attributes for the row
"""
s3 = current.response.s3
rowtype = readonly and "read" or "edit"
pkey = table._id.name
data = dict()
formfields = []
formname = self._formname()
widgets = self.widgets
for f in fields:
fname = f["name"]
idxname = "%s_i_%s_%s_%s" % (formname, fname, rowtype, index)
if not readonly:
parent = table._tablename.split("_", 1)[1]
caller = "sub_%s_%s" % (formname, idxname)
popup = Storage(parent=parent, caller=caller)
else:
popup = None
formfield = self._rename_field(table[fname],
idxname,
comments=False,
popup=popup,
skip_post_validation=True,
widget=widgets.get(fname, DEFAULT),
)
if "filterby" in self.options:
# Get reduced options set
options = self._filterby_options(fname)
if options:
if len(options) < 2:
requires = IS_IN_SET(options, zero=None)
else:
requires = IS_IN_SET(options)
formfield.requires = SKIP_POST_VALIDATION(requires)
# Get filterby-default
defaults = self._filterby_defaults()
if defaults and fname in defaults:
default = defaults[fname]["value"]
formfield.default = default
if index is not None and item and fname in item:
if formfield.type == "upload":
filename = item[fname]["value"]
if current.request.env.request_method == "POST":
if "_index" in item and item.get("_changed", False):
rowindex = item["_index"]
filename = self._store_file(table, fname, rowindex)
data[idxname] = filename
else:
value = item[fname]["value"]
widget = formfield.widget
if isinstance(widget, S3Selector):
# Use the widget parser to get at the selected ID
value, error = widget.parse(value).get("id"), None
else:
# Use the validator to get at the original value
value, error = s3_validate(table, fname, value)
if error:
value = None
data[idxname] = value
formfields.append(formfield)
if not data:
data = None
elif pkey not in data:
data[pkey] = None
# Render the subform
subform_name = "sub_%s" % formname
rowstyle = layout.rowstyle_read if readonly else layout.rowstyle
subform = SQLFORM.factory(*formfields,
record=data,
showid=False,
formstyle=rowstyle,
upload = s3.download_url,
readonly=readonly,
table_name=subform_name,
separator = ":",
submit = False,
buttons = [])
subform = subform[0]
# Retain any CSS classes added by the layout
subform_class = subform["_class"]
subform.update(**attributes)
if subform_class:
subform.add_class(subform_class)
if multiple:
# Render row actions
layout.actions(subform,
formname,
index,
item = item,
readonly = readonly,
editable = editable,
deletable = deletable,
)
return subform
# -------------------------------------------------------------------------
def _filterby_query(self):
"""
Render the filterby-options as Query to apply when retrieving
the existing rows in this inline-component
"""
filterby = self.options["filterby"]
if not filterby:
return
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
component = self.resource.components[self.selector]
table = component.table
query = None
for f in filterby:
fieldname = f["field"]
if fieldname not in table.fields:
continue
field = table[fieldname]
if "options" in f:
options = f["options"]
else:
continue
if "invert" in f:
invert = f["invert"]
else:
invert = False
if not isinstance(options, (list, tuple)):
if invert:
q = (field != options)
else:
q = (field == options)
else:
if invert:
q = (~(field.belongs(options)))
else:
q = (field.belongs(options))
if query is None:
query = q
else:
query &= q
return query
# -------------------------------------------------------------------------
def _filterby_defaults(self):
"""
Render the defaults for this inline-component as a dict
for the real-input JSON
"""
if "filterby" in self.options:
filterby = self.options["filterby"]
else:
return None
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
component = self.resource.components[self.selector]
table = component.table
defaults = dict()
for f in filterby:
fieldname = f["field"]
if fieldname not in table.fields:
continue
if "default" in f:
default = f["default"]
elif "options" in f:
options = f["options"]
if "invert" in f and f["invert"]:
continue
if isinstance(options, (list, tuple)):
if len(options) != 1:
continue
else:
default = options[0]
else:
default = options
else:
continue
if default is not None:
defaults[fieldname] = {"value": default}
return defaults
# -------------------------------------------------------------------------
def _filterby_options(self, fieldname):
"""
Re-render the options list for a field if there is a
filterby-restriction.
@param fieldname: the name of the field
"""
component = self.resource.components[self.selector]
table = component.table
if fieldname not in table.fields:
return None
field = table[fieldname]
filterby = self.options["filterby"]
if not isinstance(filterby, (list, tuple)):
filterby = [filterby]
filter_fields = dict((f["field"], f) for f in filterby)
if fieldname not in filter_fields:
return None
filterby = filter_fields[fieldname]
if "options" not in filterby:
return None
# Get the options list for the original validator
requires = field.requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
if requires:
r = requires[0]
if isinstance(r, IS_EMPTY_OR):
#empty = True
r = r.other
# Currently only supporting IS_IN_SET
if not isinstance(r, IS_IN_SET):
return None
else:
return None
r_opts = r.options()
# Get the filter options
options = filterby["options"]
if not isinstance(options, (list, tuple)):
options = [options]
subset = []
if "invert" in filterby:
invert = filterby["invert"]
else:
invert = False
# Compute reduced options list
for o in r_opts:
if invert:
if isinstance(o, (list, tuple)):
if o[0] not in options:
subset.append(o)
elif isinstance(r_opts, dict):
if o not in options:
subset.append((o, r_opts[o]))
elif o not in options:
subset.append(o)
else:
if isinstance(o, (list, tuple)):
if o[0] in options:
subset.append(o)
elif isinstance(r_opts, dict):
if o in options:
subset.append((o, r_opts[o]))
elif o in options:
subset.append(o)
return subset
# -------------------------------------------------------------------------
def _store_file(self, table, fieldname, rowindex):
"""
Find, rename and store an uploaded file and return it's
new pathname
"""
field = table[fieldname]
formname = self._formname()
upload = "upload_%s_%s_%s" % (formname, fieldname, rowindex)
post_vars = current.request.post_vars
if upload in post_vars:
f = post_vars[upload]
if hasattr(f, "file"):
# Newly uploaded file (FieldStorage)
(sfile, ofilename) = (f.file, f.filename)
nfilename = field.store(sfile,
ofilename,
field.uploadfolder)
self.upload[upload] = nfilename
return nfilename
elif isinstance(f, basestring):
# Previously uploaded file
return f
return None
# =============================================================================
class S3SQLInlineLink(S3SQLInlineComponent):
"""
Subform to edit link table entries for the master record
"""
prefix = "link"
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Get all existing links for record_id.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: list of component record IDs this record is
linked to via the link table
"""
self.resource = resource
component, link = self.get_link()
if record_id:
rkey = component.rkey
rows = link.select([rkey], as_rows=True)
if rows:
rkey = str(link.table[rkey])
values = [row[rkey] for row in rows]
else:
values = []
else:
# Use default
values = [link.table[self.options.field].default]
return values
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget renderer, currently supports multiselect (default), hierarchy
and groupedopts widgets.
@param field: the input field
@param value: the value to populate the widget
@param attributes: attributes for the widget
@return: the widget
"""
options = self.options
if options.readonly is True:
# Render read-only
return self.represent(value)
component, link = self.get_link()
multiple = options.get("multiple", True)
options["multiple"] = multiple
# Field dummy
dummy_field = Storage(name = field.name,
type = link.table[component.rkey].type)
# Widget type
widget = options.get("widget")
if widget != "hierarchy":
# Get the selectable entries for the widget and construct
# a validator from it
zero = None if multiple else options.get("zero", XML(" "))
opts = self.get_options()
if zero is None:
# Remove the empty option
opts = dict((k, v) for k, v in opts.items() if k != "")
requires = IS_IN_SET(opts,
multiple=multiple,
zero=zero,
sort=options.get("sort", True))
if zero is not None:
requires = IS_EMPTY_OR(requires)
dummy_field.requires = requires
# Helper to extract widget options
widget_opts = lambda keys: dict((k, v)
for k, v in options.items()
if k in keys)
# Instantiate the widget
if widget == "groupedopts" or not widget and "cols" in options:
from s3widgets import S3GroupedOptionsWidget
w_opts = widget_opts(("cols",
"size",
"help_field",
"multiple",
))
w = S3GroupedOptionsWidget(**w_opts)
elif widget == "hierarchy":
from s3widgets import S3HierarchyWidget
w_opts = widget_opts(("represent",
"multiple",
"leafonly",
"columns",
))
w_opts["lookup"] = component.tablename
w = S3HierarchyWidget(**w_opts)
else:
# Default to multiselect
from s3widgets import S3MultiSelectWidget
w_opts = widget_opts(("filter",
"header",
"selectedList",
"noneSelectedText",
"multiple",
"columns",
))
w = S3MultiSelectWidget(**w_opts)
# Render the widget
attr = dict(attributes)
attr["_id"] = field.name
if not link.table[options.field].writable:
_class = attr.get("_class", None)
if _class:
attr["_class"] = "%s hide" % _class
else:
attr["_class"] = "hide"
widget = w(dummy_field, value, **attr)
if hasattr(widget, "add_class"):
widget.add_class("inline-link")
# Append the attached script to jquery_ready
script = options.get("script")
if script:
current.response.s3.jquery_ready.append(script)
return widget
# -------------------------------------------------------------------------
def accept(self, form, master_id=None, format=None):
"""
Post-processes this subform element against the POST data,
and create/update/delete any related records.
@param form: the master form
@param master_id: the ID of the master record in the form
@param format: the data format extension (for audit)
@todo: implement audit
"""
s3db = current.s3db
# Name of the real input field
fname = self._formname(separator="_")
resource = self.resource
success = False
if fname in form.vars:
# Extract the new values from the form
values = form.vars[fname]
if values is None:
values = []
elif not isinstance(values, (list, tuple, set)):
values = [values]
values = set(str(v) for v in values)
# Get the link table
component, link = self.get_link()
# Get the master identity (pkey)
pkey = component.pkey
if pkey == resource._id.name:
master = {pkey: master_id}
else:
# Different pkey (e.g. super-key) => reload the master
query = (resource._id == master_id)
master = current.db(query).select(resource.table[pkey],
limitby=(0, 1)).first()
if master:
# Find existing links
query = FS(component.lkey) == master[pkey]
lresource = s3db.resource(link.tablename, filter = query)
rows = lresource.select([component.rkey], as_rows=True)
# Determine which to delete and which to add
if rows:
rkey = link.table[component.rkey]
current_ids = set(str(row[rkey]) for row in rows)
delete = current_ids - values
insert = values - current_ids
else:
delete = None
insert = values
# Delete links which are no longer used
# @todo: apply filterby to only delete within the subset?
if delete:
query &= FS(component.rkey).belongs(delete)
lresource = s3db.resource(link.tablename, filter = query)
lresource.delete()
# Insert new links
insert.discard("")
if insert:
# Insert new links
for record_id in insert:
record = {component.fkey: record_id}
link.update_link(master, record)
success = True
return success
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this subform.
@param value: the value as returned from extract()
@return: the read-only representation
"""
component, link = self.get_link()
# Use the represent of rkey if it supports bulk, otherwise
# instantiate an S3Represent from scratch:
rkey = link.table[component.rkey]
represent = rkey.represent
if not hasattr(represent, "bulk"):
# Pick the first field from the list that is available:
lookup_field = None
for fname in ("name", "tag"):
if fname in component.fields:
lookup_field = fname
break
represent = S3Represent(lookup = component.tablename,
field = lookup_field)
# Represent all values
if isinstance(value, (list, tuple, set)):
result = represent.bulk(list(value))
if None not in value:
result.pop(None, None)
else:
result = represent.bulk([value])
# Sort them
labels = result.values()
labels.sort()
# Render as TAG to support HTML output
return TAG[""](list(chain.from_iterable([[l, ", "]
for l in labels]))[:-1])
# -------------------------------------------------------------------------
def get_options(self):
"""
Get the options for the widget
@return: dict {value: representation} of options
"""
resource = self.resource
component, link = self.get_link()
rkey = link.table[component.rkey]
# Lookup rkey options from rkey validator
opts = []
requires = rkey.requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
if requires:
validator = requires[0]
if isinstance(validator, IS_EMPTY_OR):
validator = validator.other
try:
opts = validator.options()
except:
pass
# Filter these options?
widget_opts = self.options
filterby = widget_opts.get("filterby")
filteropts = widget_opts.get("options")
filterexpr = widget_opts.get("match")
if filterby and \
(filteropts is not None or filterexpr and resource._rows):
# filterby is a field selector for the component
# that shall match certain conditions
filter_selector = FS(filterby)
filter_query = None
if filteropts is not None:
# filterby-field shall match one of the given filteropts
if isinstance(filteropts, (list, tuple, set)):
filter_query = (filter_selector.belongs(list(filteropts)))
else:
filter_query = (filter_selector == filteropts)
elif filterexpr:
# filterby-field shall match one of the values for the
# filterexpr-field of the master record
rfield = resource.resolve_selector(filterexpr)
colname = rfield.colname
rows = resource.select([filterexpr], as_rows=True)
values = set(row[colname] for row in rows)
values.discard(None)
if values:
filter_query = (filter_selector.belongs(values)) | \
(filter_selector == None)
# Select the filtered component rows
filter_resource = current.s3db.resource(component.tablename,
filter = filter_query)
rows = filter_resource.select(["id"], as_rows=True)
filtered_opts = []
values = set(str(row[component.table._id]) for row in rows)
for opt in opts:
if str(opt[0]) in values:
filtered_opts.append(opt)
opts = filtered_opts
return dict(opts)
# -------------------------------------------------------------------------
def get_link(self):
"""
Find the target component and its linktable
@return: tuple of S3Resource instances (component, link)
"""
resource = self.resource
selector = self.selector
if selector in resource.components:
component = resource.components[selector]
else:
raise SyntaxError("Undefined component: %s" % selector)
if not component.link:
# @todo: better error message
raise SyntaxError("No linktable for %s" % selector)
link = component.link
return (component, link)
# =============================================================================
class S3SQLInlineComponentCheckbox(S3SQLInlineComponent):
"""
Form element for an inline-component-form
This form element allows CRUD of multi-record-components within
the main record form. It renders a single hidden text field with a
JSON representation of the component records, and a widget which
facilitates client-side manipulation of this JSON.
This widget is a checkbox per available option, so is suitable for
simple many<>many link tables ('tagging'). It does NOT support link
tables with additional fields.
The widget uses the s3.inline_component.js script for
client-side manipulation of the JSON data.
During accept(), the component gets updated according to the JSON
returned.
@todo: deprecate, replace by S3SQLInlineLink
"""
# -------------------------------------------------------------------------
def extract(self, resource, record_id):
"""
Initialize this form element for a particular record. Retrieves
the component data for this record from the database and
converts them into a JSON string to populate the input field with.
@param resource: the resource the record belongs to
@param record_id: the record ID
@return: the JSON for the input field.
"""
self.resource = resource
component_name = self.selector
if component_name in resource.components:
component = resource.components[component_name]
# For link-table components, embed the link table
# rather than the component
if component.link:
component = component.link
table = component.table
tablename = component.tablename
pkey = table._id.name
fieldname = self.options["field"]
field = table[fieldname]
if pkey == fieldname:
qfields = [field]
else:
qfields = [field, table[pkey]]
items = []
if record_id:
# Build the query
query = (resource.table._id == record_id) & \
component.get_join()
if "filterby" in self.options:
# Filter
f = self._filterby_query()
if f is not None:
query &= f
# Get the rows:
rows = current.db(query).select(*qfields)
iappend = items.append
has_permission = current.auth.s3_has_permission
for row in rows:
row_id = row[pkey]
item = {"_id": row_id}
#cid = row[component.table._id]
permitted = has_permission("read", tablename, row_id)
if not permitted:
continue
permitted = has_permission("update", tablename, row_id)
if not permitted:
item["_readonly"] = True
if fieldname in row:
value = row[fieldname]
try:
text = s3_represent_value(field,
value = value,
strip_markup = True,
xml_escape = True)
except:
text = s3_unicode(value)
else:
value = None
text = ""
value = field.formatter(value)
item[fieldname] = {"value": value, "text": text}
iappend(item)
data = {"component": component_name,
"field": fieldname,
"data": items}
else:
raise AttributeError("Undefined component")
return json.dumps(data, separators=SEPARATORS)
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget method for this form element. Renders a table with
checkboxes for all available options.
This widget uses s3.inline_component.js to facilitate
manipulation of the entries.
@param field: the Field for this form element
@param value: the current value for this field
@param attributes: keyword attributes for this widget
@ToDo: Add ability to add new options to the list
@ToDo: Option for Grouped Checkboxes (e.g. for Activity Types)
"""
opts = self.options
if opts.readonly is True:
# Render read-only
return self.represent(value)
if value is None:
value = field.default
if isinstance(value, basestring):
data = json.loads(value)
else:
data = value
value = json.dumps(value, separators=SEPARATORS)
if data is None:
raise SyntaxError("No resource structure information")
T = current.T
script = opts.get("script", None)
if script:
current.response.s3.jquery_ready.append(script)
# @ToDo: Hide completely if the user is not permitted to read this
# component
# Get the list of available options
options = self._options(data)
formname = self._formname()
fieldname = data["field"]
field_name = "%s-%s" % (formname, fieldname)
if not options:
widget = T("No options currently available")
else:
# Translate the Options?
translate = opts.get("translate", None)
if translate is None:
# Try to lookup presence of reusable field
# - how do we know the module though?
s3db = current.s3db
if hasattr(s3db, fieldname):
reusable_field = s3db.get(fieldname)
if reusable_field:
represent = reusable_field.attr.represent
if hasattr(represent, "translate"):
translate = represent.translate
# Render the options
cols = opts.get("cols", 1)
count = len(options)
num_of_rows = count / cols
if count % cols:
num_of_rows += 1
table = [[] for row in range(num_of_rows)]
row_index = 0
col_index = 0
for _id in options:
input_id = "id-%s-%s-%s" % (field_name, row_index, col_index)
option = options[_id]
v = option["name"]
if translate:
v = T(v)
label = LABEL(v, _for=input_id)
title = option.get("help", None)
if title:
# Add help tooltip
label["_title"] = title
widget = TD(INPUT(_disabled = not option["editable"],
_id=input_id,
_name=field_name,
_type="checkbox",
_value=_id,
hideerror=True,
value=option["selected"],
),
label,
)
table[row_index].append(widget)
row_index += 1
if row_index >= num_of_rows:
row_index = 0
col_index += 1
widget = TABLE(table,
_class="checkboxes-widget-s3",
)
# Real input: a hidden text field to store the JSON data
real_input = "%s_%s" % (self.resource.tablename, field_name)
default = dict(_type = "text",
_value = value,
requires=lambda v: (v, None))
attr = StringWidget._attributes(field, default, **attributes)
attr["_class"] = attr["_class"] + " hide"
attr["_id"] = real_input
# Render output HTML
output = DIV(INPUT(**attr),
widget,
_id=self._formname(separator="-"),
_field=real_input,
_class="inline-checkbox inline-component",
_name="%s_widget" % field_name,
)
return output
# -------------------------------------------------------------------------
def _options(self, data):
"""
Build the Options
"""
s3db = current.s3db
opts = self.options
# Get the component resource
resource = self.resource
component_name = data["component"]
component = resource.components[component_name]
table = component.table
# @ToDo: Support lookups to tables which don't use 'name' (e.g. 'tag')
option_help = opts.get("option_help", None)
if option_help:
fields = ["id", "name", option_help]
else:
fields = ["id", "name"]
opt_filter = opts.get("filter", None)
if opt_filter:
linktable = s3db[opt_filter["linktable"]]
lkey = opt_filter["lkey"]
rkey = opt_filter["rkey"]
if "values" in opt_filter:
# Option A - from AJAX request
values = opt_filter["values"]
else:
# Option B - from record
lookuptable = opt_filter.get("lookuptable", None)
if lookuptable:
# e.g. Project Community Activity Types filtered by Sector of parent Project
lookupkey = opt_filter.get("lookupkey", None)
if not lookupkey:
raise
if resource._rows:
_id = resource._rows[0][lookupkey]
_resource = s3db.resource(lookuptable, id=_id)
else:
_id = None
else:
# e.g. Project Themes filtered by Sector
if resource._ids:
_id = resource._ids[0]
_resource = resource
else:
_id = None
if _id:
_table = _resource.table
if rkey in _table.fields:
values = [_table[rkey]]
else:
found = False
if lookuptable:
# Need to load component
hooks = s3db.get_components(_table)
for alias in hooks:
if hooks[alias].rkey == rkey:
found = True
_resource._attach(alias, hooks[alias])
_component = _resource.components[alias]
break
else:
# Components are already loaded
components = _resource.components
for c in components:
if components[c].rkey == rkey:
found = True
_component = components[c]
break
if found:
_rows = _component.select(["id"],
limit=None,
as_rows=True)
values = [r.id for r in _rows]
else:
#raise SyntaxError
values = []
else:
# New record
values = []
rows = []
rappend = rows.append
# All rows, whether or not in the link table
fields = [table[f] for f in fields]
fields.append(linktable[rkey])
query = (table.deleted == False) & \
current.auth.s3_accessible_query("read", table)
srows = current.db(query).select(left=linktable.on(linktable[lkey] == table.id),
orderby=table.name,
*fields)
for r in srows:
v = r[linktable][rkey]
# We want all all rows which have no entry in the linktable (no restrictions)
# or else match this restriction
if not v or v in values:
_r = r[table]
record = Storage(id = _r.id,
name = _r.name)
if option_help:
record[option_help] = _r[option_help]
rappend(record)
else:
_resource = s3db.resource(component.tablename)
# Currently we only support filterby or filter, not both
filterby = opts.get("filterby", None)
if filterby:
options = filterby["options"]
filter_field = filterby["field"]
if isinstance(options, list):
_resource.add_filter(FS(filter_field).belongs(options))
else:
_resource.add_filter(FS(filter_field) == options)
rows = _resource.select(fields=fields,
limit=None,
orderby=table.name,
as_rows=True)
if not rows:
return None
if component.link:
# For link-table components, check the link table permissions
# rather than the component
component = component.link
creatable = current.auth.s3_has_permission("create", component.tablename)
options = OrderedDict()
for r in rows:
options[r.id] = dict(name=r.name,
selected=False,
editable=creatable)
if option_help:
options[r.id]["help"] = r[option_help]
# Which ones are currently selected?
fieldname = data["field"]
items = data["data"]
prefix = component.prefix
name = component.name
audit = current.audit
for i in xrange(len(items)):
item = items[i]
if fieldname in item:
if "_delete" in item:
continue
_id = item[fieldname]["value"]
if "_id" in item:
record_id = item["_id"]
# Check permissions to edit this item
editable = not "_readonly" in item
# Audit
audit("read", prefix, name,
record=record_id, representation="html")
elif "_changed" in item:
# Form had errors
editable = True
_id = int(_id)
try:
options[_id].update(selected=True,
editable=editable)
except:
# e.g. Theme filtered by Sector
current.session.error = \
current.T("Invalid data: record %(id)s not accessible in table %(table)s") % \
dict(id=_id, table=table)
redirect(URL(args=None, vars=None))
return options
# -------------------------------------------------------------------------
def represent(self, value):
"""
Read-only representation of this form element. This will be
used instead of the __call__() method when the form element
is to be rendered read-only.
@param value: the value as returned from extract()
@return: the read-only representation of this element as
string or HTML helper
"""
if isinstance(value, basestring):
data = json.loads(value)
else:
data = value
if data["data"] == []:
# Don't render a subform for NONE
return current.messages["NONE"]
fieldname = data["field"]
items = data["data"]
component = self.resource.components[data["component"]]
audit = current.audit
prefix, name = component.prefix, component.name
xml_decode = current.xml.xml_decode
vals = []
for item in items:
if "_id" in item:
record_id = item["_id"]
else:
continue
audit("read", prefix, name,
record=record_id, representation="html")
vals.append(XML(xml_decode(item[fieldname]["text"])))
vals.sort()
represent = TAG[""](list(chain.from_iterable(
[[v, ", "] for v in vals]))[:-1])
return TABLE(TBODY(TR(TD(represent),
#_class="read-row"
)),
#_class="embeddedComponent"
)
# =============================================================================
class S3SQLInlineComponentMultiSelectWidget(S3SQLInlineComponentCheckbox):
"""
Form element for an inline-component-form
This form element allows CRUD of multi-record-components within
the main record form. It renders a single hidden text field with a
JSON representation of the component records, and a widget which
facilitates client-side manipulation of this JSON.
This widget is a SELECT MULTIPLE, so is suitable for
simple many<>many link tables ('tagging'). It does NOT support link
tables with additional fields.
The widget uses the s3.inline_component.js script for
client-side manipulation of the JSON data.
During accept(), the component gets updated according to the JSON
returned.
@todo: deprecate, replace by S3SQLInlineLink
"""
# -------------------------------------------------------------------------
def __call__(self, field, value, **attributes):
"""
Widget method for this form element.
Renders a SELECT MULTIPLE with all available options.
This widget uses s3.inline_component.js to facilitate
manipulation of the entries.
@param field: the Field for this form element
@param value: the current value for this field
@param attributes: keyword attributes for this widget
@ToDo: Add ability to add new options to the list
@ToDo: Wrap S3MultiSelectWidget (or at least bring options up to date)
@ToDo: support Multiple=False
"""
opts = self.options
if opts.readonly is True:
# Render read-only
return self.represent(value)
if value is None:
value = field.default
if isinstance(value, basestring):
data = json.loads(value)
else:
data = value
value = json.dumps(value, separators=SEPARATORS)
if data is None:
raise SyntaxError("No resource structure information")
T = current.T
jquery_ready = current.response.s3.jquery_ready
script = opts.get("script", None)
if script and script not in jquery_ready:
jquery_ready.append(script)
# @ToDo: Hide completely if the user is not permitted to read this
# component
# Get the list of available options
options = self._options(data)
formname = self._formname()
fieldname = data["field"]
field_name = "%s-%s" % (formname, fieldname)
if not options:
widget = T("No options currently available")
else:
# Translate the Options?
translate = opts.get("translate", None)
if translate is None:
# Try to lookup presence of reusable field
# - how do we know the module though?
s3db = current.s3db
if hasattr(s3db, fieldname):
reusable_field = s3db.get(fieldname)
if reusable_field:
represent = reusable_field.attr.represent
if hasattr(represent, "translate"):
translate = represent.translate
# Render the options
_opts = []
vals = []
oappend = _opts.append
for _id in options:
option = options[_id]
v = option["name"]
if translate:
v = T(v)
oappend(OPTION(v,
_value=_id,
_disabled = not option["editable"]))
if option["selected"]:
vals.append(_id)
widget = SELECT(*_opts,
value=vals,
_id=field_name,
_name=field_name,
_multiple=True,
_class="inline-multiselect-widget",
_size=5 # @ToDo: Make this configurable?
)
# jQueryUI widget
# (this section could be made optional)
opt_filter = opts.get("filter", False)
header = opts.get("header", False)
selectedList = opts.get("selectedList", 3)
noneSelectedText = "Select"
if header is True:
header = '''checkAllText:'%s',uncheckAllText:"%s"''' % \
(T("Check all"),
T("Uncheck all"))
elif header is False:
header = '''header:false'''
else:
header = '''header:"%s"''' % self.header
script = '''$('#%s').multiselect({selectedText:'%s',%s,height:300,minWidth:0,selectedList:%s,noneSelectedText:'%s'})''' % \
(field_name,
T("# selected"),
header,
selectedList,
T(noneSelectedText))
if opt_filter:
script = '''%s.multiselectfilter()''' % script
if script not in jquery_ready: # Prevents loading twice when form has errors
jquery_ready.append(script)
# Real input: a hidden text field to store the JSON data
real_input = "%s_%s" % (self.resource.tablename, field_name)
default = dict(_type = "text",
_value = value,
requires=lambda v: (v, None))
attr = StringWidget._attributes(field, default, **attributes)
attr["_class"] = attr["_class"] + " hide"
attr["_id"] = real_input
# Render output HTML
output = DIV(INPUT(**attr),
widget,
_id=self._formname(separator="-"),
_field=real_input,
_class="inline-multiselect inline-component",
_name="%s_widget" % field_name,
)
columns = opts.get("columns")
if columns:
output.add_class("small-%s columns" % columns)
return output
# END =========================================================================
| anurag-ks/eden | modules/s3/s3forms.py | Python | mit | 156,053 |
from django.conf.urls import patterns, url, include
from dashboard import views
urlpatterns = patterns('',
url(r'^', views.index, name='index'),
)
| jcfausto/django-ionic-dashboard | dashboard/urls.py | Python | mit | 157 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2017-09-12
git sha : $Format:%H$
copyright : (C) 2017 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : borba.philipe@eb.mil.br
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
# Qt imports
from qgis.PyQt import QtWidgets, uic
from qgis.PyQt.QtCore import pyqtSlot, pyqtSignal, QSettings, Qt
from qgis.PyQt.QtGui import QKeySequence
FORM_CLASS, _ = uic.loadUiType(os.path.join(
os.path.dirname(__file__), 'shortcutChooserWidget.ui'))
class ShortcutChooserWidget(QtWidgets.QWidget, FORM_CLASS):
keyPressed = pyqtSignal()
def __init__(self, parent=None):
"""
Initializates ShortcutChooserWidget
"""
super(ShortcutChooserWidget, self).__init__(parent)
self.resetVariables()
self.setupUi(self)
@pyqtSlot(bool)
def on_assignShortcutPushButton_clicked(self):
"""
After button is clicked, focus is needed to use keyPressEvent and keyReleaseEvent
"""
self.setFocus()
@pyqtSlot(bool)
def on_assignShortcutPushButton_toggled(self, toggled):
"""
Button toggled reset self.modifiers and self.keys and also prepairs button text
"""
if toggled:
self.resetVariables()
self.assignShortcutPushButton.setText(self.tr('Enter Value'))
@pyqtSlot(bool, name = 'on_clearPushButton_clicked')
def clearAll(self):
"""
Clears push button and also resets self.modifiers and self.keys
"""
self.assignShortcutPushButton.setChecked(False)
self.assignShortcutPushButton.setText(self.tr('Assign Shortcut'))
self.resetVariables()
def resetVariables(self):
"""
Resets self.modifiers, self.key and self.keySequence to 0
"""
self.modifiers = 0
self.key = 0
self.keySequence = 0
def keyPressEvent(self, event):
"""
"""
if not self.assignShortcutPushButton.isChecked():
super(ShortcutChooserWidget, self).keyPressEvent(event)
return
key = int(event.key())
if key == Qt.Key_Meta:
self.modifiers |= Qt.META
self.updateShortcutText()
elif key == Qt.Key_Alt:
self.modifiers |= Qt.ALT
self.updateShortcutText()
elif key == Qt.Key_Control:
self.modifiers |= Qt.CTRL
self.updateShortcutText()
elif key == Qt.Key_Shift:
self.modifiers |= Qt.SHIFT
self.updateShortcutText()
elif key == Qt.Key_Escape:
self.assignShortcutPushButton.setChecked(False)
return
else:
self.key = key
self.updateShortcutText()
def keyReleaseEvent(self, event):
if not self.assignShortcutPushButton.isChecked():
super(ShortcutChooserWidget, self).keyReleaseEvent(event)
return
key = event.key()
if key == Qt.Key_Meta:
self.modifiers &= Qt.META
self.updateShortcutText()
elif key == Qt.Key_Alt:
self.modifiers &= Qt.ALT
self.updateShortcutText()
elif key == Qt.Key_Control:
self.modifiers &= Qt.CTRL
self.updateShortcutText()
elif key == Qt.Key_Shift:
self.modifiers &= Qt.SHIFT
self.updateShortcutText()
elif key == Qt.Key_Escape:
return
else:
self.assignShortcutPushButton.setChecked(False)
self.updateShortcutText()
self.setShortcut(self.keySequence)
def setEnabled(self, enabled):
if not enabled:
self.clearAll()
super(ShortcutChooserWidget, self).setEnabled(enabled)
def setShortcut(self, shortcut):
self.keySequence = QKeySequence(shortcut)
self.assignShortcutPushButton.setChecked(False)
self.assignShortcutPushButton.setText(self.keySequence.toString(format = QKeySequence.NativeText))
def getShortcut(self, asQKeySequence = False):
if asQKeySequence:
return self.keySequence
else:
return int(self.keySequence)
def updateShortcutText(self):
self.keySequence = QKeySequence(self.modifiers+self.key)
#this uses QKeySequence.NativeText to show in the interface. To store data, no filter should be provided
self.assignShortcutPushButton.setText(self.tr('Input: {0}').format(self.keySequence.toString(format = QKeySequence.NativeText))) | lcoandrade/DsgTools | gui/CustomWidgets/BasicInterfaceWidgets/shortcutChooserWidget.py | Python | gpl-2.0 | 5,522 |
from . import unittest
from shapely.geometry.base import BaseGeometry, EmptyGeometry
import shapely.geometry as sgeom
from shapely.geometry.polygon import LinearRing
empty_generator = lambda: iter([])
class EmptinessTestCase(unittest.TestCase):
def test_empty_class(self):
g = EmptyGeometry()
self.assertTrue(g._is_empty)
def test_empty_base(self):
g = BaseGeometry()
self.assertTrue(g._is_empty)
def test_emptying_point(self):
p = sgeom.Point(0, 0)
self.assertFalse(p._is_empty)
p.empty()
self.assertTrue(p._is_empty)
def test_none_geom(self):
p = BaseGeometry()
p._geom = None
self.assertTrue(p.is_empty)
def test_empty_point(self):
self.assertTrue(sgeom.Point().is_empty)
def test_empty_multipoint(self):
self.assertTrue(sgeom.MultiPoint().is_empty)
def test_empty_geometry_collection(self):
self.assertTrue(sgeom.GeometryCollection().is_empty)
def test_empty_linestring(self):
self.assertTrue(sgeom.LineString().is_empty)
self.assertTrue(sgeom.LineString(None).is_empty)
self.assertTrue(sgeom.LineString([]).is_empty)
self.assertTrue(sgeom.LineString(empty_generator()).is_empty)
def test_empty_multilinestring(self):
self.assertTrue(sgeom.MultiLineString([]).is_empty)
def test_empty_polygon(self):
self.assertTrue(sgeom.Polygon().is_empty)
self.assertTrue(sgeom.Polygon(None).is_empty)
self.assertTrue(sgeom.Polygon([]).is_empty)
self.assertTrue(sgeom.Polygon(empty_generator()).is_empty)
def test_empty_multipolygon(self):
self.assertTrue(sgeom.MultiPolygon([]).is_empty)
def test_empty_linear_ring(self):
self.assertTrue(LinearRing().is_empty)
self.assertTrue(LinearRing(None).is_empty)
self.assertTrue(LinearRing([]).is_empty)
self.assertTrue(LinearRing(empty_generator()).is_empty)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(EmptinessTestCase)
if __name__ == '__main__':
unittest.main()
| jdmcbr/Shapely | tests/test_emptiness.py | Python | bsd-3-clause | 2,120 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the feedback thread page."""
from core.controllers import base
from core.controllers import editor
from core.domain import exp_services
from core.domain import feedback_services
class ThreadListHandler(base.BaseHandler):
"""Handles operations relating to feedback thread lists."""
PAGE_NAME_FOR_CSRF = 'editor'
def get(self, exploration_id):
self.values.update({
'threads': [t.to_dict() for t in feedback_services.get_all_threads(
exploration_id, False)]})
self.render_json(self.values)
@base.require_user
def post(self, exploration_id):
subject = self.payload.get('subject')
if not subject:
raise self.InvalidInputException(
'A thread subject must be specified.')
text = self.payload.get('text')
if not text:
raise self.InvalidInputException(
'Text for the first message in the thread must be specified.')
feedback_services.create_thread(
exploration_id,
self.payload.get('state_name'),
self.user_id,
subject,
text)
self.render_json(self.values)
class ThreadHandler(base.BaseHandler):
"""Handles operations relating to feedback threads."""
PAGE_NAME_FOR_CSRF = 'editor'
def get(self, exploration_id, thread_id): # pylint: disable=unused-argument
suggestion = feedback_services.get_suggestion(exploration_id, thread_id)
self.values.update({
'messages': [m.to_dict() for m in feedback_services.get_messages(
exploration_id, thread_id)],
'suggestion': suggestion.to_dict() if suggestion else None
})
self.render_json(self.values)
@base.require_user
def post(self, exploration_id, thread_id): # pylint: disable=unused-argument
suggestion = feedback_services.get_suggestion(exploration_id, thread_id)
text = self.payload.get('text')
updated_status = self.payload.get('updated_status')
if not text and not updated_status:
raise self.InvalidInputException(
'Text for the message must be specified.')
if suggestion and updated_status:
raise self.InvalidInputException(
'Suggestion thread status cannot be changed manually.')
feedback_services.create_message(
exploration_id,
thread_id,
self.user_id,
updated_status,
self.payload.get('updated_subject'),
text)
self.render_json(self.values)
class RecentFeedbackMessagesHandler(base.BaseHandler):
"""Returns a list of recently-posted feedback messages.
Note that this currently also includes messages posted in private
explorations.
"""
@base.require_moderator
def get(self):
urlsafe_start_cursor = self.request.get('cursor')
all_feedback_messages, new_urlsafe_start_cursor, more = (
feedback_services.get_next_page_of_all_feedback_messages(
urlsafe_start_cursor=urlsafe_start_cursor))
self.render_json({
'results': [m.to_dict() for m in all_feedback_messages],
'cursor': new_urlsafe_start_cursor,
'more': more,
})
class FeedbackStatsHandler(base.BaseHandler):
"""Returns Feedback stats for an exploration.
- Number of open threads
- Number of total threads
"""
def get(self, exploration_id):
feedback_thread_analytics = (
feedback_services.get_thread_analytics(
exploration_id))
self.values.update({
'num_open_threads': (
feedback_thread_analytics.num_open_threads),
'num_total_threads': (
feedback_thread_analytics.num_total_threads),
})
self.render_json(self.values)
class SuggestionHandler(base.BaseHandler):
""""Handles operations relating to learner suggestions."""
PAGE_NAME_FOR_CSRF = 'player'
@base.require_user
def post(self, exploration_id):
feedback_services.create_suggestion(
exploration_id,
self.user_id,
self.payload.get('exploration_version'),
self.payload.get('state_name'),
self.payload.get('description'),
self.payload.get('suggestion_content'))
self.render_json(self.values)
class SuggestionActionHandler(base.BaseHandler):
""""Handles actions performed on threads with suggestions."""
PAGE_NAME_FOR_CSRF = 'editor'
_ACCEPT_ACTION = 'accept'
_REJECT_ACTION = 'reject'
@editor.require_editor
def put(self, exploration_id, thread_id):
action = self.payload.get('action')
if action == self._ACCEPT_ACTION:
exp_services.accept_suggestion(
self.user_id,
thread_id,
exploration_id,
self.payload.get('commit_message'))
elif action == self._REJECT_ACTION:
exp_services.reject_suggestion(
self.user_id, thread_id, exploration_id)
else:
raise self.InvalidInputException('Invalid action.')
self.render_json(self.values)
class SuggestionListHandler(base.BaseHandler):
"""Handles operations relating to list of threads with suggestions."""
PAGE_NAME_FOR_CSRF = 'editor'
_LIST_TYPE_OPEN = 'open'
_LIST_TYPE_CLOSED = 'closed'
_LIST_TYPE_ALL = 'all'
def _string_to_bool(self, has_suggestion):
if has_suggestion == 'true':
return True
elif has_suggestion == 'false':
return False
else:
return None
@base.require_user
def get(self, exploration_id):
threads = None
list_type = self.request.get('list_type')
has_suggestion = self._string_to_bool(
self.request.get('has_suggestion'))
if has_suggestion is None:
raise self.InvalidInputException(
'Invalid value for has_suggestion.')
if list_type == self._LIST_TYPE_OPEN:
threads = feedback_services.get_open_threads(
exploration_id, has_suggestion)
elif list_type == self._LIST_TYPE_CLOSED:
threads = feedback_services.get_closed_threads(
exploration_id, has_suggestion)
elif list_type == self._LIST_TYPE_ALL:
threads = feedback_services.get_all_threads(
exploration_id, has_suggestion)
else:
raise self.InvalidInputException('Invalid list type.')
self.values.update({'threads': [t.to_dict() for t in threads]})
self.render_json(self.values)
class UnsentFeedbackEmailHandler(base.BaseHandler):
"""Handler task of sending emails of feedback messages.
This is yet to be implemented."""
def post(self):
pass
| anggorodewanto/oppia | core/controllers/feedback.py | Python | apache-2.0 | 7,540 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import setuptools
import sys
from distutils.version import StrictVersion
version_file_path = os.path.join(
os.path.dirname(__file__),
'abjad',
'_version.py'
)
with open(version_file_path, 'r') as file_pointer:
file_contents_string = file_pointer.read()
local_dict = {}
exec(file_contents_string, None, local_dict)
__version__ = local_dict['__version__']
description = 'Abjad is a Python API for Formalized Score Control.'
long_description = 'Abjad is an interactive software system designed'
long_description += ' to help composers build up complex pieces of'
long_description += ' music notation in an iterative and incremental way.'
long_description += ' Use Abjad to create a symbolic representation of all'
long_description += ' the notes, rests, staves, tuplets, beams and slurs'
long_description += ' in any score.'
long_description += ' Because Abjad extends the Python programming language,'
long_description += ' you can use Abjad to make systematic changes to'
long_description += ' your music as you work.'
long_description += ' And because Abjad wraps the powerful LilyPond music'
long_description += ' notation package, you can use Abjad to control'
long_description += ' the typographic details of all the symbols on the page.'
author = [
'Trevor Bača',
'Josiah Wolf Oberholtzer',
'Víctor Adán',
]
author = ', '.join(author)
author_email = [
'trevorbaca@gmail.com',
'josiah.oberholtzer@gmail.com',
'contact@victoradan.net',
]
author_email = ', '.join(author_email)
keywords = [
'music composition',
'music notation',
'formalized score control',
'lilypond',
]
keywords = ', '.join(keywords)
install_requires = [
'configobj',
'ply',
'six',
]
version = '.'.join(str(x) for x in sys.version_info[:3])
if StrictVersion(version) < StrictVersion('3.4.0'):
install_requires.append('enum34')
extras_require = {
'development': [
'pytest',
'sphinx==1.2.3', # TODO: Remove version once Sphinx fixes https://github.com/sphinx-doc/sphinx/issues/1822
'sphinx_rtd_theme',
'sphinxcontrib-images',
'PyPDF2',
],
'ipython': [
'ipython',
],
}
entry_points = {
'console_scripts': [
'abjad = abjad.tools.systemtools.run_abjad:run_abjad',
'ajv = abjad.tools.developerscripttools.run_ajv:run_ajv',
]
}
setuptools.setup(
author=author,
author_email=author_email,
description=description,
include_package_data=True,
install_requires=install_requires,
extras_require=extras_require,
entry_points=entry_points,
keywords=keywords,
license='GPL',
long_description=long_description,
name='Abjad',
packages=['abjad'],
platforms='Any',
url='http://www.projectabjad.org',
version=__version__,
) | andrewyoung1991/abjad | setup.py | Python | gpl-3.0 | 2,910 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2013 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Miscellaneous tools used by OpenERP.
"""
from functools import wraps
import cProfile
import subprocess
import logging
import os
import socket
import sys
import threading
import time
import zipfile
from collections import defaultdict
from datetime import datetime
from itertools import islice, izip, groupby
from lxml import etree
from which import which
from threading import local
import traceback
try:
from html2text import html2text
except ImportError:
html2text = None
from config import config
from cache import *
import openerp
# get_encodings, ustr and exception_to_unicode were originally from tools.misc.
# There are moved to loglevels until we refactor tools.
from openerp.loglevels import get_encodings, ustr, exception_to_unicode # noqa
_logger = logging.getLogger(__name__)
# List of etree._Element subclasses that we choose to ignore when parsing XML.
# We include the *Base ones just in case, currently they seem to be subclasses of the _* ones.
SKIPPED_ELEMENT_TYPES = (etree._Comment, etree._ProcessingInstruction, etree.CommentBase, etree.PIBase)
def find_in_path(name):
try:
return which(name)
except IOError:
return None
def find_pg_tool(name):
path = None
if config['pg_path'] and config['pg_path'] != 'None':
path = config['pg_path']
try:
return which(name, path=path)
except IOError:
return None
def exec_pg_command(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (prog,) + args
return subprocess.call(args2)
def exec_pg_command_pipe(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
# on win32, passing close_fds=True is not compatible
# with redirecting std[in/err/out]
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=(os.name=="posix"))
return pop.stdin, pop.stdout
def exec_command_pipe(name, *args):
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
# on win32, passing close_fds=True is not compatible
# with redirecting std[in/err/out]
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=(os.name=="posix"))
return pop.stdin, pop.stdout
#----------------------------------------------------------
# File paths
#----------------------------------------------------------
#file_path_root = os.getcwd()
#file_path_addons = os.path.join(file_path_root, 'addons')
def file_open(name, mode="r", subdir='addons', pathinfo=False):
"""Open a file from the OpenERP root, using a subdir folder.
Example::
>>> file_open('hr/report/timesheer.xsl')
>>> file_open('addons/hr/report/timesheet.xsl')
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
@param name name of the file
@param mode file open mode
@param subdir subdirectory
@param pathinfo if True returns tuple (fileobject, filepath)
@return fileobject if pathinfo is False else (fileobject, filepath)
"""
import openerp.modules as addons
adps = addons.module.ad_paths
rtp = os.path.normcase(os.path.abspath(config['root_path']))
basename = name
if os.path.isabs(name):
# It is an absolute path
# Is it below 'addons_path' or 'root_path'?
name = os.path.normcase(os.path.normpath(name))
for root in adps + [rtp]:
root = os.path.normcase(os.path.normpath(root)) + os.sep
if name.startswith(root):
base = root.rstrip(os.sep)
name = name[len(base) + 1:]
break
else:
# It is outside the OpenERP root: skip zipfile lookup.
base, name = os.path.split(name)
return _fileopen(name, mode=mode, basedir=base, pathinfo=pathinfo, basename=basename)
if name.replace(os.sep, '/').startswith('addons/'):
subdir = 'addons'
name2 = name[7:]
elif subdir:
name = os.path.join(subdir, name)
if name.replace(os.sep, '/').startswith('addons/'):
subdir = 'addons'
name2 = name[7:]
else:
name2 = name
# First, try to locate in addons_path
if subdir:
for adp in adps:
try:
return _fileopen(name2, mode=mode, basedir=adp,
pathinfo=pathinfo, basename=basename)
except IOError:
pass
# Second, try to locate in root_path
return _fileopen(name, mode=mode, basedir=rtp, pathinfo=pathinfo, basename=basename)
def _fileopen(path, mode, basedir, pathinfo, basename=None):
name = os.path.normpath(os.path.join(basedir, path))
if basename is None:
basename = name
# Give higher priority to module directories, which is
# a more common case than zipped modules.
if os.path.isfile(name):
fo = open(name, mode)
if pathinfo:
return fo, name
return fo
# Support for loading modules in zipped form.
# This will not work for zipped modules that are sitting
# outside of known addons paths.
head = os.path.normpath(path)
zipname = False
while os.sep in head:
head, tail = os.path.split(head)
if not tail:
break
if zipname:
zipname = os.path.join(tail, zipname)
else:
zipname = tail
zpath = os.path.join(basedir, head + '.zip')
if zipfile.is_zipfile(zpath):
from cStringIO import StringIO
zfile = zipfile.ZipFile(zpath)
try:
fo = StringIO()
fo.write(zfile.read(os.path.join(
os.path.basename(head), zipname).replace(
os.sep, '/')))
fo.seek(0)
if pathinfo:
return fo, name
return fo
except Exception:
pass
# Not found
if name.endswith('.rml'):
raise IOError('Report %r doesn\'t exist or deleted' % basename)
raise IOError('File not found: %s' % basename)
#----------------------------------------------------------
# iterables
#----------------------------------------------------------
def flatten(list):
"""Flatten a list of elements into a uniqu list
Author: Christophe Simonis (christophe@tinyerp.com)
Examples::
>>> flatten(['a'])
['a']
>>> flatten('b')
['b']
>>> flatten( [] )
[]
>>> flatten( [[], [[]]] )
[]
>>> flatten( [[['a','b'], 'c'], 'd', ['e', [], 'f']] )
['a', 'b', 'c', 'd', 'e', 'f']
>>> t = (1,2,(3,), [4, 5, [6, [7], (8, 9), ([10, 11, (12, 13)]), [14, [], (15,)], []]])
>>> flatten(t)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
"""
def isiterable(x):
return hasattr(x, "__iter__")
r = []
for e in list:
if isiterable(e):
map(r.append, flatten(e))
else:
r.append(e)
return r
def reverse_enumerate(l):
"""Like enumerate but in the other sens
Usage::
>>> a = ['a', 'b', 'c']
>>> it = reverse_enumerate(a)
>>> it.next()
(2, 'c')
>>> it.next()
(1, 'b')
>>> it.next()
(0, 'a')
>>> it.next()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
"""
return izip(xrange(len(l)-1, -1, -1), reversed(l))
class UpdateableStr(local):
""" Class that stores an updateable string (used in wizards)
"""
def __init__(self, string=''):
self.string = string
def __str__(self):
return str(self.string)
def __repr__(self):
return str(self.string)
def __nonzero__(self):
return bool(self.string)
class UpdateableDict(local):
"""Stores an updateable dict to use in wizards
"""
def __init__(self, dict=None):
if dict is None:
dict = {}
self.dict = dict
def __str__(self):
return str(self.dict)
def __repr__(self):
return str(self.dict)
def clear(self):
return self.dict.clear()
def keys(self):
return self.dict.keys()
def __setitem__(self, i, y):
self.dict.__setitem__(i, y)
def __getitem__(self, i):
return self.dict.__getitem__(i)
def copy(self):
return self.dict.copy()
def iteritems(self):
return self.dict.iteritems()
def iterkeys(self):
return self.dict.iterkeys()
def itervalues(self):
return self.dict.itervalues()
def pop(self, k, d=None):
return self.dict.pop(k, d)
def popitem(self):
return self.dict.popitem()
def setdefault(self, k, d=None):
return self.dict.setdefault(k, d)
def update(self, E, **F):
return self.dict.update(E, F)
def values(self):
return self.dict.values()
def get(self, k, d=None):
return self.dict.get(k, d)
def has_key(self, k):
return self.dict.has_key(k)
def items(self):
return self.dict.items()
def __cmp__(self, y):
return self.dict.__cmp__(y)
def __contains__(self, k):
return self.dict.__contains__(k)
def __delitem__(self, y):
return self.dict.__delitem__(y)
def __eq__(self, y):
return self.dict.__eq__(y)
def __ge__(self, y):
return self.dict.__ge__(y)
def __gt__(self, y):
return self.dict.__gt__(y)
def __hash__(self):
return self.dict.__hash__()
def __iter__(self):
return self.dict.__iter__()
def __le__(self, y):
return self.dict.__le__(y)
def __len__(self):
return self.dict.__len__()
def __lt__(self, y):
return self.dict.__lt__(y)
def __ne__(self, y):
return self.dict.__ne__(y)
class currency(float):
""" Deprecate
.. warning::
Don't use ! Use res.currency.round()
"""
def __init__(self, value, accuracy=2, rounding=None):
if rounding is None:
rounding=10**-accuracy
self.rounding=rounding
self.accuracy=accuracy
def __new__(cls, value, accuracy=2, rounding=None):
return float.__new__(cls, round(value, accuracy))
#def __str__(self):
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
# return str(display_value)
def to_xml(s):
return s.replace('&','&').replace('<','<').replace('>','>')
def get_iso_codes(lang):
if lang.find('_') != -1:
if lang.split('_')[0] == lang.split('_')[1].lower():
lang = lang.split('_')[0]
return lang
ALL_LANGUAGES = {
'ab_RU': u'Abkhazian / аҧсуа',
'am_ET': u'Amharic / አምሃርኛ',
'ar_SY': u'Arabic / الْعَرَبيّة',
'bg_BG': u'Bulgarian / български език',
'bs_BS': u'Bosnian / bosanski jezik',
'ca_ES': u'Catalan / Català',
'cs_CZ': u'Czech / Čeština',
'da_DK': u'Danish / Dansk',
'de_DE': u'German / Deutsch',
'el_GR': u'Greek / Ελληνικά',
'en_CA': u'English (CA)',
'en_GB': u'English (UK)',
'en_US': u'English (US)',
'es_AR': u'Spanish (AR) / Español (AR)',
'es_BO': u'Spanish (BO) / Español (BO)',
'es_CL': u'Spanish (CL) / Español (CL)',
'es_CO': u'Spanish (CO) / Español (CO)',
'es_CR': u'Spanish (CR) / Español (CR)',
'es_DO': u'Spanish (DO) / Español (DO)',
'es_EC': u'Spanish (EC) / Español (EC)',
'es_ES': u'Spanish / Español',
'es_GT': u'Spanish (GT) / Español (GT)',
'es_HN': u'Spanish (HN) / Español (HN)',
'es_MX': u'Spanish (MX) / Español (MX)',
'es_NI': u'Spanish (NI) / Español (NI)',
'es_PA': u'Spanish (PA) / Español (PA)',
'es_PE': u'Spanish (PE) / Español (PE)',
'es_PR': u'Spanish (PR) / Español (PR)',
'es_PY': u'Spanish (PY) / Español (PY)',
'es_SV': u'Spanish (SV) / Español (SV)',
'es_UY': u'Spanish (UY) / Español (UY)',
'es_VE': u'Spanish (VE) / Español (VE)',
'et_EE': u'Estonian / Eesti keel',
'fa_IR': u'Persian / فارس',
'fi_FI': u'Finnish / Suomi',
'fr_BE': u'French (BE) / Français (BE)',
'fr_CH': u'French (CH) / Français (CH)',
'fr_FR': u'French / Français',
'gl_ES': u'Galician / Galego',
'gu_IN': u'Gujarati / ગુજરાતી',
'he_IL': u'Hebrew / עִבְרִי',
'hi_IN': u'Hindi / हिंदी',
'hr_HR': u'Croatian / hrvatski jezik',
'hu_HU': u'Hungarian / Magyar',
'id_ID': u'Indonesian / Bahasa Indonesia',
'it_IT': u'Italian / Italiano',
'iu_CA': u'Inuktitut / ᐃᓄᒃᑎᑐᑦ',
'ja_JP': u'Japanese / 日本語',
'ko_KP': u'Korean (KP) / 한국어 (KP)',
'ko_KR': u'Korean (KR) / 한국어 (KR)',
'lo_LA': u'Lao / ພາສາລາວ',
'lt_LT': u'Lithuanian / Lietuvių kalba',
'lv_LV': u'Latvian / latviešu valoda',
'ml_IN': u'Malayalam / മലയാളം',
'mn_MN': u'Mongolian / монгол',
'nb_NO': u'Norwegian Bokmål / Norsk bokmål',
'nl_NL': u'Dutch / Nederlands',
'nl_BE': u'Flemish (BE) / Vlaams (BE)',
'oc_FR': u'Occitan (FR, post 1500) / Occitan',
'pl_PL': u'Polish / Język polski',
'pt_BR': u'Portuguese (BR) / Português (BR)',
'pt_PT': u'Portuguese / Português',
'ro_RO': u'Romanian / română',
'ru_RU': u'Russian / русский язык',
'si_LK': u'Sinhalese / සිංහල',
'sl_SI': u'Slovenian / slovenščina',
'sk_SK': u'Slovak / Slovenský jazyk',
'sq_AL': u'Albanian / Shqip',
'sr_RS': u'Serbian (Cyrillic) / српски',
'sr@latin': u'Serbian (Latin) / srpski',
'sv_SE': u'Swedish / svenska',
'te_IN': u'Telugu / తెలుగు',
'tr_TR': u'Turkish / Türkçe',
'vi_VN': u'Vietnamese / Tiếng Việt',
'uk_UA': u'Ukrainian / українська',
'ur_PK': u'Urdu / اردو',
'zh_CN': u'Chinese (CN) / 简体中文',
'zh_HK': u'Chinese (HK)',
'zh_TW': u'Chinese (TW) / 正體字',
'th_TH': u'Thai / ภาษาไทย',
'tlh_TLH': u'Klingon',
}
def scan_languages():
""" Returns all languages supported by OpenERP for translation
:returns: a list of (lang_code, lang_name) pairs
:rtype: [(str, unicode)]
"""
return sorted(ALL_LANGUAGES.iteritems(), key=lambda k: k[1])
def get_user_companies(cr, user):
def _get_company_children(cr, ids):
if not ids:
return []
cr.execute('SELECT id FROM res_company WHERE parent_id IN %s', (tuple(ids),))
res = [x[0] for x in cr.fetchall()]
res.extend(_get_company_children(cr, res))
return res
cr.execute('SELECT company_id FROM res_users WHERE id=%s', (user,))
user_comp = cr.fetchone()[0]
if not user_comp:
return []
return [user_comp] + _get_company_children(cr, [user_comp])
def mod10r(number):
"""
Input number : account or invoice number
Output return: the same number completed with the recursive mod10
key
"""
codec=[0,9,4,6,8,2,7,1,3,5]
report = 0
result=""
for digit in number:
result += digit
if digit.isdigit():
report = codec[ (int(digit) + report) % 10 ]
return result + str((10 - report) % 10)
def human_size(sz):
"""
Return the size in a human readable format
"""
if not sz:
return False
units = ('bytes', 'Kb', 'Mb', 'Gb')
if isinstance(sz,basestring):
sz=len(sz)
s, i = float(sz), 0
while s >= 1024 and i < len(units)-1:
s /= 1024
i += 1
return "%0.2f %s" % (s, units[i])
def logged(f):
@wraps(f)
def wrapper(*args, **kwargs):
from pprint import pformat
vector = ['Call -> function: %r' % f]
for i, arg in enumerate(args):
vector.append(' arg %02d: %s' % (i, pformat(arg)))
for key, value in kwargs.items():
vector.append(' kwarg %10s: %s' % (key, pformat(value)))
timeb4 = time.time()
res = f(*args, **kwargs)
vector.append(' result: %s' % pformat(res))
vector.append(' time delta: %s' % (time.time() - timeb4))
_logger.debug('\n'.join(vector))
return res
return wrapper
class profile(object):
def __init__(self, fname=None):
self.fname = fname
def __call__(self, f):
@wraps(f)
def wrapper(*args, **kwargs):
profile = cProfile.Profile()
result = profile.runcall(f, *args, **kwargs)
profile.dump_stats(self.fname or ("%s.cprof" % (f.func_name,)))
return result
return wrapper
__icons_list = ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
'STOCK_CONNECT', 'STOCK_CONVERT', 'STOCK_COPY', 'STOCK_CUT', 'STOCK_DELETE',
'STOCK_DIALOG_AUTHENTICATION', 'STOCK_DIALOG_ERROR', 'STOCK_DIALOG_INFO',
'STOCK_DIALOG_QUESTION', 'STOCK_DIALOG_WARNING', 'STOCK_DIRECTORY', 'STOCK_DISCONNECT',
'STOCK_DND', 'STOCK_DND_MULTIPLE', 'STOCK_EDIT', 'STOCK_EXECUTE', 'STOCK_FILE',
'STOCK_FIND', 'STOCK_FIND_AND_REPLACE', 'STOCK_FLOPPY', 'STOCK_GOTO_BOTTOM',
'STOCK_GOTO_FIRST', 'STOCK_GOTO_LAST', 'STOCK_GOTO_TOP', 'STOCK_GO_BACK',
'STOCK_GO_DOWN', 'STOCK_GO_FORWARD', 'STOCK_GO_UP', 'STOCK_HARDDISK',
'STOCK_HELP', 'STOCK_HOME', 'STOCK_INDENT', 'STOCK_INDEX', 'STOCK_ITALIC',
'STOCK_JUMP_TO', 'STOCK_JUSTIFY_CENTER', 'STOCK_JUSTIFY_FILL',
'STOCK_JUSTIFY_LEFT', 'STOCK_JUSTIFY_RIGHT', 'STOCK_MEDIA_FORWARD',
'STOCK_MEDIA_NEXT', 'STOCK_MEDIA_PAUSE', 'STOCK_MEDIA_PLAY',
'STOCK_MEDIA_PREVIOUS', 'STOCK_MEDIA_RECORD', 'STOCK_MEDIA_REWIND',
'STOCK_MEDIA_STOP', 'STOCK_MISSING_IMAGE', 'STOCK_NETWORK', 'STOCK_NEW',
'STOCK_NO', 'STOCK_OK', 'STOCK_OPEN', 'STOCK_PASTE', 'STOCK_PREFERENCES',
'STOCK_PRINT', 'STOCK_PRINT_PREVIEW', 'STOCK_PROPERTIES', 'STOCK_QUIT',
'STOCK_REDO', 'STOCK_REFRESH', 'STOCK_REMOVE', 'STOCK_REVERT_TO_SAVED',
'STOCK_SAVE', 'STOCK_SAVE_AS', 'STOCK_SELECT_COLOR', 'STOCK_SELECT_FONT',
'STOCK_SORT_ASCENDING', 'STOCK_SORT_DESCENDING', 'STOCK_SPELL_CHECK',
'STOCK_STOP', 'STOCK_STRIKETHROUGH', 'STOCK_UNDELETE', 'STOCK_UNDERLINE',
'STOCK_UNDO', 'STOCK_UNINDENT', 'STOCK_YES', 'STOCK_ZOOM_100',
'STOCK_ZOOM_FIT', 'STOCK_ZOOM_IN', 'STOCK_ZOOM_OUT',
'terp-account', 'terp-crm', 'terp-mrp', 'terp-product', 'terp-purchase',
'terp-sale', 'terp-tools', 'terp-administration', 'terp-hr', 'terp-partner',
'terp-project', 'terp-report', 'terp-stock', 'terp-calendar', 'terp-graph',
'terp-check','terp-go-month','terp-go-year','terp-go-today','terp-document-new','terp-camera_test',
'terp-emblem-important','terp-gtk-media-pause','terp-gtk-stop','terp-gnome-cpu-frequency-applet+',
'terp-dialog-close','terp-gtk-jump-to-rtl','terp-gtk-jump-to-ltr','terp-accessories-archiver',
'terp-stock_align_left_24','terp-stock_effects-object-colorize','terp-go-home','terp-gtk-go-back-rtl',
'terp-gtk-go-back-ltr','terp-personal','terp-personal-','terp-personal+','terp-accessories-archiver-minus',
'terp-accessories-archiver+','terp-stock_symbol-selection','terp-call-start','terp-dolar',
'terp-face-plain','terp-folder-blue','terp-folder-green','terp-folder-orange','terp-folder-yellow',
'terp-gdu-smart-failing','terp-go-week','terp-gtk-select-all','terp-locked','terp-mail-forward',
'terp-mail-message-new','terp-mail-replied','terp-rating-rated','terp-stage','terp-stock_format-scientific',
'terp-dolar_ok!','terp-idea','terp-stock_format-default','terp-mail-','terp-mail_delete'
]
def icons(*a, **kw):
global __icons_list
return [(x, x) for x in __icons_list ]
def detect_ip_addr():
"""Try a very crude method to figure out a valid external
IP or hostname for the current machine. Don't rely on this
for binding to an interface, but it could be used as basis
for constructing a remote URL to the server.
"""
def _detect_ip_addr():
from array import array
from struct import pack, unpack
try:
import fcntl
except ImportError:
fcntl = None
ip_addr = None
if not fcntl: # not UNIX:
host = socket.gethostname()
ip_addr = socket.gethostbyname(host)
else: # UNIX:
# get all interfaces:
nbytes = 128 * 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
names = array('B', '\0' * nbytes)
#print 'names: ', names
outbytes = unpack('iL', fcntl.ioctl( s.fileno(), 0x8912, pack('iL', nbytes, names.buffer_info()[0])))[0]
namestr = names.tostring()
# try 64 bit kernel:
for i in range(0, outbytes, 40):
name = namestr[i:i+16].split('\0', 1)[0]
if name != 'lo':
ip_addr = socket.inet_ntoa(namestr[i+20:i+24])
break
# try 32 bit kernel:
if ip_addr is None:
ifaces = filter(None, [namestr[i:i+32].split('\0', 1)[0] for i in range(0, outbytes, 32)])
for ifname in [iface for iface in ifaces if iface != 'lo']:
ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, pack('256s', ifname[:15]))[20:24])
break
return ip_addr or 'localhost'
try:
ip_addr = _detect_ip_addr()
except Exception:
ip_addr = 'localhost'
return ip_addr
# RATIONALE BEHIND TIMESTAMP CALCULATIONS AND TIMEZONE MANAGEMENT:
# The server side never does any timestamp calculation, always
# sends them in a naive (timezone agnostic) format supposed to be
# expressed within the server timezone, and expects the clients to
# provide timestamps in the server timezone as well.
# It stores all timestamps in the database in naive format as well,
# which also expresses the time in the server timezone.
# For this reason the server makes its timezone name available via the
# common/timezone_get() rpc method, which clients need to read
# to know the appropriate time offset to use when reading/writing
# times.
def get_win32_timezone():
"""Attempt to return the "standard name" of the current timezone on a win32 system.
@return the standard name of the current win32 timezone, or False if it cannot be found.
"""
res = False
if sys.platform == "win32":
try:
import _winreg
hklm = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
current_tz_key = _winreg.OpenKey(hklm, r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation", 0,_winreg.KEY_ALL_ACCESS)
res = str(_winreg.QueryValueEx(current_tz_key,"StandardName")[0]) # [0] is value, [1] is type code
_winreg.CloseKey(current_tz_key)
_winreg.CloseKey(hklm)
except Exception:
pass
return res
def detect_server_timezone():
"""Attempt to detect the timezone to use on the server side.
Defaults to UTC if no working timezone can be found.
@return the timezone identifier as expected by pytz.timezone.
"""
try:
import pytz
except Exception:
_logger.warning("Python pytz module is not available. "
"Timezone will be set to UTC by default.")
return 'UTC'
# Option 1: the configuration option (did not exist before, so no backwards compatibility issue)
# Option 2: to be backwards compatible with 5.0 or earlier, the value from time.tzname[0], but only if it is known to pytz
# Option 3: the environment variable TZ
sources = [ (config['timezone'], 'OpenERP configuration'),
(time.tzname[0], 'time.tzname'),
(os.environ.get('TZ',False),'TZ environment variable'), ]
# Option 4: OS-specific: /etc/timezone on Unix
if os.path.exists("/etc/timezone"):
tz_value = False
try:
f = open("/etc/timezone")
tz_value = f.read(128).strip()
except Exception:
pass
finally:
f.close()
sources.append((tz_value,"/etc/timezone file"))
# Option 5: timezone info from registry on Win32
if sys.platform == "win32":
# Timezone info is stored in windows registry.
# However this is not likely to work very well as the standard name
# of timezones in windows is rarely something that is known to pytz.
# But that's ok, it is always possible to use a config option to set
# it explicitly.
sources.append((get_win32_timezone(),"Windows Registry"))
for (value,source) in sources:
if value:
try:
tz = pytz.timezone(value)
_logger.info("Using timezone %s obtained from %s.", tz.zone, source)
return value
except pytz.UnknownTimeZoneError:
_logger.warning("The timezone specified in %s (%s) is invalid, ignoring it.", source, value)
_logger.warning("No valid timezone could be detected, using default UTC "
"timezone. You can specify it explicitly with option 'timezone' in "
"the server configuration.")
return 'UTC'
def get_server_timezone():
return "UTC"
DEFAULT_SERVER_DATE_FORMAT = "%Y-%m-%d"
DEFAULT_SERVER_TIME_FORMAT = "%H:%M:%S"
DEFAULT_SERVER_DATETIME_FORMAT = "%s %s" % (
DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_TIME_FORMAT)
# Python's strftime supports only the format directives
# that are available on the platform's libc, so in order to
# be cross-platform we map to the directives required by
# the C standard (1989 version), always available on platforms
# with a C standard implementation.
DATETIME_FORMATS_MAP = {
'%C': '', # century
'%D': '%m/%d/%Y', # modified %y->%Y
'%e': '%d',
'%E': '', # special modifier
'%F': '%Y-%m-%d',
'%g': '%Y', # modified %y->%Y
'%G': '%Y',
'%h': '%b',
'%k': '%H',
'%l': '%I',
'%n': '\n',
'%O': '', # special modifier
'%P': '%p',
'%R': '%H:%M',
'%r': '%I:%M:%S %p',
'%s': '', #num of seconds since epoch
'%T': '%H:%M:%S',
'%t': ' ', # tab
'%u': ' %w',
'%V': '%W',
'%y': '%Y', # Even if %y works, it's ambiguous, so we should use %Y
'%+': '%Y-%m-%d %H:%M:%S',
# %Z is a special case that causes 2 problems at least:
# - the timezone names we use (in res_user.context_tz) come
# from pytz, but not all these names are recognized by
# strptime(), so we cannot convert in both directions
# when such a timezone is selected and %Z is in the format
# - %Z is replaced by an empty string in strftime() when
# there is not tzinfo in a datetime value (e.g when the user
# did not pick a context_tz). The resulting string does not
# parse back if the format requires %Z.
# As a consequence, we strip it completely from format strings.
# The user can always have a look at the context_tz in
# preferences to check the timezone.
'%z': '',
'%Z': '',
}
def server_to_local_timestamp(src_tstamp_str, src_format, dst_format, dst_tz_name,
tz_offset=True, ignore_unparsable_time=True):
"""
Convert a source timestamp string into a destination timestamp string, attempting to apply the
correct offset if both the server and local timezone are recognized, or no
offset at all if they aren't or if tz_offset is false (i.e. assuming they are both in the same TZ).
WARNING: This method is here to allow formatting dates correctly for inclusion in strings where
the client would not be able to format/offset it correctly. DO NOT use it for returning
date fields directly, these are supposed to be handled by the client!!
@param src_tstamp_str: the str value containing the timestamp in the server timezone.
@param src_format: the format to use when parsing the server timestamp.
@param dst_format: the format to use when formatting the resulting timestamp for the local/client timezone.
@param dst_tz_name: name of the destination timezone (such as the 'tz' value of the client context)
@param ignore_unparsable_time: if True, return False if src_tstamp_str cannot be parsed
using src_format or formatted using dst_format.
@return local/client formatted timestamp, expressed in the local/client timezone if possible
and if tz_offset is true, or src_tstamp_str if timezone offset could not be determined.
"""
if not src_tstamp_str:
return False
res = src_tstamp_str
if src_format and dst_format:
# find out server timezone
server_tz = get_server_timezone()
try:
# dt_value needs to be a datetime.datetime object (so no time.struct_time or mx.DateTime.DateTime here!)
dt_value = datetime.strptime(src_tstamp_str, src_format)
if tz_offset and dst_tz_name:
try:
import pytz
src_tz = pytz.timezone(server_tz)
dst_tz = pytz.timezone(dst_tz_name)
src_dt = src_tz.localize(dt_value, is_dst=True)
dt_value = src_dt.astimezone(dst_tz)
except Exception:
pass
res = dt_value.strftime(dst_format)
except Exception:
# Normal ways to end up here are if strptime or strftime failed
if not ignore_unparsable_time:
return False
return res
def split_every(n, iterable, piece_maker=tuple):
"""Splits an iterable into length-n pieces. The last piece will be shorter
if ``n`` does not evenly divide the iterable length.
@param ``piece_maker``: function to build the pieces
from the slices (tuple,list,...)
"""
iterator = iter(iterable)
piece = piece_maker(islice(iterator, n))
while piece:
yield piece
piece = piece_maker(islice(iterator, n))
if __name__ == '__main__':
import doctest
doctest.testmod()
class upload_data_thread(threading.Thread):
def __init__(self, email, data, type):
self.args = [('email',email),('type',type),('data',data)]
super(upload_data_thread,self).__init__()
def run(self):
try:
import urllib
args = urllib.urlencode(self.args)
fp = urllib.urlopen('http://www.openerp.com/scripts/survey.php', args)
fp.read()
fp.close()
except Exception:
pass
def upload_data(email, data, type='SURVEY'):
a = upload_data_thread(email, data, type)
a.start()
return True
def get_and_group_by_field(cr, uid, obj, ids, field, context=None):
""" Read the values of ``field´´ for the given ``ids´´ and group ids by value.
:param string field: name of the field we want to read and group by
:return: mapping of field values to the list of ids that have it
:rtype: dict
"""
res = {}
for record in obj.read(cr, uid, ids, [field], context=context):
key = record[field]
res.setdefault(key[0] if isinstance(key, tuple) else key, []).append(record['id'])
return res
def get_and_group_by_company(cr, uid, obj, ids, context=None):
return get_and_group_by_field(cr, uid, obj, ids, field='company_id', context=context)
# port of python 2.6's attrgetter with support for dotted notation
def resolve_attr(obj, attr):
for name in attr.split("."):
obj = getattr(obj, name)
return obj
def attrgetter(*items):
if len(items) == 1:
attr = items[0]
def g(obj):
return resolve_attr(obj, attr)
else:
def g(obj):
return tuple(resolve_attr(obj, attr) for attr in items)
return g
class unquote(str):
"""A subclass of str that implements repr() without enclosing quotation marks
or escaping, keeping the original string untouched. The name come from Lisp's unquote.
One of the uses for this is to preserve or insert bare variable names within dicts during eval()
of a dict's repr(). Use with care.
Some examples (notice that there are never quotes surrounding
the ``active_id`` name:
>>> unquote('active_id')
active_id
>>> d = {'test': unquote('active_id')}
>>> d
{'test': active_id}
>>> print d
{'test': active_id}
"""
def __repr__(self):
return self
class UnquoteEvalContext(defaultdict):
"""Defaultdict-based evaluation context that returns
an ``unquote`` string for any missing name used during
the evaluation.
Mostly useful for evaluating OpenERP domains/contexts that
may refer to names that are unknown at the time of eval,
so that when the context/domain is converted back to a string,
the original names are preserved.
**Warning**: using an ``UnquoteEvalContext`` as context for ``eval()`` or
``safe_eval()`` will shadow the builtins, which may cause other
failures, depending on what is evaluated.
Example (notice that ``section_id`` is preserved in the final
result) :
>>> context_str = "{'default_user_id': uid, 'default_section_id': section_id}"
>>> eval(context_str, UnquoteEvalContext(uid=1))
{'default_user_id': 1, 'default_section_id': section_id}
"""
def __init__(self, *args, **kwargs):
super(UnquoteEvalContext, self).__init__(None, *args, **kwargs)
def __missing__(self, key):
return unquote(key)
class mute_logger(object):
"""Temporary suppress the logging.
Can be used as context manager or decorator.
@mute_logger('openerp.plic.ploc')
def do_stuff():
blahblah()
with mute_logger('openerp.foo.bar'):
do_suff()
"""
def __init__(self, *loggers):
self.loggers = loggers
def filter(self, record):
return 0
def __enter__(self):
for logger in self.loggers:
logging.getLogger(logger).addFilter(self)
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
for logger in self.loggers:
logging.getLogger(logger).removeFilter(self)
def __call__(self, func):
@wraps(func)
def deco(*args, **kwargs):
with self:
return func(*args, **kwargs)
return deco
_ph = object()
class CountingStream(object):
""" Stream wrapper counting the number of element it has yielded. Similar
role to ``enumerate``, but for use when the iteration process of the stream
isn't fully under caller control (the stream can be iterated from multiple
points including within a library)
``start`` allows overriding the starting index (the index before the first
item is returned).
On each iteration (call to :meth:`~.next`), increases its :attr:`~.index`
by one.
.. attribute:: index
``int``, index of the last yielded element in the stream. If the stream
has ended, will give an index 1-past the stream
"""
def __init__(self, stream, start=-1):
self.stream = iter(stream)
self.index = start
self.stopped = False
def __iter__(self):
return self
def next(self):
if self.stopped: raise StopIteration()
self.index += 1
val = next(self.stream, _ph)
if val is _ph:
self.stopped = True
raise StopIteration()
return val
def stripped_sys_argv(*strip_args):
"""Return sys.argv with some arguments stripped, suitable for reexecution or subprocesses"""
strip_args = sorted(set(strip_args) | set(['-s', '--save', '-d', '--database', '-u', '--update', '-i', '--init']))
assert all(config.parser.has_option(s) for s in strip_args)
takes_value = dict((s, config.parser.get_option(s).takes_value()) for s in strip_args)
longs, shorts = list(tuple(y) for _, y in groupby(strip_args, lambda x: x.startswith('--')))
longs_eq = tuple(l + '=' for l in longs if takes_value[l])
args = sys.argv[:]
def strip(args, i):
return args[i].startswith(shorts) \
or args[i].startswith(longs_eq) or (args[i] in longs) \
or (i >= 1 and (args[i - 1] in strip_args) and takes_value[args[i - 1]])
return [x for i, x in enumerate(args) if not strip(args, i)]
def dumpstacks(sig, frame):
""" Signal handler: dump a stack trace for each existing thread."""
code = []
def extract_stack(stack):
for filename, lineno, name, line in traceback.extract_stack(stack):
yield 'File: "%s", line %d, in %s' % (filename, lineno, name)
if line:
yield " %s" % (line.strip(),)
# code from http://stackoverflow.com/questions/132058/getting-stack-trace-from-a-running-python-application#answer-2569696
# modified for python 2.5 compatibility
threads_info = dict([(th.ident, {'name': th.name, 'uid': getattr(th, 'uid', 'n/a')})
for th in threading.enumerate()])
for threadId, stack in sys._current_frames().items():
thread_info = threads_info.get(threadId)
code.append("\n# Thread: %s (id:%s) (uid:%s)" %
(thread_info and thread_info['name'] or 'n/a',
threadId,
thread_info and thread_info['uid'] or 'n/a'))
for line in extract_stack(stack):
code.append(line)
if openerp.evented:
# code from http://stackoverflow.com/questions/12510648/in-gevent-how-can-i-dump-stack-traces-of-all-running-greenlets
import gc
from greenlet import greenlet
for ob in gc.get_objects():
if not isinstance(ob, greenlet) or not ob:
continue
code.append("\n# Greenlet: %r" % (ob,))
for line in extract_stack(ob.gr_frame):
code.append(line)
_logger.info("\n".join(code))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| MarkusTeufelberger/openobject-server | openerp/tools/misc.py | Python | agpl-3.0 | 39,743 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import imp
import inspect
import os
import sys
from lxml import etree
import routes
import webob.dec
import webob.exc
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
from nova import wsgi as base_wsgi
import nova.api.openstack
from nova.api.openstack import common
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
LOG = logging.getLogger('nova.api.openstack.extensions')
FLAGS = flags.FLAGS
class ExtensionDescriptor(object):
"""Base class that defines the contract for extensions.
Note that you don't have to derive from this class to have a valid
extension; it is purely a convenience.
"""
# The name of the extension, e.g., 'Fox In Socks'
name = None
# The alias for the extension, e.g., 'FOXNSOX'
alias = None
# Description comes from the docstring for the class
# The XML namespace for the extension, e.g.,
# 'http://www.fox.in.socks/api/ext/pie/v1.0'
namespace = None
# The timestamp when the extension was last updated, e.g.,
# '2011-01-22T13:25:27-06:00'
updated = None
def __init__(self, ext_mgr):
"""Register extension with the extension manager."""
ext_mgr.register(self)
def get_resources(self):
"""List of extensions.ResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
"""
resources = []
return resources
def get_actions(self):
"""List of extensions.ActionExtension extension objects.
Actions are verbs callable from the API.
"""
actions = []
return actions
def get_request_extensions(self):
"""List of extensions.RequestExtension extension objects.
Request extensions are used to handle custom request data.
"""
request_exts = []
return request_exts
class ActionExtensionController(object):
def __init__(self, application):
self.application = application
self.action_handlers = {}
def add_action(self, action_name, handler):
self.action_handlers[action_name] = handler
def action(self, req, id, body):
for action_name, handler in self.action_handlers.iteritems():
if action_name in body:
return handler(body, req, id)
# no action handler found (bump to downstream application)
res = self.application
return res
class ActionExtensionResource(wsgi.Resource):
def __init__(self, application):
controller = ActionExtensionController(application)
wsgi.Resource.__init__(self, controller)
def add_action(self, action_name, handler):
self.controller.add_action(action_name, handler)
class RequestExtensionController(object):
def __init__(self, application):
self.application = application
self.handlers = []
def add_handler(self, handler):
self.handlers.append(handler)
def process(self, req, *args, **kwargs):
res = req.get_response(self.application)
# Deserialize the response body, if any
body = None
if res.body:
body = utils.loads(res.body)
# currently request handlers are un-ordered
for handler in self.handlers:
res = handler(req, res, body)
# Reserialize the response body
if body is not None:
res.body = utils.dumps(body)
return res
class RequestExtensionResource(wsgi.Resource):
def __init__(self, application):
controller = RequestExtensionController(application)
wsgi.Resource.__init__(self, controller)
def add_handler(self, handler):
self.controller.add_handler(handler)
class ExtensionsResource(wsgi.Resource):
def __init__(self, extension_manager):
self.extension_manager = extension_manager
def _translate(self, ext):
ext_data = {}
ext_data['name'] = ext.name
ext_data['alias'] = ext.alias
ext_data['description'] = ext.__doc__
ext_data['namespace'] = ext.namespace
ext_data['updated'] = ext.updated
ext_data['links'] = [] # TODO(dprince): implement extension links
return ext_data
def index(self, req):
extensions = []
for _alias, ext in self.extension_manager.extensions.iteritems():
extensions.append(self._translate(ext))
return dict(extensions=extensions)
def show(self, req, id):
try:
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self.extension_manager.extensions[id]
except KeyError:
raise webob.exc.HTTPNotFound()
return dict(extension=self._translate(ext))
def delete(self, req, id):
raise webob.exc.HTTPNotFound()
def create(self, req):
raise webob.exc.HTTPNotFound()
class ExtensionMiddleware(base_wsgi.Middleware):
"""Extensions middleware for WSGI."""
@classmethod
def factory(cls, global_config, **local_config):
"""Paste factory."""
def _factory(app):
return cls(app, **local_config)
return _factory
def _action_ext_resources(self, application, ext_mgr, mapper):
"""Return a dict of ActionExtensionResource-s by collection."""
action_resources = {}
for action in ext_mgr.get_actions():
if not action.collection in action_resources.keys():
resource = ActionExtensionResource(application)
mapper.connect("/:(project_id)/%s/:(id)/action.:(format)" %
action.collection,
action='action',
controller=resource,
conditions=dict(method=['POST']))
mapper.connect("/:(project_id)/%s/:(id)/action" %
action.collection,
action='action',
controller=resource,
conditions=dict(method=['POST']))
action_resources[action.collection] = resource
return action_resources
def _request_ext_resources(self, application, ext_mgr, mapper):
"""Returns a dict of RequestExtensionResource-s by collection."""
request_ext_resources = {}
for req_ext in ext_mgr.get_request_extensions():
if not req_ext.key in request_ext_resources.keys():
resource = RequestExtensionResource(application)
mapper.connect(req_ext.url_route + '.:(format)',
action='process',
controller=resource,
conditions=req_ext.conditions)
mapper.connect(req_ext.url_route,
action='process',
controller=resource,
conditions=req_ext.conditions)
request_ext_resources[req_ext.key] = resource
return request_ext_resources
def __init__(self, application, ext_mgr=None):
if ext_mgr is None:
ext_mgr = ExtensionManager()
self.ext_mgr = ext_mgr
mapper = nova.api.openstack.ProjectMapper()
serializer = wsgi.ResponseSerializer(
{'application/xml': ExtensionsXMLSerializer()})
# extended resources
for resource in ext_mgr.get_resources():
LOG.debug(_('Extended resource: %s'),
resource.collection)
if resource.serializer is None:
resource.serializer = serializer
kargs = dict(
controller=wsgi.Resource(
resource.controller, resource.deserializer,
resource.serializer),
collection=resource.collection_actions,
member=resource.member_actions)
if resource.parent:
kargs['parent_resource'] = resource.parent
mapper.resource(resource.collection, resource.collection, **kargs)
# extended actions
action_resources = self._action_ext_resources(application, ext_mgr,
mapper)
for action in ext_mgr.get_actions():
LOG.debug(_('Extended action: %s'), action.action_name)
resource = action_resources[action.collection]
resource.add_action(action.action_name, action.handler)
# extended requests
req_controllers = self._request_ext_resources(application, ext_mgr,
mapper)
for request_ext in ext_mgr.get_request_extensions():
LOG.debug(_('Extended request: %s'), request_ext.key)
controller = req_controllers[request_ext.key]
controller.add_handler(request_ext.handler)
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
mapper)
super(ExtensionMiddleware, self).__init__(application)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Route the incoming request with router."""
req.environ['extended.app'] = self.application
return self._router
@staticmethod
@webob.dec.wsgify(RequestClass=wsgi.Request)
def _dispatch(req):
"""Dispatch the request.
Returns the routed WSGI app's response or defers to the extended
application.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return req.environ['extended.app']
app = match['controller']
return app
class ExtensionManager(object):
"""Load extensions from the configured extension path.
See nova/tests/api/openstack/extensions/foxinsocks/extension.py for an
example extension implementation.
"""
def __init__(self):
LOG.audit(_('Initializing extension manager.'))
self.extensions = {}
self._load_extensions()
def register(self, ext):
# Do nothing if the extension doesn't check out
if not self._check_extension(ext):
return
alias = ext.alias
LOG.audit(_('Loaded extension: %s'), alias)
if alias in self.extensions:
raise exception.Error("Found duplicate extension: %s" % alias)
self.extensions[alias] = ext
def get_resources(self):
"""Returns a list of ResourceExtension objects."""
resources = []
resources.append(ResourceExtension('extensions',
ExtensionsResource(self)))
for ext in self.extensions.values():
try:
resources.extend(ext.get_resources())
except AttributeError:
# NOTE(dprince): Extension aren't required to have resource
# extensions
pass
return resources
def get_actions(self):
"""Returns a list of ActionExtension objects."""
actions = []
for ext in self.extensions.values():
try:
actions.extend(ext.get_actions())
except AttributeError:
# NOTE(dprince): Extension aren't required to have action
# extensions
pass
return actions
def get_request_extensions(self):
"""Returns a list of RequestExtension objects."""
request_exts = []
for ext in self.extensions.values():
try:
request_exts.extend(ext.get_request_extensions())
except AttributeError:
# NOTE(dprince): Extension aren't required to have request
# extensions
pass
return request_exts
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug(_('Ext name: %s'), extension.name)
LOG.debug(_('Ext alias: %s'), extension.alias)
LOG.debug(_('Ext description: %s'),
' '.join(extension.__doc__.strip().split()))
LOG.debug(_('Ext namespace: %s'), extension.namespace)
LOG.debug(_('Ext updated: %s'), extension.updated)
except AttributeError as ex:
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
return False
return True
def load_extension(self, ext_factory):
"""Execute an extension factory.
Loads an extension. The 'ext_factory' is the name of a
callable that will be imported and called with one
argument--the extension manager. The factory callable is
expected to call the register() method at least once.
"""
LOG.debug(_("Loading extension %s"), ext_factory)
# Load the factory
factory = utils.import_class(ext_factory)
# Call it
LOG.debug(_("Calling extension factory %s"), ext_factory)
factory(self)
def _load_extensions(self):
"""Load extensions specified on the command line."""
for ext_factory in FLAGS.osapi_extension:
try:
self.load_extension(ext_factory)
except Exception as exc:
LOG.warn(_('Failed to load extension %(ext_factory)s: '
'%(exc)s') % locals())
class RequestExtension(object):
"""Extend requests and responses of core nova OpenStack API resources.
Provide a way to add data to responses and handle custom request data
that is sent to core nova OpenStack API controllers.
"""
def __init__(self, method, url_route, handler):
self.url_route = url_route
self.handler = handler
self.conditions = dict(method=[method])
self.key = "%s-%s" % (method, url_route)
class ActionExtension(object):
"""Add custom actions to core nova OpenStack API resources."""
def __init__(self, collection, action_name, handler):
self.collection = collection
self.action_name = action_name
self.handler = handler
class ResourceExtension(object):
"""Add top level resources to the OpenStack API in nova."""
def __init__(self, collection, controller, parent=None,
collection_actions=None, member_actions=None,
deserializer=None, serializer=None):
if not collection_actions:
collection_actions = {}
if not member_actions:
member_actions = {}
self.collection = collection
self.controller = controller
self.parent = parent
self.collection_actions = collection_actions
self.member_actions = member_actions
self.deserializer = deserializer
self.serializer = serializer
class ExtensionsXMLSerializer(wsgi.XMLDictSerializer):
NSMAP = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
def show(self, ext_dict):
ext = etree.Element('extension', nsmap=self.NSMAP)
self._populate_ext(ext, ext_dict['extension'])
return self._to_xml(ext)
def index(self, exts_dict):
exts = etree.Element('extensions', nsmap=self.NSMAP)
for ext_dict in exts_dict['extensions']:
ext = etree.SubElement(exts, 'extension')
self._populate_ext(ext, ext_dict)
return self._to_xml(exts)
def _populate_ext(self, ext_elem, ext_dict):
"""Populate an extension xml element from a dict."""
ext_elem.set('name', ext_dict['name'])
ext_elem.set('namespace', ext_dict['namespace'])
ext_elem.set('alias', ext_dict['alias'])
ext_elem.set('updated', ext_dict['updated'])
desc = etree.Element('description')
desc.text = ext_dict['description']
ext_elem.append(desc)
for link in ext_dict.get('links', []):
elem = etree.SubElement(ext_elem, '{%s}link' % xmlutil.XMLNS_ATOM)
elem.set('rel', link['rel'])
elem.set('href', link['href'])
elem.set('type', link['type'])
return ext_elem
def _to_xml(self, root):
"""Convert the xml object to an xml string."""
return etree.tostring(root, encoding='UTF-8')
def admin_only(fnc):
@functools.wraps(fnc)
def _wrapped(self, *args, **kwargs):
if FLAGS.allow_admin_api:
return fnc(self, *args, **kwargs)
raise webob.exc.HTTPNotFound()
_wrapped.func_name = fnc.func_name
return _wrapped
def wrap_errors(fn):
""""Ensure errors are not passed along."""
def wrapped(*args):
try:
return fn(*args)
except Exception, e:
raise webob.exc.HTTPInternalServerError()
return wrapped
| salv-orlando/MyRepo | nova/api/openstack/extensions.py | Python | apache-2.0 | 17,647 |
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import clv_professional_specialty
| CLVsol/odoo_addons | clv_professional/specialty/__init__.py | Python | agpl-3.0 | 1,438 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Zfs(AutotoolsPackage):
"""OpenZFS is an advanced file system and volume manager which was
originally developed for Solaris and is now maintained by the OpenZFS
community. This repository contains the code for running OpenZFS on
Linux and FreeBSD."""
homepage = "https://zfsonlinux.org/"
url = "https://github.com/openzfs/zfs/releases/download/zfs-0.8.3/zfs-0.8.3.tar.gz"
version('0.8.3', sha256='545a4897ce30c2d2dd9010a0fdb600a0d3d45805e2387093c473efc03aa9d7fd')
version('0.8.2', sha256='47608e257c8ecebb918014ef1da6172c3a45d990885891af18e80f5cc28beab8')
version('0.8.1', sha256='0af79fde44b7b8ecb94d5166ce2e4fff7409c20ed874c2d759db92909e6c2799')
version('0.8.0', sha256='0fd92e87f4b9df9686f18e2ac707c16b2eeaf00f682d41c20ea519f3a0fe4705')
depends_on('libuuid')
depends_on('libtirpc')
depends_on('util-linux')
def setup_build_environment(self, env):
env.prepend_path('CPATH', self.spec['util-linux'].prefix.include)
| iulian787/spack | var/spack/repos/builtin/packages/zfs/package.py | Python | lgpl-2.1 | 1,214 |
# -*- coding: utf-8 -*-
import requests
import lxml
from lxml import html
main_url = "http://www.skoob.com.br"
def books_for_author(url=None):
"return the books of a given author"
print "acessing: %s" % url
books_found = []
r = requests.get(url)
root = lxml.html.fromstring(r.content)
all_infos = root.cssselect("div.dados_lista_busca")
print "books in this page:"
for book_infos in all_infos:
title = book_infos.cssselect("a.l15ab")[0].text_content()
books_found.append(title,)
# print title
next_page = None
try:
next_page = root.cssselect("div.proximo span.l13 a")[0].get("href")
books_found.extend(books_for_author(main_url+next_page))
except IndexError:
pass
return books_found
def run():
"get all books from a given author in saraiva bookstore"
url = main_url+"/livro/lista/tag:sarah%20dessen/tipo:autor"
books = books_for_author(url)
print "============"
for book in books:
print book | arruda/rmr | rmr/scripts/skoob_crawler.py | Python | mit | 1,068 |
#
# Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""
copy_db_multithreaded test.
"""
import os
import mutlib
from mysql.utilities.exception import MUTLibError
class test(mutlib.System_test):
"""simple db copy
This test executes copy database test cases among two servers using
multiple threads.
"""
server1 = None
server2 = None
need_server = None
# pylint: disable=W0221
def is_long(self):
# This test is a long running test
return True
def check_prerequisites(self):
# Need non-Windows platform
if os.name == "nt":
raise MUTLibError("Test requires a non-Windows platform.")
# Need at least one server.
self.server1 = self.servers.get_server(0)
self.server2 = None
self.need_server = False
if not self.check_num_servers(2):
self.need_server = True
res = self.check_num_servers(1)
rows = []
try:
rows = self.server1.exec_query("SHOW DATABASES LIKE 'employees'")
except:
pass
if len(rows) == 0:
raise MUTLibError("Need employees database loaded on "
"{0}".format(self.server1.role))
return res
def setup(self):
if self.need_server:
self.servers.spawn_new_servers(2)
self.server2 = self.servers.get_server(1)
self.drop_all()
return True
def run(self):
self.res_fname = "result.txt"
from_conn = ("--source={0}"
"".format(self.build_connection_string(self.server1)))
to_conn = ("--destination={0}"
"".format(self.build_connection_string(self.server2)))
comment = "Test case 1 - copy a sample database"
cmd = ("mysqldbcopy.py {0} {1} {2}"
"".format(from_conn, to_conn,
"employees:emp_mt --force --threads=3"))
res = self.run_test_case(0, cmd, comment)
if not res:
raise MUTLibError("{0}: failed".format(comment))
return True
def get_result(self):
return self.compare(__name__, self.results)
def record(self):
return self.save_result_file(__name__, self.results)
def drop_all(self):
"""Drops all databases created.
"""
return self.drop_db(self.server2, "emp_mt")
def cleanup(self):
if self.res_fname:
os.unlink(self.res_fname)
return self.drop_all()
| mysql/mysql-utilities | mysql-test/suite/performance/t/copy_db_multithreaded.py | Python | gpl-2.0 | 3,191 |
# Date: Friday 02 June 2017 05:04:00 PM IST
# Email: nrupatunga@whodat.com
# Name: Nrupatunga
# Description: Basic regressor function implemented
from __future__ import print_function
import os
import glob
import numpy as np
import sys
import cv2
from ..helper import config
sys.path.insert(0, config.CAFFE_PATH)
import caffe
class regressor:
"""Regressor Class"""
def __init__(self, deploy_proto, caffe_model, gpu_id, num_inputs,
do_train, logger, solver_file=None):
"""TODO: to be defined"""
self.num_inputs = num_inputs
self.logger = logger
self.caffe_model_ = caffe_model
self.modified_params_ = False
self.mean = [104, 117, 123]
self.modified_params = False
self.solver_file = None
if solver_file:
self.solver_file = solver_file
self.setupNetwork(deploy_proto, caffe_model, gpu_id, do_train)
def reshape_image_inputs(self, num_images):
"""TODO: Docstring for reshape_image_inputs.
:returns: TODO
"""
net = self.net
net.blobs['image'].reshape(num_images, self.channels, self.height, self.width)
net.blobs['target'].reshape(num_images, self.channels, self.height, self.width)
def set_images(self, images, targets):
"""TODO: Docstring for set_images.
:returns: TODO
"""
num_images = len(images)
self.reshape_image_inputs(num_images)
self.preprocess_batch(images, targets)
def preprocess(self, image):
"""TODO: Docstring for preprocess.
:arg1: TODO
:returns: TODO
"""
num_channels = self.channels
if num_channels == 1 and image.shape[2] == 3:
image_out = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
elif num_channels == 1 and image.shape[2] == 4:
image_out = cv2.cvtColor(image, cv2.COLOR_BGRA2GRAY)
elif num_channels == 3 and image.shape[2] == 4:
image_out = cv2.cvtColor(image, cv2.COLOR_BGRA2BGR)
elif num_channels == 3 and image.shape[2] == 1:
image_out = cv2.cvtColor(image, cv2.COLOR_GRAY2BGR)
else:
image_out = image
if image_out.shape != (self.height, self.width, self.channels):
image_out = cv2.resize(image_out, (self.width, self.height), interpolation=cv2.INTER_CUBIC)
image_out = np.float32(image_out)
image_out -= np.array(self.mean)
image_out = np.transpose(image_out, [2, 0, 1])
return image_out
def preprocess_batch(self, images_batch, targets_batch):
"""TODO: Docstring for preprocess_batch.
:arg1: TODO
:returns: TODO
"""
net = self.net
num_images = len(images_batch)
for i in range(num_images):
image = images_batch[i]
image_out = self.preprocess(image)
net.blobs['image'].data[i] = image_out
target = targets_batch[i]
target_out = self.preprocess(target)
net.blobs['target'].data[i] = target_out
def setupNetwork(self, deploy_proto, caffe_model, gpu_id, do_train):
"""TODO: Docstring for setupNetwork.
:deploy_proto (string) : deploy prototxt file
:caffe_model (string) : trained caffe model path
:gpu_id (integer) : GPU id
:do_train (boolean) : training phase or testing phase
"""
logger = self.logger
caffe.set_mode_gpu()
caffe.set_device(int(gpu_id))
if do_train == True:
logger.info('Setting phase to train')
# TODO: this part of the code needs to be changed for
# training phase
if self.solver_file:
self.solver = caffe.SGDSolver(self.solver_file)
net = self.solver.net
net.copy_from(caffe_model)
else:
logger.error('solver file required')
return
self.phase = caffe.TRAIN
else:
logger.info('Setting phase to test')
net = caffe.Net(deploy_proto, caffe_model, caffe.TEST)
self.phase = caffe.TEST
self.net = net
self.num_inputs = net.blobs['image'].data[...].shape[0]
self.channels = net.blobs['image'].data[...].shape[1]
self.height = net.blobs['image'].data[...].shape[2]
self.width = net.blobs['image'].data[...].shape[3]
if self.num_inputs != 1:
logger.error('Network should take exactly one input')
if self.channels != 1 and self.channels != 3:
logger.error('Network should have 1 or 3 channels')
def regress(self, curr_search_region, target_region):
"""TODO: Docstring for regress.
:returns: TODO
"""
return self.estimate(curr_search_region, target_region)
def estimate(self, curr_search_region, target_region):
"""TODO: Docstring for estimate.
:arg1: TODO
:returns: TODO
"""
net = self.net
# reshape the inputs
net.blobs['image'].data.reshape(1, self.channels, self.height, self.width)
net.blobs['target'].data.reshape(1, self.channels, self.height, self.width)
net.blobs['bbox'].data.reshape(1, 4, 1, 1)
curr_search_region = self.preprocess(curr_search_region)
target_region = self.preprocess(target_region)
net.blobs['image'].data[...] = curr_search_region
net.blobs['target'].data[...] = target_region
net.forward()
bbox_estimate = net.blobs['fc8'].data
return bbox_estimate | nrupatunga/PY-GOTURN | goturn/network/regressor.py | Python | mit | 5,623 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from py4j.java_gateway import java_import
from pyspark.conf import SparkConf
from pyspark.context import SparkContext
# for back compatibility
from pyspark.sql import SQLContext, HiveContext, Row
intp = gateway.entry_point
isSpark2 = intp.isSpark2()
if isSpark2:
from pyspark.sql import SparkSession
jsc = intp.getJavaSparkContext()
java_import(gateway.jvm, "org.apache.spark.SparkEnv")
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2")
jconf = intp.getSparkConf()
conf = SparkConf(_jvm = gateway.jvm, _jconf = jconf)
sc = _zsc_ = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
if isSpark2:
spark = __zSpark__ = SparkSession(sc, intp.getSparkSession())
sqlc = __zSqlc__ = __zSpark__._wrapped
else:
sqlc = __zSqlc__ = SQLContext(sparkContext=sc, sqlContext=intp.getSQLContext())
sqlContext = __zSqlc__
from zeppelin_context import PyZeppelinContext
#TODO(zjffdu) merge it with IPySparkZeppelinContext
class PySparkZeppelinContext(PyZeppelinContext):
def __init__(self, z, gateway):
super(PySparkZeppelinContext, self).__init__(z, gateway)
def show(self, obj):
from pyspark.sql import DataFrame
if isinstance(obj, DataFrame):
print(self.z.showData(obj._jdf))
else:
super(PySparkZeppelinContext, self).show(obj)
z = __zeppelin__ = PySparkZeppelinContext(intp.getZeppelinContext(), gateway)
__zeppelin__._setup_matplotlib()
| herval/zeppelin | spark/interpreter/src/main/resources/python/zeppelin_pyspark.py | Python | apache-2.0 | 2,494 |
from flask import Flask, render_template, request, redirect, url_for, jsonify
app = Flask(__name__)
shopping_list = ['Milk', 'Eggs', 'Bread', 'Butter']
@app.route('/', methods=['GET', 'POST', 'DELETE'])
def index():
global shopping_list
if request.method == 'POST':
shopping_list.append(request.form['item'])
return render_template('index.html', items=shopping_list)
@app.route('/remove/<name>')
def remove_item(name):
global shopping_list
if name in shopping_list:
shopping_list.remove(name)
return redirect(url_for('index'))
@app.route('/api/items')
def get_items():
global shopping_list
return jsonify({'items': shopping_list})
if __name__ == '__main__':
app.run(debug = True)
| eschleicher/flask_shopping_list | shopping.py | Python | mit | 744 |
from copy import deepcopy
from itertools import dropwhile
import logging
from regparser.notice.diff import DesignateAmendment
from regparser.tree.struct import Node
from regparser.tree.xml_parser.appendices import process_appendix
def _is_appendix_amend(al):
"""Serves as a guard/filter to distinguish appendix amendments from
amendments to other parts of the reg"""
return (not isinstance(al, DesignateAmendment)
and Node.INTERP_MARK not in al.label
and len(al.label) > 1
and not al.label[1].isdigit())
def parse_appendix_changes(amended_labels, cfr_part, parent_xml):
"""Entry point. Currently only processes whole appendices, though the
functionality will expand in the future"""
relevant_amends = [al for al in amended_labels if _is_appendix_amend(al)]
appendices = {}
for al in relevant_amends:
cfr_part, letter = al.label[:2]
# Whole appendix, e.g. "1234-C" or appendix section, e.g. "1234-C-12"
if len(al.label) <= 3 and letter not in appendices:
appendix = whole_appendix(parent_xml, cfr_part, letter)
appendices[letter] = appendix
return [a for a in appendices.values() if a]
def whole_appendix(xml, cfr_part, letter):
"""Attempt to parse an appendix. Used when the entire appendix has been
replaced/added or when we can use the section headers to determine our
place. If the format isn't what we expect, display a warning."""
xml = deepcopy(xml)
hds = xml.xpath('//HD[contains(., "Appendix %s to Part %s")]'
% (letter, cfr_part))
if len(hds) == 0:
logging.warning("Could not find Appendix %s to part %s"
% (letter, cfr_part))
elif len(hds) > 1:
logging.warning("Too many headers for %s to part %s"
% (letter, cfr_part))
else:
hd = hds[0]
hd.set('SOURCE', 'HED')
extract = hd.getnext()
if extract is not None and extract.tag == 'EXTRACT':
extract.insert(0, hd)
for trailing in dropwhile(lambda n: n.tag != 'AMDPAR',
extract.getchildren()):
extract.remove(trailing)
return process_appendix(extract, cfr_part)
logging.warning("Bad format for whole appendix")
| willbarton/regulations-parser | regparser/notice/build_appendix.py | Python | cc0-1.0 | 2,341 |
"""
Has the built-in activation functions,
code for using them,
and code for adding new user-defined ones
"""
from __future__ import division
import math
import types
def sigmoid_activation(z):
z = max(-60.0, min(60.0, 5.0 * z))
return 1.0 / (1.0 + math.exp(-z))
def tanh_activation(z):
z = max(-60.0, min(60.0, 2.5 * z))
return math.tanh(z)
def sin_activation(z):
z = max(-60.0, min(60.0, 5.0 * z))
return math.sin(z)
def gauss_activation(z):
z = max(-3.4, min(3.4, z))
return math.exp(-5.0 * z**2)
def relu_activation(z):
return z if z > 0.0 else 0.0
def softplus_activation(z):
z = max(-60.0, min(60.0, 5.0 * z))
return 0.2 * math.log(1 + math.exp(z))
def identity_activation(z):
return z
def clamped_activation(z):
return max(-1.0, min(1.0, z))
def inv_activation(z):
try:
z = 1.0 / z
except ArithmeticError: # handle overflows
return 0.0
else:
return z
def log_activation(z):
z = max(1e-7, z)
return math.log(z)
def exp_activation(z):
z = max(-60.0, min(60.0, z))
return math.exp(z)
def abs_activation(z):
return abs(z)
def hat_activation(z):
return max(0.0, 1 - abs(z))
def square_activation(z):
return z ** 2
def cube_activation(z):
return z ** 3
class InvalidActivationFunction(TypeError):
pass
def validate_activation(function):
if not isinstance(function,
(types.BuiltinFunctionType,
types.FunctionType,
types.LambdaType)):
raise InvalidActivationFunction("A function object is required.")
if function.__code__.co_argcount != 1: # avoid deprecated use of `inspect`
raise InvalidActivationFunction("A single-argument function is required.")
class ActivationFunctionSet(object):
"""
Contains the list of current valid activation functions,
including methods for adding and getting them.
"""
def __init__(self):
self.functions = {}
self.add('sigmoid', sigmoid_activation)
self.add('tanh', tanh_activation)
self.add('sin', sin_activation)
self.add('gauss', gauss_activation)
self.add('relu', relu_activation)
self.add('softplus', softplus_activation)
self.add('identity', identity_activation)
self.add('clamped', clamped_activation)
self.add('inv', inv_activation)
self.add('log', log_activation)
self.add('exp', exp_activation)
self.add('abs', abs_activation)
self.add('hat', hat_activation)
self.add('square', square_activation)
self.add('cube', cube_activation)
def add(self, name, function):
validate_activation(function)
self.functions[name] = function
def get(self, name):
f = self.functions.get(name)
if f is None:
raise InvalidActivationFunction("No such activation function: {0!r}".format(name))
return f
def is_valid(self, name):
return name in self.functions
| drallensmith/neat-python | neat/activations.py | Python | bsd-3-clause | 3,053 |
# -*- coding: utf-8 -*-
import json
import os
from listenbrainz.db.model.feedback import Feedback
import listenbrainz.db.feedback as db_feedback
import listenbrainz.db.user as db_user
from listenbrainz.db.testing import DatabaseTestCase
class FeedbackDatabaseTestCase(DatabaseTestCase):
def setUp(self):
DatabaseTestCase.setUp(self)
self.user = db_user.get_or_create(1, "recording_feedback_user")
self.sample_feedback = [
{
"recording_msid": "d23f4719-9212-49f0-ad08-ddbfbfc50d6f",
"score": 1
},
{
"recording_msid": "222eb00d-9ead-42de-aec9-8f8c1509413d",
"score": -1
}
]
def insert_test_data(self, user_id, neg_score=False):
""" Insert test data into the database """
for fb in self.sample_feedback:
db_feedback.insert(
Feedback(
user_id=user_id,
recording_msid=fb["recording_msid"],
score=fb["score"]
)
)
return len(self.sample_feedback)
def test_insert(self):
count = self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), count)
def test_update_score_when_feedback_already_exits(self):
update_fb = self.sample_feedback[0]
count = self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), count)
self.assertEqual(result[1].recording_msid, update_fb["recording_msid"])
self.assertEqual(result[1].score, 1)
update_fb["score"] = -1 # change the score to -1
# update a record by inserting a record with updated score value
db_feedback.insert(
Feedback(
user_id=self.user["id"],
recording_msid=update_fb["recording_msid"],
score=update_fb["score"]
)
)
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), count)
self.assertEqual(result[0].recording_msid, update_fb["recording_msid"])
self.assertEqual(result[0].score, -1)
def test_delete(self):
del_fb = self.sample_feedback[0]
count = self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), count)
self.assertEqual(result[1].recording_msid, del_fb["recording_msid"])
# delete one record for the user
db_feedback.delete(
Feedback(
user_id=self.user["id"],
recording_msid=del_fb["recording_msid"],
score=del_fb["score"]
)
)
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), 1)
self.assertNotEqual(result[0].recording_msid, del_fb["recording_msid"])
def test_get_feedback_for_user(self):
count = self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0)
self.assertEqual(len(result), count)
self.assertEqual(result[0].user_id, self.user["id"])
self.assertEqual(result[0].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[0].recording_msid, self.sample_feedback[1]["recording_msid"])
self.assertEqual(result[0].score, self.sample_feedback[1]["score"])
self.assertEqual(result[1].user_id, self.user["id"])
self.assertEqual(result[1].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[1].recording_msid, self.sample_feedback[0]["recording_msid"])
self.assertEqual(result[1].score, self.sample_feedback[0]["score"])
# test the score argument
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0, score=1)
self.assertEqual(len(result), 1)
self.assertEqual(result[0].score, 1)
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=0, score=-1)
self.assertEqual(len(result), 1)
self.assertEqual(result[0].score, -1)
# test the limit argument
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=1, offset=0)
self.assertEqual(len(result), 1)
# test the offset argument
result = db_feedback.get_feedback_for_user(user_id=self.user["id"], limit=25, offset=1)
self.assertEqual(len(result), 1)
def test_get_feedback_count_for_user(self):
count = self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_count_for_user(user_id=self.user["id"])
self.assertEqual(result, count)
def test_get_feedback_for_recording(self):
fb_msid_1 = self.sample_feedback[0]["recording_msid"]
self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=25, offset=0)
self.assertEqual(len(result), 1)
self.assertEqual(result[0].user_id, self.user["id"])
self.assertEqual(result[0].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[0].recording_msid, fb_msid_1)
self.assertEqual(result[0].score, self.sample_feedback[0]["score"])
user2 = db_user.get_or_create(2, "recording_feedback_other_user")
self.insert_test_data(user2["id"])
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=25, offset=0)
self.assertEqual(len(result), 2)
self.assertEqual(result[0].user_id, user2["id"])
self.assertEqual(result[0].user_name, user2["musicbrainz_id"])
self.assertEqual(result[0].recording_msid, fb_msid_1)
self.assertEqual(result[0].score, self.sample_feedback[0]["score"])
self.assertEqual(result[1].user_id, self.user["id"])
self.assertEqual(result[1].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[1].recording_msid, fb_msid_1)
self.assertEqual(result[1].score, self.sample_feedback[0]["score"])
# test the score argument
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=25, offset=0, score=1)
self.assertEqual(len(result), 2)
self.assertEqual(result[0].score, 1)
self.assertEqual(result[1].score, 1)
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=25, offset=0, score=-1)
self.assertEqual(len(result), 0)
# test the limit argument
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=1, offset=0)
self.assertEqual(len(result), 1)
# test the offset argument
result = db_feedback.get_feedback_for_recording(recording_msid=fb_msid_1, limit=25, offset=1)
self.assertEqual(len(result), 1)
def test_get_feedback_count_for_recording(self):
fb_msid_1 = self.sample_feedback[0]["recording_msid"]
self.insert_test_data(self.user["id"])
result = db_feedback.get_feedback_count_for_recording(recording_msid=fb_msid_1)
self.assertEqual(result, 1)
user2 = db_user.get_or_create(2, "recording_feedback_other_user")
self.insert_test_data(user2["id"])
result = db_feedback.get_feedback_count_for_recording(recording_msid=fb_msid_1)
self.assertEqual(result, 2)
def test_get_feedback_for_multiple_recordings_for_user(self):
self.insert_test_data(self.user["id"])
recording_list = []
# recording_msids for which feedback records are inserted
recording_list.append(self.sample_feedback[0]["recording_msid"])
recording_list.append(self.sample_feedback[1]["recording_msid"])
# recording_msid for which feedback record doesn't exist
recording_list.append("b83fd3c3-449c-49be-a874-31d7cf26d946")
result = db_feedback.get_feedback_for_multiple_recordings_for_user(
user_id=self.user["id"],
recording_list=recording_list
)
self.assertEqual(len(result), len(recording_list))
# test correct score is returned for recording_msids for which feedback records are inserted
self.assertEqual(result[0].user_id, self.user["id"])
self.assertEqual(result[0].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[0].recording_msid, recording_list[0])
self.assertEqual(result[0].score, self.sample_feedback[0]["score"])
self.assertEqual(result[1].user_id, self.user["id"])
self.assertEqual(result[1].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[1].recording_msid, recording_list[1])
self.assertEqual(result[1].score, self.sample_feedback[1]["score"])
# test score = 0 is returned for recording_msids for which feedback records are inserted
self.assertEqual(result[2].user_id, self.user["id"])
self.assertEqual(result[2].user_name, self.user["musicbrainz_id"])
self.assertEqual(result[2].recording_msid, recording_list[2])
self.assertEqual(result[2].score, 0)
| Freso/listenbrainz-server | listenbrainz/db/tests/test_feedback.py | Python | gpl-2.0 | 9,691 |
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.urls import path
from django_th.forms.wizard import DummyForm, ProviderForm, ConsumerForm, ServicesDescriptionForm
from django_th.views import TriggerListView, TriggerDeleteView, TriggerUpdateView, TriggerEditedTemplateView, MeUpdate
from django_th.views import TriggerDeletedTemplateView, me
from django_th.views_fbv import trigger_switch_all_to, trigger_edit, trigger_on_off, fire_trigger
from django_th.views_fbv import service_related_triggers_switch_to
from django_th.views_userservices import UserServiceListView, UserServiceCreateView, UserServiceUpdateView
from django_th.views_userservices import UserServiceDeleteView, renew_service
from django_th.views_wizard import UserServiceWizard, finalcallback
from django_js_reverse.views import urls_js
urlpatterns = [
path('jsreverse/', urls_js, name='js_reverse'),
# ****************************************
# admin module
# ****************************************
path('admin/', admin.site.urls),
# ****************************************
# profil
# ****************************************
path(r'me/', me, name='me'),
path(r'me/edit/', MeUpdate.as_view(), name='edit_me'),
# ****************************************
# auth module
# ****************************************
path(
'auth/password_change/',
auth_views.PasswordChangeView.as_view(template_name='auth/change_password.html'),
),
path(
'auth/password_change/done/',
auth_views.PasswordChangeDoneView.as_view(template_name='auth/password_change_done.html'),
),
path('auth/', include('django.contrib.auth.urls')),
# ****************************************
# trigger happy module
# ****************************************
path('th/', TriggerListView.as_view(), name='base'),
url(r'^th/trigger/filter_by/(?P<trigger_filtered_by>[a-zA-Z]+)$', TriggerListView.as_view(),
name='trigger_filter_by'),
url(r'^th/trigger/order_by/(?P<trigger_ordered_by>[a-zA-Z_]+)$', TriggerListView.as_view(),
name='trigger_order_by'),
path('th/trigger/', TriggerListView.as_view(), name='home'),
# ****************************************
# * trigger
# ****************************************
url(r'^th/trigger/delete/(?P<pk>\d+)$', TriggerDeleteView.as_view(), name='delete_trigger'),
url(r'^th/trigger/edit/(?P<pk>\d+)$', TriggerUpdateView.as_view(), name='edit_trigger'),
path('th/trigger/editprovider/<int:trigger_id>', trigger_edit, {'edit_what': 'Provider'}, name='edit_provider'),
path('th/trigger/editconsumer/<int:trigger_id>', trigger_edit, {'edit_what': 'Consumer'}, name='edit_consumer'),
path('th/trigger/edit/thanks', TriggerEditedTemplateView.as_view(), name="trigger_edit_thanks"),
path('th/trigger/delete/thanks', TriggerDeletedTemplateView.as_view(), name="trigger_delete_thanks"),
path('th/trigger/onoff/<int:trigger_id>', trigger_on_off, name="trigger_on_off"),
url(r'^th/trigger/all/(?P<switch>(on|off))$', trigger_switch_all_to, name="trigger_switch_all_to"),
# ****************************************
# * service
# ****************************************
path('th/services/', UserServiceListView.as_view(), name='user_services'),
url(r'^th/service/add/(?P<service_name>\w+)$', UserServiceCreateView.as_view(), name='add_service'),
url(r'^th/service/edit/(?P<pk>\d+)$', UserServiceUpdateView.as_view(), name='edit_service'),
url(r'^th/service/delete/(?P<pk>\d+)$', UserServiceDeleteView.as_view(), name='delete_service'),
url(r'^th/service/renew/(?P<pk>\d+)$', renew_service, name="renew_service"),
path('th/service/delete/', UserServiceDeleteView.as_view(), name='delete_service'),
url(r'^th/service/onoff/(?P<user_service_id>\d+)/(?P<switch>(on|off))$', service_related_triggers_switch_to,
name="service_related_triggers_switch_to"),
# ****************************************
# wizard
# ****************************************
path('th/service/create/',
UserServiceWizard.as_view([ProviderForm,
DummyForm,
ConsumerForm,
DummyForm,
ServicesDescriptionForm]),
name='create_service'),
# every service will use django_th.views.finalcallback
# and give the service_name value to use to
# trigger the real callback
path("th/callbackevernote/", finalcallback, {'service_name': 'ServiceEvernote', }, name="evernote_callback",),
path("th/callbackgithub/", finalcallback, {'service_name': 'ServiceGithub', }, name="github_callback",),
path("th/callbackpocket/", finalcallback, {'service_name': 'ServicePocket', }, name="pocket_callback",),
path("th/callbackpushbullet/", finalcallback, {'service_name': 'ServicePushbullet', }, name="pushbullet_callback",),
path("th/callbackreddit/", finalcallback, {'service_name': 'ServiceReddit', }, name="reddit_callback",),
path("th/callbacktodoist/", finalcallback, {'service_name': 'ServiceTodoist', }, name="todoist_callback",),
path("th/callbacktrello/", finalcallback, {'service_name': 'ServiceTrello', }, name="trello_callback",),
path("th/callbacktumblr/", finalcallback, {'service_name': 'ServiceTumblr', }, name="tumblr_callback",),
path("th/callbacktwitter/", finalcallback, {'service_name': 'ServiceTwitter', }, name="twitter_callback",),
path("th/callbackwallabag/", finalcallback, {'service_name': 'ServiceWallabag', }, name="wallabag_callback",),
path("th/callbackmastodon/", finalcallback, {'service_name': 'ServiceMastodon', }, name="mastodon_callback",),
path('th/myfeeds/', include('th_rss.urls')),
path('th/api/taiga/webhook/', include('th_taiga.urls')),
path('th/api/slack/webhook/', include('th_slack.urls'))
]
if settings.DJANGO_TH.get('fire'):
urlpatterns += path('th/trigger/fire/<int:trigger_id>', fire_trigger, name="fire_trigger"),
| foxmask/django-th | django_th/urls.py | Python | bsd-3-clause | 6,183 |
import os
from flask import Flask, render_template_string, request
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from flask_user import login_required, SQLAlchemyAdapter, UserManager, UserMixin
from flask_user import roles_required
# Use a Class-based config to avoid needing a 2nd file
# os.getenv() enables configuration through OS environment variables
class ConfigClass(object):
# Flask settings
SECRET_KEY = os.getenv('SECRET_KEY', 'THIS IS AN INSECURE SECRET')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'sqlite:///single_file_app.sqlite')
CSRF_ENABLED = True
# Flask-Mail settings
MAIL_USERNAME = os.getenv('MAIL_USERNAME', 'email@example.com')
MAIL_PASSWORD = os.getenv('MAIL_PASSWORD', 'password')
MAIL_DEFAULT_SENDER = os.getenv('MAIL_DEFAULT_SENDER', '"MyApp" <noreply@example.com>')
MAIL_SERVER = os.getenv('MAIL_SERVER', 'smtp.gmail.com')
MAIL_PORT = int(os.getenv('MAIL_PORT', '465'))
MAIL_USE_SSL = int(os.getenv('MAIL_USE_SSL', True))
# Flask-User settings
USER_APP_NAME = "AppName" # Used by email templates
def create_app(test_config=None): # For automated tests
# Setup Flask and read config from ConfigClass defined above
app = Flask(__name__)
app.config.from_object(__name__+'.ConfigClass')
# Load local_settings.py if file exists # For automated tests
try: app.config.from_object('local_settings')
except: pass
# Load optional test_config # For automated tests
if test_config:
app.config.update(test_config)
# Initialize Flask extensions
mail = Mail(app) # Initialize Flask-Mail
db = SQLAlchemy(app) # Initialize Flask-SQLAlchemy
# Define the User data model. Make sure to add flask.ext.user UserMixin!!
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
# User email information
email = db.Column(db.String(255), nullable=False, unique=True)
confirmed_at = db.Column(db.DateTime())
# User information
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='0')
first_name = db.Column(db.String(100), nullable=False, server_default='')
last_name = db.Column(db.String(100), nullable=False, server_default='')
# Relationships
user_auth = db.relationship('UserAuth', uselist=False)
roles = db.relationship('Role', secondary='user_roles',
backref=db.backref('users', lazy='dynamic'))
# Define the UserAuth data model.
class UserAuth(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE'))
# User authentication information
username = db.Column(db.String(50), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False, server_default='')
reset_password_token = db.Column(db.String(100), nullable=False, server_default='')
# Relationships
user = db.relationship('User', uselist=False)
# Define the Role data model
class Role(db.Model):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(50), unique=True)
# Define the UserRoles data model
class UserRoles(db.Model):
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE'))
role_id = db.Column(db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE'))
# Reset all the database tables
db.create_all()
# Setup Flask-User
db_adapter = SQLAlchemyAdapter(db, User, UserAuthClass=UserAuth)
user_manager = UserManager(db_adapter, app)
# Create 'user007' user with 'secret' and 'agent' roles
if not UserAuth.query.filter(UserAuth.username=='user007').first():
user1 = User(email='user007@example.com', first_name='James', last_name='Bond', active=True)
db.session.add(user1)
user_auth1 = UserAuth(user=user1, username='user007',
password=user_manager.hash_password('Password1')
)
db.session.add(user_auth1)
user1.roles.append(Role(name='secret'))
user1.roles.append(Role(name='agent'))
db.session.commit()
# The Home page is accessible to anyone
@app.route('/')
def home_page():
return render_template_string("""
{% extends "base.html" %}
{% block content %}
<h2>Home page</h2>
<p>This page can be accessed by anyone.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('members_page') }}>Members page</a> (login required)</p>
<p><a href={{ url_for('special_page') }}>Special page</a> (login with username 'user007' and password 'Password1')</p>
{% endblock %}
""")
# The Members page is only accessible to authenticated users
@app.route('/members')
@login_required # Use of @login_required decorator
def members_page():
return render_template_string("""
{% extends "base.html" %}
{% block content %}
<h2>Members page</h2>
<p>This page can only be accessed by authenticated users.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('members_page') }}>Members page</a> (login required)</p>
<p><a href={{ url_for('special_page') }}>Special page</a> (login with username 'user007' and password 'Password1')</p>
{% endblock %}
""")
# The Special page requires a user with 'special' and 'sauce' roles or with 'special' and 'agent' roles.
@app.route('/special')
@roles_required('secret', ['sauce', 'agent']) # Use of @roles_required decorator
def special_page():
return render_template_string("""
{% extends "base.html" %}
{% block content %}
<h2>Special Page</h2>
<p>This page can only be accessed by user007.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('members_page') }}>Members page</a> (login required)</p>
<p><a href={{ url_for('special_page') }}>Special page</a> (login with username 'user007' and password 'Password1')</p>
{% endblock %}
""")
return app
# Start development web server
if __name__=='__main__':
app = create_app()
app.run(host='0.0.0.0', port=5000, debug=True)
| jamescarignan/Flask-User | example_apps/user_auth_app.py | Python | bsd-2-clause | 6,986 |
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
# $Id$
import sys, select, time
import pycurl
c1 = pycurl.Curl()
c2 = pycurl.Curl()
c3 = pycurl.Curl()
c1.setopt(c1.URL, "http://www.python.org")
c2.setopt(c2.URL, "http://curl.haxx.se")
c3.setopt(c3.URL, "http://slashdot.org")
c1.body = open("doc1", "wb")
c2.body = open("doc2", "wb")
c3.body = open("doc3", "wb")
c1.setopt(c1.WRITEFUNCTION, c1.body.write)
c2.setopt(c2.WRITEFUNCTION, c2.body.write)
c3.setopt(c3.WRITEFUNCTION, c3.body.write)
m = pycurl.CurlMulti()
m.add_handle(c1)
m.add_handle(c2)
m.add_handle(c3)
# Number of seconds to wait for a timeout to happen
SELECT_TIMEOUT = 1.0
# Stir the state machine into action
while 1:
ret, num_handles = m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
# Keep going until all the connections have terminated
while num_handles:
# The select method uses fdset internally to determine which file descriptors
# to check.
m.select(SELECT_TIMEOUT)
while 1:
ret, num_handles = m.perform()
# Print the message, if any
print m.info_read(1)
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
# Cleanup
m.remove_handle(c3)
m.remove_handle(c2)
m.remove_handle(c1)
m.close()
c1.body.close()
c2.body.close()
c3.body.close()
c1.close()
c2.close()
c3.close()
print "http://www.python.org is in file doc1"
print "http://curl.haxx.se is in file doc2"
print "http://slashdot.org is in file doc3"
| ashumkin/pycurl-cvs | tests/test_multi6.py | Python | lgpl-2.1 | 1,480 |
#!/usr/bin/env python3
# written by sqall
# twitter: https://twitter.com/sqall01
# blog: https://h4des.org
# github: https://github.com/sqall01
#
# Licensed under the GNU Affero General Public License, version 3.
import time
import threading
import os
from .serverCommunication import ServerCommunication
# this class handles the receive part of the client
class Receiver(threading.Thread):
def __init__(self,
connection: ServerCommunication):
threading.Thread.__init__(self)
self._connection = connection
self._log_tag = os.path.basename(__file__)
self._exit_flag = False
def run(self):
while True:
if self._exit_flag:
return
# Only run the communication handler.
# NOTE: Connection initialization is performed once during AlertR client start up
# and by connection watchdog if it was lost.
self._connection.handle_requests()
time.sleep(1)
# Sets the exit flag to shut down the thread and exists connection.
def exit(self):
self._exit_flag = True
self._connection.exit()
| sqall01/alertR | shared_code/clients_all/lib/client/receiver.py | Python | agpl-3.0 | 1,160 |
import logging
import logging.config
# TODO: add rotate file log
DEFAULT_CONFIG = {
'version': 1,
'formatters': {
'standard': {
'format': '[%(levelname)s] %(asctime)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'standard',
'class': 'logging.StreamHandler'
}
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'INFO'
}
}
}
def configure_logging():
logging.config.dictConfig(DEFAULT_CONFIG)
| invinst/ResponseBot | responsebot/utils/log_utils.py | Python | apache-2.0 | 626 |
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import sys
from modules.map.proto import map_pb2
from modules.map.proto import map_lane_pb2
from modules.map.proto import map_road_pb2
import math
from shapely.geometry import LineString, Point
LANE_WIDTH = 3.3
def convert(p, p2, distance):
delta_y = p2.y - p.y
delta_x = p2.x - p.x
# print math.atan2(delta_y, delta_x)
left_angle = math.atan2(delta_y, delta_x) + math.pi / 2.0
right_angle = math.atan2(delta_y, delta_x) - math.pi / 2.0
# print angle
lp = []
lp.append(p.x + (math.cos(left_angle) * distance))
lp.append(p.y + (math.sin(left_angle) * distance))
rp = []
rp.append(p.x + (math.cos(right_angle) * distance))
rp.append(p.y + (math.sin(right_angle) * distance))
return lp, rp
def shift(p, p2, distance, isleft=True):
delta_y = p2.y - p.y
delta_x = p2.x - p.x
# print math.atan2(delta_y, delta_x)
angle = 0
if isleft:
angle = math.atan2(delta_y, delta_x) + math.pi / 2.0
else:
angle = math.atan2(delta_y, delta_x) - math.pi / 2.0
# print angle
p1n = []
p1n.append(p.x + (math.cos(angle) * distance))
p1n.append(p.y + (math.sin(angle) * distance))
p2n = []
p2n.append(p2.x + (math.cos(angle) * distance))
p2n.append(p2.y + (math.sin(angle) * distance))
return Point(p1n), Point(p2n)
def create_lane(map, id):
lane = map.lane.add()
lane.id.id = str(id)
lane.type = map_lane_pb2.Lane.CITY_DRIVING
lane.direction = map_lane_pb2.Lane.FORWARD
lane.length = 100.0
lane.speed_limit = 20.0
lane.turn = map_lane_pb2.Lane.NO_TURN
#lane.predecessor_id.add().id = str(id - 1)
#lane.successor_id.add().id = str(id + 1)
left_boundary = lane.left_boundary.curve.segment.add()
right_boundary = lane.right_boundary.curve.segment.add()
central = lane.central_curve.segment.add()
central.length = 100.0
type = lane.left_boundary.boundary_type.add()
type.s = 0
type.types.append(map_lane_pb2.LaneBoundaryType.DOTTED_YELLOW)
lane.right_boundary.length = 100.0
type = lane.right_boundary.boundary_type.add()
type.s = 0
type.types.append(map_lane_pb2.LaneBoundaryType.DOTTED_YELLOW)
lane.left_boundary.length = 100.0
return lane, central, left_boundary, right_boundary
fpath = sys.argv[1]
f = open(fpath, 'r')
points = []
for line in f:
line = line.replace("\n", '')
data = line.split(',')
x = float(data[0])
y = float(data[1])
points.append((x, y))
path = LineString(points)
length = int(path.length)
fmap = open(sys.argv[2], 'w')
id = 0
map = map_pb2.Map()
road = map.road.add()
road.id.id = "1"
section = road.section.add()
section.id.id = "2"
lane = None
lane_n1 = None
for i in range(length - 1):
if i % 100 == 0:
id += 1
if lane is not None:
lane.successor_id.add().id = str(id)
if lane_n1 is not None:
lane_n1.successor_id.add().id = str(id + 1000)
lane, central, left_boundary, right_boundary = create_lane(map, id)
lane_n1, central_n1, left_boundary_n1, right_boundary_n1 = create_lane(
map, id + 1000)
section.lane_id.add().id = str(id)
section.lane_id.add().id = str(id + 1000)
left_edge = section.boundary.outer_polygon.edge.add()
left_edge.type = map_road_pb2.BoundaryEdge.LEFT_BOUNDARY
left_edge_segment = left_edge.curve.segment.add()
right_edge = section.boundary.outer_polygon.edge.add()
right_edge.type = map_road_pb2.BoundaryEdge.RIGHT_BOUNDARY
right_edge_segment = right_edge.curve.segment.add()
lane.right_neighbor_forward_lane_id.add().id = str(id + 1000)
lane_n1.left_neighbor_forward_lane_id.add().id = str(id)
if i > 0:
lane.predecessor_id.add().id = str(id - 1)
lane_n1.predecessor_id.add().id = str(id - 1 + 1000)
right_edge_point = right_edge_segment.line_segment.point.add()
left_edge_point = left_edge_segment.line_segment.point.add()
left_bound_point = left_boundary.line_segment.point.add()
right_bound_point = right_boundary.line_segment.point.add()
central_point = central.line_segment.point.add()
p = path.interpolate(i - 1)
p2 = path.interpolate(i - 1 + 0.5)
distance = LANE_WIDTH / 2.0
lp, rp = convert(p, p2, distance)
left_bound_point.y = lp[1]
left_bound_point.x = lp[0]
right_bound_point.y = rp[1]
right_bound_point.x = rp[0]
central_point.x = p.x
central_point.y = p.y
left_edge_point.y = lp[1]
left_edge_point.x = lp[0]
left_sample = lane.left_sample.add()
left_sample.s = 0
left_sample.width = LANE_WIDTH / 2.0
right_sample = lane.right_sample.add()
right_sample.s = 0
right_sample.width = LANE_WIDTH / 2.0
#####
left_bound_point = left_boundary_n1.line_segment.point.add()
right_bound_point = right_boundary_n1.line_segment.point.add()
central_point = central_n1.line_segment.point.add()
p = path.interpolate(i - 1)
p2 = path.interpolate(i - 1 + 0.5)
distance = LANE_WIDTH
p, p2 = shift(p, p2, distance, False)
distance = LANE_WIDTH / 2.0
lp, rp = convert(p, p2, distance)
left_bound_point.y = lp[1]
left_bound_point.x = lp[0]
right_bound_point.y = rp[1]
right_bound_point.x = rp[0]
central_point.x = p.x
central_point.y = p.y
right_edge_point.y = rp[1]
right_edge_point.x = rp[0]
left_sample = lane_n1.left_sample.add()
left_sample.s = 0
left_sample.width = LANE_WIDTH / 2.0
right_sample = lane_n1.right_sample.add()
right_sample.s = 0
right_sample.width = LANE_WIDTH / 2.0
right_edge_point = right_edge_segment.line_segment.point.add()
left_edge_point = left_edge_segment.line_segment.point.add()
left_bound_point = left_boundary.line_segment.point.add()
right_bound_point = right_boundary.line_segment.point.add()
central_point = central.line_segment.point.add()
p = path.interpolate(i)
p2 = path.interpolate(i + 0.5)
distance = LANE_WIDTH / 2.0
lp, rp = convert(p, p2, distance)
central_point.x = p.x
central_point.y = p.y
left_bound_point.y = lp[1]
left_bound_point.x = lp[0]
right_bound_point.y = rp[1]
right_bound_point.x = rp[0]
left_edge_point.y = lp[1]
left_edge_point.x = lp[0]
left_sample = lane.left_sample.add()
left_sample.s = i % 100 + 1
left_sample.width = LANE_WIDTH / 2.0
right_sample = lane.right_sample.add()
right_sample.s = i % 100 + 1
right_sample.width = LANE_WIDTH / 2.0
################
left_bound_point = left_boundary_n1.line_segment.point.add()
right_bound_point = right_boundary_n1.line_segment.point.add()
central_point = central_n1.line_segment.point.add()
p = path.interpolate(i)
p2 = path.interpolate(i + 0.5)
distance = LANE_WIDTH
p, p2 = shift(p, p2, distance, False)
distance = LANE_WIDTH / 2.0
lp, rp = convert(p, p2, distance)
central_point.x = p.x
central_point.y = p.y
left_bound_point.y = lp[1]
left_bound_point.x = lp[0]
right_bound_point.y = rp[1]
right_bound_point.x = rp[0]
right_edge_point.y = rp[1]
right_edge_point.x = rp[0]
left_sample = lane_n1.left_sample.add()
left_sample.s = i % 100 + 1
left_sample.width = LANE_WIDTH / 2.0
right_sample = lane_n1.right_sample.add()
right_sample.s = i % 100 + 1
right_sample.width = LANE_WIDTH / 2.0
fmap.write(str(map))
fmap.close()
| startcode/apollo | modules/tools/map_gen/map_gen_two_lanes_right_ext.py | Python | apache-2.0 | 8,664 |
"""Class that holds static constants for LTM server scripts."""
import os
class LTM(object):
server_log_file = '/var/log/lgtm/lgtm.log'
test_log_dir = '/var/log/lgtm/ltm_logs/'
ltm_username = 'ltm'
@staticmethod
def create_log_dir(log_file_path):
if not os.path.exists(os.path.dirname(log_file_path)):
os.makedirs(os.path.dirname(log_file_path))
LTM.create_log_dir(LTM.test_log_dir)
# end class LTM
| dmonakhov/xfstests-bld | kvm-xfstests/test-appliance/files/usr/local/lib/gce-ltm/ltm.py | Python | gpl-2.0 | 423 |
from pathod import pathoc_cmdline as cmdline
import tutils
from six.moves import cStringIO as StringIO
import mock
@mock.patch("argparse.ArgumentParser.error")
def test_pathoc(perror):
assert cmdline.args_pathoc(["pathoc", "foo.com", "get:/"])
s = StringIO()
with tutils.raises(SystemExit):
cmdline.args_pathoc(["pathoc", "--show-uas"], s, s)
a = cmdline.args_pathoc(["pathoc", "foo.com:8888", "get:/"])
assert a.port == 8888
a = cmdline.args_pathoc(["pathoc", "foo.com:xxx", "get:/"])
assert perror.called
perror.reset_mock()
a = cmdline.args_pathoc(["pathoc", "-I", "10, 20", "foo.com:8888", "get:/"])
assert a.ignorecodes == [10, 20]
a = cmdline.args_pathoc(["pathoc", "-I", "xx, 20", "foo.com:8888", "get:/"])
assert perror.called
perror.reset_mock()
a = cmdline.args_pathoc(["pathoc", "-c", "foo:10", "foo.com:8888", "get:/"])
assert a.connect_to == ["foo", 10]
a = cmdline.args_pathoc(["pathoc", "foo.com", "get:/", "--http2"])
assert a.use_http2 is True
assert a.ssl is True
a = cmdline.args_pathoc(["pathoc", "foo.com", "get:/", "--http2-skip-connection-preface"])
assert a.use_http2 is True
assert a.ssl is True
assert a.http2_skip_connection_preface is True
a = cmdline.args_pathoc(["pathoc", "-c", "foo", "foo.com:8888", "get:/"])
assert perror.called
perror.reset_mock()
a = cmdline.args_pathoc(
["pathoc", "-c", "foo:bar", "foo.com:8888", "get:/"])
assert perror.called
perror.reset_mock()
a = cmdline.args_pathoc(
[
"pathoc",
"foo.com:8888",
tutils.test_data.path("data/request")
]
)
assert len(list(a.requests)) == 1
with tutils.raises(SystemExit):
cmdline.args_pathoc(["pathoc", "foo.com", "invalid"], s, s)
| tdickers/mitmproxy | test/pathod/test_pathoc_cmdline.py | Python | mit | 1,841 |
#globalP.py
#GENERAL PARAMETERS
Filename = "DCamSeq"
N = 100 # Training queue
imageSize = (480,640)
#DEBUG PARAMETERS
evalOne = True #Only evaluates first image before training
createKernelFile = False
debug = False # creates random kernel visualization images
debugAll = -3 # if 0 only create background, else create all histograms
| bmalcover/npbg | globalP.py | Python | apache-2.0 | 345 |
lloyd = {
"name": "Lloyd",
"homework": [90.0, 97.0, 75.0, 92.0],
"quizzes": [88.0, 40.0, 94.0],
"tests": [75.0, 90.0]
}
alice = {
"name": "Alice",
"homework": [100.0, 92.0, 98.0, 100.0],
"quizzes": [82.0, 83.0, 91.0],
"tests": [89.0, 97.0]
}
tyler = {
"name": "Tyler",
"homework": [0.0, 87.0, 75.0, 22.0],
"quizzes": [0.0, 75.0, 78.0],
"tests": [100.0, 100.0]
}
orwa = {
"name": "orwa",
"homework": [100.0, 89.00, 95.00, 97.00],
"quizzes": [78.00, 89.00, 90.00],
"tests": [100.00, 100.00]
}
students = [lloyd, alice, tyler]
# Add your functions below!
def average(numbers):
total = sum(numbers)
total = float(total)
average = total / len(numbers)
return average
def get_average(student):
homework = average(student["homework"])
quizzes = average(student["quizzes"])
tests = average(student["tests"])
return (homework * 0.1) + (quizzes * 0.3) + (tests * 0.6)
def get_letter_grade(score):
if score >= 90:
return "A"
elif score >= 80:
return "B"
elif score >= 70:
return "C"
elif score >= 60:
return "D"
else:
return "F"
def get_class_average(students):
results = []
for student in students:
results.append(get_average(student))
return average(results)
print get_class_average(students)
print get_letter_grade(get_class_average(students))
| orwa1902/code-exemples | class-managment.py | Python | apache-2.0 | 1,457 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is the module where the main solver object for the
nonlinear solver of bolt is defined. This solver object
stores the details of the system defined under physical_system,
and is evolved using the methods of this module.
The solver has the option of using 2 different methods:
- A semi-lagrangian scheme based on Cheng-Knorr(1978) which
uses advective interpolation.(non-conservative)
- The interpolation schemes available are
linear and cubic spline.
- Finite volume scheme(conservative):
- Riemann solvers available are the local Lax-Friedrichs and 1st order
upwind scheme.
- The reconstruction schemes available are minmod, PPM, and WENO5
"""
# Importing dependencies:
import arrayfire as af
import numpy as np
import petsc4py, sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
import socket
# Importing solver libraries:
from . import communicate
from . import boundaries
from . import timestep
from .file_io import dump
from .file_io import load
from .utils.bandwidth_test import bandwidth_test
from .utils.print_with_indent import indent
from .utils.performance_timings import print_table
from .utils.broadcasted_primitive_operations import multiply
from .compute_moments import compute_moments as compute_moments_imported
from .fields.fields import fields_solver
class nonlinear_solver(object):
"""
An instance of this class' attributes contains methods which are used
in evolving the system declared under physical system nonlinearly. The
state of the system then may be determined from the attributes of the
system such as the distribution function and electromagnetic fields.
Relevant physical information is obtained by coarse graining this system
by taking moments of the distribution function. This is performed by the
compute_moments() method.
"""
def __init__(self, physical_system, performance_test_flag = False):
"""
Constructor for the nonlinear_solver object. It takes the physical
system object as an argument and uses it in intialization and
evolution of the system in consideration.
Additionally, a performance test flag is also passed which when true,
stores time which is consumed by each of the major solver routines.
This proves particularly useful in analyzing performance bottlenecks
and obtaining benchmarks.
Parameters:
-----------
physical_system: The defined physical system object which holds
all the simulation information such as the initial
conditions, and the domain info is passed as an
argument in defining an instance of the
nonlinear_solver. This system is then evolved, and
monitored using the various methods under the
nonlinear_solver class.
"""
self.physical_system = physical_system
# Holding Domain Info:
self.q1_start, self.q1_end = physical_system.q1_start,\
physical_system.q1_end
self.q2_start, self.q2_end = physical_system.q2_start,\
physical_system.q2_end
self.p1_start, self.p1_end = physical_system.p1_start,\
physical_system.p1_end
self.p2_start, self.p2_end = physical_system.p2_start,\
physical_system.p2_end
self.p3_start, self.p3_end = physical_system.p3_start,\
physical_system.p3_end
# Holding Domain Resolution:
self.N_q1, self.dq1 = physical_system.N_q1, physical_system.dq1
self.N_q2, self.dq2 = physical_system.N_q2, physical_system.dq2
self.N_p1, self.dp1 = physical_system.N_p1, physical_system.dp1
self.N_p2, self.dp2 = physical_system.N_p2, physical_system.dp2
self.N_p3, self.dp3 = physical_system.N_p3, physical_system.dp3
# Getting number of ghost zones, and the boundary
# conditions that are utilized:
N_g_q = self.N_ghost_q = physical_system.N_ghost_q
N_g_p = self.N_ghost_p = physical_system.N_ghost_p
self.boundary_conditions = physical_system.boundary_conditions
# Declaring the communicator:
self._comm = PETSc.COMM_WORLD.tompi4py()
if(self.physical_system.params.num_devices>1):
af.set_device(self._comm.rank%self.physical_system.params.num_devices)
# Getting number of species:
self.N_species = len(physical_system.params.mass)
# Having the mass and charge along axis 1:
self.physical_system.params.mass = \
af.cast(af.moddims(af.to_array(physical_system.params.mass),
1, self.N_species
),
af.Dtype.f64
)
self.physical_system.params.charge = \
af.cast(af.moddims(af.to_array(physical_system.params.charge),
1, self.N_species
),
af.Dtype.f64
)
PETSc.Sys.Print('\nBackend Details for Nonlinear Solver:')
# Printing the backend details for each rank/device/node:
PETSc.Sys.syncPrint(indent('Rank ' + str(self._comm.rank) + ' of ' + str(self._comm.size-1)))
PETSc.Sys.syncPrint(indent('On Node: '+ socket.gethostname()))
PETSc.Sys.syncPrint(indent('Device Details:'))
PETSc.Sys.syncPrint(indent(af.info_str(), 2))
PETSc.Sys.syncPrint(indent('Device Bandwidth = ' + str(bandwidth_test(100)) + ' GB / sec'))
PETSc.Sys.syncPrint()
PETSc.Sys.syncFlush()
self.performance_test_flag = performance_test_flag
# Initializing variables which are used to time the components of the solver:
if(performance_test_flag == True):
self.time_ts = 0
self.time_interp2 = 0
self.time_sourcets = 0
self.time_fvm_solver = 0
self.time_reconstruct = 0
self.time_riemann = 0
self.time_fieldstep = 0
self.time_interp3 = 0
self.time_apply_bcs_f = 0
self.time_communicate_f = 0
petsc_bc_in_q1 = 'ghosted'
petsc_bc_in_q2 = 'ghosted'
# Only for periodic boundary conditions or shearing-box boundary conditions
# do the boundary conditions passed to the DA need to be changed. PETSc
# automatically handles the application of periodic boundary conditions when
# running in parallel. For shearing box boundary conditions, an interpolation
# operation needs to be applied on top of the periodic boundary conditions.
# In all other cases, ghosted boundaries are used.
if( self.boundary_conditions.in_q1_left == 'periodic'
or self.boundary_conditions.in_q1_left == 'shearing-box'
):
petsc_bc_in_q1 = 'periodic'
if( self.boundary_conditions.in_q2_bottom == 'periodic'
or self.boundary_conditions.in_q2_bottom == 'shearing-box'
):
petsc_bc_in_q2 = 'periodic'
if(self.boundary_conditions.in_q1_left == 'periodic'):
try:
assert(self.boundary_conditions.in_q1_right == 'periodic')
except:
raise Exception('Periodic boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
if(self.boundary_conditions.in_q1_left == 'shearing-box'):
try:
assert(self.boundary_conditions.in_q1_right == 'shearing-box')
except:
raise Exception('Shearing box boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
if(self.boundary_conditions.in_q2_bottom == 'periodic'):
try:
assert(self.boundary_conditions.in_q2_top == 'periodic')
except:
raise Exception('Periodic boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
if(self.boundary_conditions.in_q2_bottom == 'shearing-box'):
try:
assert(self.boundary_conditions.in_q2_top == 'shearing-box')
except:
raise Exception('Shearing box boundary conditions need to be applied to \
both the boundaries of a particular axis'
)
nproc_in_q1 = PETSc.DECIDE
nproc_in_q2 = PETSc.DECIDE
# Since shearing boundary conditions require interpolations which are non-local:
if(self.boundary_conditions.in_q2_bottom == 'shearing-box'):
nproc_in_q1 = 1
if(self.boundary_conditions.in_q1_left == 'shearing-box'):
nproc_in_q2 = 1
# DMDA is a data structure to handle a distributed structure
# grid and its related core algorithms. It stores metadata of
# how the grid is partitioned when run in parallel which is
# utilized by the various methods of the solver.
self._da_f = PETSc.DMDA().create([self.N_q1, self.N_q2],
dof = ( self.N_species
* (self.N_p1 + 2 * N_g_p)
* (self.N_p2 + 2 * N_g_p)
* (self.N_p3 + 2 * N_g_p)
),
stencil_width = N_g_q,
boundary_type = (petsc_bc_in_q1,
petsc_bc_in_q2
),
proc_sizes = (nproc_in_q1,
nproc_in_q2
),
stencil_type = 1,
comm = self._comm
)
# This DA is used by the FileIO routine dump_distribution_function():
self._da_dump_f = PETSc.DMDA().create([self.N_q1, self.N_q2],
dof = ( self.N_species
* self.N_p1
* self.N_p2
* self.N_p3
),
stencil_width = N_g_q,
boundary_type = (petsc_bc_in_q1,
petsc_bc_in_q2
),
proc_sizes = (nproc_in_q1,
nproc_in_q2
),
stencil_type = 1,
comm = self._comm
)
# This DA is used by the FileIO routine dump_moments():
# Finding the number of definitions for the moments:
attributes = [a for a in dir(self.physical_system.moments) if not a.startswith('_')]
# Removing utility functions:
if('integral_over_v' in attributes):
attributes.remove('integral_over_v')
self._da_dump_moments = PETSc.DMDA().create([self.N_q1, self.N_q2],
dof = self.N_species
* len(attributes),
proc_sizes = (nproc_in_q1,
nproc_in_q2
),
comm = self._comm
)
# Creation of the local and global vectors from the DA:
# This is for the distribution function
self._glob_f = self._da_f.createGlobalVec()
self._local_f = self._da_f.createLocalVec()
# The following vector is used to dump the data to file:
self._glob_dump_f = self._da_dump_f.createGlobalVec()
self._glob_moments = self._da_dump_moments.createGlobalVec()
# Getting the arrays for the above vectors:
self._glob_f_array = self._glob_f.getArray()
self._local_f_array = self._local_f.getArray()
self._glob_moments_array = self._glob_moments.getArray()
self._glob_dump_f_array = self._glob_dump_f.getArray()
# Setting names for the objects which will then be
# used as the key identifiers for the HDF5 files:
PETSc.Object.setName(self._glob_dump_f, 'distribution_function')
PETSc.Object.setName(self._glob_moments, 'moments')
# Obtaining the array values of the cannonical variables:
self.q1_center, self.q2_center = self._calculate_q_center()
self.p1_center, self.p2_center, self.p3_center = self._calculate_p_center()
# Initialize according to initial condition provided by user:
self._initialize(physical_system.params)
# Obtaining start coordinates for the local zone
# Additionally, we also obtain the size of the local zone
((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners()
(i_q1_end, i_q2_end) = (i_q1_start + N_q1_local - 1, i_q2_start + N_q2_local - 1)
# Applying dirichlet boundary conditions:
if(self.physical_system.boundary_conditions.in_q1_left == 'dirichlet'):
# If local zone includes the left physical boundary:
if(i_q1_start == 0):
self.f[:, :N_g_q] = self.boundary_conditions.\
f_left(self.f, self.q1_center, self.q2_center,
self.p1_center, self.p2_center, self.p3_center,
self.physical_system.params
)[:, :N_g_q]
if(self.physical_system.boundary_conditions.in_q1_right == 'dirichlet'):
# If local zone includes the right physical boundary:
if(i_q1_end == self.N_q1 - 1):
self.f[:, -N_g_q:] = self.boundary_conditions.\
f_right(self.f, self.q1_center, self.q2_center,
self.p1_center, self.p2_center, self.p3_center,
self.physical_system.params
)[:, -N_g_q:]
if(self.physical_system.boundary_conditions.in_q2_bottom == 'dirichlet'):
# If local zone includes the bottom physical boundary:
if(i_q2_start == 0):
self.f[:, :, :N_g_q] = self.boundary_conditions.\
f_bot(self.f, self.q1_center, self.q2_center,
self.p1_center, self.p2_center, self.p3_center,
self.physical_system.params
)[:, :, :N_g_q]
if(self.physical_system.boundary_conditions.in_q2_top == 'dirichlet'):
# If local zone includes the top physical boundary:
if(i_q2_end == self.N_q2 - 1):
self.f[:, :, -N_g_q:] = self.boundary_conditions.\
f_top(self.f, self.q1_center, self.q2_center,
self.p1_center, self.p2_center, self.p3_center,
self.physical_system.params
)[:, :, -N_g_q:]
# Assigning the value to the PETSc Vecs(for dump at t = 0):
(af.flat(self.f)).to_ndarray(self._local_f_array)
(af.flat(self.f[:, :, N_g_q:-N_g_q, N_g_q:-N_g_q])).to_ndarray(self._glob_f_array)
# Assigning the function objects to methods of the solver:
self._A_q = physical_system.A_q
self._C_q = physical_system.C_q
self._A_p = physical_system.A_p
self._C_p = physical_system.C_p
# Source/Sink term:
self._source = physical_system.source
# Initializing a variable to track time-elapsed:
self.time_elapsed = 0
def _convert_to_q_expanded(self, array):
"""
Since we are limited to use 4D arrays due to
the bound from ArrayFire, we define 2 forms
which can be used such that the computations may
carried out along all dimensions necessary:
q_expanded form:(N_p1 * N_p2 * N_p3, N_s, N_q1, N_q2)
p_expanded form:(N_p1, N_p2, N_p3, N_s * N_q1 * N_q2)
This function converts the input array from
p_expanded to q_expanded form.
"""
# Obtaining start coordinates for the local zone
# Additionally, we also obtain the size of the local zone
((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners()
array = af.moddims(array,
(self.N_p1 + 2 * self.N_ghost_p)
* (self.N_p2 + 2 * self.N_ghost_p)
* (self.N_p3 + 2 * self.N_ghost_p),
self.N_species,
(N_q1_local + 2 * self.N_ghost_q),
(N_q2_local + 2 * self.N_ghost_q)
)
af.eval(array)
return (array)
def _convert_to_p_expanded(self, array):
"""
Since we are limited to use 4D arrays due to
the bound from ArrayFire, we define 2 forms
which can be used such that the computations may
carried out along all dimensions necessary:
q_expanded form:(N_p1 * N_p2 * N_p3, N_s, N_q1, N_q2)
p_expanded form:(N_p1, N_p2, N_p3, N_s * N_q1 * N_q2)
This function converts the input array from
q_expanded to p_expanded form.
"""
# Obtaining start coordinates for the local zone
# Additionally, we also obtain the size of the local zone
((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners()
array = af.moddims(array,
self.N_p1 + 2 * self.N_ghost_p,
self.N_p2 + 2 * self.N_ghost_p,
self.N_p3 + 2 * self.N_ghost_p,
self.N_species
* (N_q1_local + 2 * self.N_ghost_q)
* (N_q2_local + 2 * self.N_ghost_q)
)
af.eval(array)
return (array)
def _calculate_q_center(self):
"""
Initializes the cannonical variables q1, q2 using a centered
formulation. The size, and resolution are the same as declared
under domain of the physical system object.
Returns in q_expanded form.
"""
# Obtaining start coordinates for the local zone
# Additionally, we also obtain the size of the local zone
((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners()
i_q1_center = i_q1_start + 0.5
i_q2_center = i_q2_start + 0.5
i_q1 = ( i_q1_center
+ np.arange(-self.N_ghost_q, N_q1_local + self.N_ghost_q)
)
i_q2 = ( i_q2_center
+ np.arange(-self.N_ghost_q, N_q2_local + self.N_ghost_q)
)
q1_center = self.q1_start + i_q1 * self.dq1
q2_center = self.q2_start + i_q2 * self.dq2
q2_center, q1_center = np.meshgrid(q2_center, q1_center)
q1_center, q2_center = af.to_array(q1_center), af.to_array(q2_center)
# To bring the data structure to the default form:(N_p, N_s, N_q1, N_q2)
q1_center = af.reorder(q1_center, 3, 2, 0, 1)
q2_center = af.reorder(q2_center, 3, 2, 0, 1)
af.eval(q1_center, q2_center)
return (q1_center, q2_center)
def _calculate_p_center(self):
"""
Initializes the cannonical variables p1, p2 and p3 using a centered
formulation. The size, and resolution are the same as declared
under domain of the physical system object.
"""
p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p1 + self.N_ghost_p
)
) * self.dp1
p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p2 + self.N_ghost_p
)
) * self.dp2
p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p3 + self.N_ghost_p
)
) * self.dp3
p2_center, p1_center, p3_center = np.meshgrid(p2_center,
p1_center,
p3_center
)
# Flattening the arrays:
p1_center = af.flat(af.to_array(p1_center))
p2_center = af.flat(af.to_array(p2_center))
p3_center = af.flat(af.to_array(p3_center))
if(self.N_species > 1):
p1_center = af.tile(p1_center, 1, self.N_species)
p2_center = af.tile(p2_center, 1, self.N_species)
p3_center = af.tile(p3_center, 1, self.N_species)
af.eval(p1_center, p2_center, p3_center)
return (p1_center, p2_center, p3_center)
def _calculate_p_left(self):
p1_left = self.p1_start + np.arange(-self.N_ghost_p,
self.N_p1 + self.N_ghost_p
) * self.dp1
p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p2 + self.N_ghost_p
)
) * self.dp2
p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p3 + self.N_ghost_p
)
) * self.dp3
p2_left, p1_left, p3_left = np.meshgrid(p2_center,
p1_left,
p3_center
)
# Flattening the arrays:
p1_left = af.flat(af.to_array(p1_left))
p2_left = af.flat(af.to_array(p2_left))
p3_left = af.flat(af.to_array(p3_left))
if(self.N_species > 1):
p1_left = af.tile(p1_left, 1, self.N_species)
p2_left = af.tile(p2_left, 1, self.N_species)
p3_left = af.tile(p3_left, 1, self.N_species)
af.eval(p1_left, p2_left, p3_left)
return (p1_left, p2_left, p3_left)
def _calculate_p_bottom(self):
p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p1 + self.N_ghost_p
)
) * self.dp1
p2_bottom = self.p2_start + np.arange(-self.N_ghost_p,
self.N_p2 + self.N_ghost_p
) * self.dp2
p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p3 + self.N_ghost_p
)
) * self.dp3
p2_bottom, p1_bottom, p3_bottom = np.meshgrid(p2_bottom,
p1_center,
p3_center
)
# Flattening the arrays:
p1_bottom = af.flat(af.to_array(p1_bottom))
p2_bottom = af.flat(af.to_array(p2_bottom))
p3_bottom = af.flat(af.to_array(p3_bottom))
if(self.N_species > 1):
p1_bottom = af.tile(p1_bottom, 1, self.N_species)
p2_bottom = af.tile(p2_bottom, 1, self.N_species)
p3_bottom = af.tile(p3_bottom, 1, self.N_species)
af.eval(p1_bottom, p2_bottom, p3_bottom)
return (p1_bottom, p2_bottom, p3_bottom)
def _calculate_p_back(self):
p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p1 + self.N_ghost_p
)
) * self.dp1
p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p,
self.N_p2 + self.N_ghost_p
)
) * self.dp2
p3_back = self.p3_start + np.arange(-self.N_ghost_p,
self.N_p3 + self.N_ghost_p
) * self.dp3
p2_back, p1_back, p3_back = np.meshgrid(p2_center,
p1_center,
p3_center
)
# Flattening the arrays:
p1_back = af.flat(af.to_array(p1_back))
p2_back = af.flat(af.to_array(p2_back))
p3_back = af.flat(af.to_array(p3_back))
if(self.N_species > 1):
p1_back = af.tile(p1_back, 1, self.N_species)
p2_back = af.tile(p2_back, 1, self.N_species)
p3_back = af.tile(p3_back, 1, self.N_species)
af.eval(p1_back, p2_back, p3_back)
return (p1_back, p2_back, p3_back)
def _initialize(self, params):
"""
Called when the solver object is declared. This function is
used to initialize the distribution function, using the options
as provided by the user.
Parameters
----------
params : file/object
params contains all details of which methods to use
in addition to useful physical constant. Additionally,
it can also be used to inject methods which need to be
used inside some solver routine
"""
# Initializing with the provided I.C's:
# af.broadcast, allows us to perform batched operations
# when operating on arrays of different sizes
# af.broadcast(function, *args) performs batched
# operations on function(*args)
self.f = af.broadcast(self.physical_system.initial_conditions.\
initialize_f, self.q1_center, self.q2_center,
self.p1_center, self.p2_center, self.p3_center, params
)
self.f_initial = self.f
if(self.physical_system.params.EM_fields_enabled):
rho_initial = multiply(self.physical_system.params.charge,
self.compute_moments('density')
)
self.fields_solver = fields_solver(self.N_q1, self.N_q2, self.N_ghost_q,
self.q1_center, self.q2_center,
self.dq1, self.dq2, self._comm,
self.boundary_conditions,
self.physical_system.params,
rho_initial, self.performance_test_flag
)
# Injection of solver functions into class as methods:
_communicate_f = communicate.\
communicate_f
_apply_bcs_f = boundaries.apply_bcs_f
strang_timestep = timestep.strang_step
lie_timestep = timestep.lie_step
swss_timestep = timestep.swss_step
jia_timestep = timestep.jia_step
compute_moments = compute_moments_imported
dump_distribution_function = dump.dump_distribution_function
dump_moments = dump.dump_moments
dump_EM_fields = dump.dump_EM_fields
load_distribution_function = load.load_distribution_function
load_EM_fields = load.load_EM_fields
print_performance_timings = print_table
| ShyamSS-95/Bolt | bolt/lib/nonlinear/nonlinear_solver.py | Python | gpl-3.0 | 29,921 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras callbacks in multi-worker training with TF2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl.testing import parameterized
from tensorflow.python.distribute import collective_all_reduce_strategy as collective_strategy
from tensorflow.python.distribute import combinations as ds_combinations
from tensorflow.python.distribute import distributed_file_utils
from tensorflow.python.distribute import multi_process_runner
from tensorflow.python.distribute import multi_worker_test_base as test_base
from tensorflow.python.framework import test_combinations as combinations
from tensorflow.python.keras import callbacks
from tensorflow.python.keras.distribute import multi_worker_testing_utils
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import test
def checkpoint_exists(filepath):
"""Returns whether the checkpoint `filepath` refers to exists."""
if filepath.endswith('.h5'):
return file_io.file_exists_v2(filepath)
tf_saved_model_exists = file_io.file_exists_v2(filepath)
tf_weights_only_checkpoint_exists = file_io.file_exists_v2(
filepath + '.index')
return tf_saved_model_exists or tf_weights_only_checkpoint_exists
def _model_setup(test_obj, file_format):
"""Set up a MNIST Keras model for testing purposes.
This function builds a MNIST Keras model and returns relevant information
for testing.
Args:
test_obj: The `TestCase` testing object.
file_format: File format for checkpoints. 'tf' or 'h5'.
Returns:
A tuple of (model, saving_filepath, train_ds, steps) where train_ds is
the training dataset.
"""
batch_size = 64
steps = 2
with collective_strategy.CollectiveAllReduceStrategy().scope():
# TODO(b/142509827): In rare cases this errors out at C++ level with the
# "Connect failed" error message.
train_ds, _ = multi_worker_testing_utils.mnist_synthetic_dataset(
batch_size, steps)
model = multi_worker_testing_utils.get_mnist_model((28, 28, 1))
# Pass saving_filepath from the parent thread to ensure every worker has the
# same filepath to save.
saving_filepath = os.path.join(test_obj.get_temp_dir(),
'checkpoint.' + file_format)
return model, saving_filepath, train_ds, steps
def _get_task_config():
return json.loads(os.environ['TF_CONFIG'])['task']
class KerasCallbackMultiProcessTest(parameterized.TestCase, test.TestCase):
@ds_combinations.generate(
combinations.combine(
mode=['eager'],
file_format=['h5', 'tf'],
save_weights_only=[True, False]))
def test_model_checkpoint_saves_on_chief_but_not_otherwise(
self, file_format, mode, save_weights_only):
def proc_model_checkpoint_saves_on_chief_but_not_otherwise(
test_obj, file_format):
model, saving_filepath, train_ds, steps = _model_setup(
test_obj, file_format)
num_epoch = 2
extension = os.path.splitext(saving_filepath)[1]
# Incorporate type/index information and thread id in saving_filepath to
# ensure every worker has a unique path. Note that in normal use case the
# saving_filepath will be the same for all workers, but we use different
# ones here just to test out chief saves checkpoint but non-chief doesn't.
task_config = _get_task_config()
saving_filepath = os.path.join(
test_obj.get_temp_dir(), 'checkpoint_%s_%d%s' %
(task_config['type'], task_config['index'], extension))
# The saving_filepath shouldn't exist at the beginning (as it's unique).
test_obj.assertFalse(checkpoint_exists(saving_filepath))
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
validation_data=train_ds,
validation_steps=steps,
callbacks=[
callbacks.ModelCheckpoint(
filepath=saving_filepath, save_weights_only=save_weights_only)
])
# If it's chief, the model should be saved; if not, the model shouldn't.
test_obj.assertEqual(
checkpoint_exists(saving_filepath), test_base.is_chief())
# If it's chief, the model should be saved (`write_filepath` should
# simply return `saving_filepath`); if not, i.e. for non-chief workers,
# the temporary path generated by `write_filepath` should no longer
# contain the checkpoint that has been deleted.
test_obj.assertEqual(
checkpoint_exists(
distributed_file_utils.write_filepath(
saving_filepath, model._distribution_strategy)),
test_base.is_chief())
multi_process_runner.run(
proc_model_checkpoint_saves_on_chief_but_not_otherwise,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self, file_format))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_model_checkpoint_works_with_same_file_path(self, mode):
def proc_model_checkpoint_works_with_same_file_path(
test_obj, saving_filepath):
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
num_epoch = 2
# The saving_filepath shouldn't exist at the beginning (as it's unique).
test_obj.assertFalse(file_io.file_exists_v2(saving_filepath))
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[callbacks.ModelCheckpoint(filepath=saving_filepath)])
test_obj.assertTrue(file_io.file_exists_v2(saving_filepath))
saving_filepath = os.path.join(self.get_temp_dir(), 'checkpoint')
multi_process_runner.run(
proc_model_checkpoint_works_with_same_file_path,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self, saving_filepath))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_backupandrestore_checkpoint_works_with_interruption(self, mode):
class InterruptingCallback(callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
if epoch == 2:
raise RuntimeError('Interrupting!')
class AssertCallback(callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
# the interruption happened on epoch 2 as specified in
# InterruptingCallback, so the initial epoch after restart will begin
# at 2.
assert epoch > 1
def proc_model_checkpoint_works_with_same_file_path(test_obj,
saving_filepath):
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
num_epoch = 4
# The saving_filepath shouldn't exist at the beginning (as it's unique).
test_obj.assertFalse(file_io.file_exists_v2(saving_filepath))
bar_dir = os.path.join(os.path.dirname(saving_filepath), 'backup')
try:
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[
callbacks.ModelCheckpoint(filepath=saving_filepath),
callbacks.BackupAndRestore(backup_dir=bar_dir),
InterruptingCallback()
])
except RuntimeError as e:
if 'Interrupting!' not in str(e):
raise
multi_process_runner.barrier().wait()
backup_filepath = os.path.join(bar_dir, 'checkpoint')
test_obj.assertTrue(file_io.file_exists_v2(backup_filepath))
test_obj.assertTrue(file_io.file_exists_v2(saving_filepath))
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[
callbacks.ModelCheckpoint(filepath=saving_filepath),
callbacks.BackupAndRestore(backup_dir=bar_dir),
AssertCallback()
])
multi_process_runner.barrier().wait()
test_obj.assertFalse(file_io.file_exists_v2(backup_filepath))
test_obj.assertTrue(file_io.file_exists_v2(saving_filepath))
saving_filepath = os.path.join(self.get_temp_dir(), 'checkpoint')
multi_process_runner.run(
proc_model_checkpoint_works_with_same_file_path,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self, saving_filepath))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_tensorboard_saves_on_chief_but_not_otherwise(self, mode):
def proc_tensorboard_saves_on_chief_but_not_otherwise(test_obj):
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
num_epoch = 2
# Incorporate type/index information and thread id in saving_filepath to
# ensure every worker has a unique path. Note that in normal use case the
# saving_filepath will be the same for all workers, but we use different
# ones here just to test out chief saves summaries but non-chief doesn't.
task_config = _get_task_config()
saving_filepath = os.path.join(
test_obj.get_temp_dir(),
'logfile_%s_%d' % (task_config['type'], task_config['index']))
# The saving_filepath shouldn't exist at the beginning (as it's unique).
test_obj.assertFalse(file_io.file_exists_v2(saving_filepath))
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[callbacks.TensorBoard(log_dir=saving_filepath)])
# If it's chief, the summaries should be saved in the filepath; if not,
# the directory should be empty (although created). Using
# `file_io.list_directory()` since the directory may be created at this
# point.
test_obj.assertEqual(
bool(file_io.list_directory_v2(saving_filepath)),
test_base.is_chief())
multi_process_runner.run(
proc_tensorboard_saves_on_chief_but_not_otherwise,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self,))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_tensorboard_can_still_save_to_temp_even_if_it_exists(self, mode):
def proc_tensorboard_can_still_save_to_temp_even_if_it_exists(test_obj):
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
num_epoch = 2
saving_filepath = os.path.join(
test_obj.get_temp_dir(), 'logfile_%s' % (_get_task_config()['type']))
saving_filepath_for_temp = os.path.join(saving_filepath, 'workertemp_1')
os.mkdir(saving_filepath)
os.mkdir(saving_filepath_for_temp)
# Verifies that even if `saving_filepath_for_temp` exists, tensorboard
# can still save to temporary directory.
test_obj.assertTrue(file_io.file_exists_v2(saving_filepath_for_temp))
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[callbacks.TensorBoard(log_dir=saving_filepath)])
multi_process_runner.run(
proc_tensorboard_can_still_save_to_temp_even_if_it_exists,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self,))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_tensorboard_works_with_same_file_path(self, mode):
def proc_tensorboard_works_with_same_file_path(test_obj, saving_filepath):
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
num_epoch = 2
# The saving_filepath shouldn't exist at the beginning (as it's unique).
test_obj.assertFalse(file_io.file_exists_v2(saving_filepath))
multi_process_runner.barrier().wait()
model.fit(
x=train_ds,
epochs=num_epoch,
steps_per_epoch=steps,
callbacks=[callbacks.TensorBoard(log_dir=saving_filepath)])
multi_process_runner.barrier().wait()
test_obj.assertTrue(file_io.list_directory_v2(saving_filepath))
saving_filepath = os.path.join(self.get_temp_dir(), 'logfile')
multi_process_runner.run(
proc_tensorboard_works_with_same_file_path,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self, saving_filepath))
@ds_combinations.generate(combinations.combine(mode=['eager']))
def test_early_stopping(self, mode):
def proc_early_stopping(test_obj):
class EpochCounterCallback(callbacks.Callback):
def on_epoch_begin(self, epoch, logs):
self.last_epoch = epoch
model, _, train_ds, steps = _model_setup(test_obj, file_format='')
epoch_counter_cbk = EpochCounterCallback()
cbks = [
callbacks.EarlyStopping(
monitor='loss', min_delta=0.05, patience=1, verbose=1),
epoch_counter_cbk
]
# Empirically, it is expected that `model.fit()` terminates around the
# 22th epoch. Asserting that it should have been stopped before the 50th
# epoch to avoid flakiness and be more predictable.
model.fit(x=train_ds, epochs=100, steps_per_epoch=steps, callbacks=cbks)
test_obj.assertLess(epoch_counter_cbk.last_epoch, 50)
multi_process_runner.run(
proc_early_stopping,
cluster_spec=test_base.create_cluster_spec(num_workers=2),
args=(self,))
if __name__ == '__main__':
multi_process_runner.test_main()
| davidzchen/tensorflow | tensorflow/python/keras/distribute/multi_worker_callback_tf2_test.py | Python | apache-2.0 | 14,004 |
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Sylvain Afchain <sylvain.afchain@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent.linux import interface
from neutron.agent.linux import iptables_manager
from neutron.common import constants as constants
from neutron.common import log
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.services.metering.drivers import abstract_driver
LOG = logging.getLogger(__name__)
NS_PREFIX = 'qrouter-'
WRAP_NAME = 'neutron-meter'
EXTERNAL_DEV_PREFIX = 'qg-'
TOP_CHAIN = WRAP_NAME + "-FORWARD"
RULE = '-r-'
LABEL = '-l-'
IptablesDriverOpts = [
cfg.StrOpt('interface_driver',
help=_("The driver used to manage the virtual "
"interface.")),
cfg.BoolOpt('use_namespaces', default=True,
help=_("Allow overlapping IP."))
]
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(interface.OPTS)
cfg.CONF.register_opts(IptablesDriverOpts)
class IptablesManagerTransaction(object):
__transactions = {}
def __init__(self, im):
self.im = im
transaction = self.__transactions.get(im, 0)
transaction += 1
self.__transactions[im] = transaction
def __enter__(self):
return self.im
def __exit__(self, type, value, traceback):
transaction = self.__transactions.get(self.im)
if transaction == 1:
self.im.apply()
del self.__transactions[self.im]
else:
transaction -= 1
self.__transactions[self.im] = transaction
class RouterWithMetering(object):
def __init__(self, conf, router):
self.conf = conf
self.id = router['id']
self.router = router
self.root_helper = config.get_root_helper(self.conf)
self.iptables_manager = iptables_manager.IptablesManager(
root_helper=self.conf.root_helper,
namespace=self.ns_name(),
binary_name=WRAP_NAME)
self.metering_labels = {}
def ns_name(self):
if self.conf.use_namespaces:
return NS_PREFIX + self.router['id']
class IptablesMeteringDriver(abstract_driver.MeteringAbstractDriver):
def __init__(self, plugin, conf):
self.plugin = plugin
self.conf = conf or cfg.CONF
self.routers = {}
if not self.conf.interface_driver:
raise SystemExit(_('An interface driver must be specified'))
LOG.info(_("Loading interface driver %s"), self.conf.interface_driver)
self.driver = importutils.import_object(self.conf.interface_driver,
self.conf)
def _update_router(self, router):
r = self.routers.get(router['id'],
RouterWithMetering(self.conf, router))
r.router = router
self.routers[r.id] = r
return r
@log.log
def update_routers(self, context, routers):
# disassociate removed routers
router_ids = [router['id'] for router in routers]
for router_id in self.routers:
if router_id not in router_ids:
self._process_disassociate_metering_label(router)
for router in routers:
old_gw_port_id = None
old_rm = self.routers.get(router['id'])
if old_rm:
old_gw_port_id = old_rm.router['gw_port_id']
gw_port_id = router['gw_port_id']
if gw_port_id != old_gw_port_id:
if old_rm:
with IptablesManagerTransaction(old_rm.iptables_manager):
self._process_disassociate_metering_label(router)
if gw_port_id:
self._process_associate_metering_label(router)
elif gw_port_id:
self._process_associate_metering_label(router)
@log.log
def remove_router(self, context, router_id):
if router_id in self.routers:
del self.routers[router_id]
def get_external_device_name(self, port_id):
return (EXTERNAL_DEV_PREFIX + port_id)[:self.driver.DEV_NAME_LEN]
def _process_metering_label_rules(self, rm, rules, label_chain,
rules_chain):
im = rm.iptables_manager
ext_dev = self.get_external_device_name(rm.router['gw_port_id'])
if not ext_dev:
return
for rule in rules:
remote_ip = rule['remote_ip_prefix']
dir = '-i ' + ext_dev
if rule['direction'] == 'egress':
dir = '-o ' + ext_dev
if rule['excluded'] == 'true':
ipt_rule = dir + ' -d ' + remote_ip + ' -j RETURN'
im.ipv4['filter'].add_rule(rules_chain, ipt_rule, wrap=False,
top=True)
else:
ipt_rule = dir + ' -d ' + remote_ip + ' -j ' + label_chain
im.ipv4['filter'].add_rule(rules_chain, ipt_rule,
wrap=False, top=False)
def _process_associate_metering_label(self, router):
self._update_router(router)
rm = self.routers.get(router['id'])
with IptablesManagerTransaction(rm.iptables_manager):
labels = router.get(constants.METERING_LABEL_KEY, [])
for label in labels:
label_id = label['id']
label_chain = iptables_manager.get_chain_name(WRAP_NAME +
LABEL + label_id,
wrap=False)
rm.iptables_manager.ipv4['filter'].add_chain(label_chain,
wrap=False)
rules_chain = iptables_manager.get_chain_name(WRAP_NAME +
RULE + label_id,
wrap=False)
rm.iptables_manager.ipv4['filter'].add_chain(rules_chain,
wrap=False)
rm.iptables_manager.ipv4['filter'].add_rule(TOP_CHAIN, '-j ' +
rules_chain,
wrap=False)
rm.iptables_manager.ipv4['filter'].add_rule(label_chain,
'',
wrap=False)
rules = label.get('rules')
if rules:
self._process_metering_label_rules(rm, rules,
label_chain,
rules_chain)
rm.metering_labels[label_id] = label
def _process_disassociate_metering_label(self, router):
rm = self.routers.get(router['id'])
if not rm:
return
with IptablesManagerTransaction(rm.iptables_manager):
labels = router.get(constants.METERING_LABEL_KEY, [])
for label in labels:
label_id = label['id']
if label_id not in rm.metering_labels:
continue
label_chain = iptables_manager.get_chain_name(WRAP_NAME +
LABEL + label_id,
wrap=False)
rules_chain = iptables_manager.get_chain_name(WRAP_NAME +
RULE + label_id,
wrap=False)
rm.iptables_manager.ipv4['filter'].remove_chain(label_chain,
wrap=False)
rm.iptables_manager.ipv4['filter'].remove_chain(rules_chain,
wrap=False)
del rm.metering_labels[label_id]
@log.log
def add_metering_label(self, context, routers):
for router in routers:
self._process_associate_metering_label(router)
@log.log
def update_metering_label_rules(self, context, routers):
for router in routers:
self._update_metering_label_rules(router)
def _update_metering_label_rules(self, router):
rm = self.routers.get(router['id'])
if not rm:
return
with IptablesManagerTransaction(rm.iptables_manager):
labels = router.get(constants.METERING_LABEL_KEY, [])
for label in labels:
label_id = label['id']
label_chain = iptables_manager.get_chain_name(WRAP_NAME +
LABEL + label_id,
wrap=False)
rules_chain = iptables_manager.get_chain_name(WRAP_NAME +
RULE + label_id,
wrap=False)
rm.iptables_manager.ipv4['filter'].empty_chain(rules_chain,
wrap=False)
rules = label.get('rules')
if rules:
self._process_metering_label_rules(rm, rules,
label_chain,
rules_chain)
@log.log
def remove_metering_label(self, context, routers):
for router in routers:
self._process_disassociate_metering_label(router)
@log.log
def get_traffic_counters(self, context, routers):
accs = {}
for router in routers:
rm = self.routers.get(router['id'])
if not rm:
continue
for label_id, label in rm.metering_labels.items():
chain = iptables_manager.get_chain_name(WRAP_NAME + LABEL +
label_id, wrap=False)
chain_acc = rm.iptables_manager.get_traffic_counters(
chain, wrap=False, zero=True)
if not chain_acc:
continue
acc = accs.get(label_id, {'pkts': 0, 'bytes': 0})
acc['pkts'] += chain_acc['pkts']
acc['bytes'] += chain_acc['bytes']
accs[label_id] = acc
return accs
| netscaler/neutron | neutron/services/metering/drivers/iptables/iptables_driver.py | Python | apache-2.0 | 11,352 |
from __future__ import unicode_literals
from rbpkg.utils.matches import matches_current_system, matches_version_range
class PackageRules(object):
"""A set of rules for installing and managing packages.
The rules provide rbpkg with the information needed to install or manage
packages, and to handle non-Python dependencies or to replace packages
with other alternatives.
Each rule may match one or more versions by specifying a version range.
Attributes:
channel (rbpkg.repository.package_channel.PackageChannel):
The channel this version is a part.
version_range (unicode):
The version range that these rules apply to, or ``*`` to match
all versions.
package_type (unicode):
The type of package. Must be one of
:py:attr:`PACKAGE_TYPE_DEB`, :py:attr:`PACKAGE_TYPE_PYTHON`,
:py:attr:`PACKAGE_TYPE_RPM`, or :py:attr:`PACKAGE_TYPE_SOURCE`.
package_name (unicode):
The name of the package in the package manager.
systems (list of unicode):
A list of systems that these rules apply to. The special value
of ``*`` matches all systems.
Valid entries are "macosx", "windows", or any Linux distribution
matching the result of :py:func:`platform.dist`.
required_dependencies (list of unicode):
A list of package bundle names that this depends on.
recommended_dependencies (list of unicode):
A list of package bundle names that this recommends.
optional_dependencies (list of unicode):
A list of package bundle names that are optional dependencies.
replaces (list of unicode):
A list of package bundle names that this package replaces.
pre_install_commands (list of unicode):
A list of shell commands to perform prior to installation.
install_commands (list of unicode):
A list of shell commands to perform for installation. If not
set, the native package manager for this package type will be
used to install the given package.
post_install_commands (list of unicode):
A list of shell commands to perform after installation.
install_flags (list of unicode):
A list of flags to pass to the native package manager.
uninstall_commands (list of unicode):
A list of shell commands to perform for uninstallation. If not
set, the native package manager for this package type will be
used to uninstall the given package.
"""
#: Python packages (eggs or wheels).
PACKAGE_TYPE_PYTHON = 'python'
#: RPM packages.
PACKAGE_TYPE_RPM = 'rpm'
#: Debian packages.
PACKAGE_TYPE_DEB = 'deb'
#: Source installs.
PACKAGE_TYPE_SOURCE = 'source'
@classmethod
def deserialize(cls, channel, data):
"""Deserialize a payload into a PackageRules.
Args:
channel (rbpkg.repository.package_channel.PackageChannel):
The channel that contains this set of rules.
data (dict):
The JSON dictionary data for the rules definitions.
Returns:
PackageRules:
The resulting package rules.
"""
deps = data.get('dependencies', {})
return PackageRules(
channel,
version_range=data['version_range'],
package_type=data['package_type'],
package_name=data.get('package_name'),
systems=data['systems'],
required_dependencies=deps.get('required'),
recommended_dependencies=deps.get('recommended'),
optional_dependencies=deps.get('optional'),
replaces=data.get('replaces'),
pre_install_commands=data.get('pre_install_commands'),
install_commands=data.get('install_commands'),
post_install_commands=data.get('post_install_commands'),
install_flags=data.get('install_flags'),
uninstall_commands=data.get('uninstall_commands'))
def __init__(self, channel, version_range=None, package_type=None,
package_name=None, systems=[], required_dependencies=[],
recommended_dependencies=[], optional_dependencies=[],
replaces=[], pre_install_commands=[], install_commands=[],
post_install_commands=[], install_flags=[],
uninstall_commands=[]):
"""Initialize the package rules.
Args:
channel (rbpkg.repository.package_channel.PackageChannel):
The channel that contains this set of rules.
version_range (unicode):
The version range that these rules apply to, or ``*`` to match
all versions.
package_type (unicode):
The type of package. Must be one of
:py:attr:`PACKAGE_TYPE_DEB`, :py:attr:`PACKAGE_TYPE_PYTHON`,
:py:attr:`PACKAGE_TYPE_RPM`, or :py:attr:`PACKAGE_TYPE_SOURCE`.
package_name (unicode):
The name of the package in the package manager.
systems (list of unicode):
A list of systems that these rules apply to. The special value
of ``*`` matches all systems.
Valid entries are "macosx", "windows", or any Linux
distribution matching the result of :py:func:`platform.dist`.
required_dependencies (list of unicode):
A list of package bundle names that this depends on.
recommended_dependencies (list of unicode):
A list of package bundle names that this recommends.
optional_dependencies (list of unicode):
A list of package bundle names that are optional dependencies.
replaces (list of unicode):
A list of package bundle names that this package replaces.
pre_install_commands (list of unicode):
A list of shell commands to perform prior to installation.
install_commands (list of unicode):
A list of shell commands to perform for installation. If not
set, the native package manager for this package type will be
used to install the given package.
post_install_commands (list of unicode):
A list of shell commands to perform after installation.
install_flags (list of unicode):
A list of flags to pass to the native package manager.
uninstall_commands (list of unicode):
A list of shell commands to perform for uninstallation. If not
set, the native package manager for this package type will be
used to uninstall the given package.
"""
self.channel = channel
self.version_range = version_range
self.package_type = package_type
self.package_name = package_name
self.systems = systems or []
self.required_dependencies = required_dependencies or []
self.recommended_dependencies = recommended_dependencies or []
self.optional_dependencies = optional_dependencies or []
self.replaces = replaces or []
self.pre_install_commands = pre_install_commands or []
self.install_commands = install_commands or []
self.post_install_commands = post_install_commands or []
self.install_flags = install_flags or []
self.uninstall_commands = uninstall_commands or []
def matches_version(self, version, require_current_system=True):
"""Return whether these rules match the given version.
By default, this will also check if it matches the current system.
Args:
version (unicode):
The version to restrict rules to.
require_current_system (bool):
If set, only rules valid for the current system will be
returned.
Returns:
bool:
``True`` if this set of rules matches the given criteria.
"""
version_range = self.package_name + self.version_range
return ((self.version_range == '*' or
matches_version_range(version, version_range)) and
(not require_current_system or
matches_current_system(self.systems)))
def serialize(self):
"""Serialize the package rules into a JSON-serializable format.
The resulting output can be embedded into the channel data.
Returns:
dict:
The serialized package rules data.
"""
deps = {}
if self.required_dependencies:
deps['required'] = self.required_dependencies
if self.recommended_dependencies:
deps['recommended'] = self.recommended_dependencies
if self.optional_dependencies:
deps['optional'] = self.optional_dependencies
data = {
'version_range': self.version_range,
'package_type': self.package_type,
'package_name': self.package_name,
'systems': self.systems,
}
if deps:
data['dependencies'] = deps
optional_fields = (
('replaces', self.replaces),
('pre_install_commands', self.pre_install_commands),
('install_commands', self.install_commands),
('post_install_commands', self.post_install_commands),
('install_flags', self.install_flags),
('uninstall_commands', self.uninstall_commands),
)
for field_name, value in optional_fields:
if value:
data[field_name] = value
return data
def __repr__(self):
return (
'<PackageRules(version_range=%s; package_type=%s; '
'package_name=%s>'
% (self.version_range, self.package_type, self.package_name)
)
| reviewboard/rbpkg | rbpkg/repository/package_rules.py | Python | mit | 10,097 |
# Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""Abstract class for AES."""
class AES(object):
def __init__(self, key, mode, IV, implementation):
if len(key) not in (16, 24, 32):
raise AssertionError()
if mode != 2:
raise AssertionError()
if len(IV) != 16:
raise AssertionError()
self.isBlockCipher = True
self.block_size = 16
self.implementation = implementation
if len(key)==16:
self.name = "aes128"
elif len(key)==24:
self.name = "aes192"
elif len(key)==32:
self.name = "aes256"
else:
raise AssertionError()
#CBC-Mode encryption, returns ciphertext
#WARNING: *MAY* modify the input as well
def encrypt(self, plaintext):
assert(len(plaintext) % 16 == 0)
#CBC-Mode decryption, returns plaintext
#WARNING: *MAY* modify the input as well
def decrypt(self, ciphertext):
assert(len(ciphertext) % 16 == 0) | valurhrafn/chrome-sync-server | tlslite/utils/aes.py | Python | mit | 1,064 |
# -*- coding: utf-8 -*-
from django.contrib.auth import authenticate, login, logout
from django.core.exceptions import ValidationError
from django.shortcuts import redirect, render
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from account.forms import SignInForm, SignUpForm
class SignInFormView(generic.FormView):
"""
Represents the 'Sign In' view.
"""
form_class = SignInForm
success_url = '/'
template_name = 'clintya_site/account/signin.html'
def form_valid(self, form):
"""
Authenticates and login the user only if it is active.
"""
email = form.cleaned_data['email']
password = form.cleaned_data['password']
user = authenticate(username=email, password=password)
if user is not None:
if user.is_active:
login(self.request, user)
return super(SignInFormView, self).form_valid(form)
else:
form.add_error(
'email',
ValidationError(_("Your account has been disabled."))
)
else:
form.add_error(
'email',
ValidationError(_("The email or password is invalid."))
)
return render(self.request, self.template_name, context={'form': form})
class SignUpFormView(generic.FormView):
"""
Represents the 'Sign Up' view.
"""
form_class = SignUpForm
success_url = '/'
template_name = 'clintya_site/account/signup.html'
def form_valid(self, form):
"""
Registers the new user, logout the current user, authenticates,
and login the new user.
"""
email = form.cleaned_data['email']
password = form.cleaned_data['password']
form.save()
logout(self.request)
user = authenticate(username=email, password=password)
if user is not None and user.is_active:
login(self.request, user)
return super(SignUpFormView, self).form_valid(form)
def signout_view(request):
"""
Represents the 'Sign Out' view to logout the authenticated user.
"""
logout(request)
return redirect('/')
| viprip/clintya | account/views.py | Python | gpl-3.0 | 2,245 |
# -*- coding: utf-8 -*-
from django.contrib import messages
from django.db.models import Q
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import redirect, get_object_or_404
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateView, View
from django.views.generic.edit import FormView
from django.views.generic.list import ListView
from know.core.http import send_file
from know.decorators import get_article, response_forbidden
from know.plugins.attachments import models, settings, forms
from know.views.mixins import ArticleMixin
class AttachmentView(ArticleMixin, FormView):
form_class = forms.AttachmentForm
template_name = "know/plugins/attachments/index.html"
@method_decorator(get_article(can_read=True))
def dispatch(self, request, article, *args, **kwargs):
if article.can_moderate(request.user):
self.attachments = models.Attachment.objects.filter(
articles=article, current_revision__deleted=False
).exclude(
current_revision__file=None
).order_by('original_filename')
self.form_class = forms.AttachmentArcihveForm
else:
self.attachments = models.Attachment.objects.active().filter(articles=article)
# Fixing some weird transaction issue caused by adding commit_manually to form_valid
return super(AttachmentView, self).dispatch(request, article, *args, **kwargs)
def form_valid(self, form):
if (self.request.user.is_anonymous() and not settings.ANONYMOUS or not self.article.can_write(self.request.user) or self.article.current_revision.locked):
return response_forbidden(self.request, self.article, self.urlpath)
attachment_revision = form.save()
if isinstance(attachment_revision, list):
messages.success(self.request, _(u'Successfully added: %s') % (", ".join([ar.get_filename() for ar in attachment_revision])))
else:
messages.success(self.request, _(u'%s was successfully added.') % attachment_revision.get_filename())
return redirect("know:attachments_index", path=self.urlpath.path, article_id=self.article.id)
def get_form_kwargs(self):
kwargs = FormView.get_form_kwargs(self)
kwargs['article'] = self.article
kwargs['request'] = self.request
return kwargs
def get_context_data(self, **kwargs):
kwargs['attachments'] = self.attachments
kwargs['deleted_attachments'] = models.Attachment.objects.filter(articles=self.article, current_revision__deleted=True)
kwargs['search_form'] = forms.SearchForm()
kwargs['selected_tab'] = 'attachments'
kwargs['anonymous_disallowed'] = self.request.user.is_anonymous() and not settings.ANONYMOUS
return super(AttachmentView, self).get_context_data(**kwargs)
class AttachmentHistoryView(ArticleMixin, TemplateView):
template_name = "know/plugins/attachments/history.html"
@method_decorator(get_article(can_read=True))
def dispatch(self, request, article, attachment_id, *args, **kwargs):
if article.can_moderate(request.user):
self.attachment = get_object_or_404(models.Attachment, id=attachment_id, articles=article)
else:
self.attachment = get_object_or_404(models.Attachment.objects.active(), id=attachment_id, articles=article)
return super(AttachmentHistoryView, self).dispatch(request, article, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs['attachment'] = self.attachment
kwargs['revisions'] = self.attachment.attachmentrevision_set.all().order_by('-revision_number')
kwargs['selected_tab'] = 'attachments'
return super(AttachmentHistoryView, self).get_context_data(**kwargs)
class AttachmentReplaceView(ArticleMixin, FormView):
form_class = forms.AttachmentForm
template_name = "know/plugins/attachments/replace.html"
@method_decorator(get_article(can_write=True, not_locked=True))
def dispatch(self, request, article, attachment_id, *args, **kwargs):
if request.user.is_anonymous() and not settings.ANONYMOUS:
return response_forbidden(request, article, kwargs.get('urlpath', None))
if article.can_moderate(request.user):
self.attachment = get_object_or_404(models.Attachment, id=attachment_id, articles=article)
else:
self.attachment = get_object_or_404(models.Attachment.objects.active(), id=attachment_id, articles=article)
return super(AttachmentReplaceView, self).dispatch(request, article, *args, **kwargs)
def form_valid(self, form):
try:
attachment_revision = form.save(commit=False)
attachment_revision.attachment = self.attachment
attachment_revision.set_from_request(self.request)
attachment_revision.previous_revision = self.attachment.current_revision
attachment_revision.save()
self.attachment.current_revision = attachment_revision
self.attachment.save()
messages.success(self.request, _(u'%s uploaded and replaces old attachment.') % attachment_revision.get_filename())
except models.IllegalFileExtension, e:
messages.error(self.request, _(u'Your file could not be saved: %s') % e)
return redirect("know:attachments_replace", attachment_id=self.attachment.id,
path=self.urlpath.path, article_id=self.article.id)
except Exception:
messages.error(self.request, _(u'Your file could not be saved, probably because of a permission error on the web server.'))
return redirect("know:attachments_replace", attachment_id=self.attachment.id,
path=self.urlpath.path, article_id=self.article.id)
return redirect("know:attachments_index", path=self.urlpath.path, article_id=self.article.id)
def get_form(self, form_class):
form = FormView.get_form(self, form_class)
form.fields['file'].help_text = _(u'Your new file will automatically be renamed to match the file already present. Files with different extensions are not allowed.')
return form
def get_initial(self, **kwargs):
return {'description': self.attachment.current_revision.description}
def get_context_data(self, **kwargs):
kwargs['attachment'] = self.attachment
kwargs['selected_tab'] = 'attachments'
return super(AttachmentReplaceView, self).get_context_data(**kwargs)
class AttachmentDownloadView(ArticleMixin, View):
@method_decorator(get_article(can_read=True))
def dispatch(self, request, article, attachment_id, *args, **kwargs):
if article.can_moderate(request.user):
self.attachment = get_object_or_404(models.Attachment, id=attachment_id, articles=article)
else:
self.attachment = get_object_or_404(models.Attachment.objects.active(), id=attachment_id, articles=article)
revision_id = kwargs.get('revision_id', None)
if revision_id:
self.revision = get_object_or_404(models.AttachmentRevision, id=revision_id, attachment__articles=article)
else:
self.revision = self.attachment.current_revision
return super(AttachmentDownloadView, self).dispatch(request, article, *args, **kwargs)
def get(self, request, *args, **kwargs):
if self.revision:
if settings.USE_LOCAL_PATH:
try:
return send_file(request, self.revision.file.path,
self.revision.created, self.attachment.original_filename)
except OSError:
pass
else:
return HttpResponseRedirect(self.revision.file.url)
raise Http404
class AttachmentChangeRevisionView(ArticleMixin, View):
form_class = forms.AttachmentForm
template_name = "know/plugins/attachments/replace.html"
@method_decorator(get_article(can_write=True, not_locked=True))
def dispatch(self, request, article, attachment_id, revision_id, *args, **kwargs):
if article.can_moderate(request.user):
self.attachment = get_object_or_404(models.Attachment, id=attachment_id, articles=article)
else:
self.attachment = get_object_or_404(models.Attachment.objects.active(), id=attachment_id, articles=article)
self.revision = get_object_or_404(models.AttachmentRevision, id=revision_id, attachment__articles=article)
return super(AttachmentChangeRevisionView, self).dispatch(request, article, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.attachment.current_revision = self.revision
self.attachment.save()
messages.success(self.request, _(u'Current revision changed for %s.') % self.attachment.original_filename)
return redirect("know:attachments_index", path=self.urlpath.path, article_id=self.article.id)
def get_context_data(self, **kwargs):
kwargs['selected_tab'] = 'attachments'
return ArticleMixin.get_context_data(self, **kwargs)
class AttachmentAddView(ArticleMixin, View):
@method_decorator(get_article(can_write=True, not_locked=True))
def dispatch(self, request, article, attachment_id, *args, **kwargs):
self.attachment = get_object_or_404(models.Attachment.objects.active().can_write(request.user), id=attachment_id)
return super(AttachmentAddView, self).dispatch(request, article, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.attachment.articles.filter(id=self.article.id):
self.attachment.articles.add(self.article)
self.attachment.save()
messages.success(self.request, _(u'Added a reference to "%(att)s" from "%(art)s".') %
{'att': self.attachment.original_filename,
'art': self.article.current_revision.title})
return redirect("know:attachments_index", path=self.urlpath.path, article_id=self.article.id)
class AttachmentDeleteView(ArticleMixin, FormView):
form_class = forms.DeleteForm
template_name = "know/plugins/attachments/delete.html"
@method_decorator(get_article(can_write=True, not_locked=True))
def dispatch(self, request, article, attachment_id, *args, **kwargs):
self.attachment = get_object_or_404(models.Attachment, id=attachment_id, articles=article)
if not self.attachment.can_delete(request.user):
return response_forbidden(request, article, kwargs.get('urlpath', None))
return super(AttachmentDeleteView, self).dispatch(request, article, *args, **kwargs)
def form_valid(self, form):
if self.attachment.article == self.article:
revision = models.AttachmentRevision()
revision.attachment = self.attachment
revision.set_from_request(self.request)
revision.deleted = True
revision.file = self.attachment.current_revision.file if self.attachment.current_revision else None
revision.description = self.attachment.current_revision.description if self.attachment.current_revision else ""
revision.save()
self.attachment.current_revision = revision
self.attachment.save()
messages.info(self.request, _(u'The file %s was deleted.') % self.attachment.original_filename)
else:
self.attachment.articles.remove(self.article)
messages.info(self.request, _(u'This article is no longer related to the file %s.') % self.attachment.original_filename)
return redirect("know:get", path=self.urlpath.path, article_id=self.article.id)
def get_context_data(self, **kwargs):
kwargs['attachment'] = self.attachment
kwargs['selected_tab'] = 'attachments'
return super(AttachmentDeleteView, self).get_context_data(**kwargs)
class AttachmentSearchView(ArticleMixin, ListView):
template_name = "know/plugins/attachments/search.html"
allow_empty = True
context_object_name = 'attachments'
paginate_by = 10
@method_decorator(get_article(can_write=True))
def dispatch(self, request, article, *args, **kwargs):
return super(AttachmentSearchView, self).dispatch(request, article, *args, **kwargs)
def get_queryset(self):
self.query = self.request.GET.get('query', None)
if not self.query:
qs = models.Attachment.objects.get_empty_query_set()
else:
qs = models.Attachment.objects.active().can_read(self.request.user)
qs = qs.filter(Q(original_filename__contains=self.query) |
Q(current_revision__description__contains=self.query) |
Q(article__current_revision__title__contains=self.query))
return qs
def get_context_data(self, **kwargs):
# Is this a bit of a hack? Use better inheritance?
kwargs_article = ArticleMixin.get_context_data(self, **kwargs)
kwargs_listview = ListView.get_context_data(self, **kwargs)
kwargs['search_form'] = forms.SearchForm(self.request.GET)
kwargs['query'] = self.query
kwargs.update(kwargs_article)
kwargs.update(kwargs_listview)
kwargs['selected_tab'] = 'attachments'
return kwargs
| indexofire/gork | src/gork/application/know/plugins/attachments/views.py | Python | mit | 13,497 |
#!/usr/bin/python
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
def not_available_on_remote(func):
def testMethod(self):
print(self.driver)
if type(self.driver) == 'remote':
return lambda x: None
else:
return func(self)
return testMethod
class CorrectEventFiringTests(unittest.TestCase):
def testShouldFireClickEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("click")
def testShouldFireMouseDownEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mousedown")
def testShouldFireMouseUpEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mouseup")
def testShouldIssueMouseDownEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mousedown").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldIssueClickEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseclick").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse click")
def testShouldIssueMouseUpEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseup").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse up")
def testMouseEventsShouldBubbleUpToContainingElements(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("child").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldEmitOnChangeEventsWhenSelectingElements(self):
self._loadPage("javascriptPage")
# Intentionally not looking up the select tag. See selenium r7937 for details.
allOptions = self.driver.find_elements_by_xpath("//select[@id='selector']//option")
initialTextValue = self.driver.find_element_by_id("result").text
foo = allOptions[0]
bar = allOptions[1]
foo.click()
self.assertEqual(self.driver.find_element_by_id("result").text, initialTextValue)
bar.click()
self.assertEqual(self.driver.find_element_by_id("result").text, "bar")
def testShouldEmitOnChangeEventsWhenChangingTheStateOfACheckbox(self):
self._loadPage("javascriptPage")
checkbox = self.driver.find_element_by_id("checkbox")
checkbox.click()
self.assertEqual(self.driver.find_element_by_id("result").text, "checkbox thing")
def testShouldEmitClickEventWhenClickingOnATextInputElement(self):
self._loadPage("javascriptPage")
clicker = self.driver.find_element_by_id("clickField")
clicker.click()
self.assertEqual(clicker.get_attribute("value"), "Clicked")
def testClearingAnElementShouldCauseTheOnChangeHandlerToFire(self):
self._loadPage("javascriptPage")
element = self.driver.find_element_by_id("clearMe")
element.clear()
result = self.driver.find_element_by_id("result")
self.assertEqual(result.text, "Cleared");
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnotherElementShouldCauseTheBlurEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# element2 = self.driver.find_element_by_id("changeable")
# element2.send_keys("bar")
# self._assertEventFired("blur")
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnElementShouldCauseTheFocusEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# self._assertEventFired("focus")
def _clickOnElementWhichRecordsEvents(self):
self.driver.find_element_by_id("plainButton").click()
def _assertEventFired(self, eventName):
result = self.driver.find_element_by_id("result")
text = result.text
self.assertTrue(eventName in text, "No " + eventName + " fired: " + text)
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| denis-vilyuzhanin/selenium-fastview | py/test/selenium/webdriver/common/correct_event_firing_tests.py | Python | apache-2.0 | 5,463 |
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Copyright (c) 2014 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Given a list of JAR files passed via --jars, produced one single JAR file with
all their contents merged. JAR files outside --build-dir are ignored.
"""
import optparse
import os
import sys
GYP_ANDROID_DIR = os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir,
'build',
'android',
'gyp')
sys.path.append(GYP_ANDROID_DIR)
import jar
from util import build_utils
def main():
parser = optparse.OptionParser()
parser.add_option('--build-dir',
help='Base build directory, such as out/Release. JARs '
'outside this directory will be skipped.')
parser.add_option('--jars', help='The jars to merge.')
parser.add_option('--output-jar', help='Name of the merged JAR file.')
options, _ = parser.parse_args()
build_dir = os.path.abspath(options.build_dir)
with build_utils.TempDir() as temp_dir:
for jar_file in build_utils.ParseGypList(options.jars):
if not os.path.abspath(jar_file).startswith(build_dir):
continue
build_utils.ExtractAll(jar_file, path=temp_dir, pattern='*.class')
jar.JarDirectory(temp_dir, options.output_jar)
if __name__ == '__main__':
sys.exit(main())
| PeterWangIntel/crosswalk | build/android/merge_jars.py | Python | bsd-3-clause | 1,551 |
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# dump_instances.py
# Copyright (C) 2015 Fracpete (pythonwekawrapper at gmail dot com)
import os
import tempfile
import traceback
import weka.core.jvm as jvm
import wekaexamples.helper as helper
import weka.filters as filters
from weka.flow.control import Flow
from weka.flow.source import FileSupplier
from weka.flow.transformer import LoadDataset, Filter, RenameRelation
from weka.flow.sink import InstanceDumper
def main():
"""
Just runs some example code.
"""
"""
Loads/filters a dataset incrementally and saves it to a new file.
"""
# setup the flow
helper.print_title("Load/filter/save dataset (incrementally)")
iris = helper.get_data_dir() + os.sep + "iris.arff"
flow = Flow(name="Load/filter/save dataset (incrementally)")
filesupplier = FileSupplier()
filesupplier.config["files"] = [iris]
flow.actors.append(filesupplier)
loaddataset = LoadDataset()
loaddataset.config["incremental"] = True
flow.actors.append(loaddataset)
flter = Filter()
flter.config["setup"] = filters.Filter(
classname="weka.filters.unsupervised.attribute.Remove", options=["-R", "last"])
flow.actors.append(flter)
rename = RenameRelation()
rename.config["name"] = "iris-reduced"
flow.actors.append(rename)
dumper = InstanceDumper()
dumper.config["output"] = tempfile.gettempdir() + os.sep + "out.arff"
flow.actors.append(dumper)
# run the flow
msg = flow.setup()
if msg is None:
print("\n" + flow.tree + "\n")
msg = flow.execute()
if msg is not None:
print("Error executing flow:\n" + msg)
else:
print("Error setting up flow:\n" + msg)
flow.wrapup()
flow.cleanup()
if __name__ == "__main__":
try:
jvm.start()
main()
except Exception, e:
print(traceback.format_exc())
finally:
jvm.stop()
| fracpete/python-weka-wrapper-examples | src/wekaexamples/flow/dump_instances.py | Python | gpl-3.0 | 2,538 |
from flask.ext.appbuilder import Model
from datetime import timedelta
from flask.ext.appbuilder.models.mixins import AuditMixin
from flask import request, redirect, flash, Response
from sqlalchemy import Column, Integer, String, ForeignKey, Text, Boolean, DateTime
from sqlalchemy import create_engine, MetaData, desc
from sqlalchemy import Table as sqlaTable
from sqlalchemy.orm import relationship
from dateutil.parser import parse
from pydruid import client
from pydruid.utils.filters import Dimension, Filter
from pandas import read_sql_query
from sqlalchemy.sql import table, literal_column
from sqlalchemy import select, and_, text, String
from copy import deepcopy, copy
from collections import namedtuple
from datetime import datetime
import logging
import json
import sqlparse
import requests
import textwrap
from panoramix import db, get_session
import config
QueryResult = namedtuple('namedtuple', ['df', 'query', 'duration'])
class Queryable(object):
@property
def column_names(self):
return sorted([c.column_name for c in self.columns])
@property
def groupby_column_names(self):
return sorted([c.column_name for c in self.columns if c.groupby])
@property
def filterable_column_names(self):
return sorted([c.column_name for c in self.columns if c.filterable])
class Database(Model, AuditMixin):
__tablename__ = 'dbs'
id = Column(Integer, primary_key=True)
database_name = Column(String(255), unique=True)
sqlalchemy_uri = Column(String(1024))
def __repr__(self):
return self.database_name
def get_sqla_engine(self):
return create_engine(self.sqlalchemy_uri)
def get_table(self, table_name):
meta = MetaData()
return sqlaTable(
table_name, meta,
autoload=True,
autoload_with=self.get_sqla_engine())
class Table(Model, Queryable, AuditMixin):
__tablename__ = 'tables'
id = Column(Integer, primary_key=True)
table_name = Column(String(255), unique=True)
main_datetime_column_id = Column(Integer, ForeignKey('table_columns.id'))
main_datetime_column = relationship(
'TableColumn', foreign_keys=[main_datetime_column_id])
default_endpoint = Column(Text)
database_id = Column(Integer, ForeignKey('dbs.id'), nullable=False)
database = relationship(
'Database', backref='tables', foreign_keys=[database_id])
baselink = "tableview"
@property
def name(self):
return self.table_name
@property
def table_link(self):
url = "/panoramix/table/{}/".format(self.id)
return '<a href="{url}">{self.table_name}</a>'.format(**locals())
@property
def metrics_combo(self):
return sorted(
[
(m.metric_name, m.verbose_name)
for m in self.metrics],
key=lambda x: x[1])
def query_bkp(
self, groupby, metrics,
granularity,
from_dttm, to_dttm,
limit_spec=None,
filter=None,
is_timeseries=True,
timeseries_limit=15, row_limit=None):
"""
Unused, legacy way of querying by building a SQL string without
using the sqlalchemy expression API (new approach which supports
all dialects)
"""
from pandas import read_sql_query
qry_start_dttm = datetime.now()
metrics_exprs = [
"{} AS {}".format(m.expression, m.metric_name)
for m in self.metrics if m.metric_name in metrics]
from_dttm_iso = from_dttm.isoformat()
to_dttm_iso = to_dttm.isoformat()
if metrics:
main_metric_expr = [m.expression for m in self.metrics if m.metric_name == metrics[0]][0]
else:
main_metric_expr = "COUNT(*)"
select_exprs = []
groupby_exprs = []
if groupby:
select_exprs = copy(groupby)
groupby_exprs = [s for s in groupby]
inner_groupby_exprs = [s for s in groupby]
select_exprs += metrics_exprs
if granularity != "all":
select_exprs += ['ds as timestamp']
groupby_exprs += ['ds']
select_exprs = ",\n".join(select_exprs)
groupby_exprs = ",\n".join(groupby_exprs)
where_clause = [
"ds >= '{from_dttm_iso}'",
"ds < '{to_dttm_iso}'"
]
for col, op, eq in filter:
if op in ('in', 'not in'):
l = ["'{}'".format(s) for s in eq.split(",")]
l = ", ".join(l)
op = op.upper()
where_clause.append(
"{col} {op} ({l})".format(**locals())
)
where_clause = " AND\n".join(where_clause).format(**locals())
on_clause = " AND ".join(["{g} = __{g}".format(g=g) for g in groupby])
limiting_join = ""
if timeseries_limit and groupby:
inner_select = ", ".join(["{g} as __{g}".format(g=g) for g in inner_groupby_exprs])
inner_groupby_exprs = ", ".join(inner_groupby_exprs)
limiting_join = (
"JOIN ( \n"
" SELECT {inner_select} \n"
" FROM {self.table_name} \n"
" WHERE \n"
" {where_clause}\n"
" GROUP BY {inner_groupby_exprs}\n"
" ORDER BY {main_metric_expr} DESC\n"
" LIMIT {timeseries_limit}\n"
") z ON {on_clause}\n"
).format(**locals())
sql = (
"SELECT\n"
" {select_exprs}\n"
"FROM {self.table_name}\n"
"{limiting_join}"
"WHERE\n"
" {where_clause}\n"
"GROUP BY\n"
" {groupby_exprs}\n"
).format(**locals())
df = read_sql_query(
sql=sql,
con=self.database.get_sqla_engine()
)
textwrap.dedent(sql)
return QueryResult(
df=df, duration=datetime.now() - qry_start_dttm, query=sql)
def query(
self, groupby, metrics,
granularity,
from_dttm, to_dttm,
limit_spec=None,
filter=None,
is_timeseries=True,
timeseries_limit=15, row_limit=None):
qry_start_dttm = datetime.now()
timestamp = literal_column(
self.main_datetime_column.column_name).label('timestamp')
metrics_exprs = [
literal_column(m.expression).label(m.metric_name)
for m in self.metrics if m.metric_name in metrics]
if metrics:
main_metric_expr = literal_column(
[m.expression for m in self.metrics if m.metric_name == metrics[0]][0])
else:
main_metric_expr = literal_column("COUNT(*)")
select_exprs = []
groupby_exprs = []
if groupby:
select_exprs = [literal_column(s) for s in groupby]
groupby_exprs = [literal_column(s) for s in groupby]
inner_groupby_exprs = [literal_column(s).label('__' + s) for s in groupby]
if granularity != "all":
select_exprs += [timestamp]
groupby_exprs += [timestamp]
select_exprs += metrics_exprs
qry = select(select_exprs)
from_clause = table(self.table_name)
qry = qry.group_by(*groupby_exprs)
where_clause_and = [
timestamp >= from_dttm.isoformat(),
timestamp < to_dttm.isoformat(),
]
for col, op, eq in filter:
if op in ('in', 'not in'):
values = eq.split(",")
cond = literal_column(col).in_(values)
if op == 'not in':
cond = ~cond
where_clause_and.append(cond)
qry = qry.where(and_(*where_clause_and))
qry = qry.order_by(desc(main_metric_expr))
qry = qry.limit(row_limit)
if timeseries_limit and groupby:
subq = select(inner_groupby_exprs)
subq = subq.select_from(table(self.table_name))
subq = subq.where(and_(*where_clause_and))
subq = subq.group_by(*inner_groupby_exprs)
subq = subq.order_by(desc(main_metric_expr))
subq = subq.limit(timeseries_limit)
on_clause = []
for gb in groupby:
on_clause.append(literal_column(gb)==literal_column("__" + gb))
from_clause = from_clause.join(subq.alias(), and_(*on_clause))
qry = qry.select_from(from_clause)
engine = self.database.get_sqla_engine()
sql = str(qry.compile(engine, compile_kwargs={"literal_binds": True}))
df = read_sql_query(
sql=sql,
con=engine
)
sql = sqlparse.format(sql, reindent=True)
return QueryResult(
df=df, duration=datetime.now() - qry_start_dttm, query=sql)
def fetch_metadata(self):
try:
table = self.database.get_table(self.table_name)
except Exception as e:
flash(str(e))
flash(
"Table doesn't seem to exist in the specified database, "
"couldn't fetch column information", "danger")
return
TC = TableColumn
M = SqlMetric
metrics = []
any_date_col = None
for col in table.columns:
try:
datatype = str(col.type)
except Exception as e:
datatype = "UNKNOWN"
dbcol = (
db.session
.query(TC)
.filter(TC.table==self)
.filter(TC.column_name==col.name)
.first()
)
db.session.flush()
if not dbcol:
dbcol = TableColumn(column_name=col.name)
if (
str(datatype).startswith('VARCHAR') or
str(datatype).startswith('STRING')):
dbcol.groupby = True
dbcol.filterable = True
db.session.merge(self)
self.columns.append(dbcol)
if not any_date_col and 'date' in datatype.lower():
any_date_col = dbcol
if dbcol.sum:
metrics.append(M(
metric_name='sum__' + dbcol.column_name,
verbose_name='sum__' + dbcol.column_name,
metric_type='sum',
expression="SUM({})".format(dbcol.column_name)
))
if dbcol.max:
metrics.append(M(
metric_name='max__' + dbcol.column_name,
verbose_name='max__' + dbcol.column_name,
metric_type='max',
expression="MAX({})".format(dbcol.column_name)
))
if dbcol.min:
metrics.append(M(
metric_name='min__' + dbcol.column_name,
verbose_name='min__' + dbcol.column_name,
metric_type='min',
expression="MIN({})".format(dbcol.column_name)
))
if dbcol.count_distinct:
metrics.append(M(
metric_name='count_distinct__' + dbcol.column_name,
verbose_name='count_distinct__' + dbcol.column_name,
metric_type='count_distinct',
expression="COUNT(DISTINCT {})".format(dbcol.column_name)
))
dbcol.type = datatype
db.session.merge(self)
db.session.commit()
metrics.append(M(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
expression="COUNT(*)"
))
for metric in metrics:
m = (
db.session.query(M)
.filter(M.metric_name==metric.metric_name)
.filter(M.table==self)
.first()
)
metric.table = self
if not m:
db.session.add(metric)
db.session.commit()
if not self.main_datetime_column:
self.main_datetime_column = any_date_col
class SqlMetric(Model, AuditMixin):
__tablename__ = 'sql_metrics'
id = Column(Integer, primary_key=True)
metric_name = Column(String(512))
verbose_name = Column(String(1024))
metric_type = Column(String(32))
table_id = Column(Integer,ForeignKey('tables.id'))
table = relationship(
'Table', backref='metrics', foreign_keys=[table_id])
expression = Column(Text)
description = Column(Text)
class TableColumn(Model, AuditMixin):
__tablename__ = 'table_columns'
id = Column(Integer, primary_key=True)
table_id = Column(Integer, ForeignKey('tables.id'))
table = relationship('Table', backref='columns', foreign_keys=[table_id])
column_name = Column(String(256))
is_dttm = Column(Boolean, default=True)
is_active = Column(Boolean, default=True)
type = Column(String(32), default='')
groupby = Column(Boolean, default=False)
count_distinct = Column(Boolean, default=False)
sum = Column(Boolean, default=False)
max = Column(Boolean, default=False)
min = Column(Boolean, default=False)
filterable = Column(Boolean, default=False)
description = Column(Text, default='')
def __repr__(self):
return self.column_name
class Cluster(Model, AuditMixin):
__tablename__ = 'clusters'
id = Column(Integer, primary_key=True)
cluster_name = Column(String(255), unique=True)
coordinator_host = Column(String(256))
coordinator_port = Column(Integer)
coordinator_endpoint = Column(String(256))
broker_host = Column(String(256))
broker_port = Column(Integer)
broker_endpoint = Column(String(256))
metadata_last_refreshed = Column(DateTime)
def __repr__(self):
return self.cluster_name
def get_pydruid_client(self):
cli = client.PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def refresh_datasources(self):
endpoint = (
"http://{self.coordinator_host}:{self.coordinator_port}/"
"{self.coordinator_endpoint}/datasources"
).format(self=self)
datasources = json.loads(requests.get(endpoint).text)
for datasource in datasources:
Datasource.sync_to_db(datasource, self)
class Datasource(Model, AuditMixin, Queryable):
baselink = "datasourcemodelview"
__tablename__ = 'datasources'
id = Column(Integer, primary_key=True)
datasource_name = Column(String(255), unique=True)
is_featured = Column(Boolean, default=False)
is_hidden = Column(Boolean, default=False)
description = Column(Text)
default_endpoint = Column(Text)
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship('User', backref='datasources', foreign_keys=[user_id])
cluster_name = Column(String(255),
ForeignKey('clusters.cluster_name'))
cluster = relationship('Cluster', backref='datasources', foreign_keys=[cluster_name])
@property
def metrics_combo(self):
return sorted(
[(m.metric_name, m.verbose_name) for m in self.metrics],
key=lambda x: x[1])
@property
def name(self):
return self.datasource_name
def __repr__(self):
return self.datasource_name
@property
def datasource_link(self):
url = "/panoramix/datasource/{}/".format(self.datasource_name)
return '<a href="{url}">{self.datasource_name}</a>'.format(**locals())
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
def latest_metadata(self):
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['minTime']
max_time = parse(max_time)
intervals = (max_time - timedelta(seconds=1)).isoformat() + '/'
intervals += (max_time + timedelta(seconds=1)).isoformat()
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=intervals)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
for col in self.columns:
col.generate_metrics()
@classmethod
def sync_to_db(cls, name, cluster):
session = get_session()
datasource = session.query(cls).filter_by(datasource_name=name).first()
if not datasource:
datasource = cls(datasource_name=name)
session.add(datasource)
datasource.cluster = cluster
cols = datasource.latest_metadata()
if not cols:
return
for col in cols:
col_obj = (
session
.query(Column)
.filter_by(datasource_name=name, column_name=col)
.first()
)
datatype = cols[col]['type']
if not col_obj:
col_obj = Column(datasource_name=name, column_name=col)
session.add(col_obj)
if datatype == "STRING":
col_obj.groupby = True
col_obj.filterable = True
if col_obj:
col_obj.type = cols[col]['type']
col_obj.datasource = datasource
col_obj.generate_metrics()
#session.commit()
def query(
self, groupby, metrics,
granularity,
from_dttm, to_dttm,
limit_spec=None,
filter=None,
is_timeseries=True,
timeseries_limit=None,
row_limit=None):
qry_start_dttm = datetime.now()
from_dttm = from_dttm.replace(tzinfo=config.DRUID_TZ) # add tzinfo to native datetime with config
to_dttm = to_dttm.replace(tzinfo=config.DRUID_TZ)
query_str = ""
aggregations = {
m.metric_name: m.json_obj
for m in self.metrics if m.metric_name in metrics
}
if not isinstance(granularity, basestring):
granularity = {"type": "duration", "duration": granularity}
qry = dict(
datasource=self.datasource_name,
dimensions=groupby,
aggregations=aggregations,
granularity=granularity,
intervals= from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = None
for col, op, eq in filter:
cond = None
if op == '==':
cond = Dimension(col)==eq
elif op == '!=':
cond = ~(Dimension(col)==eq)
elif op in ('in', 'not in'):
fields = []
splitted = eq.split(',')
if len(splitted) > 1:
for s in eq.split(','):
s = s.strip()
fields.append(Filter.build_filter(Dimension(col)==s))
cond = Filter(type="or", fields=fields)
else:
cond = Dimension(col)==eq
if op == 'not in':
cond = ~cond
if filters:
filters = Filter(type="and", fields=[
Filter.build_filter(cond),
Filter.build_filter(filters)
])
else:
filters = cond
if filters:
qry['filter'] = filters
client = self.cluster.get_pydruid_client()
orig_filters = filters
if timeseries_limit and is_timeseries:
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": timeseries_limit,
"columns": [{
"dimension": metrics[0] if metrics else self.metrics[0],
"direction": "descending",
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(client.query_dict, indent=2) + "\n"
query_str += "//\nPhase 2 (built based on phase one's results)\n"
df = client.export_pandas()
if not df is None and not df.empty:
dims = qry['dimensions']
filters = []
for index, row in df.iterrows():
fields = []
for dim in dims:
f = Filter.build_filter(Dimension(dim) == row[dim])
fields.append(f)
if len(fields) > 1:
filt = Filter(type="and", fields=fields)
filters.append(Filter.build_filter(filt))
elif fields:
filters.append(fields[0])
if filters:
ff = Filter(type="or", fields=filters)
if not orig_filters:
qry['filter'] = ff
else:
qry['filter'] = Filter(type="and", fields=[
Filter.build_filter(ff),
Filter.build_filter(orig_filters)])
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": metrics[0] if metrics else self.metrics[0],
"direction": "descending",
}],
}
client.groupby(**qry)
query_str += json.dumps(client.query_dict, indent=2)
df = client.export_pandas()
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
#class Metric(Model, AuditMixin):
class Metric(Model):
__tablename__ = 'metrics'
id = Column(Integer, primary_key=True)
metric_name = Column(String(512))
verbose_name = Column(String(1024))
metric_type = Column(String(32))
datasource_name = Column(
String(256),
ForeignKey('datasources.datasource_name'))
datasource = relationship('Datasource', backref='metrics')
json = Column(Text)
description = Column(Text)
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception as e:
obj = {}
return obj
class Column(Model, AuditMixin):
__tablename__ = 'columns'
id = Column(Integer, primary_key=True)
datasource_name = Column(
String(256),
ForeignKey('datasources.datasource_name'))
datasource = relationship('Datasource', backref='columns')
column_name = Column(String(256))
is_active = Column(Boolean, default=True)
type = Column(String(32))
groupby = Column(Boolean, default=False)
count_distinct = Column(Boolean, default=False)
sum = Column(Boolean, default=False)
max = Column(Boolean, default=False)
min = Column(Boolean, default=False)
filterable = Column(Boolean, default=False)
description = Column(Text)
def __repr__(self):
return self.column_name
@property
def isnum(self):
return self.type in ('LONG', 'DOUBLE', 'FLOAT')
def generate_metrics(self):
M = Metric
metrics = []
metrics.append(Metric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
))
# Somehow we need to reassign this for UDAFs
corrected_type = 'DOUBLE' if self.type in ('DOUBLE', 'FLOAT') else self.type
if self.sum and self.isnum:
mt = corrected_type.lower() + 'Sum'
name='sum__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.min and self.isnum:
mt = corrected_type.lower() + 'Min'
name='min__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.max and self.isnum:
mt = corrected_type.lower() + 'Max'
name='max__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.count_distinct:
mt = 'count_distinct'
name='count_distinct__' + self.column_name
metrics.append(Metric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
))
session = get_session()
for metric in metrics:
m = (
session.query(M)
.filter(M.metric_name==metric.metric_name)
.filter(M.datasource_name==self.datasource_name)
.filter(Cluster.cluster_name==self.datasource.cluster_name)
.first()
)
metric.datasource_name = self.datasource_name
if not m:
session.add(metric)
session.commit()
| wbchn/panoramix | panoramix/models.py | Python | apache-2.0 | 26,355 |
from __future__ import absolute_import
# defaults and constants
from .utils import DEFAULT_PALETTE
from ._chart_options import default_options as defaults
# main components
from ._chart import Chart
# operations and attributes for users to input into Charts
from ._attributes import color
from .operations import stack, blend
# builders
from .builder.line_builder import Line
from .builder.histogram_builder import Histogram
from .builder.bar_builder import Bar
from .builder.scatter_builder import Scatter
from .builder.boxplot_builder import BoxPlot
# easy access to required bokeh components
from ..models import ColumnDataSource
from ..io import (
curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot)
# Silence pyflakes
(curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot, ColumnDataSource)
| srinathv/bokeh | bokeh/charts/__init__.py | Python | bsd-3-clause | 948 |
# coding: utf-8
from queue import Empty
import threading
import wx
import modder
import modder.manager
from .trayicon import TrayIcon
class ModderGuiApp(wx.App):
def OnInit(self):
modder.GUI_MODE = True
self._manager = modder.manager.ModManager()
self._tray = TrayIcon()
self._timer_stop_event = threading.Event()
self._timer_thread = modder.TimerThread(modder.EVENT_QUEUE, self._timer_stop_event)
self._timer_thread.daemon = True
self._timer_thread.start()
self._modder_thread = threading.Thread(
target=self._process_mod_event_queue, name='Modder.wxApp.ModderThread'
)
self._modder_thread.daemon = True
modder.EVENT_QUEUE.put_nowait(('Modder.Started', None))
self._modder_thread.start()
return True
def _process_mod_event_queue(self):
while 1:
try:
event_name, event_data = modder.EVENT_QUEUE.get(timeout=1)
except Empty:
pass
else:
self._manager.trigger(event_name, data=event_data)
def OnExit(self):
self._timer_stop_event.set()
self._manager.trigger('Modder.BeforeQuit')
return 0
| JokerQyou/Modder2 | modder/gui/app.py | Python | mit | 1,240 |
import logging
from django.conf import settings
from django.contrib.auth import logout
from .mixins import JWTCookieActionMixin
from metrics import inc_logouts
logger = logging.getLogger(__name__)
class LogoutBaseAction(object):
"""Logout base action"""
def do(self, action_context, view, *args, **kwargs):
return action_context
class DeleteDjangoAuthSessionAction(JWTCookieActionMixin, LogoutBaseAction):
""" Delete the Django session """
def do(self, action_context, view, *args, **kwargs):
"""Deletes the Django session"""
logout(action_context.request)
return super().do(action_context, view, *args, **kwargs)
class JWTDeleteCookieAction(JWTCookieActionMixin, LogoutBaseAction):
""" Delete the cookie on the response """
def do(self, action_context, view, *args, **kwargs):
"""Deletes the jwt token without calculating"""
# Get the host for the cookie
host = self.get_host(action_context.request)
# Delete the cookie
action_context.response.delete_cookie(key=settings.JWT_COOKIE_NAME,
domain=host)
logger.info("Delete JWT token on {0}".format(host))
return super().do(action_context, view, *args, **kwargs)
class LogoutMetricsAction(LogoutBaseAction):
""" Logouts the metric """
def do(self, action_context, view, *args, **kwargs):
"""Deletes the Django session"""
# Only logout on final redirect
if action_context.extra_context["final_logout"]:
inc_logouts()
return super().do(action_context, view, *args, **kwargs)
| qdqmedia/wiggum | wiggum/authorization/actions/logout.py | Python | bsd-3-clause | 1,648 |
from __future__ import absolute_import
from copy import copy
import django
from django import template
from django.conf import settings
from django.template import Context
from django.template.base import Node, VariableNode, TextNode, NodeList
from django.template.defaulttags import IfNode
from django.template.loader import get_template
from django.template.loader_tags import ExtendsNode, BlockNode, BlockContext
from compressor.exceptions import TemplateSyntaxError, TemplateDoesNotExist
from compressor.templatetags.compress import CompressorNode
def handle_extendsnode(extendsnode, block_context=None, original=None):
"""Create a copy of Node tree of a derived template replacing
all blocks tags with the nodes of appropriate blocks.
Also handles {{ block.super }} tags.
"""
if block_context is None:
block_context = BlockContext()
blocks = dict((n.name, n) for n in
extendsnode.nodelist.get_nodes_by_type(BlockNode))
block_context.add_blocks(blocks)
context = Context(settings.COMPRESS_OFFLINE_CONTEXT)
if original is not None:
context.template = original
compiled_parent = extendsnode.get_parent(context)
parent_nodelist = compiled_parent.nodelist
# If the parent template has an ExtendsNode it is not the root.
for node in parent_nodelist:
# The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode):
if isinstance(node, ExtendsNode):
return handle_extendsnode(node, block_context, original)
break
# Add blocks of the root template to block context.
blocks = dict((n.name, n) for n in
parent_nodelist.get_nodes_by_type(BlockNode))
block_context.add_blocks(blocks)
block_stack = []
new_nodelist = remove_block_nodes(parent_nodelist, block_stack, block_context)
return new_nodelist
def remove_block_nodes(nodelist, block_stack, block_context):
new_nodelist = NodeList()
for node in nodelist:
if isinstance(node, VariableNode):
var_name = node.filter_expression.token.strip()
if var_name == 'block.super':
if not block_stack:
continue
node = block_context.get_block(block_stack[-1].name)
if not node:
continue
if isinstance(node, BlockNode):
expanded_block = expand_blocknode(node, block_stack, block_context)
new_nodelist.extend(expanded_block)
else:
# IfNode has nodelist as a @property so we can not modify it
if isinstance(node, IfNode):
node = copy(node)
for i, (condition, sub_nodelist) in enumerate(node.conditions_nodelists):
sub_nodelist = remove_block_nodes(sub_nodelist, block_stack, block_context)
node.conditions_nodelists[i] = (condition, sub_nodelist)
else:
for attr in node.child_nodelists:
sub_nodelist = getattr(node, attr, None)
if sub_nodelist:
sub_nodelist = remove_block_nodes(sub_nodelist, block_stack, block_context)
node = copy(node)
setattr(node, attr, sub_nodelist)
new_nodelist.append(node)
return new_nodelist
def expand_blocknode(node, block_stack, block_context):
popped_block = block = block_context.pop(node.name)
if block is None:
block = node
block_stack.append(block)
expanded_nodelist = remove_block_nodes(block.nodelist, block_stack, block_context)
block_stack.pop()
if popped_block is not None:
block_context.push(node.name, popped_block)
return expanded_nodelist
class DjangoParser(object):
def __init__(self, charset):
self.charset = charset
def parse(self, template_name):
try:
if django.VERSION < (1, 8):
return get_template(template_name)
else:
return get_template(template_name).template
except template.TemplateSyntaxError as e:
raise TemplateSyntaxError(str(e))
except template.TemplateDoesNotExist as e:
raise TemplateDoesNotExist(str(e))
def process_template(self, template, context):
return True
def get_init_context(self, offline_context):
return offline_context
def process_node(self, template, context, node):
pass
def render_nodelist(self, template, context, node):
if django.VERSION >= (1, 8):
context.template = template
return node.nodelist.render(context)
def render_node(self, template, context, node):
return node.render(context, forced=True)
def get_nodelist(self, node, original=None):
if isinstance(node, ExtendsNode):
try:
return handle_extendsnode(node, block_context=None, original=original)
except template.TemplateSyntaxError as e:
raise TemplateSyntaxError(str(e))
except template.TemplateDoesNotExist as e:
raise TemplateDoesNotExist(str(e))
# Check if node is an ```if``` switch with true and false branches
nodelist = []
if isinstance(node, Node):
for attr in node.child_nodelists:
nodelist += getattr(node, attr, [])
else:
nodelist = getattr(node, 'nodelist', [])
return nodelist
def walk_nodes(self, node, original=None):
if django.VERSION >= (1, 8) and original is None:
original = node
for node in self.get_nodelist(node, original):
if isinstance(node, CompressorNode) and node.is_offline_compression_enabled(forced=True):
yield node
else:
for node in self.walk_nodes(node, original):
yield node
| WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/compressor/offline/django.py | Python | bsd-3-clause | 5,960 |
for _ in range(int(raw_input())):
_length, K = map(int, raw_input().split())
arr = [int(i) for i in raw_input().split()]
if min(arr) < K:
print K - min(arr)
else:
print '0'
| Faraaz54/python_training_problems | hacker_earth/data_structures/array_update.py | Python | mit | 214 |
libname = 'scenarios'
srcFiles = [
]
hppFiles = [
]
pyconfigs = [
'scenarios/__init__.py',
'scenarios/interfaces.py',
'scenarios/antenna/__init__.py',
'scenarios/antenna/isotropic.py',
'scenarios/placer/__init__.py',
'scenarios/placer/hexagonal.py',
'scenarios/placer/circular.py',
'scenarios/placer/rectangular.py',
'scenarios/placer/linear.py',
'scenarios/placer/positionList.py',
'scenarios/placer/tests/__init__.py',
'scenarios/placer/tests/placer.py',
'scenarios/builders/__init__.py',
'scenarios/builders/creatorplacer.py',
'scenarios/traffic/__init__.py',
'scenarios/simple/__init__.py',
'scenarios/simple/singlecell.py',
'scenarios/plotting/__init__.py',
'scenarios/plotting/Plotting.py',
'scenarios/ituM2135/__init__.py',
'scenarios/ituM2135/antenna.py',
'scenarios/ituM2135/placer.py',
'scenarios/ituM2135/channelmodelcreator.py',
'scenarios/ituM2135/creatorplacer.py',
'scenarios/toolsupport/__init__.py',
'scenarios/toolsupport/pytreevisitors/__init__.py',
'scenarios/toolsupport/pytreevisitors/builders/__init__.py',
'scenarios/toolsupport/pytreevisitors/builders/creatorplacer.py',
'scenarios/binding/__init__.py',
'scenarios/binding/binding.py',
'scenarios/scenariosets/__init__.py',
'scenarios/scenariosets/scenariosets.py',
'scenarios/channelmodel/__init__.py',
'scenarios/channelmodel/channelmodelcreator.py',
]
dependencies = []
# Put in any external lib here as you would pass it to a -l compiler flag, e.g.
# dependencies = ['boost_date_time']
Return('libname srcFiles hppFiles pyconfigs dependencies')
| creasyw/IMTAphy | framework/scenarios/config/libfiles.py | Python | gpl-2.0 | 1,672 |
"""
SoftLayer.CLI.routes
~~~~~~~~~~~~~~~~~~~~~
This is how all commands are registered with the CLI.
:license: MIT, see LICENSE for more details.
"""
ALL_ROUTES = [
('shell', 'SoftLayer.shell.core:cli'),
('call-api', 'SoftLayer.CLI.call_api:cli'),
('account', 'SoftLayer.CLI.account'),
('account:invoice-detail', 'SoftLayer.CLI.account.invoice_detail:cli'),
('account:invoices', 'SoftLayer.CLI.account.invoices:cli'),
('account:events', 'SoftLayer.CLI.account.events:cli'),
('account:event-detail', 'SoftLayer.CLI.account.event_detail:cli'),
('account:licenses', 'SoftLayer.CLI.account.licenses:cli'),
('account:summary', 'SoftLayer.CLI.account.summary:cli'),
('account:billing-items', 'SoftLayer.CLI.account.billing_items:cli'),
('account:item-detail', 'SoftLayer.CLI.account.item_detail:cli'),
('account:cancel-item', 'SoftLayer.CLI.account.cancel_item:cli'),
('account:orders', 'SoftLayer.CLI.account.orders:cli'),
('account:bandwidth-pools', 'SoftLayer.CLI.account.bandwidth_pools:cli'),
('virtual', 'SoftLayer.CLI.virt'),
('virtual:bandwidth', 'SoftLayer.CLI.virt.bandwidth:cli'),
('virtual:billing', 'SoftLayer.CLI.virt.billing:cli'),
('virtual:cancel', 'SoftLayer.CLI.virt.cancel:cli'),
('virtual:capture', 'SoftLayer.CLI.virt.capture:cli'),
('virtual:create', 'SoftLayer.CLI.virt.create:cli'),
('virtual:create-options', 'SoftLayer.CLI.virt.create_options:cli'),
('virtual:detail', 'SoftLayer.CLI.virt.detail:cli'),
('virtual:dns-sync', 'SoftLayer.CLI.virt.dns:cli'),
('virtual:edit', 'SoftLayer.CLI.virt.edit:cli'),
('virtual:list', 'SoftLayer.CLI.virt.list:cli'),
('virtual:pause', 'SoftLayer.CLI.virt.power:pause'),
('virtual:power-off', 'SoftLayer.CLI.virt.power:power_off'),
('virtual:power-on', 'SoftLayer.CLI.virt.power:power_on'),
('virtual:rescue', 'SoftLayer.CLI.virt.power:rescue'),
('virtual:resume', 'SoftLayer.CLI.virt.power:resume'),
('virtual:ready', 'SoftLayer.CLI.virt.ready:cli'),
('virtual:reboot', 'SoftLayer.CLI.virt.power:reboot'),
('virtual:reload', 'SoftLayer.CLI.virt.reload:cli'),
('virtual:storage', 'SoftLayer.CLI.virt.storage:cli'),
('virtual:upgrade', 'SoftLayer.CLI.virt.upgrade:cli'),
('virtual:usage', 'SoftLayer.CLI.virt.usage:cli'),
('virtual:credentials', 'SoftLayer.CLI.virt.credentials:cli'),
('virtual:authorize-storage', 'SoftLayer.CLI.virt.authorize_storage:cli'),
('virtual:capacity', 'SoftLayer.CLI.virt.capacity:cli'),
('virtual:placementgroup', 'SoftLayer.CLI.virt.placementgroup:cli'),
('virtual:migrate', 'SoftLayer.CLI.virt.migrate:cli'),
('dedicatedhost', 'SoftLayer.CLI.dedicatedhost'),
('dedicatedhost:list', 'SoftLayer.CLI.dedicatedhost.list:cli'),
('dedicatedhost:create', 'SoftLayer.CLI.dedicatedhost.create:cli'),
('dedicatedhost:create-options', 'SoftLayer.CLI.dedicatedhost.create_options:cli'),
('dedicatedhost:detail', 'SoftLayer.CLI.dedicatedhost.detail:cli'),
('dedicatedhost:cancel', 'SoftLayer.CLI.dedicatedhost.cancel:cli'),
('dedicatedhost:cancel-guests', 'SoftLayer.CLI.dedicatedhost.cancel_guests:cli'),
('dedicatedhost:list-guests', 'SoftLayer.CLI.dedicatedhost.list_guests:cli'),
('cdn', 'SoftLayer.CLI.cdn'),
('cdn:detail', 'SoftLayer.CLI.cdn.detail:cli'),
('cdn:edit', 'SoftLayer.CLI.cdn.edit:cli'),
('cdn:list', 'SoftLayer.CLI.cdn.list:cli'),
('cdn:origin-add', 'SoftLayer.CLI.cdn.origin_add:cli'),
('cdn:origin-list', 'SoftLayer.CLI.cdn.origin_list:cli'),
('cdn:origin-remove', 'SoftLayer.CLI.cdn.origin_remove:cli'),
('cdn:purge', 'SoftLayer.CLI.cdn.purge:cli'),
('config', 'SoftLayer.CLI.config'),
('config:setup', 'SoftLayer.CLI.config.setup:cli'),
('config:show', 'SoftLayer.CLI.config.show:cli'),
('setup', 'SoftLayer.CLI.config.setup:cli'),
('dns', 'SoftLayer.CLI.dns'),
('dns:import', 'SoftLayer.CLI.dns.zone_import:cli'),
('dns:record-add', 'SoftLayer.CLI.dns.record_add:cli'),
('dns:record-edit', 'SoftLayer.CLI.dns.record_edit:cli'),
('dns:record-list', 'SoftLayer.CLI.dns.record_list:cli'),
('dns:record-remove', 'SoftLayer.CLI.dns.record_remove:cli'),
('dns:zone-create', 'SoftLayer.CLI.dns.zone_create:cli'),
('dns:zone-delete', 'SoftLayer.CLI.dns.zone_delete:cli'),
('dns:zone-list', 'SoftLayer.CLI.dns.zone_list:cli'),
('dns:zone-print', 'SoftLayer.CLI.dns.zone_print:cli'),
('block', 'SoftLayer.CLI.block'),
('block:access-authorize', 'SoftLayer.CLI.block.access.authorize:cli'),
('block:access-list', 'SoftLayer.CLI.block.access.list:cli'),
('block:access-revoke', 'SoftLayer.CLI.block.access.revoke:cli'),
('block:access-password', 'SoftLayer.CLI.block.access.password:cli'),
('block:subnets-list', 'SoftLayer.CLI.block.subnets.list:cli'),
('block:subnets-assign', 'SoftLayer.CLI.block.subnets.assign:cli'),
('block:subnets-remove', 'SoftLayer.CLI.block.subnets.remove:cli'),
('block:replica-failback', 'SoftLayer.CLI.block.replication.failback:cli'),
('block:replica-failover', 'SoftLayer.CLI.block.replication.failover:cli'),
('block:disaster-recovery-failover', 'SoftLayer.CLI.block.replication.disaster_recovery_failover:cli'),
('block:replica-order', 'SoftLayer.CLI.block.replication.order:cli'),
('block:replica-partners', 'SoftLayer.CLI.block.replication.partners:cli'),
('block:replica-locations', 'SoftLayer.CLI.block.replication.locations:cli'),
('block:snapshot-cancel', 'SoftLayer.CLI.block.snapshot.cancel:cli'),
('block:snapshot-create', 'SoftLayer.CLI.block.snapshot.create:cli'),
('block:snapshot-delete', 'SoftLayer.CLI.block.snapshot.delete:cli'),
('block:snapshot-disable', 'SoftLayer.CLI.block.snapshot.disable:cli'),
('block:snapshot-set-notification', 'SoftLayer.CLI.block.snapshot.set_notify_status:cli'),
('block:snapshot-get-notification-status', 'SoftLayer.CLI.block.snapshot.get_notify_status:cli'),
('block:snapshot-enable', 'SoftLayer.CLI.block.snapshot.enable:cli'),
('block:snapshot-schedule-list', 'SoftLayer.CLI.block.snapshot.schedule_list:cli'),
('block:snapshot-list', 'SoftLayer.CLI.block.snapshot.list:cli'),
('block:snapshot-order', 'SoftLayer.CLI.block.snapshot.order:cli'),
('block:snapshot-restore', 'SoftLayer.CLI.block.snapshot.restore:cli'),
('block:volume-cancel', 'SoftLayer.CLI.block.cancel:cli'),
('block:volume-count', 'SoftLayer.CLI.block.count:cli'),
('block:volume-detail', 'SoftLayer.CLI.block.detail:cli'),
('block:volume-duplicate', 'SoftLayer.CLI.block.duplicate:cli'),
('block:volume-list', 'SoftLayer.CLI.block.list:cli'),
('block:volume-modify', 'SoftLayer.CLI.block.modify:cli'),
('block:volume-order', 'SoftLayer.CLI.block.order:cli'),
('block:volume-set-lun-id', 'SoftLayer.CLI.block.lun:cli'),
('block:volume-limits', 'SoftLayer.CLI.block.limit:cli'),
('block:volume-refresh', 'SoftLayer.CLI.block.refresh:cli'),
('block:volume-convert', 'SoftLayer.CLI.block.convert:cli'),
('block:volume-set-note', 'SoftLayer.CLI.block.set_note:cli'),
('email', 'SoftLayer.CLI.email'),
('email:list', 'SoftLayer.CLI.email.list:cli'),
('email:detail', 'SoftLayer.CLI.email.detail:cli'),
('email:edit', 'SoftLayer.CLI.email.edit:cli'),
('licenses', 'SoftLayer.CLI.licenses'),
('licenses:create-options', 'SoftLayer.CLI.licenses.create_options:cli'),
('event-log', 'SoftLayer.CLI.event_log'),
('event-log:get', 'SoftLayer.CLI.event_log.get:cli'),
('event-log:types', 'SoftLayer.CLI.event_log.types:cli'),
('file', 'SoftLayer.CLI.file'),
('file:access-authorize', 'SoftLayer.CLI.file.access.authorize:cli'),
('file:access-list', 'SoftLayer.CLI.file.access.list:cli'),
('file:access-revoke', 'SoftLayer.CLI.file.access.revoke:cli'),
('file:replica-failback', 'SoftLayer.CLI.file.replication.failback:cli'),
('file:replica-failover', 'SoftLayer.CLI.file.replication.failover:cli'),
('file:disaster-recovery-failover', 'SoftLayer.CLI.file.replication.disaster_recovery_failover:cli'),
('file:replica-order', 'SoftLayer.CLI.file.replication.order:cli'),
('file:replica-partners', 'SoftLayer.CLI.file.replication.partners:cli'),
('file:replica-locations', 'SoftLayer.CLI.file.replication.locations:cli'),
('file:snapshot-cancel', 'SoftLayer.CLI.file.snapshot.cancel:cli'),
('file:snapshot-create', 'SoftLayer.CLI.file.snapshot.create:cli'),
('file:snapshot-delete', 'SoftLayer.CLI.file.snapshot.delete:cli'),
('file:snapshot-disable', 'SoftLayer.CLI.file.snapshot.disable:cli'),
('file:snapshot-enable', 'SoftLayer.CLI.file.snapshot.enable:cli'),
('file:snapshot-set-notification', 'SoftLayer.CLI.file.snapshot.set_notify_status:cli'),
('file:snapshot-get-notification-status', 'SoftLayer.CLI.file.snapshot.get_notify_status:cli'),
('file:snapshot-schedule-list', 'SoftLayer.CLI.file.snapshot.schedule_list:cli'),
('file:snapshot-list', 'SoftLayer.CLI.file.snapshot.list:cli'),
('file:snapshot-order', 'SoftLayer.CLI.file.snapshot.order:cli'),
('file:snapshot-restore', 'SoftLayer.CLI.file.snapshot.restore:cli'),
('file:volume-cancel', 'SoftLayer.CLI.file.cancel:cli'),
('file:volume-count', 'SoftLayer.CLI.file.count:cli'),
('file:volume-detail', 'SoftLayer.CLI.file.detail:cli'),
('file:volume-duplicate', 'SoftLayer.CLI.file.duplicate:cli'),
('file:volume-list', 'SoftLayer.CLI.file.list:cli'),
('file:volume-modify', 'SoftLayer.CLI.file.modify:cli'),
('file:volume-order', 'SoftLayer.CLI.file.order:cli'),
('file:volume-limits', 'SoftLayer.CLI.file.limit:cli'),
('file:volume-refresh', 'SoftLayer.CLI.file.refresh:cli'),
('file:volume-convert', 'SoftLayer.CLI.file.convert:cli'),
('file:volume-set-note', 'SoftLayer.CLI.file.set_note:cli'),
('firewall', 'SoftLayer.CLI.firewall'),
('firewall:add', 'SoftLayer.CLI.firewall.add:cli'),
('firewall:cancel', 'SoftLayer.CLI.firewall.cancel:cli'),
('firewall:detail', 'SoftLayer.CLI.firewall.detail:cli'),
('firewall:edit', 'SoftLayer.CLI.firewall.edit:cli'),
('firewall:list', 'SoftLayer.CLI.firewall.list:cli'),
('globalip', 'SoftLayer.CLI.globalip'),
('globalip:assign', 'SoftLayer.CLI.globalip.assign:cli'),
('globalip:cancel', 'SoftLayer.CLI.globalip.cancel:cli'),
('globalip:create', 'SoftLayer.CLI.globalip.create:cli'),
('globalip:list', 'SoftLayer.CLI.globalip.list:cli'),
('globalip:unassign', 'SoftLayer.CLI.globalip.unassign:cli'),
('image', 'SoftLayer.CLI.image'),
('image:delete', 'SoftLayer.CLI.image.delete:cli'),
('image:detail', 'SoftLayer.CLI.image.detail:cli'),
('image:edit', 'SoftLayer.CLI.image.edit:cli'),
('image:list', 'SoftLayer.CLI.image.list:cli'),
('image:import', 'SoftLayer.CLI.image.import:cli'),
('image:export', 'SoftLayer.CLI.image.export:cli'),
('image:datacenter', 'SoftLayer.CLI.image.datacenter:cli'),
('ipsec', 'SoftLayer.CLI.vpn.ipsec'),
('ipsec:configure', 'SoftLayer.CLI.vpn.ipsec.configure:cli'),
('ipsec:detail', 'SoftLayer.CLI.vpn.ipsec.detail:cli'),
('ipsec:list', 'SoftLayer.CLI.vpn.ipsec.list:cli'),
('ipsec:subnet-add', 'SoftLayer.CLI.vpn.ipsec.subnet.add:cli'),
('ipsec:subnet-remove', 'SoftLayer.CLI.vpn.ipsec.subnet.remove:cli'),
('ipsec:translation-add', 'SoftLayer.CLI.vpn.ipsec.translation.add:cli'),
('ipsec:translation-remove', 'SoftLayer.CLI.vpn.ipsec.translation.remove:cli'),
('ipsec:translation-update', 'SoftLayer.CLI.vpn.ipsec.translation.update:cli'),
('ipsec:update', 'SoftLayer.CLI.vpn.ipsec.update:cli'),
('loadbal', 'SoftLayer.CLI.loadbal'),
('loadbal:detail', 'SoftLayer.CLI.loadbal.detail:cli'),
('loadbal:list', 'SoftLayer.CLI.loadbal.list:cli'),
('loadbal:health', 'SoftLayer.CLI.loadbal.health:cli'),
('loadbal:member-add', 'SoftLayer.CLI.loadbal.members:add'),
('loadbal:member-del', 'SoftLayer.CLI.loadbal.members:remove'),
('loadbal:l7policies', 'SoftLayer.CLI.loadbal.layer7_policy_list:policies'),
('loadbal:pool-add', 'SoftLayer.CLI.loadbal.pools:add'),
('loadbal:pool-edit', 'SoftLayer.CLI.loadbal.pools:edit'),
('loadbal:pool-del', 'SoftLayer.CLI.loadbal.pools:delete'),
('loadbal:l7pool-add', 'SoftLayer.CLI.loadbal.pools:l7pool_add'),
('loadbal:l7pool-del', 'SoftLayer.CLI.loadbal.pools:l7pool_del'),
('loadbal:order', 'SoftLayer.CLI.loadbal.order:order'),
('loadbal:order-options', 'SoftLayer.CLI.loadbal.order:order_options'),
('loadbal:cancel', 'SoftLayer.CLI.loadbal.order:cancel'),
('loadbal:ns-detail', 'SoftLayer.CLI.loadbal.ns_detail:cli'),
('loadbal:ns-list', 'SoftLayer.CLI.loadbal.ns_list:cli'),
('metadata', 'SoftLayer.CLI.metadata:cli'),
('nas', 'SoftLayer.CLI.nas'),
('nas:list', 'SoftLayer.CLI.nas.list:cli'),
('nas:credentials', 'SoftLayer.CLI.nas.credentials:cli'),
('licenses', 'SoftLayer.CLI.licenses'),
('licenses:create', 'SoftLayer.CLI.licenses.create:cli'),
('licenses:cancel', 'SoftLayer.CLI.licenses.cancel:cli'),
('object-storage', 'SoftLayer.CLI.object_storage'),
('object-storage:accounts', 'SoftLayer.CLI.object_storage.list_accounts:cli'),
('object-storage:endpoints', 'SoftLayer.CLI.object_storage.list_endpoints:cli'),
('object-storage:credential', 'SoftLayer.CLI.object_storage.credential:cli'),
('order', 'SoftLayer.CLI.order'),
('order:category-list', 'SoftLayer.CLI.order.category_list:cli'),
('order:item-list', 'SoftLayer.CLI.order.item_list:cli'),
('order:package-list', 'SoftLayer.CLI.order.package_list:cli'),
('order:place', 'SoftLayer.CLI.order.place:cli'),
('order:preset-list', 'SoftLayer.CLI.order.preset_list:cli'),
('order:package-locations', 'SoftLayer.CLI.order.package_locations:cli'),
('order:place-quote', 'SoftLayer.CLI.order.place_quote:cli'),
('order:quote-list', 'SoftLayer.CLI.order.quote_list:cli'),
('order:quote-detail', 'SoftLayer.CLI.order.quote_detail:cli'),
('order:quote-save', 'SoftLayer.CLI.order.quote_save:cli'),
('order:quote', 'SoftLayer.CLI.order.quote:cli'),
('order:lookup', 'SoftLayer.CLI.order.lookup:cli'),
('hardware', 'SoftLayer.CLI.hardware'),
('hardware:bandwidth', 'SoftLayer.CLI.hardware.bandwidth:cli'),
('hardware:cancel', 'SoftLayer.CLI.hardware.cancel:cli'),
('hardware:cancel-reasons', 'SoftLayer.CLI.hardware.cancel_reasons:cli'),
('hardware:create', 'SoftLayer.CLI.hardware.create:cli'),
('hardware:create-options', 'SoftLayer.CLI.hardware.create_options:cli'),
('hardware:detail', 'SoftLayer.CLI.hardware.detail:cli'),
('hardware:billing', 'SoftLayer.CLI.hardware.billing:cli'),
('hardware:edit', 'SoftLayer.CLI.hardware.edit:cli'),
('hardware:guests', 'SoftLayer.CLI.hardware.guests:cli'),
('hardware:list', 'SoftLayer.CLI.hardware.list:cli'),
('hardware:power-cycle', 'SoftLayer.CLI.hardware.power:power_cycle'),
('hardware:power-off', 'SoftLayer.CLI.hardware.power:power_off'),
('hardware:power-on', 'SoftLayer.CLI.hardware.power:power_on'),
('hardware:reboot', 'SoftLayer.CLI.hardware.power:reboot'),
('hardware:reload', 'SoftLayer.CLI.hardware.reload:cli'),
('hardware:credentials', 'SoftLayer.CLI.hardware.credentials:cli'),
('hardware:update-firmware', 'SoftLayer.CLI.hardware.update_firmware:cli'),
('hardware:reflash-firmware', 'SoftLayer.CLI.hardware.reflash_firmware:cli'),
('hardware:rescue', 'SoftLayer.CLI.hardware.power:rescue'),
('hardware:ready', 'SoftLayer.CLI.hardware.ready:cli'),
('hardware:toggle-ipmi', 'SoftLayer.CLI.hardware.toggle_ipmi:cli'),
('hardware:authorize-storage', 'SoftLayer.CLI.hardware.authorize_storage:cli'),
('hardware:dns-sync', 'SoftLayer.CLI.hardware.dns:cli'),
('hardware:storage', 'SoftLayer.CLI.hardware.storage:cli'),
('hardware:upgrade', 'SoftLayer.CLI.hardware.upgrade:cli'),
('hardware:sensor', 'SoftLayer.CLI.hardware.sensor:cli'),
('securitygroup', 'SoftLayer.CLI.securitygroup'),
('securitygroup:list', 'SoftLayer.CLI.securitygroup.list:cli'),
('securitygroup:detail', 'SoftLayer.CLI.securitygroup.detail:cli'),
('securitygroup:create', 'SoftLayer.CLI.securitygroup.create:cli'),
('securitygroup:edit', 'SoftLayer.CLI.securitygroup.edit:cli'),
('securitygroup:delete', 'SoftLayer.CLI.securitygroup.delete:cli'),
('securitygroup:rule-list', 'SoftLayer.CLI.securitygroup.rule:rule_list'),
('securitygroup:rule-add', 'SoftLayer.CLI.securitygroup.rule:add'),
('securitygroup:rule-edit', 'SoftLayer.CLI.securitygroup.rule:edit'),
('securitygroup:rule-remove', 'SoftLayer.CLI.securitygroup.rule:remove'),
('securitygroup:interface-list',
'SoftLayer.CLI.securitygroup.interface:interface_list'),
('securitygroup:interface-add',
'SoftLayer.CLI.securitygroup.interface:add'),
('securitygroup:interface-remove',
'SoftLayer.CLI.securitygroup.interface:remove'),
('securitygroup:event-log', 'SoftLayer.CLI.securitygroup.event_log:get_by_request_id'),
('sshkey', 'SoftLayer.CLI.sshkey'),
('sshkey:add', 'SoftLayer.CLI.sshkey.add:cli'),
('sshkey:remove', 'SoftLayer.CLI.sshkey.remove:cli'),
('sshkey:edit', 'SoftLayer.CLI.sshkey.edit:cli'),
('sshkey:list', 'SoftLayer.CLI.sshkey.list:cli'),
('sshkey:print', 'SoftLayer.CLI.sshkey.print:cli'),
('ssl', 'SoftLayer.CLI.ssl'),
('ssl:add', 'SoftLayer.CLI.ssl.add:cli'),
('ssl:download', 'SoftLayer.CLI.ssl.download:cli'),
('ssl:edit', 'SoftLayer.CLI.ssl.edit:cli'),
('ssl:list', 'SoftLayer.CLI.ssl.list:cli'),
('ssl:remove', 'SoftLayer.CLI.ssl.remove:cli'),
('subnet', 'SoftLayer.CLI.subnet'),
('subnet:cancel', 'SoftLayer.CLI.subnet.cancel:cli'),
('subnet:create', 'SoftLayer.CLI.subnet.create:cli'),
('subnet:edit', 'SoftLayer.CLI.subnet.edit:cli'),
('subnet:detail', 'SoftLayer.CLI.subnet.detail:cli'),
('subnet:list', 'SoftLayer.CLI.subnet.list:cli'),
('subnet:lookup', 'SoftLayer.CLI.subnet.lookup:cli'),
('subnet:edit-ip', 'SoftLayer.CLI.subnet.edit_ip:cli'),
('tags', 'SoftLayer.CLI.tags'),
('tags:cleanup', 'SoftLayer.CLI.tags.cleanup:cli'),
('tags:list', 'SoftLayer.CLI.tags.list:cli'),
('tags:set', 'SoftLayer.CLI.tags.set:cli'),
('tags:details', 'SoftLayer.CLI.tags.details:cli'),
('tags:delete', 'SoftLayer.CLI.tags.delete:cli'),
('tags:taggable', 'SoftLayer.CLI.tags.taggable:cli'),
('ticket', 'SoftLayer.CLI.ticket'),
('ticket:create', 'SoftLayer.CLI.ticket.create:cli'),
('ticket:detail', 'SoftLayer.CLI.ticket.detail:cli'),
('ticket:list', 'SoftLayer.CLI.ticket.list:cli'),
('ticket:update', 'SoftLayer.CLI.ticket.update:cli'),
('ticket:upload', 'SoftLayer.CLI.ticket.upload:cli'),
('ticket:subjects', 'SoftLayer.CLI.ticket.subjects:cli'),
('ticket:summary', 'SoftLayer.CLI.ticket.summary:cli'),
('ticket:attach', 'SoftLayer.CLI.ticket.attach:cli'),
('ticket:detach', 'SoftLayer.CLI.ticket.detach:cli'),
('user', 'SoftLayer.CLI.user'),
('user:list', 'SoftLayer.CLI.user.list:cli'),
('user:detail', 'SoftLayer.CLI.user.detail:cli'),
('user:permissions', 'SoftLayer.CLI.user.permissions:cli'),
('user:edit-permissions', 'SoftLayer.CLI.user.edit_permissions:cli'),
('user:notifications', 'SoftLayer.CLI.user.notifications:cli'),
('user:edit-notifications', 'SoftLayer.CLI.user.edit_notifications:cli'),
('user:edit-details', 'SoftLayer.CLI.user.edit_details:cli'),
('user:create', 'SoftLayer.CLI.user.create:cli'),
('user:delete', 'SoftLayer.CLI.user.delete:cli'),
('user:vpn-manual', 'SoftLayer.CLI.user.vpn_manual:cli'),
('user:vpn-subnet', 'SoftLayer.CLI.user.vpn_subnet:cli'),
('vlan', 'SoftLayer.CLI.vlan'),
('vlan:create', 'SoftLayer.CLI.vlan.create:cli'),
('vlan:create-options', 'SoftLayer.CLI.vlan.create_options:cli'),
('vlan:detail', 'SoftLayer.CLI.vlan.detail:cli'),
('vlan:edit', 'SoftLayer.CLI.vlan.edit:cli'),
('vlan:list', 'SoftLayer.CLI.vlan.list:cli'),
('vlan:cancel', 'SoftLayer.CLI.vlan.cancel:cli'),
('summary', 'SoftLayer.CLI.summary:cli'),
('report', 'SoftLayer.CLI.report'),
('report:bandwidth', 'SoftLayer.CLI.report.bandwidth:cli'),
('autoscale', 'SoftLayer.CLI.autoscale'),
('autoscale:list', 'SoftLayer.CLI.autoscale.list:cli'),
('autoscale:detail', 'SoftLayer.CLI.autoscale.detail:cli'),
('autoscale:scale', 'SoftLayer.CLI.autoscale.scale:cli'),
('autoscale:logs', 'SoftLayer.CLI.autoscale.logs:cli'),
('autoscale:tag', 'SoftLayer.CLI.autoscale.tag:cli'),
('autoscale:edit', 'SoftLayer.CLI.autoscale.edit:cli')
]
ALL_ALIASES = {
'hw': 'hardware',
'lb': 'loadbal',
'meta': 'metadata',
'my': 'metadata',
'sg': 'securitygroup',
'server': 'hardware',
'vm': 'virtual',
'vs': 'virtual',
'dh': 'dedicatedhost',
'pg': 'placementgroup',
}
| allmightyspiff/softlayer-python | SoftLayer/CLI/routes.py | Python | mit | 21,040 |
import fnmatch
import os
from glob import glob
import numpy as np
from datetime import datetime as dt
import scipy as sp
from scipy import signal
from fastrms import fastrms
import matplotlib.pyplot as plt
nch = 64
def basename(val):
return os.path.basename(val)
def getAllFilepathsWith(dir, str):
return [y for x in os.walk(dir) \
for y in glob(os.path.join(x[0], '*'+str+'*')) \
if os.path.isfile(y)]
def getAllFilesWith(dir, str):
return [basename(y) for x in os.walk(dir) \
for y in glob(os.path.join(x[0], '*'+str+'*')) \
if os.path.isfile(y)] # return basename (file) for all files y in subfolders x recursively within dir (incl. itself) that contain str and are a file
def get_data_channels(filename, events, remove_ch, diff_subs):
events["Condition"].append(filled(nch, np.nan)) # condition in channel vector
events["ConditionSubstrate"].append(filled(nch, np.nan)) # condition substrate in channel vector
events["Substrate"].append(filled(nch, np.nan)) # substrate in channel vector
events["ToRemove"].append(np.zeros(nch)) # to remove in channel vector
for icond, condition in enumerate(events["ConditionLabel"]): # check whether condition is in current file
condstr = "C"+"{0:02d}".format(icond+1) # build string for condition indicator
if condstr in filename:
##### Get Condition
ch = []
ch.append(int(filename.split(condstr,1)[1][1:3])) # take start channel from 2nd and 3rd position after condstr
ch.append(int(filename.split(condstr,1)[1][4:6])) # take end channel from 5th and 6th position after condstr
events["Condition"][-1][ch[0]-1:ch[1]] = icond # write which condition corresponds to channel
##### Get Substrate
### [FLAG]: What does this really do? What is Condition Substrate
if diff_subs == 1 or diff_subs == 2:
events["ConditionSubstrate"][-1][ch[0]-1:ch[1]:2] \
= 2 * icond - 1
events["ConditionSubstrate"][-1][ch[0]-1:ch[1]:2] \
= 2 * icond
events["Substrate"][-1][0:64:2]=1; # even channels
if events["SubstrateLabel"][0] \
== events["SubstrateLabel"][1]: # if both labels are the same
events["Substrate"][-1][1:65:2] = 0 # uneven channels
else:
events["Substrate"][-1][1:65:2] = 1 # uneven channels
##### Get channels to remove
events["ToRemove"][-1][sorted(remove_ch)] = 1 # which channels to remove
### [FLAG]: find out, if and why this is needed ==> DatabaseOffset
#Events.ChannelsToRemove{FileNameCounter+DatabaseOffset}=ChannelsToRemove;
#Events.Time{FileNameCounter+DatabaseOffset,1}=time;
return events
def filled(m, val):
X = np.empty(m)
X[:] = val
return X
def process_data(filepath, events, parameters):
"""
Parameters:
===========
filepath:
duration:
events:
remove_ch:
different_subs: (default: 0)
"""
duration = parameters["Duration"] # duration to analyze
print(duration)
remove_ch = parameters["remove_ch"] # channels to remove
diff_subs = parameters["different_subs"] # Default is 0 (No comparison between channels).
RMSThresh = parameters["RMSThresh"] # RMS threshold for bout detection
RMSWindow = parameters["RMSWindow"] # window size for root-mean-square
events["Condition"] = [] # list of condition vectors (len=#channels) per file TODO: make this a class type
events["ConditionSubstrate"] = [] # list of condition substrate vectors (len=#channels) per file
events["Substrate"] = [] # list of substrate vectors (len=#channels) per file
events["ToRemove"] = [] # list of "to remove" vectors (len=#channels) per file
events["Timestamp"] = [] # list of timestamps per file
events["Filename"] = [] # list of file names
for filename in getAllFilepathsWith(filepath, 'CapacitanceData'): # for all files in filepath containing 'CapacitanceData'
print(basename(filename))
events["Filename"].append(basename(filename)) # save file name without path
with open(filename, 'rb') as f: # with opening
cap_data = np.fromfile(f, dtype=np.ushort) # read binary data into numpy ndarray (1-dim.)
rows = cap_data.shape[0] # to shorten next line
cap_data = (cap_data.reshape(nch, rows/nch,order='F').copy()).T # reshape array into 64-dim. matrix and take the transpose (rows = time, cols = channels)
if np.isfinite(duration) and duration < cap_data.shape[0]:
cap_data = cap_data[:duration,:] # cut off data longer than duration
this_duration = duration # actual duration of experiment
else:
if duration > cap_data.shape[0]: # warning
print("Warning: data shorter than given duration")
this_duration = cap_data.shape[0] # duration is equal to number of rows in data
cap_data[cap_data==-1]=0
timestamp = dt.strptime(filename[-19:], '%Y-%m-%dT%H_%S_%M') # timestamp of file
events["Timestamp"].append(timestamp) # timestamp in channel vector
##### Get Conditions and Substrates func
events = get_data_channels(filename, events, remove_ch, diff_subs) # see func above
##### Filtering
filtered_traces=np.full(cap_data.shape, np.nan);
krnlsz = 7 # kernel size of the applied filter TODO: = 6
for ind in range(cap_data.shape[1]): # for each channel
cap_data[:, ind] = sp.signal.medfilt(cap_data[:, ind], krnlsz) # apply median filter from scipy
c=0
for ind in range(cap_data.shape[1]): # for each channel
#print(ind) # print out which channel is currently filtered (TODO: progress bar)
this_ch = cap_data[:, ind] # current channel capacitance data time series
### defining a window for additional convolution of signal
span = 50 # span of convolving window
window = np.ones(span) / span # uniform window
filtered_traces[:, ind] = np.convolve(this_ch, window, 'same') # convolving time series with window
delta_filt = cap_data - filtered_traces # difference between filtered capacitance data and additionally convolved traces
### remove the edges
delta_filt[:span+1,:] = delta_filt[-span:,:] = 0
cap_data[:span+1,:] = cap_data[-span:,:] = 0
### get the root-mean-square power of the signal
cap_data_RMS = fastrms(delta_filt, RMSWindow, 1, 0)
### use Quiroga`s method to find the RMS threshold
# Find positive events
RMSThrEvents = np.zeros(cap_data_RMS.shape)
for ind in range(cap_data_RMS.shape[1]):
RMSThrEvents[:, ind] = cap_data_RMS[:, ind] > RMSThresh # Array of timesteps when capacitance RMS is above threshold
dRMSThrEvents = np.diff(RMSThrEvents, axis=0)
RMSPosEvents = np.zeros(dRMSThrEvents.shape)
RMSNegEvents = np.zeros(dRMSThrEvents.shape)
#for ind in range(cap_data.shape[1]):
# plt.plot(RMSThrEvents[:, ind], 'b-')
#plt.show()
eventsInd, indPosEvents, indNegEvents, distEvents = [],[],[],[] # empty lists
for ind in range(RMSThrEvents.shape[1]):
eventsInd.append( np.nonzero(RMSThrEvents[:, ind])[0] ) # indices of RMS events above Threshold
RMSPosEvents[:, ind] = dRMSThrEvents[:, ind] > 0 # positive changes (event)
RMSNegEvents[:, ind] = dRMSThrEvents[:, ind] < 0 # negative changes (event)
indPosEvents.append( np.nonzero(RMSPosEvents[:, ind])[0] ) # index of positive event
indNegEvents.append( np.nonzero(RMSNegEvents[:, ind])[0] ) # index of negative event
distEvents.append( indNegEvents[-1] - indPosEvents[-1] ) # length from negative to positive event
FDerivative = np.diff(delta_filt) # derivative of delta_filt
## use Quiroga`s method to find the signal
#clear thrPos thrNeg PosEvents NegEvents
notRMSPos = np.zeros_like(delta_filt) # needs to be same shape as delta_filt (pad zeros before)
notRMSPos[1:notRMSPos.shape[0]+1,:notRMSPos.shape[1]] \
= np.logical_not(RMSPosEvents).astype(int) # this fills all rows after the first one with the logical not of RMSPosEvents
IBIS = delta_filt * notRMSPos
## const threshold
for ind in range(FDerivative.shape[1]):
FDerivativePos=FDerivative[:,n]
FDerivativeNeg=FDerivative(:,n);
c=1;
Count=0;
ConstThresPOS=max(diff(IBIS(:,n)));
ConstThresNEG=min(diff(IBIS(:,n)));
PosEvents(n,:)=FDerivative(:,n)>ConstThresPOS;
NegEvents(n,:)=FDerivative(:,n)<ConstThresNEG;
break # break after one file [DEBUGGGGG]
"""
##
##
TimeStamps=(1:size(RRfilteredTraces,1))./100;
## Assign zeros to all signals that are not defined as events
FFDerivative=zeros(size(FDerivative));
derivativeForFigure=FDerivative;
clear NE PE
for n=1:size(NegEvents,1)
NE=logical(NegEvents(n,:))';
PE=logical(PosEvents(n,:))';
FunDerivative=FDerivative(:,n);
FuckingDerivative=FDerivative(:,n);
FunDerivative(:,:)=0;
FunDerivative(NE)=FuckingDerivative(NE);
FunDerivative(PE)=FuckingDerivative(PE);
FFDerivative(:,n)=FunDerivative;
FDerivative=FFDerivative;
ChosenOnes=false(size(NegEvents));
ChosenOnes=ChosenOnes';
for nChanels=1:size(RRfilteredTraces,2)
if sum(abs(FDerivative(:,nChanels)))>10
disp(nChanels)
[~,locsPos]=findpeaks(FDerivative(:,nChanels),'minpeakdistance',ProximityWindow);
[~,locsNeg]=findpeaks(-1.*FDerivative(:,nChanels),'minpeakdistance',ProximityWindow);
ChosenOnes(locsPos,nChanels)=true;
ChosenOnes(locsNeg,nChanels)=true;
else
end
end
%% see what happened
PPosEvents=PosEvents&ChosenOnes';
NNegEvents=NegEvents&ChosenOnes';
%remove the filtered events from the derivative signal
for n=1:size(NNegEvents,1)
NE=NNegEvents(n,:)';
PE=PPosEvents(n,:)';
FunDerivative=FDerivative(:,n);
FuckingDerivative=FDerivative(:,n);
FunDerivative(:,:)=0;
FunDerivative(NE)=FuckingDerivative(NE);
FunDerivative(PE)=FuckingDerivative(PE);
FFDerivative(:,n)=FunDerivative;
end
FDerivative=FDerivative(1:max(size(PosEvents)),:);
RRfilteredTraces=RRfilteredTraces(1:max(size(PosEvents)),:);
RfilteredTraces=RfilteredTraces(1:max(size(PosEvents)),:);
TimeStamps=TimeStamps(:,1:max(size(PosEvents)));
test=test(1:max(size(PosEvents)),:);
#
#np.savez('events.npz', **events)
"""
"""
%% LOAD TimeStamps of Digital Ons(LEDs) and Catch Trials
% importAllOptoPadData_phototransistor
% importAllOptoPadData
%% While version
for nChannels=1:size(test,2)
clear trace PosEvents NegEvents EventCounter CurrentIndexUp
clear EventDuration indNeg indPos
trace=FFDerivative(:,nChannels);
PosEvents=find(PPosEvents(nChannels,:));
NegEvents=find(NNegEvents(nChannels,:));
EventCounter=0;
disp(nChannels)
if numel(PosEvents)>=2
CurrentIndexUp=PosEvents(1);
while CurrentIndexUp<size(trace,1)
offset=CurrentIndexUp;
% if the window doesnt go out of the range and if there is an event in the window
if (CurrentIndexUp+Window < numel(trace))
% if there are negative events in window
if find(NegEvents>CurrentIndexUp & NegEvents<CurrentIndexUp+Window)
% If there are negative events of the right size
if find(trace(CurrentIndexUp:CurrentIndexUp+Window)<=(trace(CurrentIndexUp)*-EqualityFactor),1,'first')>=MinWindow;
EventCounter=EventCounter+1;
indNeg(EventCounter)=find(trace(CurrentIndexUp:CurrentIndexUp+Window)<=(trace(CurrentIndexUp)*-EqualityFactor),1,'first')+offset-1;
indPos(EventCounter)=CurrentIndexUp;
EventDuration(EventCounter)=indNeg(EventCounter)-indPos(EventCounter);
% if there are positive events after this negative then
% update the CurrentIndexUp
if find(PosEvents>indNeg(EventCounter),1,'first')
CurrentIndexUp=PosEvents(find(PosEvents>indNeg(EventCounter),1,'first'));
else %% otherwise exit the while loop
CurrentIndexUp=size(trace,1);
end
else
if numel(PosEvents)<=(find(PosEvents==CurrentIndexUp)+1)
CurrentIndexUp=size(trace,1);
else
CurrentIndexUp=PosEvents(find(PosEvents==CurrentIndexUp)+1);
end
end
else
if numel(PosEvents)<=(find(PosEvents==CurrentIndexUp)+1)
CurrentIndexUp=size(trace,1);
else
CurrentIndexUp=PosEvents(find(PosEvents==CurrentIndexUp)+1);
end
end
else
% If there are negative events of the right size
if find(trace(CurrentIndexUp:end)<=(trace(CurrentIndexUp)*-EqualityFactor),1,'first')>=MinWindow;
EventCounter=EventCounter+1;
indNeg(EventCounter)=find(trace(CurrentIndexUp:end)<=(trace(CurrentIndexUp)*-EqualityFactor),1,'first')+offset-1;
indPos(EventCounter)=CurrentIndexUp;
EventDuration(EventCounter)=indNeg(EventCounter)-indPos(EventCounter);
% if there are positive events after this negative then
% update the CurrentIndexUp
if find(PosEvents>indNeg(EventCounter),1,'first')
CurrentIndexUp=PosEvents(find(PosEvents>indNeg(EventCounter),1,'first'));
else %% otherwise exit the while loop
CurrentIndexUp=size(trace,1);
end
else
if numel(PosEvents)<=(find(PosEvents==CurrentIndexUp)+1)
CurrentIndexUp=size(trace,1);
else
CurrentIndexUp=PosEvents(find(PosEvents==CurrentIndexUp)+1);
end
end
end
end
if exist('indPos','var')
Events.Ons{FileNameCounter+DatabaseOffset,nChannels}= indPos;
Events.Offs{FileNameCounter+DatabaseOffset,nChannels}= indNeg;
Events.Durations{FileNameCounter+DatabaseOffset,nChannels}= EventDuration;
Events.IFI{FileNameCounter+DatabaseOffset,nChannels}= indPos(2:end)-indNeg(1:end-1);
else
indNeg=0;
indPos=0;
EventDuration=0;
Events.Ons{FileNameCounter+DatabaseOffset,nChannels}=[];
Events.Offs{FileNameCounter+DatabaseOffset,nChannels}= [];
Events.Durations{FileNameCounter+DatabaseOffset,nChannels}= [];
Events.IFI{FileNameCounter+DatabaseOffset,nChannels}= [];
end
else
indNeg=0;
indPos=0;
EventDuration=0;
Events.Ons{FileNameCounter+DatabaseOffset,nChannels}=[];
Events.Offs{FileNameCounter+DatabaseOffset,nChannels}= [];
Events.Durations{FileNameCounter+DatabaseOffset,nChannels}= [];
Events.IFI{FileNameCounter+DatabaseOffset,nChannels}= [];
end
end
if removeDrift
nRMSEvents=cell(1,size(IndRMSDiffFoundEvents,2));
for n=1:size(IndRMSDiffFoundEvents,2)
for m=1:size(IndRMSDiffFoundEvents{1,n},1)
TrueRMSEvents{1,n}(m)=sum(Events.Ons{FileNameCounter+DatabaseOffset,n}>=IndRMSDiffFoundEvents{1,n}(m)&Events.Ons{FileNameCounter+DatabaseOffset,n}<=IndRMSDiffFoundEvents{2,n}(m));
IndRMSDiffFoundEvents{1,n}(TrueRMSEvents{1,n}<=1)=nan;
IndRMSDiffFoundEvents{2,n}(TrueRMSEvents{1,n}<=1)=nan;
IndRMSDiffFoundEvents{3,n}(TrueRMSEvents{1,n}<=1)=nan;
nRMSEvents{1,n}(m)=TrueRMSEvents{1,n}(m);
% nRMSEvents{1,n}(TrueRMSEvents{1,n}<=1)=nan;
end
end
end
for n=1:size(IndRMSDiffFoundEvents,2)
IndRMSDiffFoundEvents{1,n}=IndRMSDiffFoundEvents{1,n}(~isnan(IndRMSDiffFoundEvents{1,n}));
IndRMSDiffFoundEvents{2,n}=IndRMSDiffFoundEvents{2,n}(~isnan(IndRMSDiffFoundEvents{2,n}));
IndRMSDiffFoundEvents{3,n}=IndRMSDiffFoundEvents{3,n}(~isnan(IndRMSDiffFoundEvents{3,n}));
nRMSEvents{1,n}= nRMSEvents{1,n}(~isnan( nRMSEvents{1,n}));
end
%% save in Events structure
Events.RMSEventsOns(FileNameCounter+DatabaseOffset,1:size(IndRMSDiffFoundEvents,2))=IndRMSDiffFoundEvents(1,:);
Events.RMSEventsOffs(FileNameCounter+DatabaseOffset,1:size(IndRMSDiffFoundEvents,2))=IndRMSDiffFoundEvents(2,1:size(IndRMSDiffFoundEvents,2));
Events.RMSEventsDurs(FileNameCounter+DatabaseOffset,1:size(IndRMSDiffFoundEvents,2))=IndRMSDiffFoundEvents(3,1:size(IndRMSDiffFoundEvents,2));
Events.RMSEventsnEvents(FileNameCounter+DatabaseOffset,1:size(IndRMSDiffFoundEvents,2))=nRMSEvents;
%% merging 2 channels of each chamber
if MergeChannels==1
Ccounter=0;
for nCham=1:2:size(Events.Ons,2)
Ccounter=Ccounter+1;
EventsTemp.Ons{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.Ons{FileNameCounter+DatabaseOffset,nCham} Events.Ons{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.Offs{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.Offs{FileNameCounter+DatabaseOffset,nCham} Events.Offs{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.Durations{FileNameCounter+DatabaseOffset,Ccounter}=EventsTemp.Offs{FileNameCounter+DatabaseOffset,Ccounter}-EventsTemp.Ons{FileNameCounter+DatabaseOffset,Ccounter};
EventsTemp.IFI{FileNameCounter+DatabaseOffset,Ccounter}= EventsTemp.Ons{FileNameCounter+DatabaseOffset,Ccounter}(2:end)- EventsTemp.Offs{FileNameCounter+DatabaseOffset,Ccounter}(1:end-1);
end
else
end
if MergeChannels==1
Ccounter=0;
for nCham=1:2:size(Events.Ons,2)
Ccounter=Ccounter+1;
EventsTemp.RMSEventsOns{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.RMSEventsOns{FileNameCounter+DatabaseOffset,nCham}; Events.RMSEventsOns{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.RMSEventsOffs{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.RMSEventsOffs{FileNameCounter+DatabaseOffset,nCham}; Events.RMSEventsOffs{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.RMSEventsDurs{FileNameCounter+DatabaseOffset,Ccounter}=EventsTemp.RMSEventsOffs{FileNameCounter+DatabaseOffset,Ccounter}-EventsTemp.RMSEventsOns{FileNameCounter+DatabaseOffset,Ccounter};
EventsTemp.RMSEventsnEvents{FileNameCounter+DatabaseOffset,Ccounter}=zeros(size(EventsTemp.RMSEventsOns{FileNameCounter+DatabaseOffset,Ccounter}));
for i=1:length(EventsTemp.RMSEventsOns{FileNameCounter+DatabaseOffset,Ccounter})
EventsTemp.RMSEventsnEvents{FileNameCounter+DatabaseOffset,Ccounter}(i)= sum(EventsTemp.Ons{FileNameCounter+DatabaseOffset,Ccounter}>=EventsTemp.RMSEventsOns{FileNameCounter+DatabaseOffset,Ccounter}(i)&EventsTemp.Ons{FileNameCounter+DatabaseOffset,Ccounter}<=EventsTemp.RMSEventsOffs{FileNameCounter+DatabaseOffset,Ccounter}(i));
end
end
else
end
clear IndRMSDiffFoundEvents
if BonsaiStyleActivityBouts
BonsaiAnalysis;
end
%% RawData Figures
% ACTIVITY BOUTS
% Events.SpillQuality(FileNameCounter+DatabaseOffset,64)=nan;
% Events.RawDataOnActBouts{FileNameCounter+DatabaseOffset,64}=[];
for i=1:size(Events.RMSEventsOns,2)
numelements=numel(Events.RMSEventsOns{FileNameCounter+DatabaseOffset,i});
Events.SpillQuality{FileNameCounter+DatabaseOffset,i}=(sum(test(:,i)>=4095))./size(test,1);
end
if MergeChannels==1
counter=0;
for nCham=1:2:size(Events.SpillQuality,2)
counter=counter+1;
EventsTemp.SpillQuality{FileNameCounter+DatabaseOffset,counter}=max([Events.SpillQuality{FileNameCounter+DatabaseOffset,nCham} Events.SpillQuality{FileNameCounter+DatabaseOffset,nCham+1}]);
end
end
%% SIP FORMS
end
if MergeChannels==1&&isfield(Events,'CatchTrial')
for FileNameCounter=1:size(Events.CatchTrial,1)
Ccounter=0;
for nCham=1:2:size(Events.Ons,2)
Ccounter=Ccounter+1;
EventsTemp.CatchTrial{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.CatchTrial{FileNameCounter+DatabaseOffset,nCham}; Events.CatchTrial{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.DigitalEventsOns{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.DigitalEventsOns{FileNameCounter+DatabaseOffset,nCham}; Events.DigitalEventsOns{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.DigitalEventsOffs{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.DigitalEventsOffs{FileNameCounter+DatabaseOffset,nCham}; Events.DigitalEventsOffs{FileNameCounter+DatabaseOffset,nCham+1}]);
EventsTemp.ConditioningTrials{FileNameCounter+DatabaseOffset,Ccounter}=sort([Events.ConditioningTrials{FileNameCounter+DatabaseOffset,nCham}; Events.ConditioningTrials{FileNameCounter+DatabaseOffset,nCham+1}]);
end
end
else
end
if MergeChannels==1
for i=1:length(Events.Condition)
EventsTemp.Condition{i}=Events.Condition{i}(1:2:end);
EventsTemp.Substrate{i}=Events.Substrate{i}(1:2:end);
EventsTemp.ToRemove{i}=Events.ToRemove{i}(1:2:end);
% EventsTemp.Condition_Substrate{i}=Events.Condition_Substrate{i}(1:2:end);
end
EventsTemp.ConditionLabel=Events.ConditionLabel;
EventsTemp.SubstrateLabel=Events.SubstrateLabel;
% EventsTemp.ExperimentData=Events.ExperimentData;
% EventsTemp.Diff_Subs_Labels=Events.Diff_Subs_Labels;
EventsTemp2=Events;
clear Events
Events=EventsTemp;
clear EventsTemp
end
cd
% stop
% Settings=['Dur','RemoveSubstrateNoneaters','RemoveGlobalNoneaters','removeDrift','BonsaiStyleActivityBouts','TimeWindow','NonEaterThreshold','RemoveSpillQuality','RemoveSpillQualityThreshold','ConditionsToTake','sipThreshold','MergeChannels'];
save(DataFilename2,'Events','Dur','RemoveSubstrateNoneaters','RemoveGlobalNoneaters','removeDrift','BonsaiStyleActivityBouts','TimeWindow','NonEaterThreshold','RemoveSpillQuality','RemoveSpillQualityThreshold','ConditionsToTake','sipThreshold','MergeChannels','Different_Subs','-mat','-v7.3')
close all
"""
| degoldschmidt/ribeirolab-codeconversion | python/flyPAD/process_data.py | Python | gpl-3.0 | 25,721 |
import itertools
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_instance_launch_and_terminate():
conn = boto.connect_ec2('the_key', 'the_secret')
reservation = conn.run_instances('ami-1234abcd')
instance = reservation.instances[0]
instance.add_tag("a key", "some value")
tags = conn.get_all_tags()
tag = tags[0]
tag.name.should.equal("a key")
tag.value.should.equal("some value")
instance.remove_tag("a key")
conn.get_all_tags().should.have.length_of(0)
@mock_ec2
def test_instance_launch_and_retrieve_all_instances():
conn = boto.connect_ec2('the_key', 'the_secret')
reservation = conn.run_instances('ami-1234abcd')
instance = reservation.instances[0]
instance.add_tag("a key", "some value")
chain = itertools.chain.from_iterable
existing_instances = list(chain([res.instances for res in conn.get_all_instances()]))
existing_instances.should.have.length_of(1)
existing_instance = existing_instances[0]
existing_instance.tags["a key"].should.equal("some value")
| andresriancho/moto | tests/test_ec2/test_tags.py | Python | apache-2.0 | 1,078 |
from flask.views import View
from flask import Response
from urllib2 import urlopen
from gpv import utils
class TreeMapPrice(View):
def dispatch_request(self, komuna, year):
api_base_url = utils.get_api_url()
url = "%s/%s/treemap/price/%d" % (api_base_url, komuna, year)
result = urlopen(url).read()
# Build response object.
resp = Response(
response=result, mimetype='application/json')
# Return response.
return resp
| opendatakosovo/municipality-procurement-visualizer | gpv/views/json/treemapprice.py | Python | gpl-2.0 | 497 |
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.urls import reverse_lazy
from django.utils.translation import gettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import tables
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.utils import settings as setting_utils
from openstack_dashboard.dashboards.identity.roles \
import forms as project_forms
from openstack_dashboard.dashboards.identity.roles \
import tables as project_tables
class IndexView(tables.DataTableView):
table_class = project_tables.RolesTable
page_title = _("Roles")
def needs_filter_first(self, table):
return self._needs_filter_first
def get_data(self):
roles = []
filters = self.get_filters()
self._needs_filter_first = False
if policy.check((("identity", "identity:list_roles"),),
self.request):
# If filter_first is set and if there are not other filters
# selected, then search criteria must be provided
# and return an empty list
if (setting_utils.get_dict_config(
'FILTER_DATA_FIRST', 'identity.roles') and not filters):
self._needs_filter_first = True
return roles
try:
roles = api.keystone.role_list(self.request,
filters=filters)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve roles list.'))
else:
msg = _("Insufficient privilege level to view role information.")
messages.info(self.request, msg)
return roles
class UpdateView(forms.ModalFormView):
template_name = 'identity/roles/update.html'
form_id = "update_role_form"
form_class = project_forms.UpdateRoleForm
submit_label = _("Update Role")
submit_url = "horizon:identity:roles:update"
success_url = reverse_lazy('horizon:identity:roles:index')
page_title = _("Update Role")
@memoized.memoized_method
def get_object(self):
try:
return api.keystone.role_get(self.request, self.kwargs['role_id'])
except Exception:
redirect = reverse("horizon:identity:roles:index")
exceptions.handle(self.request,
_('Unable to update role.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
args = (self.get_object().id,)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
def get_initial(self):
role = self.get_object()
return {'id': role.id,
'name': role.name}
class CreateView(forms.ModalFormView):
template_name = 'identity/roles/create.html'
form_id = "create_role_form"
form_class = project_forms.CreateRoleForm
submit_label = _("Create Role")
submit_url = reverse_lazy("horizon:identity:roles:create")
success_url = reverse_lazy('horizon:identity:roles:index')
page_title = _("Create Role")
| openstack/horizon | openstack_dashboard/dashboards/identity/roles/views.py | Python | apache-2.0 | 3,930 |
import latus.preferences
import latus.nodedb
import latus.util
import latus.folders
def create(app_data_folder, cloud_folder, latus_folder):
node_id = latus.util.new_node_id()
pref = latus.preferences.Preferences(app_data_folder)
pref.set_cloud_root(cloud_folder)
pref.set_latus_folder(latus_folder)
pref.set_node_id(node_id)
pref.set_new_keys()
# make the node db
cloud_folders = latus.folders.CloudFolders(cloud_folder)
node_db = latus.nodedb.NodeDB(cloud_folders.nodes, node_id, pref.get_public_key(), True) | latusrepo/latus | latus/node_management.py | Python | gpl-3.0 | 548 |
#for line in tlines:(),
# ss=line.split()),
# print "(%-10r,%7r,%12r):( %.1f,"%(ss[0],ss[1],ss[2],float(ss[3]))
yoffset_delta ={ # from f2offsets.fits for yoffset and from nsappwave.fits for
# Delta (original dispersion)
#(grism filter slit (yoffset, Delta)
('JH_G5801', 'JH', '1pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', '2pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', '3pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', '4pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', '6pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', '8pix-slit'):( -75.0, -6.667),
('JH_G5801', 'JH', 'mos'):( -75.0, -6.667),
('HK_G5802', 'JH', '1pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', '2pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', '3pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', '4pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', '6pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', '8pix-slit'):( 34.0, -4.0),
('HK_G5802', 'JH', 'mos'):( 34.0, -4.0),
('HK_G5802', 'HK', '1pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', '2pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', '3pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', '4pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', '6pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', '8pix-slit'):( -100.0, -7.826),
('HK_G5802', 'HK', 'mos'):( -100.0, -7.826),
('R3K_G5803', 'Y', '1pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', '2pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', '3pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', '4pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', '6pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', '8pix-slit'):( 610.0, -1.642),
('R3K_G5803', 'Y', 'mos'):( 610.0, -1.642),
('R3K_G5803', 'J-lo', '1pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', '2pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', '3pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', '4pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', '6pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', '8pix-slit'):( -20.0, -1.667),
('R3K_G5803', 'J-lo', 'mos'):( -20.0, -1.667),
('R3K_G5803', 'J', '1pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', '2pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', '3pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', '4pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', '6pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', '8pix-slit'):( 350.0, -2.022),
('R3K_G5803', 'J', 'mos'):( 350.0, -2.022),
('R3K_G5803', 'H', '1pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', '2pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', '3pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', '4pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', '6pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', '8pix-slit'):( 175.0, -2.609),
('R3K_G5803', 'H', 'mos'):( 175.0, -2.609),
('R3K_G5803', 'Ks', '1pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', '2pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', '3pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', '4pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', '6pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', '8pix-slit'):( 126.0, -3.462),
('R3K_G5803', 'Ks', 'mos'):( 126.0, -3.462)
}
#f2filters.fits
#filter center width cuton80 cutoff80 cuton50 cutoff50 transmission (units in microns)
filter_table = {
'Y': (1.020 ,0.0894 ,0.985 ,1.066 ,0.969 ,1.068 ,'Y_G0811.dat'),
'J-lo': (1.122 ,0.1323 ,1.056 ,1.189 ,1.048 ,1.192 ,'Jlow_G0801.dat'),
'J': (1.256 ,0.1512 ,1.178 ,1.328 ,1.175 ,1.333 ,'J_G0802.dat'),
'H': (1.631 ,0.2741 ,1.490 ,1.767 ,1.486 ,1.775 ,'H_G0803.dat'),
'Ks': (2.157 ,0.3177 ,1.997 ,2.313 ,1.991 ,2.320 ,'Ks_G0804.dat'),
'JH': (1.390 ,0.7200 ,1.163 ,1.774 ,0.970 ,1.805 ,'JH_G0809.dat'),
'HK': (1.871 ,1.0670 ,1.308 ,2.401 ,1.261 ,2.511 ,'HK_G0806.dat'),
}
| pyrrho314/recipesystem | trunk/dontload-astrodata_Gemini/ADCONFIG_Gemini/lookups/F2/F2offsets.py | Python | mpl-2.0 | 3,831 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import pickle
import glob
import os
import sys
import paddle.v2 as paddle
import paddle.v2.dataset.common as common
embsize = 32
hiddensize = 256
N = 5
# NOTE: You need to generate and split dataset then put it under your cloud storage.
# then you can use different size of embedding.
# NOTE: must change this to your own username on paddlecloud.
USERNAME = "your-username"
DC = os.getenv("PADDLE_CLOUD_CURRENT_DATACENTER")
common.DATA_HOME = "/pfs/%s/home/%s" % (DC, USERNAME)
TRAIN_FILES_PATH = os.path.join(common.DATA_HOME, "imikolov",
"imikolov_train-*")
WORD_DICT_PATH = os.path.join(common.DATA_HOME, "imikolov/word_dict.pickle")
TRAINER_ID = int(os.getenv("PADDLE_INIT_TRAINER_ID", "-1"))
TRAINER_COUNT = int(os.getenv("PADDLE_INIT_NUM_GRADIENT_SERVERS", "-1"))
def prepare_dataset():
word_dict = paddle.dataset.imikolov.build_dict()
with open(WORD_DICT_PATH, "w") as fn:
pickle.dump(word_dict, fn)
# NOTE: convert should be done by other job.
def cluster_reader_recordio(trainer_id, trainer_count):
'''
read from cloud dataset which is stored as recordio format
each trainer will read a subset of files of the whole dataset.
'''
import recordio
def reader():
file_list = glob.glob(TRAIN_FILES_PATH)
file_list.sort()
my_file_list = []
# read files for current trainer_id
for idx, f in enumerate(file_list):
if idx % trainer_count == trainer_id:
my_file_list.append(f)
for f in my_file_list:
print "processing ", f
reader = recordio.reader(f)
record_raw = reader.read()
while record_raw:
yield pickle.loads(record_raw)
record_raw = reader.read()
reader.close()
return reader
def wordemb(inlayer):
wordemb = paddle.layer.table_projection(
input=inlayer,
size=embsize,
param_attr=paddle.attr.Param(
name="_proj",
initial_std=0.001,
learning_rate=1,
l2_rate=0, ))
return wordemb
def main():
paddle.init(use_gpu=False, trainer_count=1)
# load dict from cloud file
with open(WORD_DICT_PATH) as fn:
word_dict = pickle.load(fn)
dict_size = len(word_dict)
firstword = paddle.layer.data(
name="firstw", type=paddle.data_type.integer_value(dict_size))
secondword = paddle.layer.data(
name="secondw", type=paddle.data_type.integer_value(dict_size))
thirdword = paddle.layer.data(
name="thirdw", type=paddle.data_type.integer_value(dict_size))
fourthword = paddle.layer.data(
name="fourthw", type=paddle.data_type.integer_value(dict_size))
nextword = paddle.layer.data(
name="fifthw", type=paddle.data_type.integer_value(dict_size))
Efirst = wordemb(firstword)
Esecond = wordemb(secondword)
Ethird = wordemb(thirdword)
Efourth = wordemb(fourthword)
contextemb = paddle.layer.concat(input=[Efirst, Esecond, Ethird, Efourth])
hidden1 = paddle.layer.fc(input=contextemb,
size=hiddensize,
act=paddle.activation.Sigmoid(),
layer_attr=paddle.attr.Extra(drop_rate=0.5),
bias_attr=paddle.attr.Param(learning_rate=2),
param_attr=paddle.attr.Param(
initial_std=1. / math.sqrt(embsize * 8),
learning_rate=1))
predictword = paddle.layer.fc(input=hidden1,
size=dict_size,
bias_attr=paddle.attr.Param(learning_rate=2),
act=paddle.activation.Softmax())
def event_handler(event):
if isinstance(event, paddle.event.EndIteration):
if event.batch_id % 100 == 0:
result = trainer.test(
paddle.batch(
# NOTE: if you're going to use cluster test files,
# prepare them on the storage first
paddle.dataset.imikolov.test(word_dict, N),
32))
print "Pass %d, Batch %d, Cost %f, %s, Testing metrics %s" % (
event.pass_id, event.batch_id, event.cost, event.metrics,
result.metrics)
cost = paddle.layer.classification_cost(input=predictword, label=nextword)
parameters = paddle.parameters.create(cost)
adam_optimizer = paddle.optimizer.Adam(
learning_rate=3e-3,
regularization=paddle.optimizer.L2Regularization(8e-4))
trainer = paddle.trainer.SGD(cost, parameters, adam_optimizer)
trainer.train(
paddle.batch(cluster_reader_recordio(TRAINER_ID, TRAINER_COUNT), 32),
num_passes=30,
event_handler=event_handler)
if __name__ == '__main__':
usage = "python train.py [prepare|train]"
if len(sys.argv) != 2:
print usage
exit(1)
if TRAINER_ID == -1 or TRAINER_COUNT == -1:
print "no cloud environ found, must run on cloud"
exit(1)
if sys.argv[1] == "prepare":
prepare_dataset()
elif sys.argv[1] == "train":
main()
| PaddlePaddle/cloud | demo/word2vec/train.py | Python | apache-2.0 | 5,940 |
###############################################################################
# SteadyLogSpiralPotential: a steady-state spiral potential
###############################################################################
import numpy
from ..util import conversion
from .planarPotential import planarPotential
_degtorad= numpy.pi/180.
class SteadyLogSpiralPotential(planarPotential):
"""Class that implements a steady-state spiral potential
.. math::
\\Phi(R,\\phi) = \\frac{\\mathrm{amp}\\times A}{\\alpha}\\,\\cos\\left(\\alpha\,\ln R - m\\,(\\phi-\\Omega_s\\,t-\\gamma)\\right)
Can be grown in a similar way as the DehnenBarPotential, but using :math:`T_s = 2\pi/\Omega_s` to normalize :math:`t_{\mathrm{form}}` and :math:`T_{\mathrm{steady}}`.
"""
def __init__(self,amp=1.,omegas=0.65,A=-0.035,
alpha=-7.,m=2,gamma=numpy.pi/4.,p=None,
tform=None,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a logarithmic spiral potential
INPUT:
amp - amplitude to be applied to the potential (default:
1., A below)
gamma - angle between sun-GC line and the line connecting the peak of the spiral pattern at the Solar radius (in rad; default=45 degree; or can be Quantity)
A - amplitude (alpha*potential-amplitude; default=0.035; can be Quantity
omegas= - pattern speed (default=0.65; can be Quantity)
m= number of arms
Either provide:
a) alpha=
b) p= pitch angle (rad; can be Quantity)
tform - start of spiral growth / spiral period (default: -Infinity)
tsteady - time from tform at which the spiral is fully grown / spiral period (default: 2 periods)
OUTPUT:
(none)
HISTORY:
2011-03-27 - Started - Bovy (NYU)
"""
planarPotential.__init__(self,amp=amp,ro=ro,vo=vo)
gamma= conversion.parse_angle(gamma)
p= conversion.parse_angle(p)
A= conversion.parse_energy(A,vo=self._vo)
omegas= conversion.parse_frequency(omegas,ro=self._ro,vo=self._vo)
self._omegas= omegas
self._A= A
self._m= m
self._gamma= gamma
if not p is None:
self._alpha= self._m/numpy.tan(p)
else:
self._alpha= alpha
self._ts= 2.*numpy.pi/self._omegas
if not tform is None:
self._tform= tform*self._ts
else:
self._tform= None
if not tsteady is None:
self._tsteady= self._tform+tsteady*self._ts
else:
if self._tform is None: self._tsteady= None
else: self._tsteady= self._tform+2.*self._ts
self.hasC= True
def _evaluate(self,R,phi=0.,t=0.):
"""
NAME:
_evaluate
PURPOSE:
evaluate the potential at R,phi,t
INPUT:
R - Galactocentric cylindrical radius
phi - azimuth
t - time
OUTPUT:
Phi(R,phi,t)
HISTORY:
2011-03-27 - Started - Bovy (NYU)
"""
if not self._tform is None:
if t < self._tform:
smooth= 0.
elif t < self._tsteady:
deltat= t-self._tform
xi= 2.*deltat/(self._tsteady-self._tform)-1.
smooth= (3./16.*xi**5.-5./8*xi**3.+15./16.*xi+.5)
else: #spiral is fully on
smooth= 1.
else:
smooth= 1.
return smooth*self._A/self._alpha*numpy.cos(self._alpha*numpy.log(R)
-self._m*(phi-self._omegas*t
-self._gamma))
def _Rforce(self,R,phi=0.,t=0.):
"""
NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
phi - azimuth
t - time
OUTPUT:
the radial force
HISTORY:
2010-11-24 - Written - Bovy (NYU)
"""
if not self._tform is None:
if t < self._tform:
smooth= 0.
elif t < self._tsteady:
deltat= t-self._tform
xi= 2.*deltat/(self._tsteady-self._tform)-1.
smooth= (3./16.*xi**5.-5./8*xi**3.+15./16.*xi+.5)
else: #spiral is fully on
smooth= 1.
else:
smooth= 1.
return smooth*self._A/R*numpy.sin(self._alpha*numpy.log(R)
-self._m*(phi-self._omegas*t
-self._gamma))
def _phiforce(self,R,phi=0.,t=0.):
"""
NAME:
_phiforce
PURPOSE:
evaluate the azimuthal force for this potential
INPUT:
R - Galactocentric cylindrical radius
phi - azimuth
t - time
OUTPUT:
the azimuthal force
HISTORY:
2010-11-24 - Written - Bovy (NYU)
"""
if not self._tform is None:
if t < self._tform:
smooth= 0.
elif t < self._tsteady:
deltat= t-self._tform
xi= 2.*deltat/(self._tsteady-self._tform)-1.
smooth= (3./16.*xi**5.-5./8*xi**3.+15./16.*xi+.5)
else: #spiral is fully on
smooth= 1.
else:
smooth= 1.
return -smooth*self._A/self._alpha*self._m*numpy.sin(self._alpha*numpy.log(R)
-self._m*(phi
-self._omegas*t
-self._gamma))
def wavenumber(self,R):
"""
NAME:
wavenumber
PURPOSE:
return the wavenumber at radius R (d f(R)/ d R in Phi_a(R) = F(R) e^[i f(R)]; see Binney & Tremaine 2008)
INPUT:
R - Cylindrical radius
OUTPUT:
wavenumber at R
HISTORY:
2014-08-23 - Written - Bovy (IAS)
"""
return self._alpha/R
def OmegaP(self):
"""
NAME:
OmegaP
PURPOSE:
return the pattern speed
INPUT:
(none)
OUTPUT:
pattern speed
HISTORY:
2011-10-10 - Written - Bovy (IAS)
"""
return self._omegas
def m(self):
"""
NAME:
m
PURPOSE:
return the number of arms
INPUT:
(none)
OUTPUT:
number of arms
HISTORY:
2014-08-23 - Written - Bovy (IAS)
"""
return self._m
def tform(self): #pragma: no cover
"""
NAME:
tform
PURPOSE:
return formation time of the bar
INPUT:
(none)
OUTPUT:
tform in normalized units
HISTORY:
2011-03-08 - Written - Bovy (NYU)
"""
return self._tform
| jobovy/galpy | galpy/potential/SteadyLogSpiralPotential.py | Python | bsd-3-clause | 7,342 |
import tensorflow as tf
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
import random
import math
np.random.seed(1234)
random.seed(1234)
plt.switch_backend("TkAgg")
def plotScatter(points, color):
xs = [x[0] for x in points]
ys = [y[1] for y in points]
plt.scatter(xs, ys, c=color)
def plot_weights(weights, gate, color):
plot_line(weights, color)
plot_line(gate, 'r')
#print("B: " + str(byas))
#print("XCoef: " + str(Xcoef))
def plot_line(weights, color):
n = weights
byas = -1 * n[0]/n[2]
Xcoef = -1 * n[1]/n[2]
plt.plot([-1.0, 1.0], [-1*Xcoef + byas, Xcoef + byas], '{}-'.format(color))
def plot_centroid(centroid):
plt.plot(centroid[0], centroid[1], markersize=10, marker='x', color='g', mew=5)
def plot_incorrect(point):
plt.plot(point[0], point[1], markersize=5, marker='x', color='r', mew=5)
def generateChevronData():
xBounds = [-50, 50]
yBounds = [-50, 50]
totalPoints = 100
points = []
targets = []
for i in range(0, totalPoints):
x = random.randint(xBounds[0], xBounds[1])
y = random.randint(yBounds[0], yBounds[1])
if x >= y and x <= -y:
points.append([x/50.0,y/50.0])
targets.append(0.0)
else:
points.append([x/50.0,y/50.0])
targets.append(1.0)
return np.array(points), np.array(targets)
def generate_split_data():
xBounds = [-50, 50]
yBounds = [-50, 50]
totalPoints = 100
points = []
targets = []
for i in range(0, totalPoints):
x = random.randint(xBounds[0], xBounds[1])
y = random.randint(yBounds[0], yBounds[1])
if x < 25 and x > -25 :
points.append([x/50.0,y/50.0])
targets.append(0.0)
else:
points.append([x/50.0,y/50.0])
targets.append(1.0)
return np.array(points), np.array(targets)
def generate_clumps():
xBounds = [-50, 50]
yBounds = [-50, 50]
totalPoints = 100
points = []
targets = []
for i in range(0, int(totalPoints/2.0)):
x = random.randint(xBounds[0], 0)
y = random.randint(yBounds[0], 0)
if -x - 30 < y:
points.append([x/50.0,y/50.0])
targets.append(1.0)
else:
points.append([x/50.0,y/50.0])
targets.append(0.0)
for i in range(0, int(totalPoints/2.0)):
x = random.randint(0, xBounds[1])
y = random.randint(0, yBounds[1])
if -x + 30 > y:
points.append([x/50.0,y/50.0])
targets.append(1.0)
else:
points.append([x/50.0,y/50.0])
targets.append(0.0)
return np.array(points), np.array(targets)
def generate_rectangle_data():
xBounds = [-50, 50]
yBounds = [-50, 50]
totalPoints = 100
points = []
targets = []
for i in range(0, totalPoints):
x = random.randint(xBounds[0], xBounds[1])
y = random.randint(yBounds[0], yBounds[1])
if np.abs(x) < 30 and np.abs(y) < 30 :
points.append([x/50.0,y/50.0])
targets.append(0.0)
else:
points.append([x/50.0,y/50.0])
targets.append(1.0)
return np.array(points), np.array(targets)
def sigmoid(phi):
return 1.0/(1.0 + tf.exp(-phi))
def _log(x, b):
return tf.log(x) / tf.log(b)
points, out = generateChevronData()#generate_clumps()#generate_split_data()#generate_rectangle_data()#
in_size = 2
out_size = 1
num_centroids = 1
num_outputs = 1
two = tf.constant(2.0, dtype='float64')
inputs = tf.placeholder('float64', [in_size])
targets = tf.placeholder('float64', [out_size])
hidden_weights = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(num_centroids, in_size+1)))
gate_weights = tf.Variable(np.random.uniform(low=-0.5, high=0.5, size=(num_centroids, in_size+1)))
inputs_prime = tf.concat([[1.0], inputs], axis=0)
# Peform Computation
prob = tf.reduce_sum(tf.multiply(inputs_prime, hidden_weights), 1)
g = sigmoid(tf.reduce_sum(tf.multiply(inputs_prime, gate_weights), 1))
hidden_out = sigmoid(tf.multiply(g, prob))
output = hidden_out
#errors = tf.pow(tf.subtract(tf.expand_dims(targets, 1), output), 2.0)
#error = tf.reduce_sum(errors)
error = -tf.reduce_sum(targets * _log(output, two) + (1-targets) * _log(1-output, two)) - (0.5) * tf.reduce_sum(g)
train_op = tf.train.GradientDescentOptimizer(0.001).minimize(error)
model = tf.global_variables_initializer()
with tf.Session() as session:
session.run(model)
#print(session.run(output, feed_dict={inputs: points[0], targets: [out[0]]}))
for e in range(4000):
for d in range(len(points)):
session.run(train_op, feed_dict={inputs: points[d], targets: [out[d]]})
if e % 10 == 0:
err = 0
for d in range(len(points)):
err += session.run(error, feed_dict={inputs: points[d], targets: [out[d]]})
print(err)
incorrect = []
for d in range(len(points)):
o = session.run(output, feed_dict={inputs: points[d], targets: [out[d]]})
if not int(round(o[0])) == out[d]:
incorrect.append(points[d])
gates = session.run(gate_weights)
boundarys = session.run(hidden_weights)
# Plot points on graph
c1 = []
c2 = []
for i in range(0, len(points)):
if out[i] == 0:
c1.append(points[i])
else:
c2.append(points[i])
print("Type 0: ", len(c1))
print("Type 1: ", len(c2))
plotScatter(c1,'y')
plotScatter(c2, 'b')
for i in range(len(boundarys)):
plot_weights(boundarys[i], gates[i], 'g')
for point in incorrect:
plot_incorrect(point)
plt.gca().set_aspect('equal')
plt.xlim(xmin=-1.5, xmax=1.5)
plt.ylim(ymin=-1.5, ymax=1.5)
plt.show()
| garibaldu/boundary-seekers | Boundary Hunter Ideas/TensorFlow/Single-TH-BH.py | Python | mit | 5,863 |
#########################
# py.test test functions
#########################
from __future__ import absolute_import
import pytest
from Qpyl.core.qpotential import *
from Qpyl.core.qstructure import PosVector as P
def is_close(a, b, rel_tol=1e-09, abs_tol=0.0):
return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
class TestPotFunc:
def test_bond_energy(self):
e = bond_energy(3, 1000, 1)
assert is_close(e, 2000)
def test_angle_energy(self):
e = angle_energy(100, 100, 120)
assert is_close(e, 6.0923484)
def test_torsion_energy(self):
e = torsion_energy(0, 10, 3, 3, 0)
assert is_close(e, 6.6666666666)
def test_improper_energy(self):
e = improper_energy_periodic(150, 10.5, 2, 180)
assert is_close(e, 5.25)
class TestGeomFunc:
def test_distance(self):
a1, a2 = P(0, 0, 0), P(5, 6, 7)
d = bond_distance(a1, a2)
assert is_close(d, 10.488088482)
def test_angle(self):
a1, a2, a3 = P(1, 0, 0), P(0, 0, 0), P(0, 1, 0)
d = angle_angle(a1, a2, a3)
assert is_close(d, 90)
d = angle_angle(a2, a1, a3)
assert is_close(d, 45)
def test_dihedral(self):
a1, a2, a3, a4 = P(0, 0, 0), P(1, 0, 0), P(1, 1, 0), P(2, 1, 0)
d = torsion_angle(a1, a2, a3, a4)
assert is_close(d, 180)
a1, a2, a3, a4 = P(0, 0, 0), P(1, 0, 0), P(1, 1, 0), P(2, 1, 1)
d = torsion_angle(a1, a2, a3, a4)
assert is_close(d, 135)
| mpurg/qtools | tests/Qpyl/core/qpotential_test.py | Python | mit | 1,522 |
################################################################################
### Copyright © 2012-2013 BlackDragonHunt
###
### This file is part of the Super Duper Script Editor.
###
### The Super Duper Script Editor is free software: you can redistribute it
### and/or modify it under the terms of the GNU General Public License as
### published by the Free Software Foundation, either version 3 of the License,
### or (at your option) any later version.
###
### The Super Duper Script Editor is distributed in the hope that it will be
### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with the Super Duper Script Editor.
### If not, see <http://www.gnu.org/licenses/>.
################################################################################
import re
RE_IGNORE = re.compile(ur"\<CLT.*?\>|<DIG.*?>|[\.\,\?\!\'\"…?!‘’“”【】]+", re.UNICODE | re.IGNORECASE)
RE_SEPS = re.compile(ur"[―–—\n\s]+", re.UNICODE | re.IGNORECASE)
def count_words(text):
text = RE_SEPS.sub(u" ", text)
text = RE_IGNORE.sub(u"", text)
return len(text.split())
### EOF ### | ThunderGemios10/The-Super-Duper-Script-Editor-2 | word_count.py | Python | gpl-3.0 | 1,322 |
#!/usr/bin/env python
"""
Set up the logging
"""
import logging
import tempfile
import os
def initialize_logging():
"""
Set up the screen and file logging.
:return: The log filename
"""
# set up DEBUG logging to file, INFO logging to STDERR
log_file = os.path.join(tempfile.gettempdir(), 'spfy.log')
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
# set up logging to file - see previous section for more details
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename=log_file,
filemode='w')
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setFormatter(formatter)
console.setLevel(logging.INFO)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
return log_file
| superphy/backend | app/modules/loggingFunctions.py | Python | apache-2.0 | 1,056 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests to test the test framework"""
import sys
from neutron.tests import base
class SystemExitTestCase(base.BaseTestCase):
def setUp(self):
def _fail_SystemExit(exc_info):
if isinstance(exc_info[1], SystemExit):
self.fail("A SystemExit was allowed out")
super(SystemExitTestCase, self).setUp()
# add the handler last so reaching it means the handler in BaseTestCase
# didn't do it's job
self.addOnException(_fail_SystemExit)
def run(self, *args, **kwargs):
exc = self.assertRaises(AssertionError,
super(SystemExitTestCase, self).run,
*args, **kwargs)
# this message should be generated when SystemExit is raised by a test
self.assertIn('A SystemExit was raised during the test.', str(exc))
def test_system_exit(self):
# this should generate a failure that mentions SystemExit was used
sys.exit(1)
| yuewko/neutron | neutron/tests/unit/tests/test_base.py | Python | apache-2.0 | 1,600 |
'''
A rst2pdf extension to implement something similar to sphinx's plantuml extension
(see http://pypi.python.org/pypi/sphinxcontrib-plantuml)
Therefore, stuff may be copied from that code.
Ergo:
:copyright: Copyright 2010 by Yuya Nishihara <yuya@tcha.org>.
:license: BSD, (he says see LICENSE but the file is not there ;-)
'''
import errno
import subprocess
import tempfile
from docutils import nodes
from docutils.parsers import rst
from docutils.parsers.rst import directives
import rst2pdf.genelements as genelements
from rst2pdf.image import MyImage
from rst2pdf.styles import adjustUnits
class plantuml(nodes.General, nodes.Element):
pass
class UmlDirective(rst.Directive):
"""Directive to insert PlantUML markup
Example::
.. uml::
:alt: Alice and Bob
Alice -> Bob: Hello
Alice <- Bob: Hi
You can use a :format: option to change between SVG and PNG diagrams, however,
the SVG plantuml generates doesn't look very good to me.
Also, :width: and :height: are supported as per the image directive.
"""
has_content = True
option_spec = {
'alt': directives.unchanged,
'format': directives.unchanged,
'width': directives.length_or_unitless,
'height': directives.length_or_unitless,
}
def run(self):
node = plantuml()
node['uml'] = '\n'.join(self.content)
node['alt'] = self.options.get('alt', None)
node['format'] = self.options.get('format', 'png')
node['width'] = self.options.get('width', None)
node['height'] = self.options.get('height', None)
return [node]
class PlantUmlError(Exception):
pass
class UMLHandler(genelements.NodeHandler, plantuml):
"""Class to handle UML nodes"""
def gather_elements(self, client, node, style):
# Create image calling plantuml
tfile = tempfile.NamedTemporaryFile(
dir='.', delete=False, suffix='.' + node['format']
)
args = 'plantuml -pipe -charset utf-8'
if node['format'].lower() == 'svg':
args += ' -tsvg'
client.to_unlink.append(tfile.name)
try:
p = subprocess.Popen(
args.split(),
stdout=tfile,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
)
except OSError as err:
if err.errno != errno.ENOENT:
raise
raise PlantUmlError(
'plantuml command %r cannot be run' % self.builder.config.plantuml
)
serr = p.communicate(node['uml'].encode('utf-8'))[1]
if p.returncode != 0:
raise PlantUmlError('error while running plantuml\n\n' + serr)
# Convert width and height if necessary
w = node['width']
if w is not None:
w = adjustUnits(w)
h = node['height']
if h is not None:
h = adjustUnits(h)
# Add Image node with the right image
return [MyImage(tfile.name, client=client, width=w, height=h)]
directives.register_directive("uml", UmlDirective)
| rst2pdf/rst2pdf | rst2pdf/extensions/plantuml_r2p.py | Python | mit | 3,148 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_set as page_set_module
from measurements import polymer_load
class TopekaPage(polymer_load.PageForPolymerLoad):
def __init__(self, page_set):
super(TopekaPage, self).__init__(
url='http://www.polymer-project.org/apps/topeka/?test',
ready_event='template-bound',
page_set=page_set)
self.archive_data_file = 'data/topeka.json'
class TopekaPageSet(page_set_module.PageSet):
""" Topeka quiz app for the Polymer UI toolkit """
def __init__(self):
super(TopekaPageSet, self).__init__(
user_agent_type='mobile',
archive_data_file='data/topeka.json',
bucket=page_set_module.INTERNAL_BUCKET)
self.AddUserStory(TopekaPage(self))
| guorendong/iridium-browser-ubuntu | tools/perf/page_sets/topeka.py | Python | bsd-3-clause | 881 |
from socket import *
import binascii
import tongue
class TcpClient:
HOST = '127.0.0.1'
PORT = 12345
BUFSIZ = 1024
ADDR = (HOST, PORT)
def __init__(self):
self.client = socket(AF_INET, SOCK_STREAM)
self.client.connect(self.ADDR)
self.register_data = '7E0100002501860175250400020000000037' \
'303231385957333030302D42440000000000' \
'000000000000304A544931514B00A27E'
self.packed_data = binascii.unhexlify(self.register_data)
while True:
self.client.send(self.packed_data)
data = self.client.recv(self.BUFSIZ)
if not data:
break
recv_data = tongue.Decode(data)
print recv_data.dst
if __name__ == '__main__':
client = TcpClient()
| land-pack/jtt808 | simulate/terminal.py | Python | gpl-2.0 | 830 |
import json
import re
from girder_worker.app import app
from girder_worker.utils import girder_job
from gaia.parser import deserialize
@girder_job()
@app.task(bind=True)
def gaia_task(self, kwargs):
datasetId = str(kwargs['dataset']['_id'])
token = kwargs['token']
analysis = json.loads(kwargs['analysis'])
for input in analysis['inputs']:
input['token'] = token['_id']
filename = re.sub('\s|\.', '_', kwargs['dataset']['name'])
filename = '{}.json'.format(
''.join([c for c in filename if re.match(r'\w', c)]))
if 'output' not in analysis:
analysis['output'] = {
'filename': filename,
'_type':
'gaia_tasks.inputs.MinervaVectorIO',
'item_id': datasetId,
'token': token['_id']
}
else:
analysis['output']['item_id'] = datasetId
analysis['output']['token'] = token['_id']
process = json.loads(json.dumps(analysis), object_hook=deserialize)
process.compute()
| Kitware/minerva | gaia_tasks/tasks.py | Python | apache-2.0 | 1,013 |
from amcat.tools import amcattest
from amcat.tools.table import tableoutput
from amcat.tools.table.table3 import Table, ListTable, ObjectTable, ObjectColumn, \
SortedTable
class TestTable(amcattest.AmCATTestCase):
def test_init(self):
"""Does init use 'empty' parameters?"""
a = []
t = Table(columns=a)
self.assertEqual(t.columns, a)
def test_list_table(self):
"""Can we create a list table and output as ascii"""
t = ListTable(colnames=["a1", "a2", "a3"],
data=[[1, 2, 3],
[74321, 8, 9],
[4, 5, "asdf"],
])
result = tableoutput.table2unicode(t)
correct = '''
╔═══════╤════╤══════╗
║ a1 │ a2 │ a3 ║
╟───────┼────┼──────╢
║ 1 │ 2 │ 3 ║
║ 74321 │ 8 │ 9 ║
║ 4 │ 5 │ asdf ║
╚═══════╧════╧══════╝'''
self.assertEquals(_striplines(result), _striplines(correct.strip()))
def test_object_table(self):
"""Does creating object tables work"""
class Test(object):
def __init__(self, a, b, c):
self.a, self.b, self.c = a, b, c
l = ObjectTable(rows=[Test(1, 2, 3), Test("bla", None, 7), Test(-1, -1, None)])
l.add_column(lambda x: x.a, "de a")
l.add_column("b")
l.add_column(ObjectColumn("en de C", lambda x: x.c))
result = tableoutput.table2unicode(l)
# get rid of pesky unicode
result = result.translate(dict((a, 65 + a % 26) for a in range(0x2500, 0x2600)))
correct = '''OKKKKKKEKKKKEKKKKKKKKKR
L de a K b K en de C L
ZIIIIIIQIIIIQIIIIIIIIIC
L 1 K 2 K 3 L
L bla K K 7 L
L -1 K -1 K L
UKKKKKKHKKKKHKKKKKKKKKX'''
self.assertEquals(_striplines(result), _striplines(correct.strip()))
def test_sort(self):
t = ListTable(colnames=["a1", "a2", "a3"],
data=[[1, 2, 3], [7, 8, 9], [4, 5, -4]])
s = SortedTable(t, key=lambda row: row[1])
self.assertEqual([list(row) for row in s.get_rows()], [[1, 2, 3], [4, 5, -4], [7, 8, 9]])
s = SortedTable(t, key=lambda row: row[2])
self.assertEqual([list(row) for row in s.get_rows()], [[4, 5, -4], [1, 2, 3], [7, 8, 9]])
def _striplines(x):
"""Strip each line in x to make comparison easier"""
return "\n".join(l.strip() for l in x.split("\n")).strip() | amcat/amcat | amcat/tools/table/tests/test_table3.py | Python | agpl-3.0 | 2,580 |
__author__ = 'fahadadeel'
import jpype
import os.path
from WorkingWithCharts import SetLabelDistance
asposeapispath = os.path.join(os.path.abspath("../../../"), "lib")
print "You need to put your Aspose.Slides for Java APIs .jars in this folder:\n"+asposeapispath
jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath)
testObject = SetLabelDistance('data/')
testObject.main() | asposeslides/Aspose_Slides_Java | Plugins/Aspose-Slides-Java-for-Python/tests/WorkingWithCharts/SetLabelDistance/SetLabelDistance.py | Python | mit | 405 |
from __future__ import unicode_literals
import datetime
import operator
try:
from urllib.parse import urlencode
except ImportError: # python 2
from urllib import urlencode
from django.core.urlresolvers import reverse
from django.db import models, transaction
from django.db.models import Q
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone, translation, six
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import AnonymousUser
from django.contrib.sites.models import Site
import pytz
from account import signals
from account.conf import settings
from account.fields import TimeZoneField
from account.hooks import hookset
from account.managers import EmailAddressManager, EmailConfirmationManager
from account.signals import signup_code_sent, signup_code_used
class Account(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name="account", verbose_name=_("user"))
timezone = TimeZoneField(_("timezone"))
language = models.CharField(
_("language"),
max_length=10,
choices=settings.ACCOUNT_LANGUAGES,
default=settings.LANGUAGE_CODE
)
@classmethod
def for_request(cls, request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return account
@classmethod
def create(cls, request=None, **kwargs):
create_email = kwargs.pop("create_email", True)
confirm_email = kwargs.pop("confirm_email", None)
account = cls(**kwargs)
if "language" not in kwargs:
if request is None:
account.language = settings.LANGUAGE_CODE
else:
account.language = translation.get_language_from_request(request, check_path=True)
account.save()
if create_email and account.user.email:
kwargs = {"primary": True}
if confirm_email is not None:
kwargs["confirm"] = confirm_email
EmailAddress.objects.add_email(account.user, account.user.email, **kwargs)
return account
def __str__(self):
return str(self.user)
def now(self):
"""
Returns a timezone aware datetime localized to the account's timezone.
"""
now = datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC"))
timezone = settings.TIME_ZONE if not self.timezone else self.timezone
return now.astimezone(pytz.timezone(timezone))
def localtime(self, value):
"""
Given a datetime object as value convert it to the timezone of
the account.
"""
timezone = settings.TIME_ZONE if not self.timezone else self.timezone
if value.tzinfo is None:
value = pytz.timezone(settings.TIME_ZONE).localize(value)
return value.astimezone(pytz.timezone(timezone))
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, **kwargs):
"""
After User.save is called we check to see if it was a created user. If so,
we check if the User object wants account creation. If all passes we
create an Account object.
We only run on user creation to avoid having to check for existence on
each call to User.save.
"""
user, created = kwargs["instance"], kwargs["created"]
disabled = getattr(user, "_disable_account_creation", not settings.ACCOUNT_CREATE_ON_SAVE)
if created and not disabled:
Account.create(user=user)
class AnonymousAccount(object):
def __init__(self, request=None):
self.user = AnonymousUser()
self.timezone = settings.TIME_ZONE
if request is None:
self.language = settings.LANGUAGE_CODE
else:
self.language = translation.get_language_from_request(request, check_path=True)
def __unicode__(self):
return "AnonymousAccount"
class SignupCode(models.Model):
class AlreadyExists(Exception):
pass
class InvalidCode(Exception):
pass
code = models.CharField(_("code"), max_length=64, unique=True)
max_uses = models.PositiveIntegerField(_("max uses"), default=0)
expiry = models.DateTimeField(_("expiry"), null=True, blank=True)
inviter = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
email = models.EmailField(blank=True)
notes = models.TextField(_("notes"), blank=True)
sent = models.DateTimeField(_("sent"), null=True, blank=True)
created = models.DateTimeField(_("created"), default=timezone.now, editable=False)
use_count = models.PositiveIntegerField(_("use count"), editable=False, default=0)
class Meta:
verbose_name = _("signup code")
verbose_name_plural = _("signup codes")
def __unicode__(self):
if self.email:
return "{0} [{1}]".format(self.email, self.code)
else:
return self.code
@classmethod
def exists(cls, code=None, email=None):
checks = []
if code:
checks.append(Q(code=code))
if email:
checks.append(Q(email=code))
if not checks:
return False
return cls._default_manager.filter(six.moves.reduce(operator.or_, checks)).exists()
@classmethod
def create(cls, **kwargs):
email, code = kwargs.get("email"), kwargs.get("code")
if kwargs.get("check_exists", True) and cls.exists(code=code, email=email):
raise cls.AlreadyExists()
expiry = timezone.now() + datetime.timedelta(hours=kwargs.get("expiry", 24))
if not code:
code = hookset.generate_signup_code_token(email)
params = {
"code": code,
"max_uses": kwargs.get("max_uses", 0),
"expiry": expiry,
"inviter": kwargs.get("inviter"),
"notes": kwargs.get("notes", "")
}
if email:
params["email"] = email
return cls(**params)
@classmethod
def check_code(cls, code):
try:
signup_code = cls._default_manager.get(code=code)
except cls.DoesNotExist:
raise cls.InvalidCode()
else:
if signup_code.max_uses and signup_code.max_uses <= signup_code.use_count:
raise cls.InvalidCode()
else:
if signup_code.expiry and timezone.now() > signup_code.expiry:
raise cls.InvalidCode()
else:
return signup_code
def calculate_use_count(self):
self.use_count = self.signupcoderesult_set.count()
self.save()
def use(self, user):
"""
Add a SignupCode result attached to the given user.
"""
result = SignupCodeResult()
result.signup_code = self
result.user = user
result.save()
signup_code_used.send(sender=result.__class__, signup_code_result=result)
def send(self, **kwargs):
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
current_site = kwargs["site"] if "site" in kwargs else Site.objects.get_current()
if "signup_url" not in kwargs:
signup_url = "{0}://{1}{2}?{3}".format(
protocol,
current_site.domain,
reverse("account_signup"),
urlencode({"code": self.code})
)
else:
signup_url = kwargs["signup_url"]
ctx = {
"signup_code": self,
"current_site": current_site,
"signup_url": signup_url,
}
ctx.update(kwargs.get("extra_ctx", {}))
hookset.send_invitation_email([self.email], ctx)
self.sent = timezone.now()
self.save()
signup_code_sent.send(sender=SignupCode, signup_code=self)
class SignupCodeResult(models.Model):
signup_code = models.ForeignKey(SignupCode)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
timestamp = models.DateTimeField(default=timezone.now)
def save(self, **kwargs):
super(SignupCodeResult, self).save(**kwargs)
self.signup_code.calculate_use_count()
class EmailAddress(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
email = models.EmailField(unique=settings.ACCOUNT_EMAIL_UNIQUE)
verified = models.BooleanField(_("verified"), default=False)
primary = models.BooleanField(_("primary"), default=False)
objects = EmailAddressManager()
class Meta:
verbose_name = _("email address")
verbose_name_plural = _("email addresses")
if not settings.ACCOUNT_EMAIL_UNIQUE:
unique_together = [("user", "email")]
def __unicode__(self):
return "{0} ({1})".format(self.email, self.user)
def set_as_primary(self, conditional=False):
old_primary = EmailAddress.objects.get_primary(self.user)
if old_primary:
if conditional:
return False
old_primary.primary = False
old_primary.save()
self.primary = True
self.save()
self.user.email = self.email
self.user.save()
return True
def send_confirmation(self, **kwargs):
confirmation = EmailConfirmation.create(self)
confirmation.send(**kwargs)
return confirmation
def change(self, new_email, confirm=True):
"""
Given a new email address, change self and re-confirm.
"""
with transaction.atomic():
self.user.email = new_email
self.user.save()
self.email = new_email
self.verified = False
self.save()
if confirm:
self.send_confirmation()
class EmailConfirmation(models.Model):
email_address = models.ForeignKey(EmailAddress)
created = models.DateTimeField(default=timezone.now)
sent = models.DateTimeField(null=True)
key = models.CharField(max_length=64, unique=True)
objects = EmailConfirmationManager()
class Meta:
verbose_name = _("email confirmation")
verbose_name_plural = _("email confirmations")
def __unicode__(self):
return "confirmation for {0}".format(self.email_address)
@classmethod
def create(cls, email_address):
key = hookset.generate_email_confirmation_token(email_address.email)
return cls._default_manager.create(email_address=email_address, key=key)
def key_expired(self):
expiration_date = self.sent + datetime.timedelta(days=settings.ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS)
return expiration_date <= timezone.now()
key_expired.boolean = True
def confirm(self):
if not self.key_expired() and not self.email_address.verified:
email_address = self.email_address
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
signals.email_confirmed.send(sender=self.__class__, email_address=email_address)
return email_address
def send(self, **kwargs):
current_site = kwargs["site"] if "site" in kwargs else Site.objects.get_current()
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
activate_url = "{0}://{1}{2}".format(
protocol,
current_site.domain,
reverse(settings.ACCOUNT_EMAIL_CONFIRMATION_URL, args=[self.key])
)
ctx = {
"email_address": self.email_address,
"user": self.email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"key": self.key,
}
hookset.send_confirmation_email([self.email_address.email], ctx)
self.sent = timezone.now()
self.save()
signals.email_confirmation_sent.send(sender=self.__class__, confirmation=self)
class AccountDeletion(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.SET_NULL)
email = models.EmailField()
date_requested = models.DateTimeField(_("date requested"), default=timezone.now)
date_expunged = models.DateTimeField(_("date expunged"), null=True, blank=True)
class Meta:
verbose_name = _("account deletion")
verbose_name_plural = _("account deletions")
@classmethod
def expunge(cls, hours_ago=None):
if hours_ago is None:
hours_ago = settings.ACCOUNT_DELETION_EXPUNGE_HOURS
before = timezone.now() - datetime.timedelta(hours=hours_ago)
count = 0
for account_deletion in cls.objects.filter(date_requested__lt=before, user__isnull=False):
settings.ACCOUNT_DELETION_EXPUNGE_CALLBACK(account_deletion)
account_deletion.date_expunged = timezone.now()
account_deletion.save()
count += 1
return count
@classmethod
def mark(cls, user):
account_deletion, created = cls.objects.get_or_create(user=user)
account_deletion.email = user.email
account_deletion.save()
settings.ACCOUNT_DELETION_MARK_CALLBACK(account_deletion)
return account_deletion
| rizumu/django-user-accounts | account/models.py | Python | mit | 13,425 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import io
import json
import logging.config
import os
import shutil
import sys
import tempfile
import unittest
import urllib
from datetime import timedelta
from urllib.parse import quote_plus
import mock
import jinja2
from flask import url_for
from flask._compat import PY2
from parameterized import parameterized
from werkzeug.test import Client
from airflow import configuration as conf
from airflow import models, settings
from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG
from airflow.jobs import BaseJob
from airflow.models import DAG, DagRun, TaskInstance
from airflow.models.connection import Connection
from airflow.operators.dummy_operator import DummyOperator
from airflow.settings import Session
from airflow.utils import dates, timezone
from airflow.utils.db import create_session
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from airflow.www import app as application
class TestBase(unittest.TestCase):
@classmethod
def setUpClass(cls):
conf.load_test_config()
cls.app, cls.appbuilder = application.create_app(session=Session, testing=True)
cls.app.config['WTF_CSRF_ENABLED'] = False
cls.app.jinja_env.undefined = jinja2.StrictUndefined
settings.configure_orm()
cls.session = Session
def setUp(self):
self.client = self.app.test_client()
self.login()
def login(self):
role_admin = self.appbuilder.sm.find_role('Admin')
tester = self.appbuilder.sm.find_user(username='test')
if not tester:
self.appbuilder.sm.add_user(
username='test',
first_name='test',
last_name='test',
email='test@fab.org',
role=role_admin,
password='test')
return self.client.post('/login/', data=dict(
username='test',
password='test'
), follow_redirects=True)
def logout(self):
return self.client.get('/logout/')
def clear_table(self, model):
self.session.query(model).delete()
self.session.commit()
self.session.close()
def check_content_in_response(self, text, resp, resp_code=200):
resp_html = resp.data.decode('utf-8')
self.assertEqual(resp_code, resp.status_code)
if isinstance(text, list):
for kw in text:
self.assertIn(kw, resp_html)
else:
self.assertIn(text, resp_html)
def check_content_not_in_response(self, text, resp, resp_code=200):
resp_html = resp.data.decode('utf-8')
self.assertEqual(resp_code, resp.status_code)
if isinstance(text, list):
for kw in text:
self.assertNotIn(kw, resp_html)
else:
self.assertNotIn(text, resp_html)
def percent_encode(self, obj):
if PY2:
return urllib.quote_plus(str(obj))
else:
return urllib.parse.quote_plus(str(obj))
class TestConnectionModelView(TestBase):
def setUp(self):
super(TestConnectionModelView, self).setUp()
self.connection = {
'conn_id': 'test_conn',
'conn_type': 'http',
'host': 'localhost',
'port': 8080,
'username': 'root',
'password': 'admin'
}
def tearDown(self):
self.clear_table(Connection)
super(TestConnectionModelView, self).tearDown()
def test_create_connection(self):
resp = self.client.post('/connection/add',
data=self.connection,
follow_redirects=True)
self.check_content_in_response('Added Row', resp)
class TestVariableModelView(TestBase):
def setUp(self):
super(TestVariableModelView, self).setUp()
self.variable = {
'key': 'test_key',
'val': 'text_val',
'is_encrypted': True
}
def tearDown(self):
self.clear_table(models.Variable)
super(TestVariableModelView, self).tearDown()
def test_can_handle_error_on_decrypt(self):
# create valid variable
resp = self.client.post('/variable/add',
data=self.variable,
follow_redirects=True)
# update the variable with a wrong value, given that is encrypted
Var = models.Variable
(self.session.query(Var)
.filter(Var.key == self.variable['key'])
.update({
'val': 'failed_value_not_encrypted'
}, synchronize_session=False))
self.session.commit()
# retrieve Variables page, should not fail and contain the Invalid
# label for the variable
resp = self.client.get('/variable/list', follow_redirects=True)
self.check_content_in_response(
'<span class="label label-danger">Invalid</span>', resp)
def test_xss_prevention(self):
xss = "/variable/list/<img%20src=''%20onerror='alert(1);'>"
resp = self.client.get(
xss,
follow_redirects=True,
)
self.assertEqual(resp.status_code, 404)
self.assertNotIn("<img src='' onerror='alert(1);'>",
resp.data.decode("utf-8"))
def test_import_variables_failed(self):
content = '{"str_key": "str_value"}'
with mock.patch('airflow.models.Variable.set') as set_mock:
set_mock.side_effect = UnicodeEncodeError
self.assertEqual(self.session.query(models.Variable).count(), 0)
try:
# python 3+
bytes_content = io.BytesIO(bytes(content, encoding='utf-8'))
except TypeError:
# python 2.7
bytes_content = io.BytesIO(bytes(content))
resp = self.client.post('/variable/varimport',
data={'file': (bytes_content, 'test.json')},
follow_redirects=True)
self.check_content_in_response('1 variable(s) failed to be updated.', resp)
def test_import_variables_success(self):
self.assertEqual(self.session.query(models.Variable).count(), 0)
content = ('{"str_key": "str_value", "int_key": 60,'
'"list_key": [1, 2], "dict_key": {"k_a": 2, "k_b": 3}}')
try:
# python 3+
bytes_content = io.BytesIO(bytes(content, encoding='utf-8'))
except TypeError:
# python 2.7
bytes_content = io.BytesIO(bytes(content))
resp = self.client.post('/variable/varimport',
data={'file': (bytes_content, 'test.json')},
follow_redirects=True)
self.check_content_in_response('4 variable(s) successfully updated.', resp)
class TestPoolModelView(TestBase):
def setUp(self):
super(TestPoolModelView, self).setUp()
self.pool = {
'pool': 'test-pool',
'slots': 777,
'description': 'test-pool-description',
}
def tearDown(self):
self.clear_table(models.Pool)
super(TestPoolModelView, self).tearDown()
def test_create_pool_with_same_name(self):
# create test pool
resp = self.client.post('/pool/add',
data=self.pool,
follow_redirects=True)
self.check_content_in_response('Added Row', resp)
# create pool with the same name
resp = self.client.post('/pool/add',
data=self.pool,
follow_redirects=True)
self.check_content_in_response('Already exists.', resp)
def test_create_pool_with_empty_name(self):
self.pool['pool'] = ''
resp = self.client.post('/pool/add',
data=self.pool,
follow_redirects=True)
self.check_content_in_response('This field is required.', resp)
def test_odd_name(self):
self.pool['pool'] = 'test-pool<script></script>'
self.session.add(models.Pool(**self.pool))
self.session.commit()
resp = self.client.get('/pool/list/')
self.check_content_in_response('test-pool<script>', resp)
self.check_content_not_in_response('test-pool<script>', resp)
class TestMountPoint(unittest.TestCase):
def setUp(self):
application.app = None
super(TestMountPoint, self).setUp()
conf.load_test_config()
conf.set("webserver", "base_url", "http://localhost:8080/test")
config = dict()
config['WTF_CSRF_METHODS'] = []
app = application.cached_app(config=config, testing=True)
self.client = Client(app)
def test_mount(self):
resp, _, _ = self.client.get('/', follow_redirects=True)
txt = b''.join(resp)
self.assertEqual(b"Apache Airflow is not at this location", txt)
resp, _, _ = self.client.get('/test/home', follow_redirects=True)
resp_html = b''.join(resp)
self.assertIn(b"DAGs", resp_html)
class TestAirflowBaseViews(TestBase):
EXAMPLE_DAG_DEFAULT_DATE = dates.days_ago(2)
run_id = "test_{}".format(models.DagRun.id_for_date(EXAMPLE_DAG_DEFAULT_DATE))
def setUp(self):
super(TestAirflowBaseViews, self).setUp()
self.logout()
self.login()
self.cleanup_dagruns()
self.prepare_dagruns()
def cleanup_dagruns(self):
DR = models.DagRun
dag_ids = ['example_bash_operator',
'example_subdag_operator',
'example_xcom']
(self.session
.query(DR)
.filter(DR.dag_id.in_(dag_ids))
.filter(DR.run_id == self.run_id)
.delete(synchronize_session='fetch'))
self.session.commit()
def prepare_dagruns(self):
dagbag = models.DagBag(include_examples=True)
self.bash_dag = dagbag.dags['example_bash_operator']
self.sub_dag = dagbag.dags['example_subdag_operator']
self.xcom_dag = dagbag.dags['example_xcom']
self.bash_dagrun = self.bash_dag.create_dagrun(
run_id=self.run_id,
execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING)
self.sub_dagrun = self.sub_dag.create_dagrun(
run_id=self.run_id,
execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING)
self.xcom_dagrun = self.xcom_dag.create_dagrun(
run_id=self.run_id,
execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING)
def test_index(self):
resp = self.client.get('/', follow_redirects=True)
self.check_content_in_response('DAGs', resp)
def test_health(self):
# case-1: healthy scheduler status
last_scheduler_heartbeat_for_testing_1 = timezone.utcnow()
self.session.add(BaseJob(job_type='SchedulerJob',
state='running',
latest_heartbeat=last_scheduler_heartbeat_for_testing_1))
self.session.commit()
resp_json = json.loads(self.client.get('health', follow_redirects=True).data.decode('utf-8'))
self.assertEqual('healthy', resp_json['metadatabase']['status'])
self.assertEqual('healthy', resp_json['scheduler']['status'])
self.assertEqual(str(last_scheduler_heartbeat_for_testing_1),
resp_json['scheduler']['latest_scheduler_heartbeat'])
self.session.query(BaseJob).\
filter(BaseJob.job_type == 'SchedulerJob',
BaseJob.state == 'running',
BaseJob.latest_heartbeat == last_scheduler_heartbeat_for_testing_1).\
delete()
self.session.commit()
# case-2: unhealthy scheduler status - scenario 1 (SchedulerJob is running too slowly)
last_scheduler_heartbeat_for_testing_2 = timezone.utcnow() - timedelta(minutes=1)
(self.session
.query(BaseJob)
.filter(BaseJob.job_type == 'SchedulerJob')
.update({'latest_heartbeat': last_scheduler_heartbeat_for_testing_2 - timedelta(seconds=1)}))
self.session.add(BaseJob(job_type='SchedulerJob',
state='running',
latest_heartbeat=last_scheduler_heartbeat_for_testing_2))
self.session.commit()
resp_json = json.loads(self.client.get('health', follow_redirects=True).data.decode('utf-8'))
self.assertEqual('healthy', resp_json['metadatabase']['status'])
self.assertEqual('unhealthy', resp_json['scheduler']['status'])
self.assertEqual(str(last_scheduler_heartbeat_for_testing_2),
resp_json['scheduler']['latest_scheduler_heartbeat'])
self.session.query(BaseJob).\
filter(BaseJob.job_type == 'SchedulerJob',
BaseJob.state == 'running',
BaseJob.latest_heartbeat == last_scheduler_heartbeat_for_testing_2).\
delete()
self.session.commit()
# case-3: unhealthy scheduler status - scenario 2 (no running SchedulerJob)
self.session.query(BaseJob).\
filter(BaseJob.job_type == 'SchedulerJob',
BaseJob.state == 'running').\
delete()
self.session.commit()
resp_json = json.loads(self.client.get('health', follow_redirects=True).data.decode('utf-8'))
self.assertEqual('healthy', resp_json['metadatabase']['status'])
self.assertEqual('unhealthy', resp_json['scheduler']['status'])
self.assertEqual('None',
resp_json['scheduler']['latest_scheduler_heartbeat'])
def test_home(self):
resp = self.client.get('home', follow_redirects=True)
self.check_content_in_response('DAGs', resp)
def test_task(self):
url = ('task?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Task Instance Details', resp)
def test_xcom(self):
url = ('xcom?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('XCom', resp)
def test_edit_dagrun_page(self):
resp = self.client.get('dagmodel/edit/example_bash_operator', follow_redirects=False)
self.assertEqual(resp.status_code, 200)
def test_edit_dagrun_url(self):
with self.app.test_request_context():
url = url_for('DagModelView.edit', pk='example_bash_operator')
self.assertEqual(url, '/dagmodel/edit/example_bash_operator')
def test_rendered(self):
url = ('rendered?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Rendered Template', resp)
def test_pickle_info(self):
url = 'pickle_info?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.assertEqual(resp.status_code, 200)
def test_blocked(self):
url = 'blocked'
resp = self.client.get(url, follow_redirects=True)
self.assertEqual(200, resp.status_code)
def test_dag_stats(self):
resp = self.client.get('dag_stats', follow_redirects=True)
self.assertEqual(resp.status_code, 200)
def test_task_stats(self):
resp = self.client.get('task_stats', follow_redirects=True)
self.assertEqual(resp.status_code, 200)
def test_dag_details(self):
url = 'dag_details?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('DAG details', resp)
def test_graph(self):
url = 'graph?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('runme_1', resp)
def test_tree(self):
url = 'tree?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('runme_1', resp)
def test_duration(self):
url = 'duration?days=30&dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_duration_missing(self):
url = 'duration?days=30&dag_id=missing_dag'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('seems to be missing', resp)
def test_tries(self):
url = 'tries?days=30&dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_landing_times(self):
url = 'landing_times?days=30&dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_gantt(self):
url = 'gantt?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_code(self):
url = 'code?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_paused(self):
url = 'paused?dag_id=example_bash_operator&is_paused=false'
resp = self.client.post(url, follow_redirects=True)
self.check_content_in_response('OK', resp)
def test_failed(self):
url = ('failed?task_id=run_this_last&dag_id=example_bash_operator&'
'execution_date={}&upstream=false&downstream=false&future=false&past=false'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url)
self.check_content_in_response('Wait a minute', resp)
def test_success(self):
url = ('success?task_id=run_this_last&dag_id=example_bash_operator&'
'execution_date={}&upstream=false&downstream=false&future=false&past=false'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url)
self.check_content_in_response('Wait a minute', resp)
def test_clear(self):
url = ('clear?task_id=runme_1&dag_id=example_bash_operator&'
'execution_date={}&upstream=false&downstream=false&future=false&past=false'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url)
self.check_content_in_response(['example_bash_operator', 'Wait a minute'], resp)
def test_run(self):
url = ('run?task_id=runme_0&dag_id=example_bash_operator&ignore_all_deps=false&'
'ignore_ti_state=true&execution_date={}'
.format(self.percent_encode(self.EXAMPLE_DAG_DEFAULT_DATE)))
resp = self.client.get(url)
self.check_content_in_response('', resp, resp_code=302)
def test_refresh(self):
resp = self.client.get('refresh?dag_id=example_bash_operator')
self.check_content_in_response('', resp, resp_code=302)
def test_refresh_all(self):
resp = self.client.get("/refresh_all",
follow_redirects=True)
self.check_content_in_response('', resp, resp_code=200)
def test_delete_dag_button_normal(self):
resp = self.client.get('/', follow_redirects=True)
self.check_content_in_response('/delete?dag_id=example_bash_operator', resp)
self.check_content_in_response("return confirmDeleteDag('example_bash_operator')", resp)
def test_delete_dag_button_for_dag_on_scheduler_only(self):
# Test for JIRA AIRFLOW-3233 (PR 4069):
# The delete-dag URL should be generated correctly for DAGs
# that exist on the scheduler (DB) but not the webserver DagBag
test_dag_id = "non_existent_dag"
DM = models.DagModel
self.session.query(DM).filter(DM.dag_id == 'example_bash_operator').update({'dag_id': test_dag_id})
self.session.commit()
resp = self.client.get('/', follow_redirects=True)
self.check_content_in_response('/delete?dag_id={}'.format(test_dag_id), resp)
self.check_content_in_response("return confirmDeleteDag('{}')".format(test_dag_id), resp)
self.session.query(DM).filter(DM.dag_id == test_dag_id).update({'dag_id': 'example_bash_operator'})
self.session.commit()
class TestConfigurationView(TestBase):
def test_configuration_do_not_expose_config(self):
self.logout()
self.login()
conf.set("webserver", "expose_config", "False")
resp = self.client.get('configuration', follow_redirects=True)
self.check_content_in_response(
['Airflow Configuration', '# Your Airflow administrator chose not to expose the configuration, '
'most likely for security reasons.'], resp)
def test_configuration_expose_config(self):
self.logout()
self.login()
conf.set("webserver", "expose_config", "True")
resp = self.client.get('configuration', follow_redirects=True)
self.check_content_in_response(
['Airflow Configuration', 'Running Configuration'], resp)
class TestLogView(TestBase):
DAG_ID = 'dag_for_testing_log_view'
TASK_ID = 'task_for_testing_log_view'
DEFAULT_DATE = timezone.datetime(2017, 9, 1)
ENDPOINT = 'log?dag_id={dag_id}&task_id={task_id}&' \
'execution_date={execution_date}'.format(dag_id=DAG_ID,
task_id=TASK_ID,
execution_date=DEFAULT_DATE)
def setUp(self):
conf.load_test_config()
# Create a custom logging configuration
logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
current_dir = os.path.dirname(os.path.abspath(__file__))
logging_config['handlers']['task']['base_log_folder'] = os.path.normpath(
os.path.join(current_dir, 'test_logs'))
logging_config['handlers']['task']['filename_template'] = \
'{{ ti.dag_id }}/{{ ti.task_id }}/' \
'{{ ts | replace(":", ".") }}/{{ try_number }}.log'
# Write the custom logging configuration to a file
self.settings_folder = tempfile.mkdtemp()
settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py")
new_logging_file = "LOGGING_CONFIG = {}".format(logging_config)
with open(settings_file, 'w') as handle:
handle.writelines(new_logging_file)
sys.path.append(self.settings_folder)
conf.set('core', 'logging_config_class', 'airflow_local_settings.LOGGING_CONFIG')
self.app, self.appbuilder = application.create_app(session=Session, testing=True)
self.app.config['WTF_CSRF_ENABLED'] = False
self.client = self.app.test_client()
settings.configure_orm()
self.login()
from airflow.www.views import dagbag
dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
task = DummyOperator(task_id=self.TASK_ID, dag=dag)
dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
with create_session() as session:
self.ti = TaskInstance(task=task, execution_date=self.DEFAULT_DATE)
self.ti.try_number = 1
session.merge(self.ti)
def tearDown(self):
logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
self.clear_table(TaskInstance)
shutil.rmtree(self.settings_folder)
conf.set('core', 'logging_config_class', '')
self.logout()
super(TestLogView, self).tearDown()
@parameterized.expand([
[State.NONE, 0, 0],
[State.UP_FOR_RETRY, 2, 2],
[State.UP_FOR_RESCHEDULE, 0, 1],
[State.UP_FOR_RESCHEDULE, 1, 2],
[State.RUNNING, 1, 1],
[State.SUCCESS, 1, 1],
[State.FAILED, 3, 3],
])
def test_get_file_task_log(self, state, try_number, expected_num_logs_visible):
with create_session() as session:
self.ti.state = state
self.ti.try_number = try_number
session.merge(self.ti)
response = self.client.get(
TestLogView.ENDPOINT, data=dict(
username='test',
password='test'), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn('Log by attempts', response.data.decode('utf-8'))
for num in range(1, expected_num_logs_visible + 1):
self.assertIn('try-{}'.format(num), response.data.decode('utf-8'))
self.assertNotIn('try-0', response.data.decode('utf-8'))
self.assertNotIn('try-{}'.format(expected_num_logs_visible + 1), response.data.decode('utf-8'))
def test_get_logs_with_metadata_as_download_file(self):
url_template = "get_logs_with_metadata?dag_id={}&" \
"task_id={}&execution_date={}&" \
"try_number={}&metadata={}&format=file"
try_number = 1
url = url_template.format(self.DAG_ID,
self.TASK_ID,
quote_plus(self.DEFAULT_DATE.isoformat()),
try_number,
json.dumps({}))
response = self.client.get(url)
expected_filename = '{}/{}/{}/{}.log'.format(self.DAG_ID,
self.TASK_ID,
self.DEFAULT_DATE.isoformat(),
try_number)
content_disposition = response.headers.get('Content-Disposition')
self.assertTrue(content_disposition.startswith('attachment'))
self.assertTrue(expected_filename in content_disposition)
self.assertEqual(200, response.status_code)
self.assertIn('Log for testing.', response.data.decode('utf-8'))
def test_get_logs_with_metadata(self):
url_template = "get_logs_with_metadata?dag_id={}&" \
"task_id={}&execution_date={}&" \
"try_number={}&metadata={}"
response = \
self.client.get(url_template.format(self.DAG_ID,
self.TASK_ID,
quote_plus(self.DEFAULT_DATE.isoformat()),
1,
json.dumps({})), data=dict(
username='test',
password='test'),
follow_redirects=True)
self.assertIn('"message":', response.data.decode('utf-8'))
self.assertIn('"metadata":', response.data.decode('utf-8'))
self.assertIn('Log for testing.', response.data.decode('utf-8'))
self.assertEqual(200, response.status_code)
def test_get_logs_with_null_metadata(self):
url_template = "get_logs_with_metadata?dag_id={}&" \
"task_id={}&execution_date={}&" \
"try_number={}&metadata=null"
response = \
self.client.get(url_template.format(self.DAG_ID,
self.TASK_ID,
quote_plus(self.DEFAULT_DATE.isoformat()),
1), data=dict(
username='test',
password='test'),
follow_redirects=True)
self.assertIn('"message":', response.data.decode('utf-8'))
self.assertIn('"metadata":', response.data.decode('utf-8'))
self.assertIn('Log for testing.', response.data.decode('utf-8'))
self.assertEqual(200, response.status_code)
class TestVersionView(TestBase):
def test_version(self):
resp = self.client.get('version', data=dict(
username='test',
password='test'
), follow_redirects=True)
self.check_content_in_response('Version Info', resp)
class ViewWithDateTimeAndNumRunsAndDagRunsFormTester:
DAG_ID = 'dag_for_testing_dt_nr_dr_form'
DEFAULT_DATE = datetime(2017, 9, 1)
RUNS_DATA = [
('dag_run_for_testing_dt_nr_dr_form_4', datetime(2018, 4, 4)),
('dag_run_for_testing_dt_nr_dr_form_3', datetime(2018, 3, 3)),
('dag_run_for_testing_dt_nr_dr_form_2', datetime(2018, 2, 2)),
('dag_run_for_testing_dt_nr_dr_form_1', datetime(2018, 1, 1)),
]
def __init__(self, test, endpoint):
self.test = test
self.endpoint = endpoint
def setUp(self):
from airflow.www.views import dagbag
from airflow.utils.state import State
dag = DAG(self.DAG_ID, start_date=self.DEFAULT_DATE)
dagbag.bag_dag(dag, parent_dag=dag, root_dag=dag)
self.runs = []
for rd in self.RUNS_DATA:
run = dag.create_dagrun(
run_id=rd[0],
execution_date=rd[1],
state=State.SUCCESS,
external_trigger=True
)
self.runs.append(run)
def tearDown(self):
self.test.session.query(DagRun).filter(
DagRun.dag_id == self.DAG_ID).delete()
self.test.session.commit()
self.test.session.close()
def assertBaseDateAndNumRuns(self, base_date, num_runs, data):
self.test.assertNotIn('name="base_date" value="{}"'.format(base_date), data)
self.test.assertNotIn('<option selected="" value="{}">{}</option>'.format(
num_runs, num_runs), data)
def assertRunIsNotInDropdown(self, run, data):
self.test.assertNotIn(run.execution_date.isoformat(), data)
self.test.assertNotIn(run.run_id, data)
def assertRunIsInDropdownNotSelected(self, run, data):
self.test.assertIn('<option value="{}">{}</option>'.format(
run.execution_date.isoformat(), run.run_id), data)
def assertRunIsSelected(self, run, data):
self.test.assertIn('<option selected value="{}">{}</option>'.format(
run.execution_date.isoformat(), run.run_id), data)
def test_with_default_parameters(self):
"""
Tests view with no URL parameter.
Should show all dag runs in the drop down.
Should select the latest dag run.
Should set base date to current date (not asserted)
"""
response = self.test.client.get(
self.endpoint, data=dict(
username='test',
password='test'), follow_redirects=True)
self.test.assertEqual(response.status_code, 200)
data = response.data.decode('utf-8')
self.test.assertIn('Base date:', data)
self.test.assertIn('Number of runs:', data)
self.assertRunIsSelected(self.runs[0], data)
self.assertRunIsInDropdownNotSelected(self.runs[1], data)
self.assertRunIsInDropdownNotSelected(self.runs[2], data)
self.assertRunIsInDropdownNotSelected(self.runs[3], data)
def test_with_execution_date_parameter_only(self):
"""
Tests view with execution_date URL parameter.
Scenario: click link from dag runs view.
Should only show dag runs older than execution_date in the drop down.
Should select the particular dag run.
Should set base date to execution date.
"""
response = self.test.client.get(
self.endpoint + '&execution_date={}'.format(
self.runs[1].execution_date.isoformat()),
data=dict(
username='test',
password='test'
), follow_redirects=True
)
self.test.assertEqual(response.status_code, 200)
data = response.data.decode('utf-8')
self.assertBaseDateAndNumRuns(
self.runs[1].execution_date,
conf.getint('webserver', 'default_dag_run_display_number'),
data)
self.assertRunIsNotInDropdown(self.runs[0], data)
self.assertRunIsSelected(self.runs[1], data)
self.assertRunIsInDropdownNotSelected(self.runs[2], data)
self.assertRunIsInDropdownNotSelected(self.runs[3], data)
def test_with_base_date_and_num_runs_parmeters_only(self):
"""
Tests view with base_date and num_runs URL parameters.
Should only show dag runs older than base_date in the drop down,
limited to num_runs.
Should select the latest dag run.
Should set base date and num runs to submitted values.
"""
response = self.test.client.get(
self.endpoint + '&base_date={}&num_runs=2'.format(
self.runs[1].execution_date.isoformat()),
data=dict(
username='test',
password='test'
), follow_redirects=True
)
self.test.assertEqual(response.status_code, 200)
data = response.data.decode('utf-8')
self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 2, data)
self.assertRunIsNotInDropdown(self.runs[0], data)
self.assertRunIsSelected(self.runs[1], data)
self.assertRunIsInDropdownNotSelected(self.runs[2], data)
self.assertRunIsNotInDropdown(self.runs[3], data)
def test_with_base_date_and_num_runs_and_execution_date_outside(self):
"""
Tests view with base_date and num_runs and execution-date URL parameters.
Scenario: change the base date and num runs and press "Go",
the selected execution date is outside the new range.
Should only show dag runs older than base_date in the drop down.
Should select the latest dag run within the range.
Should set base date and num runs to submitted values.
"""
response = self.test.client.get(
self.endpoint + '&base_date={}&num_runs=42&execution_date={}'.format(
self.runs[1].execution_date.isoformat(),
self.runs[0].execution_date.isoformat()),
data=dict(
username='test',
password='test'
), follow_redirects=True
)
self.test.assertEqual(response.status_code, 200)
data = response.data.decode('utf-8')
self.assertBaseDateAndNumRuns(self.runs[1].execution_date, 42, data)
self.assertRunIsNotInDropdown(self.runs[0], data)
self.assertRunIsSelected(self.runs[1], data)
self.assertRunIsInDropdownNotSelected(self.runs[2], data)
self.assertRunIsInDropdownNotSelected(self.runs[3], data)
def test_with_base_date_and_num_runs_and_execution_date_within(self):
"""
Tests view with base_date and num_runs and execution-date URL parameters.
Scenario: change the base date and num runs and press "Go",
the selected execution date is within the new range.
Should only show dag runs older than base_date in the drop down.
Should select the dag run with the execution date.
Should set base date and num runs to submitted values.
"""
response = self.test.client.get(
self.endpoint + '&base_date={}&num_runs=5&execution_date={}'.format(
self.runs[2].execution_date.isoformat(),
self.runs[3].execution_date.isoformat()),
data=dict(
username='test',
password='test'
), follow_redirects=True
)
self.test.assertEqual(response.status_code, 200)
data = response.data.decode('utf-8')
self.assertBaseDateAndNumRuns(self.runs[2].execution_date, 5, data)
self.assertRunIsNotInDropdown(self.runs[0], data)
self.assertRunIsNotInDropdown(self.runs[1], data)
self.assertRunIsInDropdownNotSelected(self.runs[2], data)
self.assertRunIsSelected(self.runs[3], data)
class TestGraphView(TestBase):
GRAPH_ENDPOINT = '/graph?dag_id={dag_id}'.format(
dag_id=ViewWithDateTimeAndNumRunsAndDagRunsFormTester.DAG_ID
)
@classmethod
def setUpClass(cls):
super(TestGraphView, cls).setUpClass()
def setUp(self):
super(TestGraphView, self).setUp()
self.tester = ViewWithDateTimeAndNumRunsAndDagRunsFormTester(
self, self.GRAPH_ENDPOINT)
self.tester.setUp()
def tearDown(self):
self.tester.tearDown()
super(TestGraphView, self).tearDown()
@classmethod
def tearDownClass(cls):
super(TestGraphView, cls).tearDownClass()
def test_dt_nr_dr_form_default_parameters(self):
self.tester.test_with_default_parameters()
def test_dt_nr_dr_form_with_execution_date_parameter_only(self):
self.tester.test_with_execution_date_parameter_only()
def test_dt_nr_dr_form_with_base_date_and_num_runs_parmeters_only(self):
self.tester.test_with_base_date_and_num_runs_parmeters_only()
def test_dt_nr_dr_form_with_base_date_and_num_runs_and_execution_date_outside(self):
self.tester.test_with_base_date_and_num_runs_and_execution_date_outside()
def test_dt_nr_dr_form_with_base_date_and_num_runs_and_execution_date_within(self):
self.tester.test_with_base_date_and_num_runs_and_execution_date_within()
class TestGanttView(TestBase):
GANTT_ENDPOINT = '/gantt?dag_id={dag_id}'.format(
dag_id=ViewWithDateTimeAndNumRunsAndDagRunsFormTester.DAG_ID
)
@classmethod
def setUpClass(cls):
super(TestGanttView, cls).setUpClass()
def setUp(self):
super(TestGanttView, self).setUp()
self.tester = ViewWithDateTimeAndNumRunsAndDagRunsFormTester(
self, self.GANTT_ENDPOINT)
self.tester.setUp()
def tearDown(self):
self.tester.tearDown()
super(TestGanttView, self).tearDown()
@classmethod
def tearDownClass(cls):
super(TestGanttView, cls).tearDownClass()
def test_dt_nr_dr_form_default_parameters(self):
self.tester.test_with_default_parameters()
def test_dt_nr_dr_form_with_execution_date_parameter_only(self):
self.tester.test_with_execution_date_parameter_only()
def test_dt_nr_dr_form_with_base_date_and_num_runs_parmeters_only(self):
self.tester.test_with_base_date_and_num_runs_parmeters_only()
def test_dt_nr_dr_form_with_base_date_and_num_runs_and_execution_date_outside(self):
self.tester.test_with_base_date_and_num_runs_and_execution_date_outside()
def test_dt_nr_dr_form_with_base_date_and_num_runs_and_execution_date_within(self):
self.tester.test_with_base_date_and_num_runs_and_execution_date_within()
class TestDagACLView(TestBase):
"""
Test Airflow DAG acl
"""
default_date = timezone.datetime(2018, 6, 1)
run_id = "test_{}".format(models.DagRun.id_for_date(default_date))
@classmethod
def setUpClass(cls):
super(TestDagACLView, cls).setUpClass()
def cleanup_dagruns(self):
DR = models.DagRun
dag_ids = ['example_bash_operator',
'example_subdag_operator']
(self.session
.query(DR)
.filter(DR.dag_id.in_(dag_ids))
.filter(DR.run_id == self.run_id)
.delete(synchronize_session='fetch'))
self.session.commit()
def prepare_dagruns(self):
dagbag = models.DagBag(include_examples=True)
self.bash_dag = dagbag.dags['example_bash_operator']
self.sub_dag = dagbag.dags['example_subdag_operator']
self.bash_dagrun = self.bash_dag.create_dagrun(
run_id=self.run_id,
execution_date=self.default_date,
start_date=timezone.utcnow(),
state=State.RUNNING)
self.sub_dagrun = self.sub_dag.create_dagrun(
run_id=self.run_id,
execution_date=self.default_date,
start_date=timezone.utcnow(),
state=State.RUNNING)
def setUp(self):
super(TestDagACLView, self).setUp()
self.cleanup_dagruns()
self.prepare_dagruns()
self.logout()
self.appbuilder.sm.sync_roles()
self.add_permission_for_role()
def login(self, username=None, password=None):
role_admin = self.appbuilder.sm.find_role('Admin')
tester = self.appbuilder.sm.find_user(username='test')
if not tester:
self.appbuilder.sm.add_user(
username='test',
first_name='test',
last_name='test',
email='test@fab.org',
role=role_admin,
password='test')
role_user = self.appbuilder.sm.find_role('User')
test_user = self.appbuilder.sm.find_user(username='test_user')
if not test_user:
self.appbuilder.sm.add_user(
username='test_user',
first_name='test_user',
last_name='test_user',
email='test_user@fab.org',
role=role_user,
password='test_user')
role_viewer = self.appbuilder.sm.find_role('User')
test_viewer = self.appbuilder.sm.find_user(username='test_viewer')
if not test_viewer:
self.appbuilder.sm.add_user(
username='test_viewer',
first_name='test_viewer',
last_name='test_viewer',
email='test_viewer@fab.org',
role=role_viewer,
password='test_viewer')
dag_acl_role = self.appbuilder.sm.add_role('dag_acl_tester')
dag_tester = self.appbuilder.sm.find_user(username='dag_tester')
if not dag_tester:
self.appbuilder.sm.add_user(
username='dag_tester',
first_name='dag_test',
last_name='dag_test',
email='dag_test@fab.org',
role=dag_acl_role,
password='dag_test')
# create an user without permission
dag_no_role = self.appbuilder.sm.add_role('dag_acl_faker')
dag_faker = self.appbuilder.sm.find_user(username='dag_faker')
if not dag_faker:
self.appbuilder.sm.add_user(
username='dag_faker',
first_name='dag_faker',
last_name='dag_faker',
email='dag_fake@fab.org',
role=dag_no_role,
password='dag_faker')
# create an user with only read permission
dag_read_only_role = self.appbuilder.sm.add_role('dag_acl_read_only')
dag_read_only = self.appbuilder.sm.find_user(username='dag_read_only')
if not dag_read_only:
self.appbuilder.sm.add_user(
username='dag_read_only',
first_name='dag_read_only',
last_name='dag_read_only',
email='dag_read_only@fab.org',
role=dag_read_only_role,
password='dag_read_only')
# create an user that has all dag access
all_dag_role = self.appbuilder.sm.add_role('all_dag_role')
all_dag_tester = self.appbuilder.sm.find_user(username='all_dag_user')
if not all_dag_tester:
self.appbuilder.sm.add_user(
username='all_dag_user',
first_name='all_dag_user',
last_name='all_dag_user',
email='all_dag_user@fab.org',
role=all_dag_role,
password='all_dag_user')
user = username if username else 'dag_tester'
passwd = password if password else 'dag_test'
return self.client.post('/login/', data=dict(
username=user,
password=passwd
))
def logout(self):
return self.client.get('/logout/')
def add_permission_for_role(self):
self.logout()
self.login(username='test',
password='test')
perm_on_dag = self.appbuilder.sm.\
find_permission_view_menu('can_dag_edit', 'example_bash_operator')
dag_tester_role = self.appbuilder.sm.find_role('dag_acl_tester')
self.appbuilder.sm.add_permission_role(dag_tester_role, perm_on_dag)
perm_on_all_dag = self.appbuilder.sm.\
find_permission_view_menu('can_dag_edit', 'all_dags')
all_dag_role = self.appbuilder.sm.find_role('all_dag_role')
self.appbuilder.sm.add_permission_role(all_dag_role, perm_on_all_dag)
role_user = self.appbuilder.sm.find_role('User')
self.appbuilder.sm.add_permission_role(role_user, perm_on_all_dag)
read_only_perm_on_dag = self.appbuilder.sm.\
find_permission_view_menu('can_dag_read', 'example_bash_operator')
dag_read_only_role = self.appbuilder.sm.find_role('dag_acl_read_only')
self.appbuilder.sm.add_permission_role(dag_read_only_role, read_only_perm_on_dag)
def test_permission_exist(self):
self.logout()
self.login(username='test',
password='test')
test_view_menu = self.appbuilder.sm.find_view_menu('example_bash_operator')
perms_views = self.appbuilder.sm.find_permissions_view_menu(test_view_menu)
self.assertEqual(len(perms_views), 2)
# each dag view will create one write, and one read permission
self.assertTrue(str(perms_views[0]).startswith('can dag'))
self.assertTrue(str(perms_views[1]).startswith('can dag'))
def test_role_permission_associate(self):
self.logout()
self.login(username='test',
password='test')
test_role = self.appbuilder.sm.find_role('dag_acl_tester')
perms = set([str(perm) for perm in test_role.permissions])
self.assertIn('can dag edit on example_bash_operator', perms)
self.assertNotIn('can dag read on example_bash_operator', perms)
def test_index_success(self):
self.logout()
self.login()
resp = self.client.get('/', follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_index_failure(self):
self.logout()
self.login()
resp = self.client.get('/', follow_redirects=True)
# The user can only access/view example_bash_operator dag.
self.check_content_not_in_response('example_subdag_operator', resp)
def test_index_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
resp = self.client.get('/', follow_redirects=True)
# The all dag user can access/view all dags.
self.check_content_in_response('example_subdag_operator', resp)
self.check_content_in_response('example_bash_operator', resp)
def test_dag_stats_success(self):
self.logout()
self.login()
resp = self.client.get('dag_stats', follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_dag_stats_failure(self):
self.logout()
self.login()
resp = self.client.get('dag_stats', follow_redirects=True)
self.check_content_not_in_response('example_subdag_operator', resp)
def test_dag_stats_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
resp = self.client.get('dag_stats', follow_redirects=True)
self.check_content_in_response('example_subdag_operator', resp)
self.check_content_in_response('example_bash_operator', resp)
def test_task_stats_success(self):
self.logout()
self.login()
resp = self.client.get('task_stats', follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_task_stats_failure(self):
self.logout()
self.login()
resp = self.client.get('task_stats', follow_redirects=True)
self.check_content_not_in_response('example_subdag_operator', resp)
def test_task_stats_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
resp = self.client.get('task_stats', follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
self.check_content_in_response('example_subdag_operator', resp)
def test_code_success(self):
self.logout()
self.login()
url = 'code?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_code_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = 'code?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('example_bash_operator', resp)
def test_code_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = 'code?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
url = 'code?dag_id=example_subdag_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_subdag_operator', resp)
def test_dag_details_success(self):
self.logout()
self.login()
url = 'dag_details?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('DAG details', resp)
def test_dag_details_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = 'dag_details?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('DAG details', resp)
def test_dag_details_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = 'dag_details?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
url = 'dag_details?dag_id=example_subdag_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_subdag_operator', resp)
def test_pickle_info_success(self):
self.logout()
self.login()
url = 'pickle_info?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.assertEqual(resp.status_code, 200)
def test_rendered_success(self):
self.logout()
self.login()
url = ('rendered?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Rendered Template', resp)
def test_rendered_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = ('rendered?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('Rendered Template', resp)
def test_rendered_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = ('rendered?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Rendered Template', resp)
def test_task_success(self):
self.logout()
self.login()
url = ('task?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Task Instance Details', resp)
def test_task_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = ('task?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('Task Instance Details', resp)
def test_task_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = ('task?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Task Instance Details', resp)
def test_xcom_success(self):
self.logout()
self.login()
url = ('xcom?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('XCom', resp)
def test_xcom_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = ('xcom?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('XCom', resp)
def test_xcom_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = ('xcom?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('XCom', resp)
def test_run_success(self):
self.logout()
self.login()
url = ('run?task_id=runme_0&dag_id=example_bash_operator&ignore_all_deps=false&'
'ignore_ti_state=true&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url)
self.check_content_in_response('', resp, resp_code=302)
def test_run_success_for_all_dag_user(self):
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
url = ('run?task_id=runme_0&dag_id=example_bash_operator&ignore_all_deps=false&'
'ignore_ti_state=true&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url)
self.check_content_in_response('', resp, resp_code=302)
def test_blocked_success(self):
url = 'blocked'
self.logout()
self.login()
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_blocked_success_for_all_dag_user(self):
url = 'blocked'
self.logout()
self.login(username='all_dag_user',
password='all_dag_user')
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
self.check_content_in_response('example_subdag_operator', resp)
def test_failed_success(self):
self.logout()
self.login()
url = ('failed?task_id=run_this_last&dag_id=example_bash_operator&'
'execution_date={}&upstream=false&downstream=false&future=false&past=false'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url)
self.check_content_in_response('Redirecting', resp, 302)
def test_duration_success(self):
url = 'duration?days=30&dag_id=example_bash_operator'
self.logout()
self.login()
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_duration_failure(self):
url = 'duration?days=30&dag_id=example_bash_operator'
self.logout()
# login as an user without permissions
self.login(username='dag_faker',
password='dag_faker')
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('example_bash_operator', resp)
def test_tries_success(self):
url = 'tries?days=30&dag_id=example_bash_operator'
self.logout()
self.login()
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_tries_failure(self):
url = 'tries?days=30&dag_id=example_bash_operator'
self.logout()
# login as an user without permissions
self.login(username='dag_faker',
password='dag_faker')
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('example_bash_operator', resp)
def test_landing_times_success(self):
url = 'landing_times?days=30&dag_id=example_bash_operator'
self.logout()
self.login()
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_landing_times_failure(self):
url = 'landing_times?days=30&dag_id=example_bash_operator'
self.logout()
self.login(username='dag_faker',
password='dag_faker')
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('example_bash_operator', resp)
def test_paused_success(self):
# post request failure won't test
url = 'paused?dag_id=example_bash_operator&is_paused=false'
self.logout()
self.login()
resp = self.client.post(url, follow_redirects=True)
self.check_content_in_response('OK', resp)
def test_refresh_success(self):
self.logout()
self.login()
resp = self.client.get('refresh?dag_id=example_bash_operator')
self.check_content_in_response('', resp, resp_code=302)
def test_gantt_success(self):
url = 'gantt?dag_id=example_bash_operator'
self.logout()
self.login()
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('example_bash_operator', resp)
def test_gantt_failure(self):
url = 'gantt?dag_id=example_bash_operator'
self.logout()
self.login(username='dag_faker',
password='dag_faker')
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('example_bash_operator', resp)
def test_success_fail_for_read_only_role(self):
# succcess endpoint need can_dag_edit, which read only role can not access
self.logout()
self.login(username='dag_read_only',
password='dag_read_only')
url = ('success?task_id=run_this_last&dag_id=example_bash_operator&'
'execution_date={}&upstream=false&downstream=false&future=false&past=false'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url)
self.check_content_not_in_response('Wait a minute', resp, resp_code=302)
def test_tree_success_for_read_only_role(self):
# tree view only allows can_dag_read, which read only role could access
self.logout()
self.login(username='dag_read_only',
password='dag_read_only')
url = 'tree?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('runme_1', resp)
def test_log_success(self):
self.logout()
self.login()
url = ('log?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Log by attempts', resp)
url = ('get_logs_with_metadata?task_id=runme_0&dag_id=example_bash_operator&'
'execution_date={}&try_number=1&metadata=null'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('"message":', resp)
self.check_content_in_response('"metadata":', resp)
def test_log_failure(self):
self.logout()
self.login(username='dag_faker',
password='dag_faker')
url = ('log?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('Log by attempts', resp)
url = ('get_logs_with_metadata?task_id=runme_0&dag_id=example_bash_operator&'
'execution_date={}&try_number=1&metadata=null'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_not_in_response('"message":', resp)
self.check_content_not_in_response('"metadata":', resp)
def test_log_success_for_user(self):
self.logout()
self.login(username='test_user',
password='test_user')
url = ('log?task_id=runme_0&dag_id=example_bash_operator&execution_date={}'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('Log by attempts', resp)
url = ('get_logs_with_metadata?task_id=runme_0&dag_id=example_bash_operator&'
'execution_date={}&try_number=1&metadata=null'
.format(self.percent_encode(self.default_date)))
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('"message":', resp)
self.check_content_in_response('"metadata":', resp)
def test_tree_view_for_viewer(self):
self.logout()
self.login(username='test_viewer',
password='test_viewer')
url = 'tree?dag_id=example_bash_operator'
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('runme_1', resp)
class TestTaskInstanceView(TestBase):
TI_ENDPOINT = '/taskinstance/list/?_flt_0_execution_date={}'
def test_start_date_filter(self):
resp = self.client.get(self.TI_ENDPOINT.format(
self.percent_encode('2018-10-09 22:44:31')))
# We aren't checking the logic of the date filter itself (that is built
# in to FAB) but simply that our UTC conversion was run - i.e. it
# doesn't blow up!
self.check_content_in_response('List Task Instance', resp)
pass
class TestTriggerDag(TestBase):
def setUp(self):
super(TestTriggerDag, self).setUp()
self.session = Session()
models.DagBag().get_dag("example_bash_operator").sync_to_db(session=self.session)
def test_trigger_dag_button_normal_exist(self):
resp = self.client.get('/', follow_redirects=True)
self.assertIn('/trigger?dag_id=example_bash_operator', resp.data.decode('utf-8'))
self.assertIn("return confirmDeleteDag('example_bash_operator')", resp.data.decode('utf-8'))
@unittest.skipIf('mysql' in conf.conf.get('core', 'sql_alchemy_conn'),
"flaky when run on mysql")
def test_trigger_dag_button(self):
test_dag_id = "example_bash_operator"
DR = models.DagRun
self.session.query(DR).delete()
self.session.commit()
self.client.get('trigger?dag_id={}'.format(test_dag_id))
run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
self.assertIsNotNone(run)
self.assertIn("manual__", run.run_id)
if __name__ == '__main__':
unittest.main()
| fenglu-g/incubator-airflow | tests/www/test_views.py | Python | apache-2.0 | 66,432 |
from base64 import b64encode
import json
from urllib import urlencode
from twisted.internet.defer import inlineCallbacks
from twisted.web import http
from vumi.utils import http_request, http_request_full
from vumi.tests.helpers import VumiTestCase
from vumi.transports.api import (
OldSimpleHttpTransport, OldTemplateHttpTransport)
from vumi.transports.tests.helpers import TransportHelper
class TestOldSimpleHttpTransport(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.config = {
'web_path': "foo",
'web_port': 0,
}
self.tx_helper = self.add_helper(
TransportHelper(OldSimpleHttpTransport))
self.transport = yield self.tx_helper.get_transport(self.config)
addr = self.transport.web_resource.getHost()
self.transport_url = "http://%s:%s/" % (addr.host, addr.port)
@inlineCallbacks
def test_health(self):
result = yield http_request(self.transport_url + "health", "",
method='GET')
self.assertEqual(json.loads(result), {})
@inlineCallbacks
def test_inbound(self):
url = '%s%s?%s' % (
self.transport_url,
self.config['web_path'],
urlencode([
('to_msisdn', 555),
('to_msisdn', 556),
('from_msisdn', 123),
('message', 'hello'),
])
)
response = yield http_request(url, '', method='GET')
[msg1, msg2] = self.tx_helper.get_dispatched_inbound()
self.assertEqual(msg1['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg1['to_addr'], "555")
self.assertEqual(msg2['to_addr'], "556")
self.assertEqual(msg1['from_addr'], "123")
self.assertEqual(msg1['content'], "hello")
self.assertEqual(json.loads(response), [
{
'id': msg1['message_id'],
'message': msg1['content'],
'from_msisdn': msg1['from_addr'],
'to_msisdn': msg1['to_addr'],
},
{
'id': msg2['message_id'],
'message': msg2['content'],
'from_msisdn': msg2['from_addr'],
'to_msisdn': msg2['to_addr'],
},
])
@inlineCallbacks
def test_http_basic_auth(self):
http_auth_config = self.config.copy()
http_auth_config.update({
'identities': {
'username': 'password',
}
})
transport = yield self.tx_helper.get_transport(http_auth_config)
url = '%s%s?%s' % (
transport.get_transport_url(),
self.config['web_path'],
urlencode({
'to_msisdn': '123',
'from_msisdn': '456',
'message': 'hello',
}))
response = yield http_request_full(url, '', method='GET')
self.assertEqual(response.code, http.UNAUTHORIZED)
self.assertEqual([], self.tx_helper.get_dispatched_inbound())
response = yield http_request_full(url, '', headers={
'Authorization': ['Basic %s' % b64encode('username:password')]
}, method='GET')
self.assertEqual(response.code, http.OK)
[msg] = self.tx_helper.get_dispatched_inbound()
self.assertEqual(msg['content'], 'hello')
self.assertEqual(msg['transport_metadata'], {
'http_user': 'username',
})
class TestOldTemplateHttpTransport(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.config = {
'web_path': "foo",
'web_port': 0,
}
self.tx_helper = self.add_helper(
TransportHelper(OldTemplateHttpTransport))
self.transport = yield self.tx_helper.get_transport(self.config)
self.transport_url = self.transport.get_transport_url()
@inlineCallbacks
def test_inbound(self):
url = '%s%s?%s' % (
self.transport_url,
self.config['web_path'],
urlencode([
('to_msisdn', 555),
('to_msisdn', 556),
('template_name', "Joe"),
('template_name', "Foo"),
('template_surname', "Smith"),
('template_surname', "Bar"),
('from_msisdn', 123),
('template', 'hello {{ name }} {{surname}}'),
])
)
response = yield http_request(url, '', method='GET')
[msg1, msg2] = self.tx_helper.get_dispatched_inbound()
self.assertEqual(msg1['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg1['to_addr'], "555")
self.assertEqual(msg1['from_addr'], "123")
self.assertEqual(msg1['content'], "hello Joe Smith")
self.assertEqual(msg2['content'], "hello Foo Bar")
self.assertEqual(json.loads(response), [
{
'id': msg1['message_id'],
'message': msg1['content'],
'from_msisdn': msg1['from_addr'],
'to_msisdn': msg1['to_addr'],
},
{
'id': msg2['message_id'],
'message': msg2['content'],
'from_msisdn': msg2['from_addr'],
'to_msisdn': msg2['to_addr'],
},
])
| TouK/vumi | vumi/transports/api/tests/test_oldapi.py | Python | bsd-3-clause | 5,374 |
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'SERVICE_CATALOG_ENTRY_NAME'
]
SERVICE_CATALOG_ENTRY_NAME = 'cloudLoadBalancers'
| racker/python-raxcli | raxcli/apps/loadbalancer/constants.py | Python | apache-2.0 | 909 |
from datetime import datetime, time
import pytz
from mailsync.libs.dates import utc_unixtime_to_localtime
from mailsync import settings
# Converts unix time to readable date format
# Used in the tooltips on the frontend
def dateformat(value, format="%d-%m-%Y-%H:%M"):
try:
_ = datetime.fromtimestamp(value, pytz.utc)
return _.strftime(format)
except:
return None
# Localized unix timestamp
# def dateformat_local(value, format="%d-%m-%Y-%H:%M"):
def dateformat_local(value, format="%h %d'%Y at %H:%M"):
value = utc_unixtime_to_localtime(value)
return dateformat(value, format)
def base_url():
if settings.PROXY is None:
host = settings.WEB_APP["host"]
port = settings.WEB_APP["port"]
if port and int(port) > 0:
base_url = "{0}:{1}".format(host, port)
else:
base_url = host
return base_url
else:
return "" | rebelact/mailsync-app | mailsync/template.py | Python | mit | 841 |
import operator
class Operation:
_name_to_func = {
'<': operator.lt,
'<=': operator.le,
'==': operator.eq,
'!=': operator.ne,
'>=': operator.ge,
'>': operator.gt,
'not': operator.not_,
'bool': operator.truth,
'abs': operator.abs,
'+': operator.add,
'&': operator.and_,
'//': operator.floordiv,
'~': operator.inv,
'<<': operator.lshift,
'%': operator.mod,
'*': operator.mul,
'|': operator.or_,
'**': operator.pow,
'>>': operator.rshift,
'/': operator.truediv,
'^': operator.xor,
}
def __init__(self, name, func=None):
self._name = name
self._func = func if func is not None else self._name_to_func[name]
def __call__(self, arg1, arg2=None):
return self._func(arg1) if arg2 is None else self._func(arg1, arg2)
| asherbar/json-plus-plus | jpp/parser/operation.py | Python | mit | 920 |
from tests import ResTest
from res.enkf import EnsembleConfig, ResConfig
from res.enkf import ConfigKeys
from ecl.util.test import TestAreaContext
from res.enkf.enums import GenDataFileType
class EnsembleConfigTest(ResTest):
def setUp(self):
self.case_directory = self.createTestPath("local/simple_config/")
self.case_file = 'simple_config/ensemble_config'
def test_create(self):
conf = EnsembleConfig( )
self.assertEqual( len(conf) , 0 )
self.assertFalse( "XYZ" in conf )
with self.assertRaises(KeyError):
node = conf["KEY"]
def test_ensemble_config_constructor(self):
config_dict = {
ConfigKeys.GEN_KW_TAG_FORMAT: '<%s>',
ConfigKeys.GEN_PARAM: [
{
ConfigKeys.NAME: 'GP',
ConfigKeys.FORWARD_INIT: False,
ConfigKeys.INPUT_FORMAT: GenDataFileType.ASCII,
ConfigKeys.OUTPUT_FORMAT: GenDataFileType.ASCII,
ConfigKeys.INIT_FILES: 'GP/GP.txt',
ConfigKeys.ECL_FILE: 'GP.txt',
ConfigKeys.MIN_STD: None,
ConfigKeys.TEMPLATE: None,
ConfigKeys.KEY_KEY: None
},
],
ConfigKeys.GEN_DATA: [
{
ConfigKeys.NAME: 'SNAKE_OIL_OPR_DIFF',
ConfigKeys.INPUT_FORMAT: GenDataFileType.ASCII,
ConfigKeys.RESULT_FILE: 'snake_oil_opr_diff_%d.txt',
ConfigKeys.REPORT_STEPS: [199],
ConfigKeys.INIT_FILES: None,
ConfigKeys.ECL_FILE: None,
ConfigKeys.TEMPLATE: None,
ConfigKeys.KEY_KEY: None
},
{
ConfigKeys.NAME: 'SNAKE_OIL_GPR_DIFF',
ConfigKeys.INPUT_FORMAT: GenDataFileType.ASCII,
ConfigKeys.RESULT_FILE: 'snake_oil_gpr_diff_%d.txt',
ConfigKeys.REPORT_STEPS: [199],
ConfigKeys.INIT_FILES: None,
ConfigKeys.ECL_FILE: None,
ConfigKeys.TEMPLATE: None,
ConfigKeys.KEY_KEY: None
}
],
ConfigKeys.CUSTOM_KW: [
{
ConfigKeys.NAME: 'SNAKE_OIL_NPV',
ConfigKeys.RESULT_FILE: 'snake_oil_npv.txt',
ConfigKeys.OUT_FILE: None
}
],
ConfigKeys.GEN_KW: [
{
ConfigKeys.NAME: 'MULTFLT',
ConfigKeys.TEMPLATE: 'configuration_tests/FAULT_TEMPLATE',
ConfigKeys.OUT_FILE: 'MULTFLT.INC',
ConfigKeys.PARAMETER_FILE: 'configuration_tests/MULTFLT.TXT',
ConfigKeys.INIT_FILES: None,
ConfigKeys.MIN_STD: None,
ConfigKeys.FORWARD_INIT: False,
}
],
ConfigKeys.SURFACE_KEY: [
{
ConfigKeys.NAME: 'TOP',
ConfigKeys.INIT_FILES: 'configuration_tests/surface/small.irap',
ConfigKeys.OUT_FILE: 'configuration_tests/surface/small_out.irap',
ConfigKeys.BASE_SURFACE_KEY: 'configuration_tests/surface/small.irap',
ConfigKeys.MIN_STD: None,
ConfigKeys.FORWARD_INIT: False
}
],
ConfigKeys.SUMMARY: ['WOPR:OP_1'],
ConfigKeys.FIELD_KEY: [
{
ConfigKeys.NAME: 'PERMX',
ConfigKeys.VAR_TYPE: 'PARAMETER',
ConfigKeys.INIT_FILES: 'fields/permx%d.grdecl',
ConfigKeys.OUT_FILE: 'permx.grdcel',
ConfigKeys.ENKF_INFILE: None,
ConfigKeys.INIT_TRANSFORM: None,
ConfigKeys.OUTPUT_TRANSFORM: None,
ConfigKeys.INPUT_TRANSFORM: None,
ConfigKeys.MIN_KEY: None,
ConfigKeys.MAX_KEY: None,
ConfigKeys.MIN_STD: None,
ConfigKeys.FORWARD_INIT: False
}
],
ConfigKeys.SCHEDULE_PREDICTION_FILE: [
{
ConfigKeys.TEMPLATE: 'configuration_tests/input/schedule.sch',
ConfigKeys.INIT_FILES: None,
ConfigKeys.MIN_STD: None,
ConfigKeys.PARAMETER_KEY: None
}
],
ConfigKeys.CONTAINER_KEY: [
{
ConfigKeys.NAME: 'CXX',
ConfigKeys.ARGLIST: ['PERMX', 'MULTFLT']
}
]
}
self.case_directory = self.createTestPath("local/configuration_tests/")
with TestAreaContext("ensemble_config_test") as work_area:
work_area.copy_directory(self.case_directory)
res_config = ResConfig('configuration_tests/ensemble_config.ert')
ensemble_config_file = res_config.ensemble_config
ensemble_config_dict = EnsembleConfig(config_dict=config_dict, grid=res_config.ecl_config.getGrid())
self.assertEqual(ensemble_config_dict, ensemble_config_file)
| Statoil/libres | python/tests/res/enkf/test_ensemble_config.py | Python | gpl-3.0 | 5,360 |
from unittest import result
from unittest.util import strclass
class TreeTextTestResult(result.TestResult):
indent = ' ' * 4
test_class = None
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
super(TreeTextTestResult, self).__init__(stream, descriptions, verbosity)
self.stream = stream
self.descriptions = descriptions
self.verbosity = verbosity
def getDescription(self, test):
description = test.shortDescription()
if self.descriptions and description:
return self.indent + description
else:
return self.indent + str(test)
def getClassDescription(self, test):
test_class = test.__class__
description = test.__doc__
if self.descriptions and description:
return description.split('\n')[0].strip()
else:
return strclass(test_class)
def startTest(self, test):
super(TreeTextTestResult, self).startTest(test)
if self.test_class != test.__class__:
self.stream.writeln()
self.test_class = test.__class__
self.stream.writeln(self.getClassDescription(test))
self.stream.write(self.getDescription(test))
self.stream.write(' ... ')
self.stream.flush()
def addSuccess(self, test):
super(TreeTextTestResult, self).addSuccess(test)
self.stream.writeln("ok")
def addError(self, test, err):
super(TreeTextTestResult, self).addError(test, err)
self.stream.writeln("ERROR")
def addFailure(self, test, err):
super(TreeTextTestResult, self).addFailure(test, err)
self.stream.writeln("FAIL")
def addSkip(self, test, reason):
super(TreeTextTestResult, self).addSkip(test, reason)
self.stream.writeln("skipped {0!r}".format(reason))
def addExpectedFailure(self, test, err):
super(TreeTextTestResult, self).addExpectedFailure(test, err)
self.stream.writeln("expected failure")
def addUnexpectedSuccess(self, test):
super(TreeTextTestResult, self).addUnexpectedSuccess(test)
self.stream.writeln("unexpected success")
def printErrors(self):
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
| ntruessel/qcgc | test/runner/TreeTextTestResult.py | Python | mit | 2,681 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import mock
from quantum.agent import rpc
from quantum.openstack.common import cfg
from quantum.openstack.common import context
class AgentRPCPluginApi(unittest.TestCase):
def _test_rpc_call(self, method):
agent = rpc.PluginApi('fake_topic')
ctxt = context.RequestContext('fake_user', 'fake_project')
expect_val = 'foo'
with mock.patch('quantum.openstack.common.rpc.call') as rpc_call:
rpc_call.return_value = expect_val
func_obj = getattr(agent, method)
if method == 'tunnel_sync':
actual_val = func_obj(ctxt, 'fake_tunnel_ip')
else:
actual_val = func_obj(ctxt, 'fake_device', 'fake_agent_id')
self.assertEqual(actual_val, expect_val)
def test_get_device_details(self):
self._test_rpc_call('get_device_details')
def test_update_device_down(self):
self._test_rpc_call('update_device_down')
def test_tunnel_sync(self):
self._test_rpc_call('tunnel_sync')
class AgentRPCMethods(unittest.TestCase):
def test_create_consumers(self):
dispatcher = mock.Mock()
expected = [
mock.call(new=True),
mock.call().create_consumer('foo-topic-op', dispatcher,
fanout=True),
mock.call().consume_in_thread()
]
call_to_patch = 'quantum.openstack.common.rpc.create_connection'
with mock.patch(call_to_patch) as create_connection:
conn = rpc.create_consumers(dispatcher, 'foo', [('topic', 'op')])
create_connection.assert_has_calls(expected)
class AgentRPCNotificationDispatcher(unittest.TestCase):
def setUp(self):
self.create_connection_p = mock.patch(
'quantum.openstack.common.rpc.create_connection')
self.create_connection = self.create_connection_p.start()
cfg.CONF.set_override('default_notification_level', 'INFO')
cfg.CONF.set_override('notification_topics', ['notifications'])
def tearDown(self):
self.create_connection_p.stop()
cfg.CONF.reset()
def test_init(self):
nd = rpc.NotificationDispatcher()
expected = [
mock.call(new=True),
mock.call().declare_topic_consumer(topic='notifications.info',
queue_name=mock.ANY,
callback=nd._add_to_queue),
mock.call().consume_in_thread()
]
self.create_connection.assert_has_calls(expected)
def test_add_to_queue(self):
nd = rpc.NotificationDispatcher()
nd._add_to_queue('foo')
self.assertEqual(nd.queue.get(), 'foo')
def _test_run_dispatch_helper(self, msg, handler):
msgs = [msg]
def side_effect(*args):
return msgs.pop(0)
with mock.patch('eventlet.Queue.get') as queue_get:
queue_get.side_effect = side_effect
nd = rpc.NotificationDispatcher()
# catch the assertion so that the loop runs once
self.assertRaises(IndexError, nd.run_dispatch, handler)
def test_run_dispatch_once(self):
class SimpleHandler:
def __init__(self):
self.network_delete_end = mock.Mock()
msg = dict(event_type='network.delete.end',
payload=dict(network_id='a'))
handler = SimpleHandler()
self._test_run_dispatch_helper(msg, handler)
handler.network_delete_end.called_once_with(msg['payload'])
def test_run_dispatch_missing_handler(self):
class SimpleHandler:
self.subnet_create_start = mock.Mock()
msg = dict(event_type='network.delete.end',
payload=dict(network_id='a'))
handler = SimpleHandler()
with mock.patch('quantum.agent.rpc.LOG') as log:
self._test_run_dispatch_helper(msg, handler)
log.assert_has_calls([mock.call.debug(mock.ANY)])
def test_run_dispatch_handler_raises(self):
class SimpleHandler:
def network_delete_end(self, payload):
raise Exception('foo')
msg = dict(event_type='network.delete.end',
payload=dict(network_id='a'))
handler = SimpleHandler()
with mock.patch('quantum.agent.rpc.LOG') as log:
self._test_run_dispatch_helper(msg, handler)
log.assert_has_calls([mock.call.warn(mock.ANY)])
| aristanetworks/arista-ovs-quantum | quantum/tests/unit/test_agent_rpc.py | Python | apache-2.0 | 5,185 |
import openmc
import openmc.capi
from openmc.stats import Box
from openmc.material import Materials
import pytest
from tests.testing_harness import PyAPITestHarness
pytestmark = pytest.mark.skipif(
not openmc.capi._dagmc_enabled(),
reason="DAGMC CAD geometry is not enabled.")
class UWUWTest(PyAPITestHarness):
def _build_inputs(self):
model = openmc.model.Model()
# settings
model.settings.batches = 5
model.settings.inactive = 0
model.settings.particles = 100
source = openmc.Source(space=Box([-4, -4, -4],
[ 4, 4, 4]))
model.settings.source = source
model.settings.dagmc = True
model.settings.export_to_xml()
# tally
tally = openmc.Tally()
tally.scores = ['total']
tally.filters = [openmc.CellFilter(1)]
model.tallies = [tally]
model.tallies.export_to_xml()
def test_uwuw():
harness = UWUWTest('statepoint.5.h5')
harness.main()
| wbinventor/openmc | tests/regression_tests/uwuw/test.py | Python | mit | 1,025 |
#!/usr/bin/python
#
# Script written by Legoktm, 2011
# Released into the Public Domain on November, 16, 2011
# This product comes with no warranty of any sort.
# Enjoy!
#
from commands import getoutput
def notify(string, program=False):
if not program:
command = 'growlnotify Python -m "%s"' %string
else:
command = 'growlnotify "%s" -m "%s"' %(program, string)
getoutput(command)
def notifyold(string):
#THIS IS THE OLD METHOD. YOU SHOULD ONLY USE THIS IF YOU DO NOT HAVE growlnotify INSTALLED.
print"""]9;%s
""" %string
| legoktm/legoktm | icstalker/iclib/growl.py | Python | mit | 535 |
def itemTemplate():
return ['object/tangible/wearables/armor/nightsister/shared_armor_nightsister_bicep_r_s01.iff'] # needs correct iff still
| agry/NGECore2 | scripts/loot/lootItems/legendarylootchest/nightsister_melee_armguard.py | Python | lgpl-3.0 | 145 |
"""Google Cloud function that loads the Datastore data into BQ."""
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: utf-8 -*-
import base64
import datetime
import json
import os
from typing import Any, Dict, Optional
from google.cloud import bigquery
from google.cloud import datastore as store
from google.cloud.functions_v1.context import Context
import numpy as np
import pandas as pd
PROJECT_ID = os.getenv("PROJECT_ID", "")
BQ_REPORTING_DATASET = os.getenv("BQ_REPORTING_DATASET", "")
BQ_REPORTING_TABLE = os.getenv("BQ_REPORTING_TABLE", "")
def _get_data_from_datastore(current_date: str) -> pd.DataFrame:
"""Extracts all entities processed at current_date.
Args:
current_date: string representing the current date in YYYYMMDD format
Returns:
A dataframe containing all data extracted from Datastore
"""
db = store.Client(PROJECT_ID)
ancestor = db.key("processing_date", current_date)
query = db.query(kind="child_file", ancestor=ancestor)
results = list(query.fetch())
df = pd.DataFrame(results)
if "last_processed_timestamp" in df:
df["last_processed_timestamp"] = df["last_processed_timestamp"].astype(
np.int64) // 10**9
return df
def _write_to_bigquery(df: pd.DataFrame, table_name: str):
"""Writes the given dataframe into the BQ table.
Args:
df: A pandas dataframe representing the data to be written
table_name: A string representing the full path of the metadata BQ table
"""
client = bigquery.Client()
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = "WRITE_TRUNCATE"
job_config.schema = _get_bq_schema()
job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
job = client.load_table_from_json(
json.loads(df.to_json(orient="records")),
table_name,
job_config=job_config)
job.result()
def _get_bq_schema():
"""Returns the schema of the BigQuery table used by the solution.
Args:
None
Returns:
A list of BigQuery fields.
"""
return [
bigquery.SchemaField(name="cid", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="processing_date", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="target_platform", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="parent_file_name", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="parent_file_path", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="parent_file_date", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="parent_total_files", field_type="INT64", mode="REQUIRED"),
bigquery.SchemaField(
name="parent_total_rows", field_type="INT64", mode="REQUIRED"),
bigquery.SchemaField(
name="child_file_name", field_type="STRING", mode="REQUIRED"),
bigquery.SchemaField(
name="child_num_rows", field_type="INT64", mode="REQUIRED"),
bigquery.SchemaField(
name="child_num_errors", field_type="INT64", mode="REQUIRED"),
bigquery.SchemaField(
name="child_errors",
field_type="RECORD",
mode="REPEATED",
fields=[
bigquery.SchemaField(
name="code", field_type="STRING", mode="NULLABLE"),
bigquery.SchemaField(
name="message", field_type="STRING", mode="NULLABLE"),
bigquery.SchemaField(
name="count", field_type="INTEGER", mode="NULLABLE")
]),
bigquery.SchemaField(
name="last_processed_timestamp",
field_type="TIMESTAMP",
mode="REQUIRED")
]
def main(event: Dict[str, Any], context=Optional[Context]):
"""Triggers the message processing.
Args:
event (dict): The dictionary with data specific to this type of event. The
`data` field contains the PubsubMessage message. The `attributes` field
will contain custom attributes if there are any.
context (google.cloud.functions.Context): The Cloud Functions event
metadata. The `event_id` field contains the Pub/Sub message ID. The
`timestamp` field contains the publish time.
"""
del context
del event
date = datetime.date.today().strftime("%Y%m%d")
table_name = f"{PROJECT_ID}.{BQ_REPORTING_DATASET}.{BQ_REPORTING_TABLE}_{date}"
df = _get_data_from_datastore(date)
if not df.empty:
_write_to_bigquery(df, table_name)
def _test_main():
"""Function for optional testing of the main function from the command line.
Args:
None.
Returns:
None
"""
data = {}
main(
event={"data": base64.b64encode(bytes(json.dumps(data).encode("utf-8")))})
def _test_get_data_from_datastore():
"""Used for testing data extraction from Datastore using the command line.
Args:
None.
Returns:
None
"""
date = "YYYYMMDD"
table_name = f"{PROJECT_ID}.{BQ_REPORTING_DATASET}.{BQ_REPORTING_TABLE}_{date}"
df = _get_data_from_datastore(date)
if not df.empty:
_write_to_bigquery(df, table_name)
if __name__ == "__main__":
_test_get_data_from_datastore()
| google/centimani | cfs/reporting_data_extractor/main.py | Python | apache-2.0 | 5,698 |
# -*- coding: utf-8 -*-
# Copyright 2007-2021 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from hyperspy._components.expression import Expression
class Bleasdale(Expression):
r"""Bleasdale function component.
Also called the Bleasdale-Nelder function. Originates from the description of the yield-density relationship in crop growth.
.. math::
f(x) = \left(a+b\cdot x\right)^{-1/c}
Parameters
-----------
a : Float
b : Float
c : Float
**kwargs
Extra keyword arguments are passed to the ``Expression`` component.
For :math:`(a+b\cdot x)\leq0`, the component will be set to 0.
"""
def __init__(self, a=1., b=1., c=1., module="numexpr", **kwargs):
super().__init__(
expression="where((a + b * x) > 0, (a + b * x) ** (-1 / c), 0)",
name="Bleasdale",
a=a,
b=b,
c=c,
module=module,
autodoc=False,
compute_gradients=False,
**kwargs)
def grad_a(self, x):
"""
Returns d(function)/d(parameter_1)
"""
a = self.a.value
b = self.b.value
c = self.c.value
return np.where((a + b * x) > 0, -(a + b * x) ** (-1 / c - 1) / c, 0)
def grad_b(self, x):
"""
Returns d(function)/d(parameter_1)
"""
a = self.a.value
b = self.b.value
c = self.c.value
return np.where((a + b * x) > 0, -x * (a + b * x) ** (-1 / c - 1) / c
, 0)
def grad_c(self, x):
"""
Returns d(function)/d(parameter_1)
"""
a = self.a.value
b = self.b.value
c = self.c.value
return np.where((a + b * x) > 0, np.log(a + b * x) / (c ** 2. *
(b * x + a) ** (1. / c)), 0)
| erh3cq/hyperspy | hyperspy/_components/bleasdale.py | Python | gpl-3.0 | 2,498 |
import cgi
import datetime
import time
from tempfile import NamedTemporaryFile
from fabric.api import *
from fabric import colors
@task
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
@task
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
@task
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
@task
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
@task
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
@task
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
@task
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
@task
def do_nothing():
for x in range(0, 20):
print 'nothing {}'.format(x)
time.sleep(0.2)
input = prompt('Enter something:')
for x in range(0, 20):
print 'nothing {} - {}'.format(x, input)
time.sleep(0.2)
@task
def color_test():
for x in range(0, 2):
print colors.blue('Blue text', bold=False) + '\n'
time.sleep(0.2)
print colors.cyan('cyan text', bold=False)
time.sleep(0.2)
print colors.green('green text', bold=False)
time.sleep(0.2)
print colors.magenta('magenta text', bold=False)
time.sleep(0.2)
print colors.red('red text', bold=False)
time.sleep(0.2)
print colors.white('white text', bold=False)
time.sleep(0.2)
print colors.yellow('yellow text', bold=False)
time.sleep(0.2)
print colors.blue('Blue text bold', bold=True)
time.sleep(0.2)
print colors.cyan('cyan text bold', bold=True)
time.sleep(0.2)
print colors.green('green text bold', bold=True)
time.sleep(0.2)
print colors.magenta('magenta text bold', bold=True)
time.sleep(0.2)
print colors.red('red text bold', bold=True)
time.sleep(0.2)
print colors.white('white text bold', bold=True)
time.sleep(0.2)
print colors.yellow('yellow text bold', bold=True)
time.sleep(0.2)
@task
def test_env(argument="nothing"):
print("Task Arguments:")
print argument
print
print("Task Env:")
for x, y in env.iteritems():
print '{}: {}'.format(x, y)
@task
def update_sandbox_site(comment_text):
"""put's a text file on the server"""
env.user = 'root'
env.key = '''-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEArBsTE4MxG/x5PVcL4bNgvcIVJPdS2xWzmZkMZMeOkKx5y+Ew
DWT0nxU6iZn59M4p5B+xcxCzilnu0ljzeooxUbFRR/2jdvuAVEQRXcAFi17WtHP3
Up7U/t+vBmv2ZNLAlOn6U10rw36+wPmiG/yKTp9kYNfCE9B0+RS/5DPHbR2FS84D
49vxruJKMH9zW0A0Me+PskOQtKCcDxT966Z2MsNZiPBti5ZK4yrjDYBM8E9PfXzJ
tlL/j6G8Z8ALm/MA4VypVMUEaRuRu8TfoMoTAeB2ixRYjoTJMi6r8s7WLOXaGmuV
g3Z1Gj20sdo2Fi5ordBSm+DDxVHxIBE4iRWkAQIDAQABAoIBAB1u4+xKW3O10eYz
pMyMqNbLAmK4CWt+YqC6E+yIVFFZrdq4QEeKJGuwbbpqotzDBVcGNIrBKHNYvgcr
PziNubGG6aeuMO6ARIokufOWi1wyc/WYf4uZrkOIbZ5jiFfl0xmkijMHlBxy6JyI
FLlEj0Ky76/ANmi9FcQjUE3urQRz6CaDHTjNGAK9qlh61GknGSb99E7BhGMigYkY
tVDw3RdQtaVJD0XcTpT/e6H3u6IelL3GS+F+JYpgHUC2gEMRTX8qluuLk8zjzNV8
qS0CXmOXNcCKbJjxWVxXubAlclHfLnpAU9s+Dvp3X+qs1N2LdGyNE+LpGvQe9MDp
Mg0pKaECgYEA2jbW7QGA7mpsUqdzdXLbVTihq5EqjPSLSVbI1h/z+DGVtsgCDwEd
+7EeB4tU/RNb5Ky+DGWjGEm2Rw+ntjhwO58lMh45eE2Z4vGnfkKo9uOMlWi/yH+b
tcB8t78cmazZXYK3dZgsjWsXf4FyQ6aJHIvwhN2xQRmGu2hX36S1IRMCgYEAyehO
KinudGEE7PrEbxt6MoYGL9pupELCE2oKC292PMRharPN7C2+7E+DbsDEfd4t6nEc
EzOWWADnFNh3VEGrsDwAgjgh8j2vgf35mLIaRXYXXcJlEtBIOVI9yOYOo8GdA/XV
yiOGVjjHrwVbWXCNULNKUsSmVN67HCOUXmlUHRsCgYBGF5Vj3bbHXkHbLtRkZndT
YXR0wpVTX32aGhk6xlq8X1kCtC4NGcPCw/qsW7H59Izw4BfPrZn8xDibjMjHPEu4
qv7soU6+eNa0UgEGCm1xmFfg6huoUGz4rZKiBu4t4pqTcdhyGmY9KqgKmc7VMhoa
pEymsPstuQBRFEwdly9jJwKBgEcvci+HbRz2/8eVeiA6LdEWU6QXfR7IsqgpoLT7
bVJrYnU+Q4Hbdw7V0d8Ac8Z0yPd5PY6/h2grmU1OLHQ2WxPdc8h1hfJkMTbBlnhx
grWutvpFiWEisfQTvNjR06OEpZk52VBVSg2oIy7f0p8sAYbMT43y6znM9WcsXCkV
NaS1AoGBAM8jX1zQP/jo1PWuwjzX38xyoDeludCmT09YRsC4C41LbI1Wj8mfxrf5
bQiPkbIZOeTG6ivvPbKbiwF5W+i92W+uUag5kotG+xCQUrWDrj2ELxRbAetT4dRh
vv+QoeHlCNRQ+2lsHIoqcAPFCXw6HTT5O/0MIAcEZICT7nf6znXX
-----END RSA PRIVATE KEY-----'''
file_to_deliever = NamedTemporaryFile(delete=False)
file_text = "Deployed at: {} <br /> Comment: {}".format(datetime.datetime.now().strftime('%c'), cgi.escape(comment_text))
file_to_deliever.write(file_text)
file_to_deliever.close()
put(file_to_deliever.name, '/var/www/html/index.html', use_sudo=True)
| paperreduction/fabric-bolt | fabric_bolt/fabfile.py | Python | mit | 5,177 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class SSHOperator(BaseOperator):
"""
SSHOperator to execute commands on given remote host using the ssh_hook.
:param ssh_hook: predefined ssh_hook to use for remote execution
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param command: command to execute on remote host
:type command: str
:param timeout: timeout for executing the command.
:type timeout: int
:param do_xcom_push: return the stdout which also get set in xcom by airflow platform
:type do_xcom_push: bool
"""
template_fields = ('command',)
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
command=None,
timeout=10,
do_xcom_push=False,
*args,
**kwargs):
super(SSHOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.timeout = timeout
self.do_xcom_push = do_xcom_push
def execute(self, context):
try:
if self.ssh_conn_id and not self.ssh_hook:
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("can not operate without ssh_hook or ssh_conn_id")
if self.remote_host is not None:
self.ssh_hook.remote_host = self.remote_host
ssh_client = self.ssh_hook.get_conn()
if not self.command:
raise AirflowException("no command specified so nothing to execute here.")
# Auto apply tty when its required in case of sudo
get_pty = False
if self.command.startswith('sudo'):
get_pty = True
# set timeout taken as params
stdin, stdout, stderr = ssh_client.exec_command(command=self.command,
get_pty=get_pty,
timeout=self.timeout
)
exit_status = stdout.channel.recv_exit_status()
if exit_status is 0:
# only returning on output if do_xcom_push is set
# otherwise its not suppose to be disclosed
if self.do_xcom_push:
return stdout.read()
else:
error_msg = stderr.read()
raise AirflowException("error running cmd: {0}, error: {1}"
.format(self.command, error_msg))
except Exception as e:
raise AirflowException("SSH operator error: {0}".format(str(e)))
return True
def tunnel(self):
ssh_client = self.ssh_hook.get_conn()
ssh_client.get_transport()
| holygits/incubator-airflow | airflow/contrib/operators/ssh_operator.py | Python | apache-2.0 | 3,889 |
from base import Plugin
class Graph(Plugin):
"Make pretty graphs of your requests"
active = False
def __init__(self):
super(Graph, self).__init__()
self.request_graph = self.data['request_graph'] = {}
import pygraphviz
self.graph = pygraphviz.AGraph(directed=True)
def urls_parsed(self, sender, fro, returned_urls, **kwargs):
from_node = self.graph.add_node(str(fro), shape='tripleoctagon')
for url in returned_urls:
if not self.graph.has_node(str(url)):
node = self.graph.add_node(str(url))
self.graph.add_edge(str(fro), str(url))
def finish_run(self, sender, **kwargs):
print "Making graph of your URLs, this may take a while"
self.graph.layout(prog='fdp')
self.graph.draw('my_urls.png')
| frac/django-test-utils | test_utils/crawler/plugins/graph.py | Python | mit | 830 |
# -*- coding: utf-8 -*-
#
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironicclient.common import utils
from ironicclient.openstack.common import cliutils
def _print_port_show(port):
fields = ['address', 'created_at', 'extra', 'node_uuid', 'updated_at',
'uuid']
data = dict([(f, getattr(port, f, '')) for f in fields])
cliutils.print_dict(data, wrap=72)
@cliutils.arg('port', metavar='<port id>', help="UUID of port")
def do_port_show(cc, args):
"""Show a port."""
port = cc.port.get(args.port)
_print_port_show(port)
@cliutils.arg(
'--limit',
metavar='<limit>',
type=int,
help='Maximum number of ports to return per request, '
'0 for no limit. Default is the maximum number used '
'by the Ironic API Service.')
@cliutils.arg(
'--marker',
metavar='<marker>',
help='Port UUID (e.g of the last port in the list from '
'a previous request). Returns the list of ports '
'after this UUID.')
def do_port_list(cc, args):
"""List ports."""
params = {}
if args.marker is not None:
params['marker'] = args.marker
if args.limit is not None:
params['limit'] = args.limit
port = cc.port.list(**params)
field_labels = ['UUID', 'Address']
fields = ['uuid', 'address']
cliutils.print_list(port, fields, field_labels, sortby_index=None)
@cliutils.arg(
'-a', '--address',
metavar='<address>',
required=True,
help='MAC Address for this port')
@cliutils.arg(
'-n', '--node_uuid',
metavar='<node uuid>',
required=True,
help='UUID of the node that this port belongs to')
@cliutils.arg(
'-e', '--extra',
metavar="<key=value>",
action='append',
help="Record arbitrary key/value metadata. "
"Can be specified multiple times")
def do_port_create(cc, args):
"""Create a new port."""
field_list = ['address', 'extra', 'node_uuid']
fields = dict((k, v) for (k, v) in vars(args).items()
if k in field_list and not (v is None))
fields = utils.args_array_to_dict(fields, 'extra')
port = cc.port.create(**fields)
field_list.append('uuid')
data = dict([(f, getattr(port, f, '')) for f in field_list])
cliutils.print_dict(data, wrap=72)
@cliutils.arg('port', metavar='<port id>', nargs='+', help="UUID of port")
def do_port_delete(cc, args):
"""Delete a port."""
for p in args.port:
cc.port.delete(p)
print ('Deleted port %s' % p)
@cliutils.arg('port', metavar='<port id>', help="UUID of port")
@cliutils.arg(
'op',
metavar='<op>',
choices=['add', 'replace', 'remove'],
help="Operations: 'add', 'replace' or 'remove'")
@cliutils.arg(
'attributes',
metavar='<path=value>',
nargs='+',
action='append',
default=[],
help="Attributes to add/replace or remove "
"(only PATH is necessary on remove)")
def do_port_update(cc, args):
"""Update a port."""
patch = utils.args_array_to_patch(args.op, args.attributes[0])
port = cc.port.update(args.port, patch)
_print_port_show(port)
| ramineni/myclient | ironicclient/v1/port_shell.py | Python | apache-2.0 | 3,685 |
import unittest
import accepton
class ClientTest(unittest.TestCase):
def test_configurable_environment(self):
client = accepton.Client(environment="development")
self.assertEqual("development", client.environment)
def test_defaults_to_production_environment(self):
client = accepton.Client()
self.assertEqual("production", client.environment)
def test_has_api_key_is_true_when_configured(self):
client = accepton.Client(api_key="abc123")
self.assertEqual(True, client.has_api_key())
def test_has_api_key_is_false_when_not_configured(self):
client = accepton.Client()
self.assertEqual(False, client.has_api_key())
def test_user_agent(self):
expected = "accepton-python/{0}".format(accepton.__version__)
actual = accepton.Client().user_agent
self.assertEqual(expected, actual)
| accepton/accepton-python | tests/test_client.py | Python | mit | 890 |
# Python - 3.6.0
def last_digit(n1, n2):
if not n2:
return 1
n1 %= 10
r = 1
while n2 > 1:
n2, m = n2 >> 1, n2 & 1
if m:
r = (r * n1) % 10
n1 = (n1 * n1) % 10
return (n1 * r) % 10
| RevansChen/online-judge | Codewars/5kyu/last-digit-of-a-large-number/Python/solutions1.py | Python | mit | 244 |
"""
Simulation of Game of Life with pygame
Instructions:
Press ESC or F4 to quit the game
Press RETURN to restart the game
Press SPACE to stop or resume the game
Press "p" or "+" to zoom in
Press "m" or "-" to zoom out
Press one of the letter below to change the color of the alive cells:
- r: red
- b: blue
- g: green
- c: cyan
- w: white
When the game is stopped, you can click and move the mouse to select new cells
"""
import numpy as np
import pygame
import random
from pygame.locals import *
__author__ = "Davide Tonin"
game_ended = False
game_stop = False
board_changed = False
CELL_SIZE = 20
FPS = 60
total_cells, alive_cells = 0, 0
game_board = None
color = "red"
mouse_clicked = False
def init_board():
"""Initialize the game board with random alive and dead cells"""
global game_board
game_board = np.random.randint(2, size=(HEIGHT // CELL_SIZE, WIDTH // CELL_SIZE))
def game_board_transition():
"""Parse the game board, count neighbours and do the transition to the next step"""
global game_board, alive_cells
previous_game_board = np.copy(game_board)
alive_cells = 0
for row in range(game_board.shape[0]):
for column in range(game_board[row].shape[0]):
alive_neighbours = 0
if row > 0:
if column > 0 and previous_game_board[row - 1][column - 1] > 0:
alive_neighbours += 1
if previous_game_board[row - 1][column] > 0:
alive_neighbours += 1
if column < game_board[row].shape[0]-1 and previous_game_board[row - 1][column + 1] > 0:
alive_neighbours += 1
if column > 0 and previous_game_board[row][column-1] > 0:
alive_neighbours += 1
if column < game_board[row].shape[0]-1 and previous_game_board[row][column + 1] > 0:
alive_neighbours += 1
if row < game_board.shape[0]-1:
if column > 0 and previous_game_board[row + 1][column - 1] > 0:
alive_neighbours += 1
if previous_game_board[row + 1][column] > 0:
alive_neighbours += 1
if column < game_board[row].shape[0]-1 and previous_game_board[row + 1][column + 1] > 0:
alive_neighbours += 1
if game_board[row][column] > 0:
if alive_neighbours == 2 or alive_neighbours == 3:
if game_board[row][column] < 6:
game_board[row][column] += 1
else:
game_board[row][column] = 0
else:
if alive_neighbours == 3:
game_board[row][column] = 1
if game_board[row][column] > 0:
alive_cells += 1
def resize_board(action):
""" Resize the game board """
global game_board, CELL_SIZE
CELL_SIZE += 1 if action == "+" else -1
new_game_board = np.zeros((HEIGHT // CELL_SIZE, WIDTH // CELL_SIZE), dtype=int)
for row in range(new_game_board.shape[0]):
for column in range(new_game_board[row].shape[0]):
try:
new_game_board[row][column] = game_board[row][column]
except:
new_game_board[row][column] = random.randint(0, 1)
game_board = np.copy(new_game_board)
def draw_game_board():
"""Draw the game board"""
global game_window, game_board, color
draw_array = []
for row in range(game_board.shape[0]):
for column in range(game_board[row].shape[0]):
if game_board[row][column] > 0:
if color == "red":
alive_color = (game_board[row][column] * 40, 0, 0)
elif color == "green":
alive_color = (0, game_board[row][column] * 40, 0)
elif color == "blue":
alive_color = (0, 0, game_board[row][column] * 40)
elif color == "cyan":
alive_color = (0, game_board[row][column] * 40, game_board[row][column] * 40)
elif color == "white":
alive_color = (
game_board[row][column] * 40, game_board[row][column] * 40, game_board[row][column] * 40)
pygame.draw.rect(game_window, alive_color, [column * CELL_SIZE, row * CELL_SIZE, CELL_SIZE, CELL_SIZE])
def select_cells():
global alive_cells
row, col = pygame.mouse.get_pos()[1] // CELL_SIZE, pygame.mouse.get_pos()[0] // CELL_SIZE
try:
if game_board[row][col] < 6:
game_board[row][col] += 1
if game_board[row][col] == 1:
alive_cells += 1
except:
pass
if __name__ == '__main__':
pygame.init()
GAME_RESOLUTION = WIDTH, HEIGHT = pygame.display.Info().current_w, pygame.display.Info().current_h
game_window = pygame.display.set_mode(GAME_RESOLUTION, FULLSCREEN | HWSURFACE | DOUBLEBUF | HWACCEL)
pygame.display.set_caption("PyGameOfLife, " + __author__)
clock = pygame.time.Clock()
pygame.font.init()
text_settings = pygame.font.SysFont("Open Sans", 25)
init_board()
stop_transition = True
while not game_ended:
# Event handler
for event in pygame.event.get():
if event.type == pygame.QUIT: game_ended = True
if event.type == KEYDOWN:
if event.key == K_ESCAPE or event.key == K_F4: game_ended = True
if event.key == K_RETURN: init_board(); game_stop = False; board_changed = True
if event.key == K_SPACE: game_stop = not game_stop
if event.key == K_r: color = "red"; board_changed = True
if event.key == K_g: color = "green"; board_changed = True
if event.key == K_b: color = "blue"; board_changed = True
if event.key == K_c: color = "cyan"; board_changed = True
if event.key == K_w: color = "white"; board_changed = True
if event.key == K_p or event.key == K_PLUS: resize_board("+"); game_stop = False; board_changed = True
if event.key == K_m or event.key == K_MINUS: resize_board("-"); game_stop = False; board_changed = True
if event.type == MOUSEBUTTONDOWN:
mouse_clicked = True
if event.type == MOUSEBUTTONUP:
mouse_clicked = False
pygame.Surface.fill(game_window, (0, 0, 0))
if not game_stop or board_changed:
if board_changed:
board_changed = False
else:
game_board_transition()
elif game_stop:
if mouse_clicked:
select_cells()
draw_game_board()
total_cells = (WIDTH // CELL_SIZE) * (HEIGHT // CELL_SIZE)
game_window.blit(text_settings.render("FPS: " + str(round(clock.get_fps(), 2)), True, (255, 255, 255)),
(20, 20))
game_window.blit(text_settings.render("Total cells: " + str(total_cells), True, (255, 255, 255)), (20, 50))
game_window.blit(text_settings.render(
"Alive cells: " + str(alive_cells) + ", " + str(round(alive_cells * 100 / total_cells, 2)) + "%", True,
(255, 255, 255)), (20, 80))
pygame.display.flip()
clock.tick(FPS)
pygame.quit()
exit()
| DavideTonin99/pygameoflife | main.py | Python | mit | 7,534 |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
#
# __init__.py
#
# This file is part of Squilla
#
# Copyright (C) 2014 Thibault Cohen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
__version__ = "0.2"
import sys
import os
import grp
embedded_libs_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../embedded_libs/')
sys.path.append(embedded_libs_path)
from squilla.application import Application
from squilla.lib.logger import logger
try:
import pyotherside
except ImportError:
import sys
# Allow testing Python backend alone.
logger.debug("PyOtherSide not found, continuing anyway!")
class pyotherside:
def atexit(*args): pass
def send(*args): pass
sys.modules["pyotherside"] = pyotherside()
def main():
"""Initialize application."""
global app
try:
os.setgid(grp.getgrnam("privileged").gr_gid)
except Exception as e:
logger.debug("Can't set privileged group: %s" % str(e))
app = Application(interval=3)
app.start()
def shutdown():
global app
app.stop()
| titilambert/harbour-squilla | squilla/__init__.py | Python | gpl-3.0 | 1,805 |
# Script to make "simple" geothermal models to show effects of shallow structures.
import numpy as np, sys, os, time, gzip, cPickle as pickle, scipy, gc
from glob import glob
#sys.path.append('/tera_raid/gudni/gitCodes/simpeg')
#sys.path.append('/tera_raid/gudni/gitCodes/simpegem')
import SimPEG as simpeg
import SimPEG
from SimPEG import NSEM
# Load the solver
#sys.path.append('/tera_raid/gudni')
from pymatsolver import MumpsSolver
# Open files
freqList = np.load('MTfrequencies.npy')
locs = np.load('MTlocations.npy')
# Load the model
mesh, modDict = simpeg.Mesh.TensorMesh.readVTK('nsmesh_CoarseHKPK1_NoExtension.vtr')
sigma = modDict['S/m']
bgsigma = np.ones_like(sigma)*1e-8
bgsigma[sigma > 9.999e-7] = 0.01
# for loc in locs:
# # NOTE: loc has to be a (1,3) np.ndarray otherwise errors accure
# for rxType in ['zxxr','zxxi','zxyr','zxyi','zyxr','zyxi','zyyr','zyyi']:
# rxList.append(simpegNSEM.SurveyNSEM.RxMT(simpeg.mkvc(loc,2).T,rxType))
# Make a receiver list
rxList = []
for rxType in ['zxxr','zxxi','zxyr','zxyi','zyxr','zyxi','zyyr','zyyi','tzxr','tzxi','tzyr','tzyi']:
rxList.append(NSEM.Rx(locs,rxType))
# Source list
srcList =[]
for freq in freqList:
srcList.append(NSEM.SrcNSEM.polxy_1Dprimary(rxList,freq))
# Survey MT
survey = NSEM.Survey(srcList)
# Background 1D model
sigma1d = mesh.r(bgsigma,'CC','CC','M')[0,0,:]
## Setup the problem object
problem = NSEM.Problem3D_ePrimSec(mesh,sigmaPrimary = sigma1d)
problem.verbose = True
problem.Solver = MumpsSolver
problem.pair(survey)
## Calculate the fields
stTime = time.time()
print 'Starting calculating field solution at ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
sys.stdout.flush()
FmtSer = problem.fields(sigma)
print 'Ended calculation field at ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
print 'Ran for {:f}'.format(time.time()-stTime)
## Project data
stTime = time.time()
print 'Starting projecting fields to data at ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
sys.stdout.flush()
mtData = NSEM.Data(survey,survey.eval(FmtSer))
print 'Ended projection of fields at ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
print 'Ran for {:f}'.format(time.time()-stTime)
mtStArr = mtData.toRecArray('Complex')
SimPEG.np.save('MTdataStArr_nsmesh_HKPK1Coarse_noExtension',mtStArr)
try:
pickle.dump(open('MTfields_HKPK1Coarse.pkl','wb'),FmtSer)
except:
fieldsDict = {}
for freq in survey.freqs:
src = survey.getSrcByFreq(freq)
fieldsDict[freq] = {'e_pxSolution':FmtSer[src,'e_pxSolution'],'e_pySolution':FmtSer[src,'e_pySolution']}
with open('MTfields_HKPK1Coarse.pkl','wb') as out:
pickle.dump(fieldsDict,out,2)
del FmtSer, mtStArr, mtData
gc.collect()
# Read in the fields dicts
if False:
FmtSer = problem.fieldsPair()
for freq, fD in fieldsDict.iteritems():
src = survey.getSrcByFreq(freq)
FmtSer[src,'e_pxSolution'] = fD['e_pxSolution']
FmtSer[src,'e_pySolution'] = fD['e_pySolution']
| simpeg/presentations | SciPy2016/MTwork/ForwardModeling_noExtension_Coarse/findDiam_MTforward_HKPK1.py | Python | mit | 3,007 |
class ConnectableRegistry():
def __init__(self):
self.registry = {}
@classmethod
def instance(cls):
try:
cls._instance
except AttributeError:
cls._instance = cls()
return cls._instance
@classmethod
def register(cls, klass_to_register):
inst = cls.instance()
inst.registry[klass_to_register.ttype] = klass_to_register
return klass_to_register
| Mause/circuitry | circuitry/connectable_registry.py | Python | mit | 447 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = "acpi",
version = "1.0.0",
url = 'https://github.com/ondrejsika/python-acpi',
license = 'MIT',
description = "Python acpi parser library",
author = 'Ondrej Sika',
author_email = 'ondrejsika@ondrejsika.com',
py_modules = ("acpi", ),
install_requires = (),
include_package_data = True,
) | robertmuil/python-acpi | setup.py | Python | mit | 431 |
#! /usr/bin/env python
# scapy.contrib.description = Skinny Call Control Protocol (SCCP)
# scapy.contrib.status = loads
#############################################################################
# #
# scapy-skinny.py --- Skinny Call Control Protocol (SCCP) extension #
# #
# Copyright (C) 2006 Nicolas Bareil <nicolas.bareil@ eads.net> #
# EADS/CRC security team #
# #
# This file is part of Scapy #
# Scapy is free software: you can redistribute it and/or modify #
# under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation; version 2. #
# #
# This program is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
# General Public License for more details. #
# #
#############################################################################
from __future__ import absolute_import
from scapy.packet import *
from scapy.fields import *
from scapy.layers.inet import TCP
from scapy.modules.six.moves import range
#####################################################################
# Helpers and constants
#####################################################################
skinny_messages_cls = {
# Station -> Callmanager
0x0000: "SkinnyMessageKeepAlive",
0x0001: "SkinnyMessageRegister",
0x0002: "SkinnyMessageIpPort",
0x0003: "SkinnyMessageKeypadButton",
0x0004: "SkinnyMessageEnblocCall",
0x0005: "SkinnyMessageStimulus",
0x0006: "SkinnyMessageOffHook",
0x0007: "SkinnyMessageOnHook",
0x0008: "SkinnyMessageHookFlash",
0x0009: "SkinnyMessageForwardStatReq",
0x000A: "SkinnyMessageSpeedDialStatReq",
0x000B: "SkinnyMessageLineStatReq",
0x000C: "SkinnyMessageConfigStatReq",
0x000D: "SkinnyMessageTimeDateReq",
0x000E: "SkinnyMessageButtonTemplateReq",
0x000F: "SkinnyMessageVersionReq",
0x0010: "SkinnyMessageCapabilitiesRes",
0x0011: "SkinnyMessageMediaPortList",
0x0012: "SkinnyMessageServerReq",
0x0020: "SkinnyMessageAlarm",
0x0021: "SkinnyMessageMulticastMediaReceptionAck",
0x0022: "SkinnyMessageOpenReceiveChannelAck",
0x0023: "SkinnyMessageConnectionStatisticsRes",
0x0024: "SkinnyMessageOffHookWithCgpn",
0x0025: "SkinnyMessageSoftKeySetReq",
0x0026: "SkinnyMessageSoftKeyEvent",
0x0027: "SkinnyMessageUnregister",
0x0028: "SkinnyMessageSoftKeyTemplateReq",
0x0029: "SkinnyMessageRegisterTokenReq",
0x002A: "SkinnyMessageMediaTransmissionFailure",
0x002B: "SkinnyMessageHeadsetStatus",
0x002C: "SkinnyMessageMediaResourceNotification",
0x002D: "SkinnyMessageRegisterAvailableLines",
0x002E: "SkinnyMessageDeviceToUserData",
0x002F: "SkinnyMessageDeviceToUserDataResponse",
0x0030: "SkinnyMessageUpdateCapabilities",
0x0031: "SkinnyMessageOpenMultiMediaReceiveChannelAck",
0x0032: "SkinnyMessageClearConference",
0x0033: "SkinnyMessageServiceURLStatReq",
0x0034: "SkinnyMessageFeatureStatReq",
0x0035: "SkinnyMessageCreateConferenceRes",
0x0036: "SkinnyMessageDeleteConferenceRes",
0x0037: "SkinnyMessageModifyConferenceRes",
0x0038: "SkinnyMessageAddParticipantRes",
0x0039: "SkinnyMessageAuditConferenceRes",
0x0040: "SkinnyMessageAuditParticipantRes",
0x0041: "SkinnyMessageDeviceToUserDataVersion1",
# Callmanager -> Station */
0x0081: "SkinnyMessageRegisterAck",
0x0082: "SkinnyMessageStartTone",
0x0083: "SkinnyMessageStopTone",
0x0085: "SkinnyMessageSetRinger",
0x0086: "SkinnyMessageSetLamp",
0x0087: "SkinnyMessageSetHkFDetect",
0x0088: "SkinnyMessageSpeakerMode",
0x0089: "SkinnyMessageSetMicroMode",
0x008A: "SkinnyMessageStartMediaTransmission",
0x008B: "SkinnyMessageStopMediaTransmission",
0x008C: "SkinnyMessageStartMediaReception",
0x008D: "SkinnyMessageStopMediaReception",
0x008F: "SkinnyMessageCallInfo",
0x0090: "SkinnyMessageForwardStat",
0x0091: "SkinnyMessageSpeedDialStat",
0x0092: "SkinnyMessageLineStat",
0x0093: "SkinnyMessageConfigStat",
0x0094: "SkinnyMessageTimeDate",
0x0095: "SkinnyMessageStartSessionTransmission",
0x0096: "SkinnyMessageStopSessionTransmission",
0x0097: "SkinnyMessageButtonTemplate",
0x0098: "SkinnyMessageVersion",
0x0099: "SkinnyMessageDisplayText",
0x009A: "SkinnyMessageClearDisplay",
0x009B: "SkinnyMessageCapabilitiesReq",
0x009C: "SkinnyMessageEnunciatorCommand",
0x009D: "SkinnyMessageRegisterReject",
0x009E: "SkinnyMessageServerRes",
0x009F: "SkinnyMessageReset",
0x0100: "SkinnyMessageKeepAliveAck",
0x0101: "SkinnyMessageStartMulticastMediaReception",
0x0102: "SkinnyMessageStartMulticastMediaTransmission",
0x0103: "SkinnyMessageStopMulticastMediaReception",
0x0104: "SkinnyMessageStopMulticastMediaTransmission",
0x0105: "SkinnyMessageOpenReceiveChannel",
0x0106: "SkinnyMessageCloseReceiveChannel",
0x0107: "SkinnyMessageConnectionStatisticsReq",
0x0108: "SkinnyMessageSoftKeyTemplateRes",
0x0109: "SkinnyMessageSoftKeySetRes",
0x0110: "SkinnyMessageStationSelectSoftKeysMessage",
0x0111: "SkinnyMessageCallState",
0x0112: "SkinnyMessagePromptStatus",
0x0113: "SkinnyMessageClearPromptStatus",
0x0114: "SkinnyMessageDisplayNotify",
0x0115: "SkinnyMessageClearNotify",
0x0116: "SkinnyMessageCallPlane",
0x0117: "SkinnyMessageCallPlane",
0x0118: "SkinnyMessageUnregisterAck",
0x0119: "SkinnyMessageBackSpaceReq",
0x011A: "SkinnyMessageRegisterTokenAck",
0x011B: "SkinnyMessageRegisterTokenReject",
0x0042: "SkinnyMessageDeviceToUserDataResponseVersion1",
0x011C: "SkinnyMessageStartMediaFailureDetection",
0x011D: "SkinnyMessageDialedNumber",
0x011E: "SkinnyMessageUserToDeviceData",
0x011F: "SkinnyMessageFeatureStat",
0x0120: "SkinnyMessageDisplayPriNotify",
0x0121: "SkinnyMessageClearPriNotify",
0x0122: "SkinnyMessageStartAnnouncement",
0x0123: "SkinnyMessageStopAnnouncement",
0x0124: "SkinnyMessageAnnouncementFinish",
0x0127: "SkinnyMessageNotifyDtmfTone",
0x0128: "SkinnyMessageSendDtmfTone",
0x0129: "SkinnyMessageSubscribeDtmfPayloadReq",
0x012A: "SkinnyMessageSubscribeDtmfPayloadRes",
0x012B: "SkinnyMessageSubscribeDtmfPayloadErr",
0x012C: "SkinnyMessageUnSubscribeDtmfPayloadReq",
0x012D: "SkinnyMessageUnSubscribeDtmfPayloadRes",
0x012E: "SkinnyMessageUnSubscribeDtmfPayloadErr",
0x012F: "SkinnyMessageServiceURLStat",
0x0130: "SkinnyMessageCallSelectStat",
0x0131: "SkinnyMessageOpenMultiMediaChannel",
0x0132: "SkinnyMessageStartMultiMediaTransmission",
0x0133: "SkinnyMessageStopMultiMediaTransmission",
0x0134: "SkinnyMessageMiscellaneousCommand",
0x0135: "SkinnyMessageFlowControlCommand",
0x0136: "SkinnyMessageCloseMultiMediaReceiveChannel",
0x0137: "SkinnyMessageCreateConferenceReq",
0x0138: "SkinnyMessageDeleteConferenceReq",
0x0139: "SkinnyMessageModifyConferenceReq",
0x013A: "SkinnyMessageAddParticipantReq",
0x013B: "SkinnyMessageDropParticipantReq",
0x013C: "SkinnyMessageAuditConferenceReq",
0x013D: "SkinnyMessageAuditParticipantReq",
0x013F: "SkinnyMessageUserToDeviceDataVersion1",
}
skinny_callstates = {
0x1: "Off Hook",
0x2: "On Hook",
0x3: "Ring out",
0xc: "Proceeding",
}
skinny_ring_type = {
0x1: "Ring off"
}
skinny_speaker_modes = {
0x1: "Speaker on",
0x2: "Speaker off"
}
skinny_lamp_mode = {
0x1: "Off (?)",
0x2: "On",
}
skinny_stimulus = {
0x9: "Line"
}
############
# Fields #
############
class SkinnyDateTimeField(StrFixedLenField):
def __init__(self, name, default):
StrFixedLenField.__init__(self, name, default, 32)
def m2i(self, pkt, s):
year, month, dow, day, hour, min, sec, millisecond = struct.unpack('<8I', s) # noqa: E501
return (year, month, day, hour, min, sec)
def i2m(self, pkt, val):
if isinstance(val, str):
val = self.h2i(pkt, val)
l = val[:2] + (0,) + val[2:7] + (0,)
return struct.pack('<8I', *l)
def i2h(self, pkt, x):
if isinstance(x, str):
return x
else:
return time.ctime(time.mktime(x + (0, 0, 0)))
def i2repr(self, pkt, x):
return self.i2h(pkt, x)
def h2i(self, pkt, s):
t = ()
if isinstance(s, str):
t = time.strptime(s)
t = t[:2] + t[2:-3]
else:
if not s:
y, m, d, h, min, sec, rest, rest, rest = time.gmtime(time.time()) # noqa: E501
t = (y, m, d, h, min, sec)
else:
t = s
return t
###########################
# Packet abstract class #
###########################
class SkinnyMessageGeneric(Packet):
name = 'Generic message'
class SkinnyMessageKeepAlive(Packet):
name = 'keep alive'
class SkinnyMessageKeepAliveAck(Packet):
name = 'keep alive ack'
class SkinnyMessageOffHook(Packet):
name = 'Off Hook'
fields_desc = [LEIntField("unknown1", 0),
LEIntField("unknown2", 0), ]
class SkinnyMessageOnHook(SkinnyMessageOffHook):
name = 'On Hook'
class SkinnyMessageCallState(Packet):
name = 'Skinny Call state message'
fields_desc = [LEIntEnumField("state", 1, skinny_callstates),
LEIntField("instance", 1),
LEIntField("callid", 0),
LEIntField("unknown1", 4),
LEIntField("unknown2", 0),
LEIntField("unknown3", 0)]
class SkinnyMessageSoftKeyEvent(Packet):
name = 'Soft Key Event'
fields_desc = [LEIntField("key", 0),
LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageSetRinger(Packet):
name = 'Ring message'
fields_desc = [LEIntEnumField("ring", 0x1, skinny_ring_type),
LEIntField("unknown1", 0),
LEIntField("unknown2", 0),
LEIntField("unknown3", 0)]
_skinny_tones = {
0x21: 'Inside dial tone',
0x22: 'xxx',
0x23: 'xxx',
0x24: 'Alerting tone',
0x25: 'Reorder Tone'
}
class SkinnyMessageStartTone(Packet):
name = 'Start tone'
fields_desc = [LEIntEnumField("tone", 0x21, _skinny_tones),
LEIntField("unknown1", 0),
LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageStopTone(SkinnyMessageGeneric):
name = 'stop tone'
fields_desc = [LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageSpeakerMode(Packet):
name = 'Speaker mdoe'
fields_desc = [LEIntEnumField("ring", 0x1, skinny_speaker_modes)]
class SkinnyMessageSetLamp(Packet):
name = 'Lamp message (light of the phone)'
fields_desc = [LEIntEnumField("stimulus", 0x5, skinny_stimulus),
LEIntField("instance", 1),
LEIntEnumField("mode", 2, skinny_lamp_mode)]
class SkinnyMessageStationSelectSoftKeysMessage(Packet):
name = 'Station Select Soft Keys Message'
fields_desc = [LEIntField("instance", 1),
LEIntField("callid", 0),
LEIntField("set", 0),
LEIntField("map", 0xffff)]
class SkinnyMessagePromptStatus(Packet):
name = 'Prompt status'
fields_desc = [LEIntField("timeout", 0),
StrFixedLenField("text", b"\0" * 32, 32),
LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageCallPlane(Packet):
name = 'Activate/Deactivate Call Plane Message'
fields_desc = [LEIntField("instance", 1)]
class SkinnyMessageTimeDate(Packet):
name = 'Setting date and time'
fields_desc = [SkinnyDateTimeField("settime", None),
LEIntField("timestamp", 0)]
class SkinnyMessageClearPromptStatus(Packet):
name = 'clear prompt status'
fields_desc = [LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageKeypadButton(Packet):
name = 'keypad button'
fields_desc = [LEIntField("key", 0),
LEIntField("instance", 1),
LEIntField("callid", 0)]
class SkinnyMessageDialedNumber(Packet):
name = 'dialed number'
fields_desc = [StrFixedLenField("number", "1337", 24),
LEIntField("instance", 1),
LEIntField("callid", 0)]
_skinny_message_callinfo_restrictions = ['CallerName', 'CallerNumber', 'CalledName', 'CalledNumber', 'OriginalCalledName', 'OriginalCalledNumber', 'LastRedirectName', 'LastRedirectNumber'] + ['Bit%d' % i for i in range(8, 15)] # noqa: E501
class SkinnyMessageCallInfo(Packet):
name = 'call information'
fields_desc = [StrFixedLenField("callername", "Jean Valjean", 40),
StrFixedLenField("callernum", "1337", 24),
StrFixedLenField("calledname", "Causette", 40),
StrFixedLenField("callednum", "1034", 24),
LEIntField("lineinstance", 1),
LEIntField("callid", 0),
StrFixedLenField("originalcalledname", "Causette", 40),
StrFixedLenField("originalcallednum", "1034", 24),
StrFixedLenField("lastredirectingname", "Causette", 40),
StrFixedLenField("lastredirectingnum", "1034", 24),
LEIntField("originalredirectreason", 0),
LEIntField("lastredirectreason", 0),
StrFixedLenField('voicemailboxG', b'\0' * 24, 24),
StrFixedLenField('voicemailboxD', b'\0' * 24, 24),
StrFixedLenField('originalvoicemailboxD', b'\0' * 24, 24),
StrFixedLenField('lastvoicemailboxD', b'\0' * 24, 24),
LEIntField('security', 0),
FlagsField('restriction', 0, 16, _skinny_message_callinfo_restrictions), # noqa: E501
LEIntField('unknown', 0)]
class SkinnyRateField(LEIntField):
def i2repr(self, pkt, x):
if x is None:
x = 0
return '%d ms/pkt' % x
_skinny_codecs = {
0x0: 'xxx',
0x1: 'xxx',
0x2: 'xxx',
0x3: 'xxx',
0x4: 'G711 ulaw 64k'
}
_skinny_echo = {
0x0: 'echo cancelation off',
0x1: 'echo cancelation on'
}
class SkinnyMessageOpenReceiveChannel(Packet):
name = 'open receive channel'
fields_desc = [LEIntField('conference', 0),
LEIntField('passthru', 0),
SkinnyRateField('rate', 20),
LEIntEnumField('codec', 4, _skinny_codecs),
LEIntEnumField('echo', 0, _skinny_echo),
LEIntField('unknown1', 0),
LEIntField('callid', 0)]
def guess_payload_class(self, p):
return conf.padding_layer
_skinny_receive_channel_status = {
0x0: 'ok',
0x1: 'ko'
}
class SkinnyMessageOpenReceiveChannelAck(Packet):
name = 'open receive channel'
fields_desc = [LEIntEnumField('status', 0, _skinny_receive_channel_status),
IPField('remote', '0.0.0.0'),
LEIntField('port', RandShort()),
LEIntField('passthru', 0),
LEIntField('callid', 0)]
_skinny_silence = {
0x0: 'silence suppression off',
0x1: 'silence suppression on',
}
class SkinnyFramePerPacketField(LEIntField):
def i2repr(self, pkt, x):
if x is None:
x = 0
return '%d frames/pkt' % x
class SkinnyMessageStartMediaTransmission(Packet):
name = 'start multimedia transmission'
fields_desc = [LEIntField('conference', 0),
LEIntField('passthru', 0),
IPField('remote', '0.0.0.0'),
LEIntField('port', RandShort()),
SkinnyRateField('rate', 20),
LEIntEnumField('codec', 4, _skinny_codecs),
LEIntField('precedence', 200),
LEIntEnumField('silence', 0, _skinny_silence),
SkinnyFramePerPacketField('maxframes', 0),
LEIntField('unknown1', 0),
LEIntField('callid', 0)]
def guess_payload_class(self, p):
return conf.padding_layer
class SkinnyMessageCloseReceiveChannel(Packet):
name = 'close receive channel'
fields_desc = [LEIntField('conference', 0),
LEIntField('passthru', 0),
IPField('remote', '0.0.0.0'),
LEIntField('port', RandShort()),
SkinnyRateField('rate', 20),
LEIntEnumField('codec', 4, _skinny_codecs),
LEIntField('precedence', 200),
LEIntEnumField('silence', 0, _skinny_silence),
LEIntField('callid', 0)]
class SkinnyMessageStopMultiMediaTransmission(Packet):
name = 'stop multimedia transmission'
fields_desc = [LEIntField('conference', 0),
LEIntField('passthru', 0),
LEIntField('callid', 0)]
class Skinny(Packet):
name = "Skinny"
fields_desc = [LEIntField("len", None),
LEIntField("res", 0),
LEIntEnumField("msg", 0, skinny_messages_cls)]
def post_build(self, pkt, p):
if self.len is None:
l = len(p) + len(pkt) - 8 # on compte pas les headers len et reserved # noqa: E501
pkt = struct.pack('@I', l) + pkt[4:]
return pkt + p
# An helper
def get_cls(name, fallback_cls):
return globals().get(name, fallback_cls)
# return __builtin__.__dict__.get(name, fallback_cls)
for msgid, strcls in skinny_messages_cls.items():
cls = get_cls(strcls, SkinnyMessageGeneric)
bind_layers(Skinny, cls, {"msg": msgid})
bind_layers(TCP, Skinny, {"dport": 2000})
bind_layers(TCP, Skinny, {"sport": 2000})
if __name__ == "__main__":
from scapy.main import interact
interact(mydict=globals(), mybanner="Welcome to Skinny add-on")
| smainand/scapy | scapy/contrib/skinny.py | Python | gpl-2.0 | 18,715 |
#python svm_train.py {--ID VEC_FILE.pkl ...} --ans TRAIN_ANSWERS --dev DEVNUM --output FILE
# trains an SVM to assign regression weights to tree-wise similarity vectors
# trains/tests on the splits denoted by DEVNUM {1-8}
# TRAIN_ANSWERS needs to be an X-by-1 numpy array of similarity training answers
# model is output to the FILE specified by --output
import numpy
import cPickle as pickle
from sklearn import svm
import sys
OPTS = {}
for aix in range(1,len(sys.argv)):
if len(sys.argv[aix]) < 2 or sys.argv[aix][:2] != '--':
#filename or malformed arg
continue
elif aix < len(sys.argv) - 1 and len(sys.argv[aix+1]) > 2 and sys.argv[aix+1][:2] == '--':
#missing filename, so simple arg
OPTS[sys.argv[aix][2:]] = True
else:
OPTS[sys.argv[aix][2:]] = sys.argv[aix+1]
if 'output' not in OPTS or 'dev' not in OPTS:
raise #need someplace to dump the model or this is a waste of time
inputlist = [label for label in OPTS if label not in ['ans','output','dev']]
inputlist = sorted(inputlist) #arrange systems alphabetically according to cli identifier
Xlist = []
ylist = None
with open(OPTS['ans'],'rb') as f:
#snag the training answers
ylist = pickle.load(f)
#ylist = numpy.ravel(ylist) #put ylist in a flattened format
for fileid in inputlist:
#for each composition system, grab the similarity cross-product vector
print "Incorporating info from %s" % (fileid)
with open(OPTS[fileid],'rb') as f:
newfile = pickle.load(f).astype('float64')
if Xlist == []:
#if we haven't seen trained output yet, save it
Xlist = newfile
else:
#concatenate each system's training output to the others
Xlist = numpy.concatenate( (Xlist,newfile), axis=1)
print 'X',Xlist.shape
print 'y',ylist.shape
#remove bad training examples
Xlist = Xlist[ylist != -1]
ylist = ylist[ylist != -1]
Xlist = numpy.nan_to_num(Xlist)
devnum = int(OPTS['dev'])
if 'mod' in OPTS:
#this will ensure that no single domain is left out, but it won't be trivial to eval against the gold standard anymore.
devX = numpy.delete(Xlist,range(devnum-1,Xlist.shape[0],8),axis=0)
devY = numpy.delete(ylist,range(devnum-1,ylist.shape[0],8),axis=0)
testX = Xlist[devnum-1::8]
else:
#tests on relatively unseen domains
devX = numpy.concatenate((Xlist[:(devnum-1)*1000],Xlist[devnum*1000:]),axis=0)
devY = numpy.concatenate((ylist[:(devnum-1)*1000],ylist[devnum*1000:]),axis=0)
testX = Xlist[(devnum-1)*1000:devnum*1000]
#myobs_scaled = sklearn.preprocessing.scale(myobs_a) #less memory efficient, but centers and scales all features/columns
print 'X',devX.shape
print 'y',devY.shape
#train the SVM regressor based on our training data
model = svm.SVR(kernel='linear')
model.fit(devX, devY)
predictions = model.predict(testX)
predictions = predictions.astype('string',copy=False)
with open(OPTS['output']+OPTS['dev'],'w') as f:
f.write('\n'.join(predictions) + '\n')
| vansky/azmat | scripts/svm_dev.py | Python | gpl-3.0 | 2,918 |
# Copyright 2010-2016, Damian Johnson and The Tor Project
# See LICENSE for licensing information
"""
Panel presenting the configuration state for tor or nyx. Options can be edited
and the resulting configuration files saved.
"""
import curses
import os
import nyx.controller
import nyx.curses
import nyx.panel
import nyx.popups
import stem.control
import stem.manual
from nyx.curses import WHITE, NORMAL, BOLD, HIGHLIGHT
from nyx import DATA_DIR, tor_controller
from stem.util import conf, enum, log, str_tools
SortAttr = enum.Enum('NAME', 'VALUE', 'VALUE_TYPE', 'CATEGORY', 'USAGE', 'SUMMARY', 'DESCRIPTION', 'MAN_PAGE_ENTRY', 'IS_SET')
DETAILS_HEIGHT = 8
NAME_WIDTH = 25
VALUE_WIDTH = 15
def conf_handler(key, value):
if key == 'features.config.order':
return conf.parse_enum_csv(key, value[0], SortAttr, 3)
CONFIG = conf.config_dict('nyx', {
'attr.config.category_color': {},
'attr.config.sort_color': {},
'features.config.order': [SortAttr.MAN_PAGE_ENTRY, SortAttr.NAME, SortAttr.IS_SET],
'features.config.state.showPrivateOptions': False,
'features.config.state.showVirtualOptions': False,
}, conf_handler)
class ConfigEntry(object):
"""
Configuration option presented in the panel.
:var str name: name of the configuration option
:var str value_type: type of value
:var stem.manual.ConfigOption manual: manual information about the option
"""
def __init__(self, name, value_type, manual):
self.name = name
self.value_type = value_type
self.manual = manual.config_options.get(name, stem.manual.ConfigOption(name))
self._index = manual.config_options.keys().index(name) if name in manual.config_options else 99999
def value(self):
"""
Provides the value of this configuration option.
:returns: **str** representation of the current config value
"""
values = tor_controller().get_conf(self.name, [], True)
if not values:
return '<none>'
elif self.value_type == 'Boolean' and values[0] in ('0', '1'):
return 'False' if values[0] == '0' else 'True'
elif self.value_type == 'DataSize' and values[0].isdigit():
return str_tools.size_label(int(values[0]))
elif self.value_type == 'TimeInterval' and values[0].isdigit():
return str_tools.time_label(int(values[0]), is_long = True)
else:
return ', '.join(values)
def is_set(self):
"""
Checks if the configuration option has a custom value.
:returns: **True** if the option has a custom value, **False** otherwise
"""
return tor_controller().is_set(self.name, False)
def sort_value(self, attr):
"""
Provides a heuristic for sorting by a given value.
:param SortAttr attr: sort attribute to provide a heuristic for
:returns: comparable value for sorting
"""
if attr == SortAttr.CATEGORY:
return self.manual.category
elif attr == SortAttr.NAME:
return self.name
elif attr == SortAttr.VALUE:
return self.value()
elif attr == SortAttr.VALUE_TYPE:
return self.value_type
elif attr == SortAttr.USAGE:
return self.manual.usage
elif attr == SortAttr.SUMMARY:
return self.manual.summary
elif attr == SortAttr.DESCRIPTION:
return self.manual.description
elif attr == SortAttr.MAN_PAGE_ENTRY:
return self._index
elif attr == SortAttr.IS_SET:
return not self.is_set()
class ConfigPanel(nyx.panel.Panel):
"""
Editor for tor's configuration.
"""
def __init__(self):
nyx.panel.Panel.__init__(self)
self._contents = []
self._scroller = nyx.curses.CursorScroller()
self._sort_order = CONFIG['features.config.order']
self._show_all = False # show all options, or just the important ones
cached_manual_path = os.path.join(DATA_DIR, 'manual')
if os.path.exists(cached_manual_path):
manual = stem.manual.Manual.from_cache(cached_manual_path)
else:
try:
manual = stem.manual.Manual.from_man()
try:
manual.save(cached_manual_path)
except IOError as exc:
log.debug("Unable to cache manual information to '%s'. This is fine, but means starting Nyx takes a little longer than usual: " % (cached_manual_path, exc))
except IOError as exc:
log.debug("Unable to use 'man tor' to get information about config options (%s), using bundled information instead" % exc)
manual = stem.manual.Manual.from_cache()
try:
for line in tor_controller().get_info('config/names').splitlines():
# Lines of the form "<option> <type>[ <documentation>]". Documentation
# was apparently only in old tor versions like 0.2.1.25.
if ' ' not in line:
continue
line_comp = line.split()
name, value_type = line_comp[0], line_comp[1]
# skips private and virtual entries if not configured to show them
if name.startswith('__') and not CONFIG['features.config.state.showPrivateOptions']:
continue
elif value_type == 'Virtual' and not CONFIG['features.config.state.showVirtualOptions']:
continue
self._contents.append(ConfigEntry(name, value_type, manual))
self._contents = sorted(self._contents, key = lambda entry: [entry.sort_value(field) for field in self._sort_order])
except stem.ControllerError as exc:
log.warn('Unable to determine the configuration options tor supports: %s' % exc)
def show_sort_dialog(self):
"""
Provides the dialog for sorting our configuration options.
"""
sort_colors = dict([(attr, CONFIG['attr.config.sort_color'].get(attr, WHITE)) for attr in SortAttr])
results = nyx.popups.select_sort_order('Config Option Ordering:', SortAttr, self._sort_order, sort_colors)
if results:
self._sort_order = results
self._contents = sorted(self._contents, key = lambda entry: [entry.sort_value(field) for field in self._sort_order])
def show_write_dialog(self):
"""
Confirmation dialog for saving tor's configuration.
"""
controller = tor_controller()
torrc = controller.get_info('config-text', None)
if nyx.popups.confirm_save_torrc(torrc):
try:
controller.save_conf()
nyx.controller.show_message('Saved configuration to %s' % controller.get_info('config-file', '<unknown>'), HIGHLIGHT, max_wait = 2)
except IOError as exc:
nyx.controller.show_message('Unable to save configuration (%s)' % exc.strerror, HIGHLIGHT, max_wait = 2)
self.redraw()
def key_handlers(self):
def _scroll(key):
page_height = self.get_height() - DETAILS_HEIGHT
is_changed = self._scroller.handle_key(key, self._get_config_options(), page_height)
if is_changed:
self.redraw()
def _edit_selected_value():
selected = self._scroller.selection(self._get_config_options())
initial_value = selected.value() if selected.is_set() else ''
new_value = nyx.controller.input_prompt('%s Value (esc to cancel): ' % selected.name, initial_value)
if new_value != initial_value:
try:
if selected.value_type == 'Boolean':
# if the value's a boolean then allow for 'true' and 'false' inputs
if new_value.lower() == 'true':
new_value = '1'
elif new_value.lower() == 'false':
new_value = '0'
elif selected.value_type == 'LineList':
new_value = new_value.split(',') # set_conf accepts list inputs
tor_controller().set_conf(selected.name, new_value)
self.redraw()
except Exception as exc:
nyx.controller.show_message('%s (press any key)' % exc, HIGHLIGHT, max_wait = 30)
def _toggle_show_all():
self._show_all = not self._show_all
self.redraw()
return (
nyx.panel.KeyHandler('arrows', 'scroll up and down', _scroll, key_func = lambda key: key.is_scroll()),
nyx.panel.KeyHandler('enter', 'edit configuration option', _edit_selected_value, key_func = lambda key: key.is_selection()),
nyx.panel.KeyHandler('w', 'write torrc', self.show_write_dialog),
nyx.panel.KeyHandler('a', 'toggle filtering', _toggle_show_all),
nyx.panel.KeyHandler('s', 'sort ordering', self.show_sort_dialog),
)
def _draw(self, subwindow):
contents = self._get_config_options()
selected, scroll = self._scroller.selection(contents, subwindow.height - DETAILS_HEIGHT)
is_scrollbar_visible = len(contents) > subwindow.height - DETAILS_HEIGHT
if selected is not None:
_draw_selection_details(subwindow, selected)
hidden_msg = "press 'a' to hide most options" if self._show_all else "press 'a' to show all options"
subwindow.addstr(0, 0, 'Tor Configuration (%s):' % hidden_msg, HIGHLIGHT)
scroll_offset = 1
if is_scrollbar_visible:
scroll_offset = 3
subwindow.scrollbar(DETAILS_HEIGHT, scroll, len(contents) - 1)
if selected is not None:
subwindow._addch(1, DETAILS_HEIGHT - 1, curses.ACS_TTEE)
# Description column can grow up to eighty characters. After that any extra
# space goes to the value.
description_width = max(0, subwindow.width - scroll_offset - NAME_WIDTH - VALUE_WIDTH - 2)
if description_width > 80:
value_width = VALUE_WIDTH + (description_width - 80)
description_width = 80
else:
value_width = VALUE_WIDTH
for i, entry in enumerate(contents[scroll:]):
_draw_line(subwindow, scroll_offset, DETAILS_HEIGHT + i, entry, entry == selected, value_width, description_width)
if DETAILS_HEIGHT + i >= subwindow.height:
break
def _get_config_options(self):
return self._contents if self._show_all else filter(lambda entry: stem.manual.is_important(entry.name) or entry.is_set(), self._contents)
def _draw_line(subwindow, x, y, entry, is_selected, value_width, description_width):
"""
Show an individual configuration line.
"""
attr = [CONFIG['attr.config.category_color'].get(entry.manual.category, WHITE)]
attr.append(BOLD if entry.is_set() else NORMAL)
attr.append(HIGHLIGHT if is_selected else NORMAL)
option_label = str_tools.crop(entry.name, NAME_WIDTH).ljust(NAME_WIDTH + 1)
value_label = str_tools.crop(entry.value(), value_width).ljust(value_width + 1)
summary_label = str_tools.crop(entry.manual.summary, description_width).ljust(description_width)
subwindow.addstr(x, y, option_label + value_label + summary_label, *attr)
def _draw_selection_details(subwindow, selected):
"""
Shows details of the currently selected option.
"""
attr = ', '.join(('custom' if selected.is_set() else 'default', selected.value_type, 'usage: %s' % selected.manual.usage))
selected_color = CONFIG['attr.config.category_color'].get(selected.manual.category, WHITE)
subwindow.box(0, 0, subwindow.width, DETAILS_HEIGHT)
subwindow.addstr(2, 1, '%s (%s Option)' % (selected.name, selected.manual.category), selected_color, BOLD)
subwindow.addstr(2, 2, 'Value: %s (%s)' % (selected.value(), str_tools.crop(attr, subwindow.width - len(selected.value()) - 13)), selected_color, BOLD)
description = 'Description: %s' % selected.manual.description
for i in range(DETAILS_HEIGHT - 4):
if not description:
break # done writing description
line, description = description.split('\n', 1) if '\n' in description else (description, '')
if i < DETAILS_HEIGHT - 5:
line, remainder = str_tools.crop(line, subwindow.width - 3, 4, 4, str_tools.Ending.HYPHEN, True)
description = ' ' + remainder.strip() + description
subwindow.addstr(2, 3 + i, line, selected_color, BOLD)
else:
subwindow.addstr(2, 3 + i, str_tools.crop(line, subwindow.width - 3, 4, 4), selected_color, BOLD)
| sammyshj/nyx | nyx/panel/config.py | Python | gpl-3.0 | 11,732 |
#!/usr/bin/env python
"""
Write an aggregation query to answer this question:
Of the users in the "Brasilia" timezone who have tweeted 100 times or more,
who has the largest number of followers?
The following hints will help you solve this problem:
- Time zone is found in the "time_zone" field of the user object in each tweet.
- The number of tweets for each user is found in the "statuses_count" field.
To access these fields you will need to use dot notation (from Lesson 4)
- Your aggregation query should return something like the following:
{u'ok': 1.0,
u'result': [{u'_id': ObjectId('52fd2490bac3fa1975477702'),
u'followers': 2597,
u'screen_name': u'marbles',
u'tweets': 12334}]}
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation
pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided. If you want to run this code
locally on your machine, you have to install MongoDB, download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used
in examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
# complete the aggregation pipeline
pipeline = [{ "$match": {"user.time_zone": "Brasilia", "user.statuses_count" : {"$gte" : 100}} },
{ "$project": {"followers": "$user.followers_count", "screen_name": "$user.screen_name", "tweets": "$user.statuses_count", "timezone": "$user.time_zone"} },
{ "$sort": {"followers": -1} },
{ "$limit": 1 }]
return pipeline
def aggregate(db, pipeline):
result = db.tweets.aggregate(pipeline)
return result
if __name__ == '__main__':
db = get_db('twitter')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
assert len(result["result"]) == 1
assert result["result"][0]["followers"] == 17209
import pprint
pprint.pprint(result)
| benjaminsoellner/2015_Data_Analyst_Project_3 | Lesson_5_Analyzing_Data/10-Using_match_and_project/followers.py | Python | agpl-3.0 | 2,533 |
#!/usr/bin/python
#
# If missing 'pika' read how to download it at:
# http://www.rabbitmq.com/tutorials/tutorial-one-python.html
#
import pika
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='pmacct', type='direct')
channel.queue_declare(queue='acct_1')
channel.queue_bind(exchange='pmacct', routing_key='acct', queue='acct_1')
print ' [*] Example inspired from: http://www.rabbitmq.com/getstarted.html'
print ' [*] Waiting for messages on E=pmacct,direct RK=acct Q=acct_1 H=localhost. Edit code to change any parameter. To exit press CTRL+C'
def callback(ch, method, properties, body):
print " [x] Received %r" % (body,)
channel.basic_consume(callback,
queue='acct_1',
no_ack=True)
channel.start_consuming()
| ervteng/pmacct-bugfixes | examples/amqp/amqp_receiver.py | Python | gpl-2.0 | 875 |
# -*- coding: utf-8 -*-
import os
from datetime import datetime
import sys
try:
from hashlib import sha1
except ImportError:
sys.exit('ImportError: No module named hashlib\n'
'If you are on python2.4 this library is not part of python. '
'Please install it. Example: easy_install hashlib')
from sqlalchemy import Table, ForeignKey, Column
from sqlalchemy.types import Unicode, Integer, DateTime
from sqlalchemy.orm import relation, synonym
from vatsystem.model import DeclarativeBase, metadata, DBSession
__all__=['User', 'Group', 'Permission']
#{ Association tables
# This is the association table for the many-to-many relationship between
# groups and permissions. This is required by repoze.what.
group_permission_table=Table('tg_group_permission', metadata,
Column('group_id', Integer, ForeignKey('tg_group.group_id',
onupdate="CASCADE", ondelete="CASCADE"), primary_key=True),
Column('permission_id', Integer, ForeignKey('tg_permission.permission_id',
onupdate="CASCADE", ondelete="CASCADE"), primary_key=True)
)
# This is the association table for the many-to-many relationship between
# groups and members - this is, the memberships. It's required by repoze.what.
user_group_table=Table('tg_user_group', metadata,
Column('user_id', Integer, ForeignKey('tg_user.user_id',
onupdate="CASCADE", ondelete="CASCADE"), primary_key=True),
Column('group_id', Integer, ForeignKey('tg_group.group_id',
onupdate="CASCADE", ondelete="CASCADE"), primary_key=True)
)
#{ The auth* model itself
class Group(DeclarativeBase):
__tablename__='tg_group'
group_id=Column(Integer, autoincrement=True, primary_key=True)
group_name=Column(Unicode(16), unique=True, nullable=False)
display_name=Column(Unicode(255))
created=Column(DateTime, default=datetime.now)
users=relation('User', secondary=user_group_table, backref='groups')
def __repr__(self):
return '<Group: name=%s>'%self.group_name
def __unicode__(self):
return self.group_name
# The 'info' argument we're passing to the email_address and password columns
# contain metadata that Rum (http://python-rum.org/) can use generate an
# admin interface for your models.
class User(DeclarativeBase):
__tablename__='tg_user'
user_id=Column(Integer, autoincrement=True, primary_key=True)
user_name=Column(Unicode(16), unique=True, nullable=False)
email_address=Column(Unicode(255), nullable=True)
phone=Column(Unicode(255), nullable=True)
fax=Column(Unicode(255), nullable=True)
display_name=Column(Unicode(255))
password=Column('password', Unicode(80))
created=Column(DateTime, default=datetime.now)
def __repr__(self):
return '<User: email="%s", display name="%s">'%(
self.email_address, self.display_name)
def __unicode__(self):
return self.display_name or self.user_name
@property
def permissions(self):
"""Return a set of strings for the permissions granted."""
perms=set()
for g in self.groups:
perms=perms|set(g.permissions)
return perms
@classmethod
def by_email_address(cls, email):
"""Return the user object whose email address is ``email``."""
return DBSession.query(cls).filter(cls.email_address==email).first()
@classmethod
def by_user_name(cls, username):
"""Return the user object whose user name is ``username``."""
return DBSession.query(cls).filter(cls.user_name==username).first()
def validate_password(self, password):
return self.password==password
@classmethod
def identify(cls, value):
return DBSession.query(cls).filter(cls.user_name.match(value)).one()
class Permission(DeclarativeBase):
__tablename__='tg_permission'
permission_id=Column(Integer, autoincrement=True, primary_key=True)
permission_name=Column(Unicode(16), unique=True, nullable=False)
description=Column(Unicode(255))
groups=relation(Group, secondary=group_permission_table, backref='permissions')
def __repr__(self):
return '<Permission: name=%s>'%self.permission_name
def __unicode__(self):
return self.permission_name
| LamCiuLoeng/vat | vatsystem/model/auth.py | Python | mit | 4,258 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.