hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5bd21e77793736f8913d3aa19ac9d0ef524c25eb | 2,351 | py | Python | aliyun-python-sdk-rds/aliyunsdkrds/request/v20140815/CancelImportRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-rds/aliyunsdkrds/request/v20140815/CancelImportRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | 1 | 2020-05-31T14:51:47.000Z | 2020-05-31T14:51:47.000Z | aliyun-python-sdk-rds/aliyunsdkrds/request/v20140815/CancelImportRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrds.endpoint import endpoint_data
class CancelImportRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'CancelImport','rds')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_ImportId(self):
return self.get_query_params().get('ImportId')
def set_ImportId(self,ImportId):
self.add_query_param('ImportId',ImportId)
def get_DBInstanceId(self):
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self,DBInstanceId):
self.add_query_param('DBInstanceId',DBInstanceId) | 35.089552 | 74 | 0.772437 |
284238a091a7eea0d5394b8a2e086f707d21c007 | 16,110 | py | Python | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/3-sender_receiver_7.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/3-sender_receiver_7.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/3-sender_receiver_7.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(1, mgr.Not(x_s_evt))
loc1 = Location(env, mgr.Not(s_evt))
loc1.set_progress(0, x_s_evt)
hint = Hint("h_s_evt1", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(1, mgr.Not(x_r_l))
loc1 = Location(env, mgr.Not(r_l))
loc1.set_progress(0, x_r_l)
hint = Hint("h_r_l1", env, frozenset([r_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
return frozenset(res)
| 40.275 | 89 | 0.567163 |
a6da7aad4b9c9676545131a486c4275ce487a6e6 | 5,136 | py | Python | homeassistant/components/switcher_kis/switch.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 4 | 2019-07-03T22:36:57.000Z | 2019-08-10T15:33:25.000Z | homeassistant/components/switcher_kis/switch.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 7 | 2019-08-23T05:26:02.000Z | 2022-03-11T23:57:18.000Z | homeassistant/components/switcher_kis/switch.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 2 | 2018-08-15T03:59:35.000Z | 2018-10-18T12:20:05.000Z | """Home Assistant Switcher Component Switch platform."""
from logging import getLogger
from typing import Callable, Dict, TYPE_CHECKING
from homeassistant.components.switch import ATTR_CURRENT_POWER_W, SwitchDevice
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_AUTO_OFF_SET, ATTR_ELECTRIC_CURRENT, ATTR_REMAINING_TIME,
DATA_DEVICE, DOMAIN, SIGNAL_SWITCHER_DEVICE_UPDATE)
if TYPE_CHECKING:
from aioswitcher.devices import SwitcherV2Device
from aioswitcher.api.messages import SwitcherV2ControlResponseMSG
_LOGGER = getLogger(__name__)
DEVICE_PROPERTIES_TO_HA_ATTRIBUTES = {
'power_consumption': ATTR_CURRENT_POWER_W,
'electric_current': ATTR_ELECTRIC_CURRENT,
'remaining_time': ATTR_REMAINING_TIME,
'auto_off_set': ATTR_AUTO_OFF_SET
}
async def async_setup_platform(hass: HomeAssistantType, config: Dict,
async_add_entities: Callable,
discovery_info: Dict) -> None:
"""Set up the switcher platform for the switch component."""
if discovery_info is None:
return
async_add_entities([SwitcherControl(hass.data[DOMAIN][DATA_DEVICE])])
class SwitcherControl(SwitchDevice):
"""Home Assistant switch entity."""
def __init__(self, device_data: 'SwitcherV2Device') -> None:
"""Initialize the entity."""
self._self_initiated = False
self._device_data = device_data
self._state = device_data.state
@property
def name(self) -> str:
"""Return the device's name."""
return self._device_data.name
@property
def should_poll(self) -> bool:
"""Return False, entity pushes its state to HA."""
return False
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "{}-{}".format(
self._device_data.device_id, self._device_data.mac_addr)
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
from aioswitcher.consts import STATE_ON as SWITCHER_STATE_ON
return self._state == SWITCHER_STATE_ON
@property
def current_power_w(self) -> int:
"""Return the current power usage in W."""
return self._device_data.power_consumption
@property
def device_state_attributes(self) -> Dict:
"""Return the optional state attributes."""
from aioswitcher.consts import WAITING_TEXT
attribs = {}
for prop, attr in DEVICE_PROPERTIES_TO_HA_ATTRIBUTES.items():
value = getattr(self._device_data, prop)
if value and value is not WAITING_TEXT:
attribs[attr] = value
return attribs
@property
def available(self) -> bool:
"""Return True if entity is available."""
from aioswitcher.consts import (STATE_OFF as SWITCHER_STATE_OFF,
STATE_ON as SWITCHER_STATE_ON)
return self._state in [SWITCHER_STATE_ON, SWITCHER_STATE_OFF]
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
async_dispatcher_connect(
self.hass, SIGNAL_SWITCHER_DEVICE_UPDATE, self.async_update_data)
async def async_update_data(self, device_data: 'SwitcherV2Device') -> None:
"""Update the entity data."""
if device_data:
if self._self_initiated:
self._self_initiated = False
else:
self._device_data = device_data
self._state = self._device_data.state
self.async_schedule_update_ha_state()
async def async_turn_on(self, **kwargs: Dict) -> None:
"""Turn the entity on.
This method must be run in the event loop and returns a coroutine.
"""
await self._control_device(True)
async def async_turn_off(self, **kwargs: Dict) -> None:
"""Turn the entity off.
This method must be run in the event loop and returns a coroutine.
"""
await self._control_device(False)
async def _control_device(self, send_on: bool) -> None:
"""Turn the entity on or off."""
from aioswitcher.api import SwitcherV2Api
from aioswitcher.consts import (COMMAND_OFF, COMMAND_ON,
STATE_OFF as SWITCHER_STATE_OFF,
STATE_ON as SWITCHER_STATE_ON)
response = None # type: SwitcherV2ControlResponseMSG
async with SwitcherV2Api(
self.hass.loop, self._device_data.ip_addr,
self._device_data.phone_id, self._device_data.device_id,
self._device_data.device_password) as swapi:
response = await swapi.control_device(
COMMAND_ON if send_on else COMMAND_OFF)
if response and response.successful:
self._self_initiated = True
self._state = \
SWITCHER_STATE_ON if send_on else SWITCHER_STATE_OFF
self.async_schedule_update_ha_state()
| 35.666667 | 79 | 0.654206 |
dadbed94367190a6c1df74a890c81d0eba76a9dd | 1,127 | py | Python | webpanel/urls.py | csk17k/WebPanel | fdb0ae1b2fd12d006fbca65c779369e2d3d62928 | [
"Apache-2.0"
] | null | null | null | webpanel/urls.py | csk17k/WebPanel | fdb0ae1b2fd12d006fbca65c779369e2d3d62928 | [
"Apache-2.0"
] | null | null | null | webpanel/urls.py | csk17k/WebPanel | fdb0ae1b2fd12d006fbca65c779369e2d3d62928 | [
"Apache-2.0"
] | 1 | 2021-06-24T13:38:23.000Z | 2021-06-24T13:38:23.000Z | """webpanel URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('login.urls'), name = "Login"),
url(r'^register/',include('contestReg.urls'),name="register"),
url(r'^feedback/', include('feedbackform.urls'), name = 'FeedBack'),
url(r'^shortener/',include('shortener.urls',namespace='shortener')),
#url(r'^',include('contestReg.urls'))
# url(r'^/',redirect('/register/edit/'))
]
| 40.25 | 79 | 0.680568 |
7ab31a8b39cc868dbd8b7c379ccf1cc71d10ca0c | 6,992 | py | Python | .ipynb_checkpoints/opt-checkpoint.py | chuktuk/Alzheimers_Disease_Analysis | 1fd5ac72035c68117214beb38d2ba41e1e8699c3 | [
"MIT"
] | 8 | 2020-01-03T00:33:19.000Z | 2021-05-13T15:50:40.000Z | .ipynb_checkpoints/opt-checkpoint.py | chuktuk/Alzheimers_Disease_Analysis | 1fd5ac72035c68117214beb38d2ba41e1e8699c3 | [
"MIT"
] | null | null | null | .ipynb_checkpoints/opt-checkpoint.py | chuktuk/Alzheimers_Disease_Analysis | 1fd5ac72035c68117214beb38d2ba41e1e8699c3 | [
"MIT"
] | 6 | 2020-01-02T23:21:45.000Z | 2021-05-13T15:50:49.000Z | if 'pd' not in globals():
import pandas as pd
if 'np' not in globals():
import numpy as np
if 'plt' not in globals():
import matplotlib.pyplot as plt
if 'sns' not in globals():
import seaborn as sns
if 'scipy.stats' not in globals():
import scipy.stats
sns.set()
def optimize(fe, biomarker, fp_rate, dt_rate, size, gender):
fp_rate = 100 - fp_rate
dt_rate = 100 - dt_rate
# divide data and use supplied gender
if gender == 'males':
df = fe[fe.PTGENDER == 'Male']
else:
df = fe[fe.PTGENDER == 'Female']
# divide the data by final diagnosis
ad = df[df.DX == 'AD']
non_ad = df[df.DX != 'AD']
if np.mean(ad[biomarker]) > np.mean(non_ad[biomarker]): #min(ad[biomarker]) >= 0:
# create the bootstrap distribution for AD
th_value_at_fp_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(ad[biomarker],
size=len(ad)),fp_rate) for i in range(size)]})
th_value_at_dt_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(non_ad[biomarker],
size=len(non_ad)),dt_rate) for i in range(size)]})
else:
# create the bootstrap distribution for AD
th_value_at_fp_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(ad[biomarker],
size=len(ad)),fp_rate) for i in range(size)]})
th_value_at_dt_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(non_ad[biomarker],
size=len(non_ad)),dt_rate) for i in range(size)]})
fp_th_value = np.mean(th_value_at_dt_rate[biomarker])
dr_th_value = np.mean(th_value_at_fp_rate[biomarker])
# display further results
print(100-fp_rate, '% false positive threshold value: ', fp_th_value)
#print('Mean 95th percentile of bootstrap samples for non AD patients: ', np.mean(bs_non_95[biomarker]))
print(100-dt_rate, '% detection threshold value: ', dr_th_value)
#print('Mean 5th percentile of bootstrap samples for non AD patients: ', np.mean(bs_ad_5[biomarker]))
return th_value_at_dt_rate, th_value_at_fp_rate
def get_reverse(th_value_at_dt_rate, th_value_at_fp_rate, fp_rate, dt_rate, fe, biomarker, gender, increase=True):
if gender == 'males':
ad = fe[(fe.DX == 'AD') & (fe.PTGENDER == 'Male')]
non = fe[(fe.DX != 'AD') & (fe.PTGENDER == 'Male')]
else:
ad = fe[(fe.DX == 'AD') & (fe.PTGENDER == 'Female')]
non = fe[(fe.DX != 'AD') & (fe.PTGENDER == 'Female')]
bs1 = pd.DataFrame({'percentiles':
[scipy.stats.percentileofscore(ad.sample(len(ad),replace=True)[biomarker],
np.mean(th_value_at_dt_rate).values) for i in range(10000)]})
bs2 = pd.DataFrame({'percentiles':
[scipy.stats.percentileofscore(non.sample(len(non),replace=True)[biomarker],
np.mean(th_value_at_fp_rate).values) for i in range(10000)]})
ad = round(np.mean(bs1.percentiles),2)
non = round(np.mean(bs2.percentiles),2)
if increase:
print('The detection rate for AD at', fp_rate, '% false positive rate: ', round(100-ad,2), '%')
print('The false positive rate at', dt_rate, '% AD detection: ', round(100-non,2), '%')
return round(100-ad,2), round(100-non,2)
else:
print('The detection rate for AD at',fp_rate, '% false positive rate: ', ad, '%')
print('The false positive rate at', dt_rate, '% AD detection: ', non, '%')
return ad, non
def optimize_combo(fe, biomarker, fp_rate, dt_rate, size, gender, increase=True):
if increase == False:
fp_rate = 100 - fp_rate
dt_rate = 100 - dt_rate
# divide data and use supplied gender
if gender == 'males':
df = fe[fe.PTGENDER == 'Male']
else:
df = fe[fe.PTGENDER == 'Female']
# divide the data by final diagnosis
ad = df[df.DX == 'AD']
non_ad = df[df.DX != 'AD']
if np.mean(ad[biomarker]) > np.mean(non_ad[biomarker]): #min(ad[biomarker]) >= 0:
# create the bootstrap distribution for AD
th_value_at_fp_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(ad[biomarker],
size=len(ad)),fp_rate) for i in range(size)]})
th_value_at_dt_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(non_ad[biomarker],
size=len(non_ad)),dt_rate) for i in range(size)]})
else:
# create the bootstrap distribution for AD
th_value_at_fp_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(ad[biomarker],
size=len(ad)),fp_rate) for i in range(size)]})
th_value_at_dt_rate = pd.DataFrame({biomarker: [np.percentile(np.random.choice(non_ad[biomarker],
size=len(non_ad)),dt_rate) for i in range(size)]})
fp_th_value = np.mean(th_value_at_dt_rate[biomarker])
dr_th_value = np.mean(th_value_at_fp_rate[biomarker])
# display results
print(100-fp_rate, '% false positive threshold value: ', fp_th_value)
print(100-dt_rate, '% detection threshold value: ', dr_th_value)
# next function begins here
bs1 = pd.DataFrame({'percentiles':
[scipy.stats.percentileofscore(ad.sample(len(ad),replace=True)[biomarker],
np.mean(th_value_at_dt_rate).values) for i in range(10000)]})
bs2 = pd.DataFrame({'percentiles':
[scipy.stats.percentileofscore(non_ad.sample(len(non_ad),replace=True)[biomarker],
np.mean(th_value_at_fp_rate).values) for i in range(10000)]})
ad_score = round(np.mean(bs1.percentiles),2)
non_score = round(np.mean(bs2.percentiles),2)
if increase:
print('The detection rate for AD at', fp_rate, '% false positive rate: ', round(100-ad_score,2), '%')
print('The false positive rate at', dt_rate, '% AD detection: ', round(100-non_score,2), '%')
#return round(100-ad_score,2), round(100-non_score,2)
else:
print('The detection rate for AD at',100-fp_rate, '% false positive rate: ', ad_score, '%')
print('The false positive rate at', 100-dt_rate, '% AD detection: ', non_score, '%')
#return ad_score, non_score | 47.564626 | 118 | 0.562214 |
187ccc6de8b965e0ebd13e80351ff738c503c026 | 9,154 | py | Python | model/LwFModel.py | gyeongmoon/CNN-DM | 5b71307fa41096bc439d480283c0c4c0200164be | [
"MIT"
] | 2 | 2021-05-31T04:43:12.000Z | 2021-10-06T07:48:21.000Z | model/LwFModel.py | gyeongmoon/CNN-DM | 5b71307fa41096bc439d480283c0c4c0200164be | [
"MIT"
] | null | null | null | model/LwFModel.py | gyeongmoon/CNN-DM | 5b71307fa41096bc439d480283c0c4c0200164be | [
"MIT"
] | null | null | null | import time
import torch
import torch.nn as nn
from model import utils
from model import LwFLoss
from torchvision import models
from torch.autograd import Variable
#######################################################
# Defining the Learning without Forgetting (LwF) model.
# -----------------------------------------------------
class Model(nn.Module):
def __init__(self, model_name, dataset, num_classes, is_fine_tuning=True, pretrained=True,
network_name='LwFModel'):
super(Model, self).__init__()
prev_model = eval(model_name)(pretrained=True)
if not is_fine_tuning: # Feature-extraction.
for param in prev_model.parameters():
param.requires_grad = False
# Total number of classifiers.
self.num_classifiers = len(num_classes)
# Define the base model.
self.features = prev_model.features
self.fc6 = nn.Sequential(*list(prev_model.classifier.children())[:3])
self.fc7 = nn.Sequential(*list(prev_model.classifier.children())[3:6])
# self.classifier = nn.Linear(prev_model.classifier._modules['6'].in_features, num_classes).
for i, num_class in enumerate(num_classes):
classifier_name = 'classifier' + str(i)
setattr(self, classifier_name, nn.Linear(prev_model.classifier._modules['6'].in_features, num_class))
# If continual_learning & pretrained & before a new classifier, load the saved model.
if (self.num_classifiers > 1) and pretrained and (i == self.num_classifiers - 2):
if 'imagenet' in dataset[i]:
setattr(self, classifier_name, prev_model.classifier[6])
else:
self.load_model(dataset[0:-1], model_name, network_name)
# Load the saved model.
def load_model(self, dataset, model_name, network_name):
saved_model_name = network_name + '_'
for data_name in dataset:
saved_model_name = saved_model_name + data_name + '_'
if 'vgg16' in model_name: # vgg16 model.
saved_model_name = saved_model_name + 'vgg'
else: # alexnet model.
saved_model_name = saved_model_name + 'model'
checkpoint = torch.load(saved_model_name)
self.load_state_dict(checkpoint['state_dict']) # Containing ['bias', 'weight'].
# Define parameters to be trained.
def params(self, lr, is_fine_tuning=True):
if is_fine_tuning:
if self.num_classifiers > 1:
params = [{'params': self.features.parameters(), 'lr': 0.02 * lr},
{'params': self.fc6.parameters(), 'lr': 0.02 * lr},
{'params': self.fc7.parameters(), 'lr': 0.02 * lr}]
for i in range(self.num_classifiers):
classifier_name = 'classifier' + str(i)
if i != self.num_classifiers - 1:
params = params + [{'params': getattr(self, classifier_name).parameters(), 'lr': 0.02 * lr}]
else:
params = params + [{'params': getattr(self, classifier_name).parameters()}]
else:
params = self.parameters()
else: # Feature-Extraction.
classifier_name = 'classifier' + str(self.num_classifiers - 1)
params = [{'params': getattr(self, classifier_name).parameters()}]
return params
def forward(self, x):
features = self.features(x)
features = features.view(features.size(0), -1)
fc6 = self.fc6(features)
fc7 = self.fc7(fc6)
outputs = []
for i in range(self.num_classifiers):
classifier_name = 'classifier' + str(i)
output = getattr(self, classifier_name)(fc7)
outputs = outputs + [output]
return outputs
#####################
# Training the model.
def train_model(model, optimizer, scheduler, start_epoch, num_epochs, dataloaders, dataset_sizes, ld=0.02):
# Define dataloader & dataset_size
dataloader, dataset_size = dataloaders[model.num_classifiers-1], dataset_sizes[model.num_classifiers-1]
# Define Criterion for loss.
criterion = nn.CrossEntropyLoss()
LwF_criterion = LwFLoss.LwFLoss() # LwF_Loss
# Gen_output for LwFLoss.
prev_labels = {}
prev_labels = utils.gen_output(model, dataloader, prev_labels)
best_model_wts = model.state_dict()
torch.save({'model': best_model_wts}, 'curr_best_model_wts')
best_loss = 0.0
best_acc = 0.0
since = time.time()
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(start_epoch + epoch, start_epoch + num_epochs - 1))
print('-' * 10)
# Each epoch has a training and validation phase
for phase in ['train', 'test']:
if phase == 'train':
scheduler.step()
model.train(True) # Set model to training mode
else:
model.train(False) # Set model to evaluate mode
running_loss = 0.0
running_corrects = 0
# Iterate over data.
for i, data in enumerate(dataloader[phase]):
# get the inputs
inputs, labels, _ = data
# wrap them in Variable
if torch.cuda.is_available():
inputs, labels = Variable(inputs.cuda()), Variable(labels.cuda())
else:
inputs, labels = Variable(inputs), Variable(labels)
# zero the parameter gradients
optimizer.zero_grad()
# forward
outputs = model(inputs)
_, preds = torch.max(outputs[-1].data, 1) # You can use "topk" function.
if phase == 'train':
LwF_Loss = 0
for k in range(model.num_classifiers - 1):
# wrap prev_labels in Variable for out of memory.
if torch.cuda.is_available():
prev_labels_i = Variable(prev_labels[k][i].cuda())
else:
prev_labels_i = prev_labels[k][i]
LwF_Loss = LwF_Loss + LwF_criterion(outputs[k], prev_labels_i)
# CrossEntropyLoss + Knowledge Distillation Loss.
loss = criterion(outputs[-1], labels) + ld * LwF_Loss
else:
loss = criterion(outputs[-1], labels)
# backward + optimize only if in training phase
if phase == 'train':
loss.backward()
optimizer.step()
# statistics
running_loss += loss.item()
running_corrects += torch.sum(preds == labels.data).item()
epoch_loss = running_loss / dataset_size[phase]
epoch_acc = running_corrects / dataset_size[phase]
print('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))
# deep copy the model
if phase == 'test' and epoch_acc > best_acc:
best_loss = epoch_loss
best_acc = epoch_acc
best_model_wts = model.state_dict()
torch.save({'model': best_model_wts}, 'curr_best_model_wts')
# if model.num_classifiers > 1: # Continual Learning.
# if (epoch % 2 == 0 and epoch < 10) or (epoch % 10 == 0) or (epoch == num_epochs-1):
# test_model(model, dataloaders, dataset_sizes, num_task=0) # Test the model.
# print()
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
print('Best test Loss: {:4f} Acc: {:4f}'.format(best_loss, best_acc)) # mems
# load the best model.
checkpoint = torch.load('curr_best_model_wts')
model.load_state_dict(checkpoint['model'])
return model
#################
# Test the model.
def test_model(model, dataloaders, dataset_sizes, num_task):
# Define dataloader & dataset_size
dataloader, dataset_size = dataloaders[num_task], dataset_sizes[num_task]
# Define Criterion for loss.
criterion = nn.CrossEntropyLoss()
model.train(False)
running_loss = 0.0
running_corrects = 0
for i, data in enumerate(dataloader['test']):
inputs, labels, _ = data
if torch.cuda.is_available():
inputs, labels = Variable(inputs.cuda()), Variable(labels.cuda())
else:
inputs, labels = Variable(inputs), Variable(labels)
# forward
outputs = model(inputs)
_, preds = torch.max(outputs[num_task].data, 1) # To check Ac (Accuracy of total model).
loss = criterion(outputs[num_task], labels)
# statistics
running_loss += loss.item()
running_corrects += torch.sum(preds == labels.data).item()
epoch_loss = running_loss / dataset_size['test']
epoch_acc = running_corrects / dataset_size['test']
print('Test Loss: {:.4f} Acc: {:.4f}'.format(epoch_loss, epoch_acc))
| 38.141667 | 116 | 0.575377 |
a395350e5fce83a5c07ea131f63660ac541e3517 | 1,144 | py | Python | FatherSon/HelloWorld2_source_code/Listing_23-9.py | axetang/AxePython | 3b517fa3123ce2e939680ad1ae14f7e602d446a6 | [
"Apache-2.0"
] | 1 | 2019-01-04T05:47:50.000Z | 2019-01-04T05:47:50.000Z | FatherSon/HelloWorld2_source_code/Listing_23-9.py | axetang/AxePython | 3b517fa3123ce2e939680ad1ae14f7e602d446a6 | [
"Apache-2.0"
] | null | null | null | FatherSon/HelloWorld2_source_code/Listing_23-9.py | axetang/AxePython | 3b517fa3123ce2e939680ad1ae14f7e602d446a6 | [
"Apache-2.0"
] | null | null | null | # Listing_23-9.py
# Copyright Warren & Csrter Sande, 2013
# Released under MIT license http://www.opensource.org/licenses/mit-license.php
# Version $version ----------------------------
# Crazy Eights - getting the new suit when the player plays an 8
# Note that this is not a complete program. It needs to be put together
# with the other parts of Crazy Eights to make a working program.
def get_new_suit():
global active_suit
got_suit = False
while not got_suit: # keep trying until the player enters a valid suit
suit = raw_input("Pick a suit: ")
if suit.lower() == 'd':
active_suit = "Diamonds"
got_suit = True
elif suit.lower() == 's':
active_suit = "Spades"
got_suit = True
elif suit.lower() == 'h':
active_suit = "Hearts"
got_suit = True
elif suit.lower() == 'c':
active_suit = "Clubs"
got_suit = True
else:
print"Not a valid suit. Try again. ",
print "You picked", active_suit
| 36.903226 | 120 | 0.546329 |
6d76028b0ae8fcee36c5a5cc36fbcb4f59179c46 | 222 | py | Python | inn/inn_hotels/doctype/inn_package_detail/test_inn_package_detail.py | vinhnguyent090/front-desk | 7384642e9206e30855986465a7ef63c8fd76ef2a | [
"MIT"
] | 4 | 2021-08-19T03:33:36.000Z | 2021-08-28T16:37:52.000Z | inn/inn_hotels/doctype/inn_package_detail/test_inn_package_detail.py | vinhnguyent090/front-desk | 7384642e9206e30855986465a7ef63c8fd76ef2a | [
"MIT"
] | 98 | 2020-02-24T08:12:47.000Z | 2021-08-21T07:54:03.000Z | inn/inn_hotels/doctype/inn_package_detail/test_inn_package_detail.py | vinhnguyent090/front-desk | 7384642e9206e30855986465a7ef63c8fd76ef2a | [
"MIT"
] | 13 | 2021-01-24T18:08:43.000Z | 2022-03-29T09:23:25.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Core Initiative and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestInnPackageDetail(unittest.TestCase):
pass
| 20.181818 | 54 | 0.779279 |
bf1770ab5a06d4ec74e8ff38f6a57b98e345c8a4 | 6,557 | py | Python | Replication Codes/code/python/estimate_R/estimate_R_KF.py | 3dgiordano/TrackingR | 2cb43988f8598d519de341f48754132bf095b33e | [
"MIT"
] | 20 | 2021-02-19T01:47:41.000Z | 2022-03-07T05:58:38.000Z | Replication Codes/code/python/estimate_R/estimate_R_KF.py | 3dgiordano/TrackingR | 2cb43988f8598d519de341f48754132bf095b33e | [
"MIT"
] | 6 | 2021-02-15T09:06:12.000Z | 2022-01-31T02:24:29.000Z | Replication Codes/code/python/estimate_R/estimate_R_KF.py | 3dgiordano/TrackingR | 2cb43988f8598d519de341f48754132bf095b33e | [
"MIT"
] | 12 | 2021-04-01T20:38:03.000Z | 2022-02-14T21:22:05.000Z | import os
import sys
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '../..'))
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.api as sm
import statsmodels.formula.api as smf
import scipy
import warnings
from python.tools import (
clean_folder
)
def estimate_R(y, gamma, n_start_values_grid = 0, maxiter = 200):
"""Estimate basic reproduction number using
Kalman filtering techniques
Args:
y (np array): Time series of growth rate in infections
gamma (double): Rate of recoveries (gamma)
n_start_values_grid (int, optional): Number of starting values used in the optimization;
the effective number of starting values is (n_start_values_grid ** 2)
maxiter (int, optional): Maximum number of iterations
Returns:
dict: Dictionary containing the results
R (np array): Estimated series for R
se_R (np array): Estimated standard error for R
flag (int): Optimization flag (0 if successful)
sigma2_irregular (float): Estimated variance of the irregular component
sigma2_level (float): Estimated variance of the level component
gamma (float): Value of gamma used in the estimation
"""
assert isinstance(n_start_values_grid, int), \
"n_start_values_grid must be an integer"
assert isinstance(maxiter, int), \
"maxiter must be an integer"
assert n_start_values_grid >= 0 and maxiter > 0, \
"n_start_values_grid and max_iter must be positive"
assert isinstance(y, np.ndarray), \
"y must be a numpy array"
assert y.ndim == 1, \
"y must be a vector"
# Setup model instance
mod_ll = sm.tsa.UnobservedComponents(y, 'local level')
# Estimate model
if n_start_values_grid > 0:
# If requested, use multiple starting
# values for more robust optimization results
start_vals_grid = np.linspace(0.01, 2.0, n_start_values_grid) * pd.Series(y).var()
opt_res = []
for start_val_1 in start_vals_grid:
for start_val_2 in start_vals_grid:
res_ll = mod_ll.fit(start_params = np.array([start_val_1, start_val_2]),
disp = False, maxiter = maxiter)
opt_res.append({'obj_value': res_ll.mle_retvals['fopt'],
'start_val_1': start_val_1,
'start_val_2': start_val_2,
'flag': res_ll.mle_retvals['warnflag']})
# The optimizer minimizes the negative of
# the likelihood, so find the minimum value
opt_res = pd.DataFrame(opt_res)
opt_res.sort_values(by = 'obj_value', ascending = True, inplace = True)
res_ll = mod_ll.fit(start_params = np.array([opt_res['start_val_1'][0],
opt_res['start_val_2'][0]]),
maxiter = maxiter, disp = False)
else:
res_ll = mod_ll.fit(maxiter = maxiter, disp = False)
R = 1 + 1 / (gamma) * res_ll.smoothed_state[0]
se_R = (1 / gamma * (res_ll.smoothed_state_cov[0] ** 0.5))[0]
return {'R': R,
'se_R': se_R,
'flag': res_ll.mle_retvals['warnflag'],
'sigma2_irregular': res_ll.params[0],
'sigma2_level': res_ll.params[1],
'signal_to_noise': res_ll.params[1] / res_ll.params[0],
'gamma': gamma}
################
## Parameters ##
################
output_folder = './estimate_R/output/estimate_R_KF/'
input_folder = './estimate_R/input/estimate_R_KF/'
min_T = 20
gamma = 1 / 7.0
min_signal_to_noise = 0.01
max_signal_to_noise = 0.25
days_infectious = 7 # Baseline for of duration of infectiousness
###############
## Load data ##
###############
clean_folder(output_folder)
df = pd.read_csv('{}/dataset.csv'.format(input_folder))
df['Date'] = pd.to_datetime(df['Date'])
# Impose minimum time-series observations
df_temp = df.groupby('Country/Region').count()['gr_infected_{}'.format(days_infectious)].reset_index()
df_temp.rename(columns = {'gr_infected_{}'.format(days_infectious): 'no_obs'},
inplace = True)
df = pd.merge(df, df_temp, how = 'left')
mask = df['no_obs'] >= min_T
df = df.loc[mask, ]
################
## Estimate R ##
################
df['R'] = np.nan
df['se_R'] = np.nan
df_optim_res = []
with warnings.catch_warnings():
# Ignore warnings from statsmodels
# Instead, check later
warnings.filterwarnings("ignore", message = "Maximum Likelihood optimization failed to converge. Check mle_retvals")
for country in df['Country/Region'].unique():
mask = df['Country/Region'] == country
df_temp = df.loc[mask, ].copy()
y = df_temp['gr_infected_{}'.format(days_infectious)].values
res = estimate_R(y, gamma = gamma)
df.loc[mask, 'R'] = res['R']
df.loc[mask, 'se_R'] = res['se_R']
df_optim_res.append({'Country/Region': country,
'flag': res['flag'],
'sigma2_irregular': res['sigma2_irregular'],
'sigma2_level': res['sigma2_level'],
'signal_to_noise': res['signal_to_noise']})
df_optim_res = pd.DataFrame(df_optim_res)
# Merge in optimization results
df = pd.merge(df, df_optim_res, how = 'left')
###################################
## Filter out unreliable results ##
###################################
# Unsuccessful optimization
mask = df['flag'] != 0
df = df.loc[~mask, ]
# Filter out implausible signal-to-noise ratios
mask = (df['signal_to_noise'] <= min_signal_to_noise) | (df['signal_to_noise'] >= max_signal_to_noise)
df = df.loc[~mask, ]
# Collect optimization results
df_optim_res = df.groupby('Country/Region').first()[['flag', 'sigma2_irregular', 'sigma2_level', 'signal_to_noise']].reset_index()
df_optim_res.to_csv('{}/optim_res.csv'.format(output_folder), index = False)
####################
## Export results ##
####################
df = df[['Country/Region', 'Date', 'R', 'se_R']].copy()
df.reset_index(inplace = True)
del df['index']
df['days_infectious'] = 1 / gamma
# Calculate confidence intervals
alpha = [0.05, 0.35]
names = ['95', '65']
for aa, name in zip(alpha, names):
t_crit = scipy.stats.norm.ppf(1 - aa / 2)
df['ci_{}_u'.format(name)] = df['R'] + t_crit * df['se_R']
df['ci_{}_l'.format(name)] = df['R'] - t_crit * df['se_R']
# Save estimates
df.to_csv('{}/estimated_R.csv'.format(output_folder), index = False)
| 35.63587 | 130 | 0.613543 |
a1f337b69b98f2671185b496492ecba1e65d44dc | 4,353 | py | Python | src/pyjen/build.py | TheFriendlyCoder/pyjen | a3d7e8f69cb53f80f627300f8d3aa0d4302a5ac1 | [
"Apache-2.0"
] | 5 | 2017-12-14T13:39:04.000Z | 2020-07-06T09:46:02.000Z | src/pyjen/build.py | TheFriendlyCoder/pyjen | a3d7e8f69cb53f80f627300f8d3aa0d4302a5ac1 | [
"Apache-2.0"
] | 119 | 2016-09-13T01:39:31.000Z | 2020-08-31T03:06:19.000Z | src/pyjen/build.py | TheFriendlyCoder/pyjen | a3d7e8f69cb53f80f627300f8d3aa0d4302a5ac1 | [
"Apache-2.0"
] | 3 | 2015-03-17T18:49:22.000Z | 2019-07-03T14:10:27.000Z | """Primitives for interacting with Jenkins builds"""
from datetime import datetime
import logging
from urllib.parse import urljoin
from pyjen.changeset import Changeset
class Build:
"""information about a single build / run of a :class:`~.job.Job`"""
def __init__(self, api):
"""
Args:
api (JenkinsAPI):
Pre-initialized connection to the Jenkins REST API
"""
super().__init__()
self._api = api
self._log = logging.getLogger(__name__)
def __eq__(self, obj):
if not isinstance(obj, Build):
return False
if obj.uid != self.uid:
return False
return True
def __ne__(self, obj):
if not isinstance(obj, Build):
return True
if obj.uid != self.uid:
return True
return False
def __hash__(self):
return hash(self.uid)
@property
def url(self):
"""str: URL of this build"""
return self._api.url
@property
def number(self):
"""int: sequentially assigned numeric ID for the build"""
data = self._api.get_api_data()
return data['number']
@property
def start_time(self):
"""datetime.datetime: time stamp of when this build was started"""
data = self._api.get_api_data()
time_in_seconds = data['timestamp'] * 0.001
return datetime.fromtimestamp(time_in_seconds)
@property
def is_building(self):
"""bool: True if the build is currently executing otherwise False"""
data = self._api.get_api_data()
return data['building']
@property
def console_output(self):
"""str: raw console output for this build as plain text"""
return self._api.get_text("/consoleText")
@property
def result(self):
"""str: state of the associated job upon completion of this build.
Typically one of the following:
* "SUCCESS"
* "UNSTABLE"
* "FAILURE"
* "ABORTED"
"""
data = self._api.get_api_data()
return data['result']
@property
def changeset(self):
"""Changeset: Description of 0 or more SCM revisions associated with
/ included in this build"""
data = self._api.get_api_data()
return Changeset(self._api, data['changeSet'])
@property
def description(self):
"""str: Gets the descriptive text associated with this build. May be an
empty string if no description given."""
data = self._api.get_api_data()
retval = data["description"]
if retval is None:
return ""
return retval
@description.setter
def description(self, value):
args = {
'params': {
'description': value,
'Submit': "Submit"
}
}
self._api.post(self.url + '/submitDescription', args=args)
@property
def uid(self):
"""str: internal, unique identifier associated with this build"""
data = self._api.get_api_data()
return data["id"]
@property
def artifact_urls(self):
"""list (): list of 0 or more URLs to download published build artifacts
"""
data = self._api.get_api_data()
artifacts_node = data['artifacts']
retval = []
for node in artifacts_node:
url = urljoin(
self._api.url, "artifact/" + node['fileName'])
retval.append(url)
return retval
@property
def duration(self):
"""int: total runtime of the build, in milliseconds. Returns 0 if
build hasn't finished"""
data = self._api.get_api_data()
return data['duration']
@property
def estimated_duration(self):
"""int: Estimated runtime for a running build, in milliseconds.
Estimate is based off average duration of previous builds"""
data = self._api.get_api_data()
return data['estimatedDuration']
def abort(self):
"""Aborts this build before it completes"""
self._api.post(self._api.url + "stop")
def kill(self):
"""Performs hard kill on this build"""
self._api.post(self._api.url + "kill")
if __name__ == "__main__": # pragma: no cover
pass
| 26.705521 | 80 | 0.583735 |
abf5d732dfb9c002341fa69d085a75dc9ffe9e41 | 1,755 | py | Python | moonrock/sites/localhost/index.py | ToyokoLabs/moonrock | abd5de575713796d9b19a48fa447cdd7cef520cc | [
"BSD-2-Clause"
] | null | null | null | moonrock/sites/localhost/index.py | ToyokoLabs/moonrock | abd5de575713796d9b19a48fa447cdd7cef520cc | [
"BSD-2-Clause"
] | null | null | null | moonrock/sites/localhost/index.py | ToyokoLabs/moonrock | abd5de575713796d9b19a48fa447cdd7cef520cc | [
"BSD-2-Clause"
] | null | null | null | from datetime import date
from flask import Flask, request, url_for, render_template
app = Flask(__name__)
@app.route('/explore')
def indexblog():
return render_template('indexblog.html', blog=True)
@app.route('/')
def carousel():
return render_template('carousel.html', home=True)
@app.route('/checkout/<string:plan>', methods=['GET'])
def checkout(plan):
price = {'k1': 15, 'k6': 72, 'k12':108}[plan]
ccdate = '{}-{}'.format(date.today().year, date.today().month)
return render_template('checkout.html',
checkout=True, plan=plan, price=price,
ccdate=ccdate)
@app.route('/checkout/<string:plan>', methods=['POST'])
def checkoutpost(plan):
if plan not in ('k1', 'k6', 'k12'):
exit()
ccnumber = request.form['ccnumber']
nameoncard = request.form['ccnameoncard'].upper()
fullname = request.form['firstname'] + ' ' + request.form['lastname']
if ccnumber != '1111111111111' or nameoncard != 'JOHN GALT':
return render_template('errorcard.html', fullname=fullname)
product = 'MoonRock Kit ' + plan[1:]
total = request.form['total']
return render_template('thankyou.html', fullname=fullname,
total=total, product=product)
@app.route('/promo/<string:promo>', methods=['POST'])
def promo(promo):
codes = {'MR10': '10', 'MR5':'5', 'MRHALFPRICE':'50%'}
if promo in codes:
return codes[promo]
else:
return 'N/A'
@app.route('/learn')
def album():
return render_template('learn.html', learn=True)
@app.route('/subscribe')
def pricing():
return render_template('pricing.html', subscribe=True)
@app.route('/old')
def old():
return render_template('oldblogcode.html') | 30.789474 | 73 | 0.630769 |
10df35bb052357ccd0747e4660956b11d0ab43c5 | 4,062 | py | Python | tests/data/primitives/test_primitives/fail.py | tods-doc/axolotl | 6fc87bedb514677db09c039d492d1d3c7864913d | [
"Apache-2.0"
] | null | null | null | tests/data/primitives/test_primitives/fail.py | tods-doc/axolotl | 6fc87bedb514677db09c039d492d1d3c7864913d | [
"Apache-2.0"
] | null | null | null | tests/data/primitives/test_primitives/fail.py | tods-doc/axolotl | 6fc87bedb514677db09c039d492d1d3c7864913d | [
"Apache-2.0"
] | null | null | null | import os.path
import typing
from d3m import container, exceptions, utils
from d3m.metadata import hyperparams, base as metadata_base
from d3m.primitive_interfaces import base, transformer
from . import __author__, __version__
__all__ = ('FailPrimitive',)
Inputs = container.DataFrame
Outputs = container.DataFrame
class Hyperparams(hyperparams.Hyperparams):
method_to_fail = hyperparams.Enumeration[str](
values=['__init__', 'set_training_data', 'fit', 'produce', 'none'],
default='produce',
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="The name of the method the user wants this primitive to fail on.",
)
class IntentionalError(Exception):
"""
Exception raised for testing purposes.
Parameters
----------
class_name : str
Name of the class where the error occurred.
method_name : str
Name of the method where the error occurred.
"""
def __init__(self, class_name: str, method_name: str) -> None:
message = f"This is an exception raised by a(n) {class_name} object in the {method_name} method"
super().__init__(message)
class FailPrimitive(transformer.TransformerPrimitiveBase[Inputs, Outputs, Hyperparams]):
"""
A primitive which fails on the requested method (given as hyper-parameter).
Moreover, primitive does not correctly preserve state so if you pickle
and unpickle it, it does not seen itself as fitted anymore.
"""
metadata: typing.ClassVar[metadata_base.PrimitiveMetadata] = metadata_base.PrimitiveMetadata({
'id': 'd6dfbefa-0fb8-11e9-ab14-d663bd873d93',
'version': __version__,
'name': "Failure Tester",
'keywords': ['test primitive'],
'source': {
'name': __author__,
'contact': 'mailto:author@example.com',
'uris': [
'https://gitlab.com/datadrivendiscovery/tests-data/blob/master/primitives/test_primitives/fail.py',
'https://gitlab.com/datadrivendiscovery/tests-data.git',
],
},
'installation': [{
'type': metadata_base.PrimitiveInstallationType.PIP,
'package_uri': 'git+https://gitlab.com/datadrivendiscovery/tests-data.git@{git_commit}#egg=test_primitives&subdirectory=primitives'.format(
git_commit=utils.current_git_commit(os.path.dirname(__file__)),
),
}],
'location_uris': [
'https://gitlab.com/datadrivendiscovery/tests-data/raw/{git_commit}/primitives/test_primitives/fail.py'.format(
git_commit=utils.current_git_commit(os.path.dirname(__file__)),
),
],
'python_path': 'd3m.primitives.operator.null.FailTest',
'algorithm_types': [
metadata_base.PrimitiveAlgorithmType.IDENTITY_FUNCTION,
],
'primitive_family': metadata_base.PrimitiveFamily.OPERATOR,
})
def __init__(self, *, hyperparams: Hyperparams) -> None:
super().__init__(hyperparams=hyperparams)
self._conditional_fail('__init__')
self._fitted = False
def _conditional_fail(self, method_name: str) -> None:
if self.hyperparams['method_to_fail'] == method_name:
raise IntentionalError(self.__class__.__name__, method_name)
def set_training_data(self) -> None: # type: ignore
self._conditional_fail('set_training_data')
self._fitted = False
super().set_training_data()
def fit(self, *, timeout: float = None, iterations: int = None) -> base.CallResult[None]:
self._conditional_fail('fit')
self._fitted = True
return super().fit(timeout=timeout, iterations=iterations)
def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
self._conditional_fail('produce')
if not self._fitted:
raise exceptions.PrimitiveNotFittedError("Primitive is not fitted.")
return base.CallResult(inputs)
| 37.962617 | 151 | 0.667159 |
8dc555f36672ba61d7e7d5609965298c09f72e12 | 1,123 | py | Python | test/test_day12.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
] | null | null | null | test/test_day12.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
] | null | null | null | test/test_day12.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
] | null | null | null | from days import day12
from ddt import ddt, data, unpack
import unittest
import util
@ddt
class MyTestCase(unittest.TestCase):
@data([['0 <-> 2',
'1 <-> 1',
'2 <-> 0, 3, 4',
'3 <-> 2, 4',
'4 <-> 2, 3, 6',
'5 <-> 6',
'6 <-> 4, 5'], '6'])
@unpack
def test_example_a(self, test_input, expected):
result = day12.part_a(test_input)
self.assertEqual(result, expected)
def test_answer_part_a(self):
result = day12.part_a(util.get_file_contents('day12.txt'))
self.assertEqual(result, '175')
@data([['0 <-> 2',
'1 <-> 1',
'2 <-> 0, 3, 4',
'3 <-> 2, 4',
'4 <-> 2, 3, 6',
'5 <-> 6',
'6 <-> 4, 5'], '2'])
@unpack
def test_example_b(self, test_input, expected):
result = day12.part_b(test_input)
self.assertEqual(result, expected)
def test_answer_part_b(self):
result = day12.part_b(util.get_file_contents('day12.txt'))
self.assertEqual(result, '213')
| 28.075 | 67 | 0.485307 |
7bf77386b18141e7e942f86c953718dcfab057d7 | 399 | py | Python | api_courses/api_courses/wsgi.py | Bincha3000/API-Online-Courses | f8f23d8e2605d0b689b6548b02cd845de614c470 | [
"MIT"
] | null | null | null | api_courses/api_courses/wsgi.py | Bincha3000/API-Online-Courses | f8f23d8e2605d0b689b6548b02cd845de614c470 | [
"MIT"
] | 6 | 2020-02-12T00:52:25.000Z | 2022-01-13T01:36:21.000Z | api_courses/api_courses/wsgi.py | gladunvv/api-online-courses | f8f23d8e2605d0b689b6548b02cd845de614c470 | [
"MIT"
] | null | null | null | """
WSGI config for api_courses project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api_courses.settings')
application = get_wsgi_application()
| 23.470588 | 78 | 0.789474 |
e552907b1e2c2d5c81903811e96b9c1489bd40ab | 1,081 | bzl | Python | ubuntu1604_webtest/revisions.bzl | ash2k/container-definitions | 753fbbee3d26d63e500036d4c1ce6d95bc47031c | [
"Apache-2.0"
] | null | null | null | ubuntu1604_webtest/revisions.bzl | ash2k/container-definitions | 753fbbee3d26d63e500036d4c1ce6d95bc47031c | [
"Apache-2.0"
] | null | null | null | ubuntu1604_webtest/revisions.bzl | ash2k/container-definitions | 753fbbee3d26d63e500036d4c1ce6d95bc47031c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by the dependency update service and should not be modified directly.
# For more details, check the deps_spec.yaml file in the current folder.
RULES_DOCKER = struct(
commit = "faaa10a72fa9abde070e2a20d6046e9f9b849e9a",
sha256 = "feb53c560be2f97b7d02b23a1738a3154ba89fe630f09a7a838dcad38731b0b8",
)
LAYER_DEFINITIONS = struct(
commit = "eb43424ced6278d29fa68ac855a2ca2cf8b88af6",
sha256 = "b5c67806c4a3524d446ce4502649cf48594cbdb1dea75f9afce350d2aa98daf5",
)
| 40.037037 | 98 | 0.786309 |
e5aac79794358b0f420e2bb6615337bb72ba48e0 | 334 | py | Python | safergpy/code/bench/exps_config/methods/gpy_mle0132.py | johncoltrane1/saferGPMLE | b86fbd329eaad0b6374a1b28cae43b2a7f81eb61 | [
"BSD-3-Clause"
] | null | null | null | safergpy/code/bench/exps_config/methods/gpy_mle0132.py | johncoltrane1/saferGPMLE | b86fbd329eaad0b6374a1b28cae43b2a7f81eb61 | [
"BSD-3-Clause"
] | 10 | 2021-06-25T15:10:26.000Z | 2021-07-15T12:50:21.000Z | safergpy/code/bench/exps_config/methods/gpy_mle0132.py | johncoltrane1/saferGPMLE | b86fbd329eaad0b6374a1b28cae43b2a7f81eb61 | [
"BSD-3-Clause"
] | 3 | 2021-06-16T07:39:05.000Z | 2022-03-16T09:31:55.000Z | method_args = {
"param": "log",
"init": "classic_profiled",
"stopping_criterion": "soft",
"do_profiling": False,
"optim_scheme": [[1, 1.0]],
}
| 41.75 | 63 | 0.278443 |
4ccaf23c4cb15bca8adca39437c57ccfa3e80898 | 5,496 | py | Python | tests/test_states.py | meysam81/transitions | 02076d3f259efd1a2c066a823547ecd7083a49b7 | [
"MIT"
] | 3 | 2019-12-06T15:27:59.000Z | 2020-01-14T15:15:27.000Z | tests/test_states.py | v1k45/transitions | 02076d3f259efd1a2c066a823547ecd7083a49b7 | [
"MIT"
] | null | null | null | tests/test_states.py | v1k45/transitions | 02076d3f259efd1a2c066a823547ecd7083a49b7 | [
"MIT"
] | null | null | null | from transitions import Machine
from transitions.extensions.states import *
from transitions.extensions.factory import LockedHierarchicalGraphMachine
from time import sleep
from unittest import TestCase
from .test_graphviz import TestDiagramsLockedNested
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
class TestTransitions(TestCase):
def test_tags(self):
@add_state_features(Tags)
class CustomMachine(Machine):
pass
states = [{"name": "A", "tags": ["initial", "success", "error_state"]}]
m = CustomMachine(states=states, initial='A')
s = m.get_state(m.state)
self.assertTrue(s.is_initial)
self.assertTrue(s.is_success)
self.assertTrue(s.is_error_state)
self.assertFalse(s.is_not_available)
def test_error(self):
@add_state_features(Error)
class CustomMachine(Machine):
pass
states = ['A', 'B', 'F',
{'name': 'S1', 'tags': ['accepted']},
{'name': 'S2', 'accepted': True}]
transitions = [['to_B', ['S1', 'S2'], 'B'], ['go', 'A', 'B'], ['fail', 'B', 'F'],
['success1', 'B', 'S2'], ['success2', 'B', 'S2']]
m = CustomMachine(states=states, transitions=transitions, auto_transitions=False, initial='A')
m.go()
m.success1()
self.assertTrue(m.get_state(m.state).is_accepted)
m.to_B()
m.success2()
self.assertTrue(m.get_state(m.state).is_accepted)
m.to_B()
with self.assertRaises(MachineError):
m.fail()
def test_error_callback(self):
@add_state_features(Error)
class CustomMachine(Machine):
pass
mock_callback = MagicMock()
states = ['A', {"name": "B", "on_enter": mock_callback}, 'C']
transitions = [
["to_B", "A", "B"],
["to_C", "B", "C"],
]
m = CustomMachine(states=states, transitions=transitions, auto_transitions=False, initial='A')
m.to_B()
self.assertEqual(m.state, "B")
self.assertTrue(mock_callback.called)
def test_timeout(self):
mock = MagicMock()
@add_state_features(Timeout)
class CustomMachine(Machine):
def timeout(self):
mock()
states = ['A',
{'name': 'B', 'timeout': 0.3, 'on_timeout': 'timeout'},
{'name': 'C', 'timeout': 0.3, 'on_timeout': mock}]
m = CustomMachine(states=states)
m.to_B()
m.to_A()
sleep(0.4)
self.assertFalse(mock.called)
m.to_B()
sleep(0.4)
self.assertTrue(mock.called)
m.to_C()
sleep(0.4)
self.assertEqual(mock.call_count, 2)
with self.assertRaises(AttributeError):
m.add_state({'name': 'D', 'timeout': 0.3})
def test_timeout_callbacks(self):
timeout = MagicMock()
notification = MagicMock()
counter = MagicMock()
@add_state_features(Timeout)
class CustomMachine(Machine):
pass
class Model(object):
def on_timeout_B(self):
counter()
def timeout(self):
timeout()
def notification(self):
notification()
def another_notification(self):
notification()
states = ['A', {'name': 'B', 'timeout': 0.05, 'on_timeout': 'timeout'}]
model = Model()
machine = CustomMachine(model=model, states=states, initial='A')
model.to_B()
sleep(0.1)
self.assertTrue(timeout.called)
self.assertTrue(counter.called)
machine.get_state('B').add_callback('timeout', 'notification')
machine.on_timeout_B('another_notification')
model.to_B()
sleep(0.1)
self.assertEqual(timeout.call_count, 2)
self.assertEqual(counter.call_count, 2)
self.assertTrue(notification.called)
machine.get_state('B').on_timeout = []
model.to_B()
sleep(0.1)
self.assertEqual(timeout.call_count, 2)
self.assertEqual(notification.call_count, 2)
def test_volatile(self):
class TemporalState(object):
def __init__(self):
self.value = 5
def increase(self):
self.value += 1
@add_state_features(Volatile)
class CustomMachine(Machine):
pass
states = ['A', {'name': 'B', 'volatile': TemporalState}]
m = CustomMachine(states=states, initial='A')
m.to_B()
self.assertEqual(m.scope.value, 5)
# should call method of TemporalState
m.scope.increase()
self.assertEqual(m.scope.value, 6)
# re-entering state should reset default volatile object
m.to_A()
self.assertFalse(hasattr(m.scope, 'value'))
m.scope.foo = 'bar'
m.to_B()
# custom attribute of A should be gone
self.assertFalse(hasattr(m.scope, 'foo'))
# value should be reset
self.assertEqual(m.scope.value, 5)
class TestStatesDiagramsLockedNested(TestDiagramsLockedNested):
def setUp(self):
@add_state_features(Error, Timeout, Volatile)
class CustomMachine(LockedHierarchicalGraphMachine):
pass
super(TestStatesDiagramsLockedNested, self).setUp()
self.machine_cls = CustomMachine
| 29.234043 | 102 | 0.576237 |
0f2afdca8183c0212d764254f2faa8ce4ed0cc98 | 1,301 | py | Python | tests/unit/test_main.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | 1 | 2016-11-08T13:57:42.000Z | 2016-11-08T13:57:42.000Z | tests/unit/test_main.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | null | null | null | tests/unit/test_main.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from pytest import raises
# The parametrize function is generated, so this doesn't work:
#
# from pytest.mark import parametrize
#
import pytest
parametrize = pytest.mark.parametrize
from sloc_report import metadata
from sloc_report.main import main
class TestMain(object):
@parametrize('helparg', ['-h', '--help'])
def test_help(self, helparg, capsys):
with raises(SystemExit) as exc_info:
main(['progname', helparg])
out, err = capsys.readouterr()
# Should have printed some sort of usage message. We don't
# need to explicitly test the content of the message.
assert 'usage' in out
# Should have used the program name from the argument
# vector.
assert 'progname' in out
# Should exit with zero return code.
assert exc_info.value.code == 0
@parametrize('versionarg', ['-V', '--version'])
def test_version(self, versionarg, capsys):
with raises(SystemExit) as exc_info:
main(['progname', versionarg])
out, err = capsys.readouterr()
# Should print out version.
assert err == '{0} {1}\n'.format(metadata.project, metadata.version)
# Should exit with zero return code.
assert exc_info.value.code == 0
| 33.358974 | 76 | 0.64412 |
6d0c94b3675e72c080039399c68f3d9925d92af2 | 2,316 | py | Python | load_data.py | ckyeungac/DeepIRT | 7749767d30adb4ef12889ea47e0bcfefedafb046 | [
"MIT"
] | 38 | 2019-10-09T12:34:37.000Z | 2022-03-31T02:39:56.000Z | load_data.py | ckyeungac/DeepIRT | 7749767d30adb4ef12889ea47e0bcfefedafb046 | [
"MIT"
] | 4 | 2020-01-28T23:01:19.000Z | 2022-02-10T00:32:22.000Z | load_data.py | ckyeungac/DeepIRT | 7749767d30adb4ef12889ea47e0bcfefedafb046 | [
"MIT"
] | 15 | 2019-12-16T15:28:51.000Z | 2022-03-31T02:40:01.000Z | import numpy as np
from utils import getLogger
class DataLoader():
def __init__(self, n_questions, seq_len, separate_char):
self.separate_char = separate_char
self.n_questions = n_questions
self.seq_len = seq_len
def load_data(self, path):
q_data = []
qa_data = []
with open(path, 'r') as f:
for line_idx, line in enumerate(f):
line = line.strip()
# skip the number of sequence
if line_idx%3 == 0:
continue
# handle question_line
elif line_idx%3 == 1:
q_tag_list = line.split(self.separate_char)
# handle answer-line
elif line_idx%3 == 2:
a_tag_list = line.split(self.separate_char)
# find the number of split for this sequence
n_split = len(q_tag_list) // self.seq_len
if len(q_tag_list) % self.seq_len != 0:
n_split += 1
for k in range(n_split):
# temporary container for each sequence
q_container = list()
qa_container = list()
start_idx = k*self.seq_len
end_idx = min((k+1)*self.seq_len, len(a_tag_list))
for i in range(start_idx, end_idx):
q_value = int(q_tag_list[i])
a_value = int(a_tag_list[i]) # either be 0 or 1
qa_value = q_value + a_value * self.n_questions
q_container.append(q_value)
qa_container.append(qa_value)
q_data.append(q_container)
qa_data.append(qa_container)
# convert it to numpy array
q_data_array = np.zeros((len(q_data), self.seq_len))
qa_data_array = np.zeros((len(q_data), self.seq_len))
for i in range(len(q_data)):
_q_data = q_data[i]
_qa_data = qa_data[i]
q_data_array[i, :len(_q_data)] = _q_data
qa_data_array[i, :len(_qa_data)] = _qa_data
return q_data_array, qa_data_array
| 39.931034 | 76 | 0.49266 |
b1067eb8e20a059de2883214f2e61fd6115496cd | 1,284 | py | Python | checkers_engine/competition.py | Nikita2305/checkersAI | aee26e3573c60d9ce19bb7af8c8af86f19593f0c | [
"MIT"
] | null | null | null | checkers_engine/competition.py | Nikita2305/checkersAI | aee26e3573c60d9ce19bb7af8c8af86f19593f0c | [
"MIT"
] | null | null | null | checkers_engine/competition.py | Nikita2305/checkersAI | aee26e3573c60d9ce19bb7af8c8af86f19593f0c | [
"MIT"
] | null | null | null | class Competition:
def __init__(self, player1, player2):
player1.set_player_id(1)
player2.set_player_id(2)
self.players = [None, player1, player2]
self.player_index = 1
self.moves = []
def draw_check(self):
n = len(self.moves)
max_eq = 3
for r in range(1, 5):
for i in range(0, max_eq):
a = n - 4 * i - r
b = n - r
if (a < 0 or b < 0 or self.moves[a] != self.moves[b]):
return False
return True
def make_move(self):
current_board = self.players[self.player_index].board
self.player_index = current_board.player_turn
other_index = 3 - self.player_index
if (not current_board.get_possible_moves()):
return other_index
if (self.draw_check()):
return 0
move = self.players[self.player_index].choose_move()
self.moves += [move]
print(move)
self.players[self.player_index].handle_move(move)
self.players[other_index].handle_move(move)
return None
def process_game(self):
while(True):
winner = self.make_move()
if (winner != None):
return winner
| 31.317073 | 70 | 0.543614 |
e5223b13b01b238edc15d0e1334f32cf5aefacd8 | 2,926 | py | Python | homeassistant/components/twitch/sensor.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 4 | 2019-07-03T22:36:57.000Z | 2019-08-10T15:33:25.000Z | homeassistant/components/twitch/sensor.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 7 | 2019-08-23T05:26:02.000Z | 2022-03-11T23:57:18.000Z | homeassistant/components/twitch/sensor.py | zalke/home-assistant | a31e49c857722c0723dc5297cd83cbce0f8716f6 | [
"Apache-2.0"
] | 3 | 2016-08-26T12:32:49.000Z | 2020-02-26T21:01:35.000Z | """Support for the Twitch stream status."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_GAME = 'game'
ATTR_TITLE = 'title'
CONF_CHANNELS = 'channels'
CONF_CLIENT_ID = 'client_id'
ICON = 'mdi:twitch'
STATE_OFFLINE = 'offline'
STATE_STREAMING = 'streaming'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CHANNELS, default=[]):
vol.All(cv.ensure_list, [cv.string]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Twitch platform."""
from twitch import TwitchClient
from requests.exceptions import HTTPError
channels = config.get(CONF_CHANNELS, [])
client = TwitchClient(client_id=config.get(CONF_CLIENT_ID))
try:
client.ingests.get_server_list()
except HTTPError:
_LOGGER.error("Client ID is not valid")
return
users = client.users.translate_usernames_to_ids(channels)
add_entities([TwitchSensor(user, client) for user in users], True)
class TwitchSensor(Entity):
"""Representation of an Twitch channel."""
def __init__(self, user, client):
"""Initialize the sensor."""
self._client = client
self._user = user
self._channel = self._user.name
self._id = self._user.id
self._state = STATE_OFFLINE
self._preview = self._game = self._title = None
@property
def should_poll(self):
"""Device should be polled."""
return True
@property
def name(self):
"""Return the name of the sensor."""
return self._channel
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def entity_picture(self):
"""Return preview of current game."""
return self._preview
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._state == STATE_STREAMING:
return {
ATTR_GAME: self._game,
ATTR_TITLE: self._title,
}
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
# pylint: disable=no-member
def update(self):
"""Update device state."""
stream = self._client.streams.get_stream_by_user(self._id)
if stream:
self._game = stream.get('channel').get('game')
self._title = stream.get('channel').get('status')
self._preview = stream.get('preview').get('medium')
self._state = STATE_STREAMING
else:
self._preview = self._client.users.get_by_id(self._id).get('logo')
self._state = STATE_OFFLINE
| 27.603774 | 78 | 0.644908 |
ff52f9893f037aaa3d57fe0ef957b09fc3532f6d | 828 | py | Python | var/spack/repos/builtin/packages/r-rlang/package.py | nkianggiss/spack | 3477d3375142a30f5714bb5966a6d8bb22c33c06 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2018-08-20T06:55:11.000Z | 2018-08-20T06:55:11.000Z | var/spack/repos/builtin/packages/r-rlang/package.py | nkianggiss/spack | 3477d3375142a30f5714bb5966a6d8bb22c33c06 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2019-04-29T22:36:27.000Z | 2019-04-30T12:51:38.000Z | var/spack/repos/builtin/packages/r-rlang/package.py | nkianggiss/spack | 3477d3375142a30f5714bb5966a6d8bb22c33c06 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2020-03-12T19:27:17.000Z | 2020-03-12T19:27:17.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRlang(RPackage):
"""A toolbox for working with base types, core R features like the
condition system, and core 'Tidyverse' features like tidy evaluation."""
homepage = "https://cran.r-project.org/package=rlang"
url = "https://cran.r-project.org/src/contrib/rlang_0.2.2.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/rlang"
version('0.2.2', 'df2abf3a1936c503ed1edd4350ffb5f0')
version('0.1.4', 'daed5104d557c0cbfb4a654ec8ffb579')
version('0.1.2', '170f8cf7b61898040643515a1746a53a')
version('0.1.1', '38a51a0b8f8487eb52b4f3d986313682')
| 39.428571 | 79 | 0.725845 |
0609c3b21f5e6444494c60d18bb8321d8de0bdfc | 8,514 | py | Python | tensorflow_federated/python/core/impl/wrappers/computation_wrapper_instances.py | ESWZY/federated | 4e5738666cfe5498ab1538e9dd6dc886917acf46 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/core/impl/wrappers/computation_wrapper_instances.py | ESWZY/federated | 4e5738666cfe5498ab1538e9dd6dc886917acf46 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/core/impl/wrappers/computation_wrapper_instances.py | ESWZY/federated | 4e5738666cfe5498ab1538e9dd6dc886917acf46 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions of specific computation wrapper instances."""
import functools
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.impl.compiler import building_blocks
from tensorflow_federated.python.core.impl.computation import computation_impl
from tensorflow_federated.python.core.impl.context_stack import context_stack_impl
from tensorflow_federated.python.core.impl.federated_context import federated_computation_utils
from tensorflow_federated.python.core.impl.tensorflow_context import tensorflow_serialization
from tensorflow_federated.python.core.impl.types import type_analysis
from tensorflow_federated.python.core.impl.types import type_conversions
from tensorflow_federated.python.core.impl.wrappers import computation_wrapper
# The documentation of the arguments and return values from the wrapper_fns
# is quite detailed and can be found in `computation_wrapper.py` along with
# the definitions of `_wrap` and `ComputationWrapper`. In order to avoid having
# to repeat those descriptions (and make any relevant changes in four separate
# places) the documentation here simply forwards readers over.
#
# pylint:disable=g-doc-args,g-doc-return-or-yield
def _tf_wrapper_fn(parameter_type, name):
"""Wrapper function to plug Tensorflow logic into the TFF framework.
This function is passed through `computation_wrapper.ComputationWrapper`.
Documentation its arguments can be found inside the definition of that class.
"""
del name # Unused.
if not type_analysis.is_tensorflow_compatible_type(parameter_type):
raise TypeError('`tf_computation`s can accept only parameter types with '
'constituents `SequenceType`, `StructType` '
'and `TensorType`; you have attempted to create one '
'with the type {}.'.format(parameter_type))
ctx_stack = context_stack_impl.context_stack
tf_serializer = tensorflow_serialization.tf_computation_serializer(
parameter_type, ctx_stack)
arg = next(tf_serializer)
try:
result = yield arg
except Exception as e: # pylint: disable=broad-except
tf_serializer.throw(e)
comp_pb, extra_type_spec = tf_serializer.send(result)
tf_serializer.close()
yield computation_impl.ComputationImpl(comp_pb, ctx_stack, extra_type_spec)
tensorflow_wrapper = computation_wrapper.ComputationWrapper(
computation_wrapper.PythonTracingStrategy(_tf_wrapper_fn))
def _federated_computation_wrapper_fn(parameter_type, name):
"""Wrapper function to plug orchestration logic into the TFF framework.
This function is passed through `computation_wrapper.ComputationWrapper`.
Documentation its arguments can be found inside the definition of that class.
"""
ctx_stack = context_stack_impl.context_stack
if parameter_type is None:
parameter_name = None
else:
parameter_name = 'arg'
fn_generator = federated_computation_utils.federated_computation_serializer(
parameter_name=parameter_name,
parameter_type=parameter_type,
context_stack=ctx_stack,
suggested_name=name)
arg = next(fn_generator)
try:
result = yield arg
except Exception as e: # pylint: disable=broad-except
fn_generator.throw(e)
target_lambda, extra_type_spec = fn_generator.send(result)
fn_generator.close()
yield computation_impl.ComputationImpl(target_lambda.proto, ctx_stack,
extra_type_spec)
federated_computation_wrapper = computation_wrapper.ComputationWrapper(
computation_wrapper.PythonTracingStrategy(
_federated_computation_wrapper_fn))
# pylint:enable=g-doc-args,g-doc-return-or-yield
def building_block_to_computation(building_block):
"""Converts a computation building block to a computation impl."""
py_typecheck.check_type(building_block,
building_blocks.ComputationBuildingBlock)
return computation_impl.ComputationImpl(building_block.proto,
context_stack_impl.context_stack)
def _check_returns_type_helper(fn, expected_return_type):
"""Helper for `check_returns_type`."""
if not computation_wrapper.is_function(fn):
raise ValueError(f'`assert_raises` expected a function, but found {fn}.')
@functools.wraps(fn)
def wrapped_func(*args, **kwargs):
result = fn(*args, **kwargs)
if result is None:
raise ValueError('TFF computations may not return `None`. '
'Consider instead returning `()`.')
result_type = type_conversions.infer_type(result)
if not result_type.is_identical_to(expected_return_type):
raise TypeError(
f'Value returned from `{fn.__name__}` did not match asserted type.\n'
+ computation_types.type_mismatch_error_message(
result_type,
expected_return_type,
computation_types.TypeRelation.IDENTICAL,
second_is_expected=True))
return result
return wrapped_func
def check_returns_type(*args):
"""Checks that the decorated function returns values of the provided type.
This decorator can be used to ensure that a TFF computation returns a value
of the expected type. For example:
```
@tff.tf_computation(tf.int32, tf.int32)
@tff.check_returns_type(tf.int32)
def add(a, b):
return a + b
```
It can also be applied to non-TFF (Python) functions to ensure that the values
they return conform to the expected type.
Note that this assertion is run whenever the function is called. In the case
of `@tff.tf_computation` and `@tff.federated_computation`s, this means that
the assertion will run when the computation is traced. To enable this,
`@tff.check_returns_type` should be applied *inside* the `tff.tf_computation`:
```
# YES:
@tff.tf_computation(...)
@tff.check_returns_type(...)
...
# NO:
@tff.check_returns_type(...) # Don't put this before the line below
@tff.tf_computation(...)
...
```
Args:
*args: Either a Python function, or TFF type spec, or both (function first).
Returns:
If invoked with a function as an argument, returns an instance of a TFF
computation constructed based on this function. If called without one, as
in the typical decorator style of usage, returns a callable that expects
to be called with the function definition supplied as a parameter. See
also `tff.tf_computation` for an extended documentation.
"""
if not args:
raise ValueError('`assert_return`s called without a return type')
if computation_wrapper.is_function(args[0]):
# If the first argument on the list is a Python function or a
# tf.function, this is the one that's being wrapped. This is the case of
# either a decorator invocation without arguments as "@xyz" applied to a
# function definition, of an inline invocation as "... = xyz(lambda....).
if len(args) != 2:
raise ValueError(
f'`check_returns_type` expected two arguments: a function to decorate '
f'and an expected return type. Found {len(args)} arguments: {args}')
return _check_returns_type_helper(args[0],
computation_types.to_type(args[1]))
else:
# The function is being invoked as a decorator with arguments.
# The arguments come first, then the returned value is applied to
# the function to be wrapped.
if len(args) != 1:
raise ValueError(
f'`check_returns_type` expected a single argument specifying the '
f'return type. Found {len(args)} arguments: {args}')
return_type = computation_types.to_type(args[0])
if return_type is None:
raise ValueError('Asserted return type may not be `None`. '
'Consider instead a return type of `()`')
return lambda fn: _check_returns_type_helper(fn, return_type)
| 41.940887 | 95 | 0.738431 |
b9badfac338c558f75221a1234ac33caa554e6d2 | 1,508 | py | Python | learning/algorithms/nb/naive_bayse.py | dmytroKarataiev/MachineLearningIntro | a115a04b7313ef1550fa77f2233b659cae414cf4 | [
"MIT"
] | 1 | 2016-09-27T10:17:29.000Z | 2016-09-27T10:17:29.000Z | learning/algorithms/nb/naive_bayse.py | dmytroKarataiev/MachineLearningIntro | a115a04b7313ef1550fa77f2233b659cae414cf4 | [
"MIT"
] | null | null | null | learning/algorithms/nb/naive_bayse.py | dmytroKarataiev/MachineLearningIntro | a115a04b7313ef1550fa77f2233b659cae414cf4 | [
"MIT"
] | 1 | 2018-09-08T14:41:27.000Z | 2018-09-08T14:41:27.000Z | #!/usr/bin/python
""" Complete the code in ClassifyNB.py with the sklearn
Naive Bayes classifier to classify the terrain data.
The objective of this exercise is to recreate the decision
boundary found in the lesson video, and make a plot that
visually shows the decision boundary """
from class_vis import prettyPicture, output_image
from ClassifyNB import classify
from learning.algorithms.prep_terrain_data import makeTerrainData
features_train, labels_train, features_test, labels_test = makeTerrainData()
### the training data (features_train, labels_train) have both "fast" and "slow" points mixed
### in together--separate them so we can give them different colors in the scatterplot,
### and visually identify them
grade_fast = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==0]
bumpy_fast = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==0]
grade_slow = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==1]
bumpy_slow = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==1]
# You will need to complete this function imported from the ClassifyNB script.
# Be sure to change to that code tab to complete this quiz.
clf = classify(features_train, labels_train)
### draw the decision boundary with the text points overlaid
prettyPicture(clf, features_test, labels_test)
output_image("test.png", "png", open("test.png", "rb").read())
| 38.666667 | 99 | 0.764589 |
59d948ba2f4eb3ee8f7cdcf6e1217ed7fd4f8278 | 2,163 | py | Python | demo/configs/config_template_rna_atac_ka_knn.py | mukamel-lab/SingleCellFusion_EnhancerPaper | acbfa5184667ca57c333c04c310b0712a0e8e15e | [
"MIT"
] | null | null | null | demo/configs/config_template_rna_atac_ka_knn.py | mukamel-lab/SingleCellFusion_EnhancerPaper | acbfa5184667ca57c333c04c310b0712a0e8e15e | [
"MIT"
] | null | null | null | demo/configs/config_template_rna_atac_ka_knn.py | mukamel-lab/SingleCellFusion_EnhancerPaper | acbfa5184667ca57c333c04c310b0712a0e8e15e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""An example configuration file
"""
import sys
import os
# Assuming the cell order in the metadata tables are the same as those in the gene level matrices
# The output knn matrices follow such order as well
dir_path = os.path.dirname(os.path.realpath(__file__))
ka_smooth = TOBEFILLED
knn = TOBEFILLED
date = TOBEFILLED
# # Configs
name = 'mop_rna_atac_ka{}_knn{}_{}'.format(ka_smooth, knn, date,)
outdir = os.path.join(dir_path, '../results')
output_pcX_all = outdir + '/pcX_all_{}.npy'.format(name)
output_cells_all = outdir + '/cells_all_{}.npy'.format(name)
output_imputed_data_format = outdir + '/imputed_data_{}_{{}}.npy'.format(name)
output_clst_and_umap = outdir + '/intg_summary_{}.tsv'.format(name)
output_figures = outdir + '/figures/{}_{{}}.{{}}'.format(name)
output_cluster_centroids = outdir + '/centroids_{}.pkl'.format(name)
save_knn = True # new required arguments (7/27/2020)
output_knn_within = outdir + "/knn_within_{}_{{}}.npz".format(name)
output_knn_across = outdir + "/knn_across_{}_{{}}_{{}}.npz".format(name)
# end of new required arguments (7/27/2020)
# required for downsamp (8/7/2020)
output_cells = outdir + "/cells_{{}}_{}.npy".format(name)
DATA_DIR = os.path.join(dir_path, '../data')
# fixed dataset configs
sys.path.insert(0, DATA_DIR)
from __init__datasets import *
meta_f = os.path.join(DATA_DIR, '{0}_metadata.tsv')
hvftrs_f = os.path.join(DATA_DIR, '{0}_hvfeatures.{1}')
hvftrs_gene = os.path.join(DATA_DIR, '{0}_hvfeatures.gene')
hvftrs_cell = os.path.join(DATA_DIR, '{0}_hvfeatures.cell')
mods_selected = [
'atac',
'rna',
]
features_selected = ['atac']
# check features
for features_modality in features_selected:
assert (features_modality in mods_selected)
# within modality
ps = {'mc': 0.9,
'atac': 0.1,
'rna': 0.7,
}
drop_npcs = {
'mc': 0,
'atac': 0,
'rna': 0,
}
ka_smooth = ka_smooth # default: 5
# across modality
cross_mod_distance_measure = 'correlation' # cca
knn = knn
relaxation = 3
n_cca = 30
# PCA
npc = 50
# clustering
k = 30 # default: 30
resolutions = [0.1, 1, 2, 4]
# umap
umap_neighbors = 60
min_dist = 0.5 | 27.730769 | 97 | 0.691632 |
ca9b9cfbf03f699456378c577a676b9b1d44db2b | 2,567 | py | Python | tests/test_run.py | blacktanktop/vivid | e85837bcd86575f8a275517250dd026aac3e451f | [
"BSD-2-Clause-FreeBSD"
] | 39 | 2020-05-13T18:13:25.000Z | 2022-03-02T10:46:53.000Z | tests/test_run.py | blacktanktop/vivid | e85837bcd86575f8a275517250dd026aac3e451f | [
"BSD-2-Clause-FreeBSD"
] | 29 | 2020-05-13T18:04:09.000Z | 2022-02-27T04:43:18.000Z | tests/test_run.py | blacktanktop/vivid | e85837bcd86575f8a275517250dd026aac3e451f | [
"BSD-2-Clause-FreeBSD"
] | 3 | 2020-05-13T19:17:01.000Z | 2020-10-28T21:29:42.000Z | import os
import shutil
import pandas as pd
import pytest
from vivid.core import BaseBlock
from vivid.runner import Runner
from .conftest import CounterBlock
def test_block_transform_count(regression_set):
a = CounterBlock(name='a')
b = CounterBlock(name='b')
c = CounterBlock(name='c')
d = CounterBlock(name='d', parent=[a, b, c])
e = CounterBlock(name='e', parent=[a, c])
f = CounterBlock(name='f', parent=[a, d])
g = CounterBlock(name='g', parent=[e, f])
input_df, y = regression_set
runner = Runner(blocks=f)
runner.fit(input_df, y)
for block in [a, b, c, d, f]:
assert block.counter == 1, block
# 学習時の予測値は取り出せる
out = runner.load_output(block, is_fit_context=True)
assert out is not None
assert isinstance(out, pd.DataFrame)
assert block.name in out.columns[0]
# predict はしていないので予測値を取り出そうとするとエラーになる
with pytest.raises(FileNotFoundError):
runner.load_output(block, is_fit_context=False)
for block in [g, e]:
assert block.counter == 0, block
with pytest.raises(ValueError):
runner.load_output(block, is_fit_context=True)
runner.fit(input_df, y)
for block in [a, b, c, d, f]:
assert block.counter == 1, block
dirpath = os.path.join(runner.experiment.output_dir, d.runtime_env)
shutil.rmtree(dirpath)
runner.fit(input_df, y)
for block in [d, f]:
assert block.counter == 2, block
runner.predict(input_df, cache=False)
for block in [a, b, c, d, f]:
out = runner.load_output(block, is_fit_context=False)
assert out is not None
assert isinstance(out, pd.DataFrame)
def test_re_fit():
input_df1 = pd.DataFrame({'a': [1, 2, 3]})
input_df2 = pd.DataFrame({'a': [1, 2, 2]})
class Count(BaseBlock):
def fit(self, source_df: pd.DataFrame, y, experiment) -> pd.DataFrame:
self.vc = source_df['a'].value_counts().to_dict()
return self.transform(source_df)
def transform(self, source_df):
x = source_df['a'].map(self.vc)
out_df = pd.DataFrame()
out_df['a_count'] = x.values
return out_df
a = Count('a')
runner = Runner(a)
runner.fit(input_df1)
runner.predict(input_df1)
y_trans = runner.load_output(a, is_fit_context=False)
print(y_trans)
runner.fit(input_df2)
runner.predict(input_df2)
y_trans2 = runner.load_output(a, is_fit_context=False)
print(y_trans2)
assert y_trans.equals(y_trans2)
| 28.208791 | 78 | 0.634982 |
4d06a0043605d2bc0d2c76d247fd92667dbd2811 | 7,410 | py | Python | objsamples/sample_folder.py | jmartinezbernet/FuelSDK-Python | bc0f3f61e20d84e8d0600e8adb13e121d2cea047 | [
"MIT"
] | 91 | 2015-12-04T23:33:50.000Z | 2022-03-18T15:53:02.000Z | objsamples/sample_folder.py | jmartinezbernet/FuelSDK-Python | bc0f3f61e20d84e8d0600e8adb13e121d2cea047 | [
"MIT"
] | 70 | 2015-11-30T21:33:32.000Z | 2022-03-17T09:10:14.000Z | objsamples/sample_folder.py | jmartinezbernet/FuelSDK-Python | bc0f3f61e20d84e8d0600e8adb13e121d2cea047 | [
"MIT"
] | 155 | 2015-10-23T00:25:21.000Z | 2022-03-30T09:42:36.000Z | import ET_Client
try:
debug = False
stubObj = ET_Client.ET_Client(False, debug)
# Retrieve All Folder with GetMoreResults
print('>>> Retrieve All Folder with GetMoreResults')
getFolder = ET_Client.ET_Folder()
getFolder.auth_stub = stubObj
getFolder.props = ["ID", "Client.ID", "ParentFolder.ID", "ParentFolder.CustomerKey", "ParentFolder.ObjectID", "ParentFolder.Name", "ParentFolder.Description", "ParentFolder.ContentType", "ParentFolder.IsActive", "ParentFolder.IsEditable", "ParentFolder.AllowChildren", "Name", "Description", "ContentType", "IsActive", "IsEditable", "AllowChildren", "CreatedDate", "ModifiedDate", "Client.ModifiedBy", "ObjectID", "CustomerKey", "Client.EnterpriseID", "Client.CreatedBy"]
getResponse = getFolder.get()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('Results Length: ' + str(len(getResponse.results)))
#print 'Results: ' + str(getResponse.results)
while getResponse.more_results:
print('>>> Continue Retrieve All Folder with GetMoreResults')
getResponse = getFolder.getMoreResults()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('RequestID: ' + str(getResponse.request_id))
print('Results Length: ' + str(len(getResponse.results)))
NameOfTestFolder = "PythonSDKFolder"
# Retrieve Specific Folder for Email Folder ParentID
print('>>> Retrieve Specific Folder for Email Folder ParentID')
getFolder = ET_Client.ET_Folder()
getFolder.auth_stub = stubObj
getFolder.props = ["ID"]
#getFolder.search_filter = {'Property' : 'ContentType','SimpleOperator' : 'equals','Value' : "email"}
getFolder.search_filter = {'LeftOperand' : {'Property' : 'ContentType','SimpleOperator' : 'equals','Value' : "email"}, 'RightOperand' : {'Property' : 'ParentFolder.ID','SimpleOperator' : 'equals','Value' : "0"}, 'LogicalOperator' : 'AND'}
getResponse = getFolder.get()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('Results Length: ' + str(len(getResponse.results)))
print('Results: ' + str(getResponse.results))
ParentIDForEmail = getResponse.results[0].ID
print('Parent Folder for Email: ' + str(ParentIDForEmail))
# Create Folder
print('>>> Create Folder')
postFolder = ET_Client.ET_Folder()
postFolder.auth_stub = stubObj
postFolder.props = {"CustomerKey" : NameOfTestFolder, "Name" : NameOfTestFolder, "Description" : NameOfTestFolder, "ContentType": "EMAIL", "ParentFolder" : {"ID" : ParentIDForEmail}}
postResponse = postFolder.post()
print('Post Status: ' + str(postResponse.status))
print('Code: ' + str(postResponse.code))
print('Message: ' + str(postResponse.message))
print('Result Count: ' + str(len(postResponse.results)))
print('Results: ' + str(postResponse.results))
# Retrieve newly created Folder
print('>>> Retrieve newly created Folder')
getFolder = ET_Client.ET_Folder()
getFolder.auth_stub = stubObj
getFolder.props = ["ID", "Client.ID", "ParentFolder.ID", "ParentFolder.CustomerKey", "ParentFolder.ObjectID", "ParentFolder.Name", "ParentFolder.Description", "ParentFolder.ContentType", "ParentFolder.IsActive", "ParentFolder.IsEditable", "ParentFolder.AllowChildren", "Name", "Description", "ContentType", "IsActive", "IsEditable", "AllowChildren", "CreatedDate", "ModifiedDate", "Client.ModifiedBy", "ObjectID", "CustomerKey", "Client.EnterpriseID", "Client.CreatedBy"]
getFolder.search_filter = {'Property' : 'CustomerKey','SimpleOperator' : 'equals','Value' : NameOfTestFolder}
getResponse = getFolder.get()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('Results Length: ' + str(len(getResponse.results)))
print('Results: ' + str(getResponse.results))
# Update Folder
print('>>> Update Folder')
patchFolder = ET_Client.ET_Folder()
patchFolder.auth_stub = stubObj
patchFolder.props = {"CustomerKey" : NameOfTestFolder, "Name" : NameOfTestFolder, "Description" : "Updated Description"}
patchResponse = patchFolder.patch()
print('Patch Status: ' + str(patchResponse.status))
print('Code: ' + str(patchResponse.code))
print('Message: ' + str(patchResponse.message))
print('Result Count: ' + str(len(patchResponse.results)))
print('Results: ' + str(patchResponse.results))
# Retrieve updated Folder
print('>>> Retrieve updated Folder')
getFolder = ET_Client.ET_Folder()
getFolder.auth_stub = stubObj
getFolder.props = ["ID", "Client.ID", "ParentFolder.ID", "ParentFolder.CustomerKey", "ParentFolder.ObjectID", "ParentFolder.Name", "ParentFolder.Description", "ParentFolder.ContentType", "ParentFolder.IsActive", "ParentFolder.IsEditable", "ParentFolder.AllowChildren", "Name", "Description", "ContentType", "IsActive", "IsEditable", "AllowChildren", "CreatedDate", "ModifiedDate", "Client.ModifiedBy", "ObjectID", "CustomerKey", "Client.EnterpriseID", "Client.CreatedBy"]
getFolder.search_filter = {'Property' : 'CustomerKey','SimpleOperator' : 'equals','Value' : NameOfTestFolder}
getResponse = getFolder.get()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('Results Length: ' + str(len(getResponse.results)))
print('Results: ' + str(getResponse.results))
# Delete Folder
print('>>> Delete Folder')
deleteFolder = ET_Client.ET_Folder()
deleteFolder.auth_stub = stubObj
deleteFolder.props = {"CustomerKey" : NameOfTestFolder}
deleteResponse = deleteFolder.delete()
print('Delete Status: ' + str(deleteResponse.status))
print('Code: ' + str(deleteResponse.code))
print('Message: ' + str(deleteResponse.message))
print('Result Count: ' + str(len(deleteResponse.results)))
print('Results: ' + str(deleteResponse.results))
# Retrieve Folder to confirm deletion
print('>>> Retrieve Folder to confirm deletion')
getFolder = ET_Client.ET_Folder()
getFolder.auth_stub = stubObj
getFolder.props = ["ID"]
getFolder.search_filter = {'Property' : 'CustomerKey','SimpleOperator' : 'equals','Value' : NameOfTestFolder}
getResponse = getFolder.get()
print('Retrieve Status: ' + str(getResponse.status))
print('Code: ' + str(getResponse.code))
print('Message: ' + str(getResponse.message))
print('MoreResults: ' + str(getResponse.more_results))
print('Results Length: ' + str(len(getResponse.results)))
print('Results: ' + str(getResponse.results))
except Exception as e:
print('Caught exception: ' + str(e.message))
print(e) | 57 | 477 | 0.68664 |
1f885dd99be1eac6cf9d37c48fe924c57a79662e | 399 | py | Python | {{cookiecutter.project_name}}/{{cookiecutter.project_name}}/core/helpers.py | jacebrowning/template-django | bc284d7f7df0c3ff897fc5312cbcaf8396cdce5d | [
"Unlicense"
] | 8 | 2018-03-23T10:15:35.000Z | 2021-03-23T18:41:04.000Z | {{cookiecutter.project_name}}/{{cookiecutter.project_name}}/core/helpers.py | jacebrowning/template-django | bc284d7f7df0c3ff897fc5312cbcaf8396cdce5d | [
"Unlicense"
] | 28 | 2017-02-16T03:51:45.000Z | 2022-02-18T16:09:35.000Z | {{cookiecutter.project_name}}/{{cookiecutter.project_name}}/core/helpers.py | jacebrowning/template-django | bc284d7f7df0c3ff897fc5312cbcaf8396cdce5d | [
"Unlicense"
] | 1 | 2021-03-23T18:41:05.000Z | 2021-03-23T18:41:05.000Z | from django.conf import settings
def build_url(path: str) -> str:
assert settings.BASE_URL
assert path.startswith('/')
return settings.BASE_URL + path
def allow_debug(request) -> bool:
if not settings.ALLOW_DEBUG:
return False
if request.GET.get("debug") == "false":
return False
if request.GET.get("debug"):
return True
return settings.DEBUG
| 22.166667 | 43 | 0.659148 |
dbf3192d13bce59bda94086f38aed5ea6907cfa3 | 7,835 | py | Python | pattern/web/pdf/latin_enc.py | PenTesting/pattern | 820cccf33c6ac4a4f1564a273137171cfa6ab7cb | [
"BSD-3-Clause"
] | 1,103 | 2018-04-20T14:08:11.000Z | 2022-03-29T06:22:43.000Z | pattern/web/pdf/latin_enc.py | PenTesting/pattern | 820cccf33c6ac4a4f1564a273137171cfa6ab7cb | [
"BSD-3-Clause"
] | 29 | 2019-04-03T14:52:38.000Z | 2022-03-24T12:33:05.000Z | pattern/web/pdf/latin_enc.py | PenTesting/pattern | 820cccf33c6ac4a4f1564a273137171cfa6ab7cb | [
"BSD-3-Clause"
] | 262 | 2017-09-16T22:15:50.000Z | 2022-03-31T00:38:42.000Z | #!/usr/bin/env python2
""" Standard encoding tables used in PDF.
This table is extracted from PDF Reference Manual 1.6, pp.925
"D.1 Latin Character Set and Encodings"
"""
ENCODING = [
# (name, std, mac, win, pdf)
('A', 65, 65, 65, 65),
('AE', 225, 174, 198, 198),
('Aacute', None, 231, 193, 193),
('Acircumflex', None, 229, 194, 194),
('Adieresis', None, 128, 196, 196),
('Agrave', None, 203, 192, 192),
('Aring', None, 129, 197, 197),
('Atilde', None, 204, 195, 195),
('B', 66, 66, 66, 66),
('C', 67, 67, 67, 67),
('Ccedilla', None, 130, 199, 199),
('D', 68, 68, 68, 68),
('E', 69, 69, 69, 69),
('Eacute', None, 131, 201, 201),
('Ecircumflex', None, 230, 202, 202),
('Edieresis', None, 232, 203, 203),
('Egrave', None, 233, 200, 200),
('Eth', None, None, 208, 208),
('Euro', None, None, 128, 160),
('F', 70, 70, 70, 70),
('G', 71, 71, 71, 71),
('H', 72, 72, 72, 72),
('I', 73, 73, 73, 73),
('Iacute', None, 234, 205, 205),
('Icircumflex', None, 235, 206, 206),
('Idieresis', None, 236, 207, 207),
('Igrave', None, 237, 204, 204),
('J', 74, 74, 74, 74),
('K', 75, 75, 75, 75),
('L', 76, 76, 76, 76),
('Lslash', 232, None, None, 149),
('M', 77, 77, 77, 77),
('N', 78, 78, 78, 78),
('Ntilde', None, 132, 209, 209),
('O', 79, 79, 79, 79),
('OE', 234, 206, 140, 150),
('Oacute', None, 238, 211, 211),
('Ocircumflex', None, 239, 212, 212),
('Odieresis', None, 133, 214, 214),
('Ograve', None, 241, 210, 210),
('Oslash', 233, 175, 216, 216),
('Otilde', None, 205, 213, 213),
('P', 80, 80, 80, 80),
('Q', 81, 81, 81, 81),
('R', 82, 82, 82, 82),
('S', 83, 83, 83, 83),
('Scaron', None, None, 138, 151),
('T', 84, 84, 84, 84),
('Thorn', None, None, 222, 222),
('U', 85, 85, 85, 85),
('Uacute', None, 242, 218, 218),
('Ucircumflex', None, 243, 219, 219),
('Udieresis', None, 134, 220, 220),
('Ugrave', None, 244, 217, 217),
('V', 86, 86, 86, 86),
('W', 87, 87, 87, 87),
('X', 88, 88, 88, 88),
('Y', 89, 89, 89, 89),
('Yacute', None, None, 221, 221),
('Ydieresis', None, 217, 159, 152),
('Z', 90, 90, 90, 90),
('Zcaron', None, None, 142, 153),
('a', 97, 97, 97, 97),
('aacute', None, 135, 225, 225),
('acircumflex', None, 137, 226, 226),
('acute', 194, 171, 180, 180),
('adieresis', None, 138, 228, 228),
('ae', 241, 190, 230, 230),
('agrave', None, 136, 224, 224),
('ampersand', 38, 38, 38, 38),
('aring', None, 140, 229, 229),
('asciicircum', 94, 94, 94, 94),
('asciitilde', 126, 126, 126, 126),
('asterisk', 42, 42, 42, 42),
('at', 64, 64, 64, 64),
('atilde', None, 139, 227, 227),
('b', 98, 98, 98, 98),
('backslash', 92, 92, 92, 92),
('bar', 124, 124, 124, 124),
('braceleft', 123, 123, 123, 123),
('braceright', 125, 125, 125, 125),
('bracketleft', 91, 91, 91, 91),
('bracketright', 93, 93, 93, 93),
('breve', 198, 249, None, 24),
('brokenbar', None, None, 166, 166),
('bullet', 183, 165, 149, 128),
('c', 99, 99, 99, 99),
('caron', 207, 255, None, 25),
('ccedilla', None, 141, 231, 231),
('cedilla', 203, 252, 184, 184),
('cent', 162, 162, 162, 162),
('circumflex', 195, 246, 136, 26),
('colon', 58, 58, 58, 58),
('comma', 44, 44, 44, 44),
('copyright', None, 169, 169, 169),
('currency', 168, 219, 164, 164),
('d', 100, 100, 100, 100),
('dagger', 178, 160, 134, 129),
('daggerdbl', 179, 224, 135, 130),
('degree', None, 161, 176, 176),
('dieresis', 200, 172, 168, 168),
('divide', None, 214, 247, 247),
('dollar', 36, 36, 36, 36),
('dotaccent', 199, 250, None, 27),
('dotlessi', 245, 245, None, 154),
('e', 101, 101, 101, 101),
('eacute', None, 142, 233, 233),
('ecircumflex', None, 144, 234, 234),
('edieresis', None, 145, 235, 235),
('egrave', None, 143, 232, 232),
('eight', 56, 56, 56, 56),
('ellipsis', 188, 201, 133, 131),
('emdash', 208, 209, 151, 132),
('endash', 177, 208, 150, 133),
('equal', 61, 61, 61, 61),
('eth', None, None, 240, 240),
('exclam', 33, 33, 33, 33),
('exclamdown', 161, 193, 161, 161),
('f', 102, 102, 102, 102),
('fi', 174, 222, None, 147),
('five', 53, 53, 53, 53),
('fl', 175, 223, None, 148),
('florin', 166, 196, 131, 134),
('four', 52, 52, 52, 52),
('fraction', 164, 218, None, 135),
('g', 103, 103, 103, 103),
('germandbls', 251, 167, 223, 223),
('grave', 193, 96, 96, 96),
('greater', 62, 62, 62, 62),
('guillemotleft', 171, 199, 171, 171),
('guillemotright', 187, 200, 187, 187),
('guilsinglleft', 172, 220, 139, 136),
('guilsinglright', 173, 221, 155, 137),
('h', 104, 104, 104, 104),
('hungarumlaut', 205, 253, None, 28),
('hyphen', 45, 45, 45, 45),
('i', 105, 105, 105, 105),
('iacute', None, 146, 237, 237),
('icircumflex', None, 148, 238, 238),
('idieresis', None, 149, 239, 239),
('igrave', None, 147, 236, 236),
('j', 106, 106, 106, 106),
('k', 107, 107, 107, 107),
('l', 108, 108, 108, 108),
('less', 60, 60, 60, 60),
('logicalnot', None, 194, 172, 172),
('lslash', 248, None, None, 155),
('m', 109, 109, 109, 109),
('macron', 197, 248, 175, 175),
('minus', None, None, None, 138),
('mu', None, 181, 181, 181),
('multiply', None, None, 215, 215),
('n', 110, 110, 110, 110),
('nine', 57, 57, 57, 57),
('ntilde', None, 150, 241, 241),
('numbersign', 35, 35, 35, 35),
('o', 111, 111, 111, 111),
('oacute', None, 151, 243, 243),
('ocircumflex', None, 153, 244, 244),
('odieresis', None, 154, 246, 246),
('oe', 250, 207, 156, 156),
('ogonek', 206, 254, None, 29),
('ograve', None, 152, 242, 242),
('one', 49, 49, 49, 49),
('onehalf', None, None, 189, 189),
('onequarter', None, None, 188, 188),
('onesuperior', None, None, 185, 185),
('ordfeminine', 227, 187, 170, 170),
('ordmasculine', 235, 188, 186, 186),
('oslash', 249, 191, 248, 248),
('otilde', None, 155, 245, 245),
('p', 112, 112, 112, 112),
('paragraph', 182, 166, 182, 182),
('parenleft', 40, 40, 40, 40),
('parenright', 41, 41, 41, 41),
('percent', 37, 37, 37, 37),
('period', 46, 46, 46, 46),
('periodcentered', 180, 225, 183, 183),
('perthousand', 189, 228, 137, 139),
('plus', 43, 43, 43, 43),
('plusminus', None, 177, 177, 177),
('q', 113, 113, 113, 113),
('question', 63, 63, 63, 63),
('questiondown', 191, 192, 191, 191),
('quotedbl', 34, 34, 34, 34),
('quotedblbase', 185, 227, 132, 140),
('quotedblleft', 170, 210, 147, 141),
('quotedblright', 186, 211, 148, 142),
('quoteleft', 96, 212, 145, 143),
('quoteright', 39, 213, 146, 144),
('quotesinglbase', 184, 226, 130, 145),
('quotesingle', 169, 39, 39, 39),
('r', 114, 114, 114, 114),
('registered', None, 168, 174, 174),
('ring', 202, 251, None, 30),
('s', 115, 115, 115, 115),
('scaron', None, None, 154, 157),
('section', 167, 164, 167, 167),
('semicolon', 59, 59, 59, 59),
('seven', 55, 55, 55, 55),
('six', 54, 54, 54, 54),
('slash', 47, 47, 47, 47),
('space', 32, 32, 32, 32),
('sterling', 163, 163, 163, 163),
('t', 116, 116, 116, 116),
('thorn', None, None, 254, 254),
('three', 51, 51, 51, 51),
('threequarters', None, None, 190, 190),
('threesuperior', None, None, 179, 179),
('tilde', 196, 247, 152, 31),
('trademark', None, 170, 153, 146),
('two', 50, 50, 50, 50),
('twosuperior', None, None, 178, 178),
('u', 117, 117, 117, 117),
('uacute', None, 156, 250, 250),
('ucircumflex', None, 158, 251, 251),
('udieresis', None, 159, 252, 252),
('ugrave', None, 157, 249, 249),
('underscore', 95, 95, 95, 95),
('v', 118, 118, 118, 118),
('w', 119, 119, 119, 119),
('x', 120, 120, 120, 120),
('y', 121, 121, 121, 121),
('yacute', None, None, 253, 253),
('ydieresis', None, 216, 255, 255),
('yen', 165, 180, 165, 165),
('z', 122, 122, 122, 122),
('zcaron', None, None, 158, 158),
('zero', 48, 48, 48, 48),
]
| 32.376033 | 61 | 0.523165 |
b9b6c62c212642794e62bbfe399359333f13c1d4 | 420 | py | Python | script popolamento DB/env/lib/python3.7/site-packages/setuptools/_vendor/packaging/utils.py | 2dadsgn/Smart-vase-webapp-flask- | 0714d960ec21c77be069dd07b1bc8407f33e0b72 | [
"Apache-2.0"
] | 1 | 2020-10-21T04:51:46.000Z | 2020-10-21T04:51:46.000Z | script popolamento DB/env/lib/python3.7/site-packages/setuptools/_vendor/packaging/utils.py | 2dadsgn/Smart-vase-webapp-flask- | 0714d960ec21c77be069dd07b1bc8407f33e0b72 | [
"Apache-2.0"
] | 2 | 2020-10-23T06:51:04.000Z | 2020-11-12T07:03:37.000Z | script popolamento DB/env/lib/python3.7/site-packages/setuptools/_vendor/packaging/utils.py | 2dadsgn/Smart-vase-webapp-flask- | 0714d960ec21c77be069dd07b1bc8407f33e0b72 | [
"Apache-2.0"
] | 1 | 2020-10-24T05:21:20.000Z | 2020-10-24T05:21:20.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import re
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
# This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower()
| 30 | 79 | 0.75 |
090df65b7b555f4dd970592b82c45e38057681af | 8,013 | py | Python | tools/random_graph.py | rujiewu/pose_lcn | 70f2f76a1c3c94510483c30ee33d8917386d002b | [
"MIT"
] | 25 | 2020-03-31T05:36:50.000Z | 2022-03-03T03:20:16.000Z | tools/random_graph.py | rujiewu/pose_lcn | 70f2f76a1c3c94510483c30ee33d8917386d002b | [
"MIT"
] | 3 | 2020-05-08T03:17:14.000Z | 2021-11-10T07:48:36.000Z | tools/random_graph.py | rujiewu/pose_lcn | 70f2f76a1c3c94510483c30ee33d8917386d002b | [
"MIT"
] | 4 | 2020-07-07T09:41:17.000Z | 2021-01-30T09:09:14.000Z | import random
import argparse
from pprint import pprint
class Graph(object):
def __init__(self, nodes, edges=None, loops=False, multigraph=False,
digraph=False):
self.nodes = nodes
if edges:
self.edges = edges
self.edge_set = self._compute_edge_set()
else:
self.edges = []
self.edge_set = set()
self.loops = loops
self.multigraph = multigraph
self.digraph = digraph
def _compute_edge_set(self):
raise NotImplementedError()
def add_edge(self, edge):
"""Add the edge if the graph type allows it."""
if self.multigraph or edge not in self.edge_set:
self.edges.append(edge)
self.edge_set.add(edge)
if not self.digraph:
self.edge_set.add(edge[::-1]) # add other direction to set.
return True
return False
def make_random_edge(self):
"""Generate a random edge between any two nodes in the graph."""
if self.loops:
# With replacement.
random_edge = (random.choice(self.nodes), random.choice(self.nodes))
else:
# Without replacement.
random_edge = tuple(random.sample(self.nodes, 2))
return random_edge
def add_random_edges(self, total_edges):
"""Add random edges until the number of desired edges is reached."""
while len(self.edges) < total_edges:
self.add_edge(self.make_random_edge())
def sort_edges(self):
"""If undirected, sort order that the nodes are listed in the edge."""
if not self.digraph:
self.edges = [((t, s) if t < s else (s, t)) for s, t in self.edges]
self.edges.sort()
def generate_gml(self):
# Inspiration:
# http://networkx.lanl.gov/_modules/networkx/readwrite/gml.html#generate_gml
indent = ' ' * 4
yield 'graph ['
if self.digraph:
yield indent + 'directed 1'
# Write nodes
for index, node in enumerate(self.nodes):
yield indent + 'node ['
yield indent * 2 + 'id {}'.format(index)
yield indent * 2 + 'label "{}"'.format(str(node))
yield indent + ']'
# Write edges
for source, target in self.edges:
yield indent + 'edge ['
yield indent * 2 + 'source {}'.format(self.nodes.index(source))
yield indent * 2 + 'target {}'.format(self.nodes.index(target))
yield indent + ']'
yield ']'
def write_gml(self, fname):
with open(fname, mode='w') as f:
for line in self.generate_gml():
line += '\n'
f.write(line.encode('latin-1'))
def check_num_edges(nodes, num_edges, loops, multigraph, digraph):
"""Checks that the number of requested edges is acceptable."""
num_nodes = len(nodes)
# Check min edges
min_edges = num_nodes - 1
if num_edges < min_edges:
raise ValueError('num_edges less than minimum (%i)' % min_edges)
# Check max edges
max_edges = num_nodes * (num_nodes - 1)
if not digraph:
max_edges /= 2
if loops:
max_edges += num_nodes
if not multigraph and num_edges > max_edges:
raise ValueError('num_edges greater than maximum (%i)' % max_edges)
def naive(nodes, num_edges, loops=False, multigraph=False, digraph=False):
# Idea:
# Each node starts off in its own component.
# Keep track of the components, combining them when an edge merges two.
# While there are less edges than requested:
# Randomly select two nodes, and create an edge between them.
# If there is more than one component remaining, repeat the process.
check_num_edges(nodes, num_edges, loops, multigraph, digraph)
def update_components(components, edge):
# Update the component list.
comp_index = [None] * 2
for index, comp in enumerate(components):
for i in (0, 1):
if edge[i] in comp:
comp_index[i] = index
# Break early once we have found both sets.
if all(x is not None for x in comp_index):
break
# Combine components if the nodes aren't already in the same one.
if comp_index[0] != comp_index[1]:
components[comp_index[0]] |= components[comp_index[1]]
del components[comp_index[1]]
finished = False
while not finished:
graph = Graph(nodes, loops=loops, multigraph=multigraph, digraph=digraph)
# Start with each node in its own component.
components = [set([x]) for x in nodes]
while len(graph.edges) < num_edges:
# Generate a random edge.
edge = graph.make_random_edge()
if graph.add_edge(edge):
# Update the component list.
update_components(components, edge)
if len(components) == 1:
finished = True
return graph
def partition(nodes, num_edges, loops=False, multigraph=False, digraph=False):
# Algorithm inspiration:
# http://stackoverflow.com/questions/2041517/random-simple-connected-graph-generation-with-given-sparseness
# Idea:
# Create a random connected graph by adding edges between nodes from
# different partitions.
# Add random edges until the number of desired edges is reached.
check_num_edges(nodes, num_edges, loops, multigraph, digraph)
graph = Graph(nodes, loops=loops, multigraph=multigraph, digraph=digraph)
# Create two partitions, S and T. Initially store all nodes in S.
S, T = set(nodes), set()
# Randomly select a first node, and place it in T.
node_s = random.sample(S, 1).pop()
S.remove(node_s)
T.add(node_s)
# Create a random connected graph.
while S:
# Select random node from S, and another in T.
node_s, node_t = random.sample(S, 1).pop(), random.sample(T, 1).pop()
# Create an edge between the nodes, and move the node from S to T.
edge = (node_s, node_t)
assert graph.add_edge(edge) == True
S.remove(node_s)
T.add(node_s)
# Add random edges until the number of desired edges is reached.
graph.add_random_edges(num_edges)
return graph
def random_walk(nodes, num_edges, loops=False, multigraph=False, digraph=False):
# Algorithm inspiration:
# https://en.wikipedia.org/wiki/Uniform_spanning_tree#The_uniform_spanning_tree
# Idea:
# Create a uniform spanning tree (UST) using a random walk.
# Add random edges until the number of desired edges is reached.
check_num_edges(nodes, num_edges, loops, multigraph, digraph)
# Create two partitions, S and T. Initially store all nodes in S.
S, T = set(nodes), set()
# Pick a random node, and mark it as visited and the current node.
current_node = random.sample(S, 1).pop()
S.remove(current_node)
T.add(current_node)
graph = Graph(nodes, loops=loops, multigraph=multigraph, digraph=digraph)
# Create a random connected graph.
while S:
# Randomly pick the next node from the neighbors of the current node.
# As we are generating a connected graph, we assume a complete graph.
neighbor_node = random.sample(nodes, 1).pop()
# If the new node hasn't been visited, add the edge from current to new.
if neighbor_node not in T:
edge = (current_node, neighbor_node)
graph.add_edge(edge)
S.remove(neighbor_node)
T.add(neighbor_node)
# Set the new node as the current node.
current_node = neighbor_node
# Add random edges until the number of desired edges is reached.
graph.add_random_edges(num_edges)
return graph
def generate_random_graph(num_nodes, num_edges):
nodes = [x for x in range(int(num_nodes))]
graph = random_walk(nodes, num_edges, False, False, False)
return graph
| 35.613333 | 111 | 0.623861 |
503ce8f2a996a180d9ec40b534afaf375b6e30c3 | 792 | py | Python | backend/api/views/annotation.py | 12xiaoni/text-label | 7456c5e73d32bcfc81a02be7e0d748f162934d35 | [
"MIT"
] | null | null | null | backend/api/views/annotation.py | 12xiaoni/text-label | 7456c5e73d32bcfc81a02be7e0d748f162934d35 | [
"MIT"
] | null | null | null | backend/api/views/annotation.py | 12xiaoni/text-label | 7456c5e73d32bcfc81a02be7e0d748f162934d35 | [
"MIT"
] | null | null | null | from django.shortcuts import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from members.permissions import IsAnnotationApprover, IsProjectAdmin
from ..models import Example
from ..serializers import ApproverSerializer
class ApprovalAPI(APIView):
permission_classes = [IsAuthenticated & (IsAnnotationApprover | IsProjectAdmin)]
def post(self, request, *args, **kwargs):
approved = self.request.data.get('approved', True)
example = get_object_or_404(Example, pk=self.kwargs['example_id'])
example.annotations_approved_by = self.request.user if approved else None
example.save()
return Response(ApproverSerializer(example).data)
| 37.714286 | 84 | 0.780303 |
78164a00dc6c8569957941a8c8b534a193f02f71 | 11,534 | py | Python | tensorflow_probability/python/math/scan_associative.py | mrksr/probability | 242731a9b2b42d4eb676539658a8d5e8267c0720 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/math/scan_associative.py | mrksr/probability | 242731a9b2b42d4eb676539658a8d5e8267c0720 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/math/scan_associative.py | mrksr/probability | 242731a9b2b42d4eb676539658a8d5e8267c0720 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utilities for parallel calculation of prefix sums."""
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import prefer_static
def _interleave(a, b):
"""Interleaves two `Tensor`s along their first axis."""
# [a b c ...] [d e f ...] -> [a d b e c f ...]
num_elems_a = prefer_static.shape(a)[0]
num_elems_b = prefer_static.shape(b)[0]
def _interleave_with_b(a):
return tf.reshape(
tf.stack([a, b], axis=1),
prefer_static.concat([[2 * num_elems_b],
prefer_static.shape(a)[1:]], axis=0))
return prefer_static.cond(
prefer_static.equal(num_elems_a, num_elems_b + 1),
lambda: tf.concat([_interleave_with_b(a[:-1]), a[-1:]], axis=0),
lambda: _interleave_with_b(a))
def _validate_elem_length(elems_flat):
"""Checks that elems all have the same length, and returns that length."""
assertions = []
elem_length = prefer_static.shape(elems_flat[0])[0]
is_consistent = prefer_static.reduce_all([
prefer_static.equal(
prefer_static.shape(elem)[0], elem_length)
for elem in elems_flat[1:]])
is_consistent_ = tf.get_static_value(is_consistent)
if is_consistent_ is None:
assertions.append(
tf.debugging.assert_equal(
is_consistent, True,
message='Input `Tensor`s must have the same first dimension.'
' (saw: {})'.format([elem.shape for elem in elems_flat])))
elif not is_consistent_:
raise ValueError(
'Input `Tensor`s must have the same first dimension.'
' (saw: {})'.format([elem.shape for elem in elems_flat]))
return elem_length, assertions
def scan_associative(fn, elems, validate_args=False, name=None):
"""Perform a scan with an associative binary operation, in parallel.
The associative scan operation computes the cumulative sum, or
[all-prefix sum](https://en.wikipedia.org/wiki/Prefix_sum), of a set of
elements under an associative binary operation [1]. For example, using the
ordinary addition operator `fn = lambda a, b: a + b`, this is equivalent to
the ordinary cumulative sum `tf.math.cumsum` along axis 0. This method
supports the general case of arbitrary associative binary operations operating
on `Tensor`s or structures of `Tensor`s:
```python
associative_scan(fn, elems) = tf.stack([
elems[0],
fn(elems[0], elems[1]),
fn(elems[0], fn(elems[1], elems[2])),
...
fn(elems[0], fn(elems[1], fn(..., fn(elems[-2], elems[-1]))),
], axis=0)
```
The associative structure allows the computation to be decomposed
and executed by parallel reduction. Where a naive sequential
implementation would loop over all `N` elements, this method requires
only a logarithmic number (`2 * ceil(log_2 N)`) of sequential steps, and
can thus yield substantial performance speedups from hardware-accelerated
vectorization. The total number of invocations of the binary operation
(including those performed in parallel) is
`2 * (N / 2 + N / 4 + ... + 1) = 2N - 2`
--- i.e., approximately twice as many as a naive approach.
[1] Blelloch, Guy E.
[Prefix sums and their applications](
https://www.cs.cmu.edu/~guyb/papers/Ble93.pdf)
Technical Report CMU-CS-90-190,
School of Computer Science,
Carnegie Mellon University, 1990.
Args:
fn: Python callable implementing an associative binary operation with
signature `r = fn(a, b)`. This must satisfy associativity:
`fn(a, fn(b, c)) == fn(fn(a, b), c)`. The inputs and result are
(possibly nested structures of) `Tensor`(s), matching `elems`. Each
`Tensor` has a leading batch dimension in place of `elem_length`; the `fn`
is expected to map over this dimension. The result `r` has the same shape
(and structure) as the two inputs `a` and `b`.
elems: A (possibly nested structure of) `Tensor`(s), each with leading
dimension `elem_length`. Note that `elem_length` determines the number
of recursive steps required to perform the scan: if, in graph mode,
this is not statically available, then ops will be created to
handle any `elem_length` up to the maximum dimension of a `Tensor`.
validate_args: Python `bool`. When `True`, runtime checks
for invalid inputs are performed. This may carry a performance cost.
Default value: `False`.
name: Python `str` name prefixed to ops created by this function.
Returns:
result: A (possibly nested structure of) `Tensor`(s) of the same shape
and structure as `elems`, in which the `k`th element is the result of
recursively applying `fn` to combine the first `k` elements of
`elems`. For example, given `elems = [a, b, c, ...]`, the result
would be `[a, fn(a, b), fn(fn(a, b), c), ...]`.
#### Examples
```python
import tensorflow as tf
import tensorflow_probability as tfp
import operator
# Example 1: Partials sums of numbers.
tfp.math.associative_scan(operator.add, tf.range(0, 4))
# ==> [ 0, 1, 3, 6]
# Example 2: Partial products of random matrices.
dist = tfp.distributions.Normal(loc=0., scale=1.)
matrices = dist.sample(sample_shape=[100, 2, 2])
tfp.math.associative_scan(tf.matmul, matrices)
```
"""
def lowered_fn(a, b):
# Lower `fn` to operate on flattened sequences of elems.
with tf.name_scope('fn'):
return tf.nest.flatten(
fn(tf.nest.pack_sequence_as(elems, a),
tf.nest.pack_sequence_as(elems, b)))
elems_flat = [tf.convert_to_tensor(elem) for elem in tf.nest.flatten(elems)]
# Summary of algorithm:
#
# Consider elements of `_scan(elems)` at odd indices. That's the same as first
# summing successive pairs of elements of `elems` and performing a scan on
# that half sized tensor. We perform the latter scan by recursion.
#
# Now consider the even elements of `_scan(elems)`. These can be computed
# from the odd elements of `_scan(elems)` by adding each odd element of
# `_scan(elems)` to the matching even element in the original `elems`.
#
# We return the odd and even elements interleaved.
#
# For the base case of the recursion we return the first element
# of `elems` followed by the sum of the first two elements computed as
# a (small two-down-to-one) reduction step.
# The following is a pictorial representation of the algorithm using the
# variables in the code below. The operator '+' is used to represent
# the binary operation.
# Note how the recursive call to `_scan` operates on a reduced form of the
# input array in which successive pairs have already been summed.
# elems x0 x1 x2 x3 x4 x5 ...
# |\ / | \ / | \ /
# | \ / | \ / | \ /
# | \ / | \ / | \ /
# | \ / | \ / | \ /
# reduced | x0+x1 | x2+x3 | x4+x5 ...
# _elems | | | | | |
# | | | | | |
# | | | | | |
# _scan(..) | | | | | |
# +--|----+----------|-----+----------|-------+---- ...
# | | | |
# | | | |
# +--|----+----------|-----+----------|-------+---- ...
# | | | | | |
# odd | x0+x1 | x0+...+x3 | x0+..+x5 ...
# _elems | | \ | | \ | |
# | | \ | | \ | |
# even | | \ | | \ | |
# _elems x0 | x0+...+x2 | x0+...+x4 | ...
# | | | | | |
# inter | | | | | |
# leave(..) | | | | | |
# x0 x0+x1 x0+...+x2 x0+...+x3 x0+...+x4 x0+...+x5 ...
def _scan(level, elems):
"""Perform scan on `elems`."""
elem_length = prefer_static.shape(elems[0])[0]
# Apply `fn` to reduce adjacent pairs to a single entry.
a = [elem[0:-1:2] for elem in elems]
b = [elem[1::2] for elem in elems]
reduced_elems = lowered_fn(a, b)
def handle_base_case_elem_length_two():
return [tf.concat([elem[0:1], reduced_elem], axis=0)
for (reduced_elem, elem) in zip(reduced_elems, elems)]
def handle_base_case_elem_length_three():
reduced_reduced_elems = lowered_fn(
reduced_elems, [elem[2:3] for elem in elems])
return [
tf.concat([elem[0:1], reduced_elem, reduced_reduced_elem], axis=0)
for (reduced_reduced_elem, reduced_elem, elem)
in zip(reduced_reduced_elems, reduced_elems, elems)]
# Base case of recursion: assumes `elem_length` is 2 or 3.
at_base_case = prefer_static.logical_or(
prefer_static.equal(elem_length, 2),
prefer_static.equal(elem_length, 3))
base_value = lambda: prefer_static.cond( # pylint: disable=g-long-lambda
prefer_static.equal(elem_length, 2),
handle_base_case_elem_length_two,
handle_base_case_elem_length_three)
if level == 0:
return base_value()
def recursive_case():
"""Evaluate the next step of the recursion."""
odd_elems = _scan(level - 1, reduced_elems)
def even_length_case():
return lowered_fn([odd_elem[:-1] for odd_elem in odd_elems],
[elem[2::2] for elem in elems])
def odd_length_case():
return lowered_fn([odd_elem for odd_elem in odd_elems],
[elem[2::2] for elem in elems])
results = prefer_static.cond(
prefer_static.equal(elem_length % 2, 0),
even_length_case,
odd_length_case)
# The first element of a scan is the same as the first element
# of the original `elems`.
even_elems = [tf.concat([elem[0:1], result], axis=0)
for (elem, result) in zip(elems, results)]
return list(map(_interleave, even_elems, odd_elems))
return prefer_static.cond(at_base_case, base_value, recursive_case)
with tf.name_scope(name if name else 'scan_associative'):
elem_length, assertions = _validate_elem_length(elems_flat)
max_num_levels = 64 # If input length is not static, we build a graph
# that supports tensors of length up to 2**64.
with tf.control_dependencies(assertions if validate_args else []):
return prefer_static.cond(
elem_length < 2,
lambda: elems,
lambda: (tf.nest.pack_sequence_as(elems, # pylint: disable=g-long-lambda
_scan(max_num_levels, elems_flat))))
| 42.877323 | 81 | 0.599792 |
1b5575fd137c632d4cc1e21106e58dc620816422 | 3,494 | py | Python | tests/test_ymlmd.py | cisagov/log4j-md-yml | ed00ae7765f6c2af6c0b3b609651ef29aef8f09c | [
"CC0-1.0"
] | 2 | 2021-12-31T16:29:53.000Z | 2022-01-07T12:21:52.000Z | tests/test_ymlmd.py | cisagov/md-table-to-yml | ed00ae7765f6c2af6c0b3b609651ef29aef8f09c | [
"CC0-1.0"
] | 18 | 2021-12-28T17:10:16.000Z | 2022-03-21T20:17:12.000Z | tests/test_ymlmd.py | cisagov/md-table-to-yml | ed00ae7765f6c2af6c0b3b609651ef29aef8f09c | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env pytest -vs
"""Tests for ymlmd."""
# Standard Python Libraries
import os
import sys
from unittest.mock import patch
# Third-Party Libraries
import pytest
# cisagov Libraries
import ymlmd
import ymlmd.yml2md
log_levels = (
"debug",
"info",
"warning",
"error",
"critical",
)
# define sources of version strings
RELEASE_TAG = os.getenv("RELEASE_TAG")
PROJECT_VERSION = ymlmd.__version__
def test_stdout_version(capsys):
"""Verify that version string sent to stdout agrees with the module version."""
with pytest.raises(SystemExit):
with patch.object(sys, "argv", ["bogus", "--version"]):
ymlmd.yml2md.main()
captured = capsys.readouterr()
assert (
captured.out == f"{PROJECT_VERSION}\n"
), "standard output by '--version' should agree with module.__version__"
def test_running_as_module(capsys):
"""Verify that the __main__.py file loads correctly."""
with pytest.raises(SystemExit):
with patch.object(sys, "argv", ["bogus", "--version"]):
# F401 is a "Module imported but unused" warning. This import
# emulates how this project would be run as a module. The only thing
# being done by __main__ is importing the main entrypoint of the
# package and running it, so there is nothing to use from this
# import. As a result, we can safely ignore this warning.
# cisagov Libraries
import ymlmd.__main__ # noqa: F401
captured = capsys.readouterr()
assert (
captured.out == f"{PROJECT_VERSION}\n"
), "standard output by '--version' should agree with module.__version__"
@pytest.mark.skipif(
RELEASE_TAG in [None, ""], reason="this is not a release (RELEASE_TAG not set)"
)
def test_release_version():
"""Verify that release tag version agrees with the module version."""
assert (
RELEASE_TAG == f"v{PROJECT_VERSION}"
), "RELEASE_TAG does not match the project version"
# @pytest.mark.parametrize("level", log_levels)
# def test_log_levels(level):
# """Validate commandline log-level arguments."""
# with patch.object(
# sys, "argv", ["bogus", f"--log-level={level}", "data/test_file.yml"]
# ):
# with patch.object(logging.root, "handlers", []):
# assert (
# logging.root.hasHandlers() is False
# ), "root logger should not have handlers yet"
# return_code = None
# try:
# ymlmd.yml2md.main()
# except SystemExit as sys_exit:
# return_code = sys_exit.code
# assert return_code is None, "main() should return success"
# assert (
# logging.root.hasHandlers() is True
# ), "root logger should now have a handler"
# assert (
# logging.getLevelName(logging.root.getEffectiveLevel()) == level.upper()
# ), f"root logger level should be set to {level.upper()}"
# assert return_code is None, "main() should return success"
def test_bad_log_level():
"""Validate bad log-level argument returns error."""
with patch.object(
sys, "argv", ["bogus", "--log-level=emergency", "data/test_file.yml"]
):
return_code = None
try:
ymlmd.yml2md.main()
except SystemExit as sys_exit:
return_code = sys_exit.code
assert return_code == 1, "main() should exit with error"
| 33.92233 | 89 | 0.621351 |
86e17153cbaffb37aa948a16ead0b7d9d6a80a53 | 1,338 | py | Python | corehq/apps/hqcase/management/commands/migrate_case_export_tags.py | dslowikowski/commcare-hq | ad8885cf8dab69dc85cb64f37aeaf06106124797 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T23:26:39.000Z | 2015-02-10T23:26:39.000Z | corehq/apps/hqcase/management/commands/migrate_case_export_tags.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/hqcase/management/commands/migrate_case_export_tags.py | SEL-Columbia/commcare-hq | 992ee34a679c37f063f86200e6df5a197d5e3ff6 | [
"BSD-3-Clause"
] | null | null | null | from django.core.management.base import LabelCommand, CommandError
from casexml.apps.case.models import CommCareCase
from optparse import make_option
from couchexport.models import SavedExportSchema
from dimagi.utils.couch.database import get_db
class Command(LabelCommand):
help = "."
args = ""
label = ""
option_list = LabelCommand.option_list + \
(make_option('--dryrun', action='store_true', dest='dryrun', default=False,
help="Don't do the actual migration, just print the output"),)
def handle(self, *args, **options):
if len(args) != 0: raise CommandError("This command doesn't expect arguments!")
count = 0
for line in get_db().view("case/by_user", reduce=False):
case = CommCareCase.get(line["id"])
if hasattr(case, 'domain') and hasattr(case, 'type'):
if not "#export_tag" in case or case['#export_tag'] != ["domain", "type"]:
print "migrating case %s in domain %s" % (case.get_id, case.domain)
case['#export_tag'] = ["domain", "type"]
count += 1
if not options["dryrun"]:
case.save()
prefix = "would have " if options["dryrun"] else ""
print "%smigrated %s cases" % (prefix, count)
| 41.8125 | 90 | 0.593423 |
85b4be756730e6b009199e8eb678ae0646e7263e | 113 | py | Python | src_3d/__init__.py | xzluo97/MvMM-RegNet | c08d5df14b4a9c4a98c66973ff4950aba7f416e4 | [
"MIT"
] | 19 | 2020-07-14T02:23:58.000Z | 2022-03-15T12:22:49.000Z | src_3d/__init__.py | xzluo97/MvMM-RegNet | c08d5df14b4a9c4a98c66973ff4950aba7f416e4 | [
"MIT"
] | 4 | 2020-09-25T22:42:40.000Z | 2021-08-25T15:03:29.000Z | src_3d/__init__.py | xzluo97/MvMM-RegNet | c08d5df14b4a9c4a98c66973ff4950aba7f416e4 | [
"MIT"
] | 7 | 2020-08-29T15:46:13.000Z | 2021-07-16T01:51:28.000Z | __author__ = 'Xinzhe Luo, Fudan University'
__version__ = '0.1'
__credits__ = 'Xiahai Zhuang, Fudan University' | 37.666667 | 47 | 0.752212 |
eb1a6c102fd32f083388381afbd5fd399563199e | 391 | py | Python | petl/config.py | OptionMetrics/petl | ee0a196f40c07218249be0d279b72e57d177a7fd | [
"MIT"
] | null | null | null | petl/config.py | OptionMetrics/petl | ee0a196f40c07218249be0d279b72e57d177a7fd | [
"MIT"
] | null | null | null | petl/config.py | OptionMetrics/petl | ee0a196f40c07218249be0d279b72e57d177a7fd | [
"MIT"
] | null | null | null | from __future__ import division, print_function, absolute_import
from petl.compat import text_type
look_style = 'grid' # alternatives: 'simple', 'minimal'
look_limit = 5
look_index_header = False
look_vrepr = repr
look_width = None
see_limit = 5
see_index_header = False
see_vrepr = repr
display_limit = 5
display_index_header = False
display_vrepr = text_type
sort_buffersize = 100000
| 20.578947 | 64 | 0.797954 |
72219e324749d8d230c665528f452145d6dba287 | 678 | py | Python | maddrive_adas/sign_det/base.py | lsd-maddrive/adas_system | 0352d59a500aebbd68fbf45f416fb98d1b850e13 | [
"MIT"
] | null | null | null | maddrive_adas/sign_det/base.py | lsd-maddrive/adas_system | 0352d59a500aebbd68fbf45f416fb98d1b850e13 | [
"MIT"
] | 14 | 2021-10-02T10:10:45.000Z | 2022-03-26T08:32:48.000Z | maddrive_adas/sign_det/base.py | lsd-maddrive/adas_system | 0352d59a500aebbd68fbf45f416fb98d1b850e13 | [
"MIT"
] | null | null | null | import numpy as np
from typing import List
class DetectedSign:
"""
Class to control detected sign operations.
We define one point of signs retectino contract for communication
"""
def __init__(self, bbox: List[float]) -> None:
self._bbox = np.array(bbox, dtype=np.float32)
def as_dict(self) -> dict:
return {"bbox": self._bbox.tolist()}
class BaseSignsDetector:
def __init__(self) -> None:
pass
def detect(self, img: np.array) -> dict:
predictions = self.detect_batch([img])
return predictions[0]
def detect_batch(self, imgs: List[np.array]) -> List[dict]:
raise NotImplementedError()
| 23.37931 | 69 | 0.647493 |
649eef8e1623d5f6b11c11e3ae0b36b840745758 | 638 | py | Python | Support/Fuego/Pythia/pythia-0.4/packages/pyre/pyre/applications/Resource.py | marient/PelePhysics | e6ad1839d77b194e09ab44ff850c9489652e5d81 | [
"BSD-3-Clause-LBNL"
] | 1 | 2019-04-24T13:32:23.000Z | 2019-04-24T13:32:23.000Z | Support/Fuego/Pythia/pythia-0.4/packages/pyre/pyre/applications/Resource.py | marient/PelePhysics | e6ad1839d77b194e09ab44ff850c9489652e5d81 | [
"BSD-3-Clause-LBNL"
] | null | null | null | Support/Fuego/Pythia/pythia-0.4/packages/pyre/pyre/applications/Resource.py | marient/PelePhysics | e6ad1839d77b194e09ab44ff850c9489652e5d81 | [
"BSD-3-Clause-LBNL"
] | null | null | null | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2003 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
class Resource(object):
def name(self):
return self._name
def __init__(self, name):
self._name = name
return
# version
__id__ = "$Id: Resource.py,v 1.1 2003/06/02 19:30:20 aivazis Exp $"
# End of file
| 20.580645 | 83 | 0.380878 |
58a5e346a811e18c758bc542d14f40e0a59027bb | 56,837 | py | Python | astropy/coordinates/tests/test_representation.py | fardal/astropy | 642a7768b4ec2e3ab9fd9acdf53632a3c7eca372 | [
"BSD-3-Clause"
] | 8 | 2019-04-27T01:19:45.000Z | 2020-09-21T03:31:01.000Z | astropy/coordinates/tests/test_representation.py | fardal/astropy | 642a7768b4ec2e3ab9fd9acdf53632a3c7eca372 | [
"BSD-3-Clause"
] | null | null | null | astropy/coordinates/tests/test_representation.py | fardal/astropy | 642a7768b4ec2e3ab9fd9acdf53632a3c7eca372 | [
"BSD-3-Clause"
] | 5 | 2019-04-27T01:19:47.000Z | 2020-09-20T15:15:19.000Z | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from copy import deepcopy
from collections import OrderedDict
import pytest
import numpy as np
from numpy.testing import assert_allclose
from astropy import units as u
from astropy.tests.helper import (assert_quantity_allclose as
assert_allclose_quantity, catch_warnings)
from astropy.utils import isiterable
from astropy.utils.compat import NUMPY_LT_1_14
from astropy.utils.exceptions import AstropyDeprecationWarning
from astropy.coordinates.angles import Longitude, Latitude, Angle
from astropy.coordinates.distances import Distance
from astropy.coordinates.representation import (REPRESENTATION_CLASSES,
DIFFERENTIAL_CLASSES,
BaseRepresentation,
SphericalRepresentation,
UnitSphericalRepresentation,
SphericalCosLatDifferential,
CartesianRepresentation,
CylindricalRepresentation,
PhysicsSphericalRepresentation,
CartesianDifferential,
SphericalDifferential,
_combine_xyz)
# Preserve the original REPRESENTATION_CLASSES dict so that importing
# the test file doesn't add a persistent test subclass (LogDRepresentation)
def setup_function(func):
func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES)
def teardown_function(func):
REPRESENTATION_CLASSES.clear()
REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG)
class TestSphericalRepresentation:
def test_name(self):
assert SphericalRepresentation.get_name() == 'spherical'
assert SphericalRepresentation.get_name() in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = SphericalRepresentation()
def test_init_quantity(self):
s3 = SphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg, distance=10 * u.kpc)
assert s3.lon == 8. * u.hourangle
assert s3.lat == 5. * u.deg
assert s3.distance == 10 * u.kpc
assert isinstance(s3.lon, Longitude)
assert isinstance(s3.lat, Latitude)
assert isinstance(s3.distance, Distance)
def test_init_lonlat(self):
s2 = SphericalRepresentation(Longitude(8, u.hour),
Latitude(5, u.deg),
Distance(10, u.kpc))
assert s2.lon == 8. * u.hourangle
assert s2.lat == 5. * u.deg
assert s2.distance == 10. * u.kpc
assert isinstance(s2.lon, Longitude)
assert isinstance(s2.lat, Latitude)
assert isinstance(s2.distance, Distance)
# also test that wrap_angle is preserved
s3 = SphericalRepresentation(Longitude(-90, u.degree,
wrap_angle=180*u.degree),
Latitude(-45, u.degree),
Distance(1., u.Rsun))
assert s3.lon == -90. * u.degree
assert s3.lon.wrap_angle == 180 * u.degree
def test_init_array(self):
s1 = SphericalRepresentation(lon=[8, 9] * u.hourangle,
lat=[5, 6] * u.deg,
distance=[1, 2] * u.kpc)
assert_allclose(s1.lon.degree, [120, 135])
assert_allclose(s1.lat.degree, [5, 6])
assert_allclose(s1.distance.kpc, [1, 2])
assert isinstance(s1.lon, Longitude)
assert isinstance(s1.lat, Latitude)
assert isinstance(s1.distance, Distance)
def test_init_array_nocopy(self):
lon = Longitude([8, 9] * u.hourangle)
lat = Latitude([5, 6] * u.deg)
distance = Distance([1, 2] * u.kpc)
s1 = SphericalRepresentation(lon=lon, lat=lat, distance=distance, copy=False)
lon[:] = [1, 2] * u.rad
lat[:] = [3, 4] * u.arcmin
distance[:] = [8, 9] * u.Mpc
assert_allclose_quantity(lon, s1.lon)
assert_allclose_quantity(lat, s1.lat)
assert_allclose_quantity(distance, s1.distance)
def test_init_float32_array(self):
"""Regression test against #2983"""
lon = Longitude(np.float32([1., 2.]), u.degree)
lat = Latitude(np.float32([3., 4.]), u.degree)
s1 = UnitSphericalRepresentation(lon=lon, lat=lat, copy=False)
assert s1.lon.dtype == np.float32
assert s1.lat.dtype == np.float32
assert s1._values['lon'].dtype == np.float32
assert s1._values['lat'].dtype == np.float32
def test_reprobj(self):
s1 = SphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg, distance=10 * u.kpc)
s2 = SphericalRepresentation.from_representation(s1)
assert_allclose_quantity(s2.lon, 8. * u.hourangle)
assert_allclose_quantity(s2.lat, 5. * u.deg)
assert_allclose_quantity(s2.distance, 10 * u.kpc)
def test_broadcasting(self):
s1 = SphericalRepresentation(lon=[8, 9] * u.hourangle,
lat=[5, 6] * u.deg,
distance=10 * u.kpc)
assert_allclose_quantity(s1.lon, [120, 135] * u.degree)
assert_allclose_quantity(s1.lat, [5, 6] * u.degree)
assert_allclose_quantity(s1.distance, [10, 10] * u.kpc)
def test_broadcasting_mismatch(self):
with pytest.raises(ValueError) as exc:
s1 = SphericalRepresentation(lon=[8, 9, 10] * u.hourangle,
lat=[5, 6] * u.deg,
distance=[1, 2] * u.kpc)
assert exc.value.args[0] == "Input parameters lon, lat, and distance cannot be broadcast"
def test_readonly(self):
s1 = SphericalRepresentation(lon=8 * u.hourangle,
lat=5 * u.deg,
distance=1. * u.kpc)
with pytest.raises(AttributeError):
s1.lon = 1. * u.deg
with pytest.raises(AttributeError):
s1.lat = 1. * u.deg
with pytest.raises(AttributeError):
s1.distance = 1. * u.kpc
def test_getitem_len_iterable(self):
s = SphericalRepresentation(lon=np.arange(10) * u.deg,
lat=-np.arange(10) * u.deg,
distance=1 * u.kpc)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.lon, [2, 4, 6] * u.deg)
assert_allclose_quantity(s_slc.lat, [-2, -4, -6] * u.deg)
assert_allclose_quantity(s_slc.distance, [1, 1, 1] * u.kpc)
assert len(s) == 10
assert isiterable(s)
def test_getitem_len_iterable_scalar(self):
s = SphericalRepresentation(lon=1 * u.deg,
lat=-2 * u.deg,
distance=3 * u.kpc)
with pytest.raises(TypeError):
s_slc = s[0]
with pytest.raises(TypeError):
len(s)
assert not isiterable(s)
def test_nan_distance(self):
""" This is a regression test: calling represent_as() and passing in the
same class as the object shouldn't round-trip through cartesian.
"""
sph = SphericalRepresentation(1*u.deg, 2*u.deg, np.nan*u.kpc)
new_sph = sph.represent_as(SphericalRepresentation)
assert_allclose_quantity(new_sph.lon, sph.lon)
assert_allclose_quantity(new_sph.lat, sph.lat)
dif = SphericalCosLatDifferential(1*u.mas/u.yr, 2*u.mas/u.yr,
3*u.km/u.s)
sph = sph.with_differentials(dif)
new_sph = sph.represent_as(SphericalRepresentation)
assert_allclose_quantity(new_sph.lon, sph.lon)
assert_allclose_quantity(new_sph.lat, sph.lat)
class TestUnitSphericalRepresentation:
def test_name(self):
assert UnitSphericalRepresentation.get_name() == 'unitspherical'
assert UnitSphericalRepresentation.get_name() in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = UnitSphericalRepresentation()
def test_init_quantity(self):
s3 = UnitSphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg)
assert s3.lon == 8. * u.hourangle
assert s3.lat == 5. * u.deg
assert isinstance(s3.lon, Longitude)
assert isinstance(s3.lat, Latitude)
def test_init_lonlat(self):
s2 = UnitSphericalRepresentation(Longitude(8, u.hour),
Latitude(5, u.deg))
assert s2.lon == 8. * u.hourangle
assert s2.lat == 5. * u.deg
assert isinstance(s2.lon, Longitude)
assert isinstance(s2.lat, Latitude)
def test_init_array(self):
s1 = UnitSphericalRepresentation(lon=[8, 9] * u.hourangle,
lat=[5, 6] * u.deg)
assert_allclose(s1.lon.degree, [120, 135])
assert_allclose(s1.lat.degree, [5, 6])
assert isinstance(s1.lon, Longitude)
assert isinstance(s1.lat, Latitude)
def test_init_array_nocopy(self):
lon = Longitude([8, 9] * u.hourangle)
lat = Latitude([5, 6] * u.deg)
s1 = UnitSphericalRepresentation(lon=lon, lat=lat, copy=False)
lon[:] = [1, 2] * u.rad
lat[:] = [3, 4] * u.arcmin
assert_allclose_quantity(lon, s1.lon)
assert_allclose_quantity(lat, s1.lat)
def test_reprobj(self):
s1 = UnitSphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg)
s2 = UnitSphericalRepresentation.from_representation(s1)
assert_allclose_quantity(s2.lon, 8. * u.hourangle)
assert_allclose_quantity(s2.lat, 5. * u.deg)
def test_broadcasting(self):
s1 = UnitSphericalRepresentation(lon=[8, 9] * u.hourangle,
lat=[5, 6] * u.deg)
assert_allclose_quantity(s1.lon, [120, 135] * u.degree)
assert_allclose_quantity(s1.lat, [5, 6] * u.degree)
def test_broadcasting_mismatch(self):
with pytest.raises(ValueError) as exc:
s1 = UnitSphericalRepresentation(lon=[8, 9, 10] * u.hourangle,
lat=[5, 6] * u.deg)
assert exc.value.args[0] == "Input parameters lon and lat cannot be broadcast"
def test_readonly(self):
s1 = UnitSphericalRepresentation(lon=8 * u.hourangle,
lat=5 * u.deg)
with pytest.raises(AttributeError):
s1.lon = 1. * u.deg
with pytest.raises(AttributeError):
s1.lat = 1. * u.deg
def test_getitem(self):
s = UnitSphericalRepresentation(lon=np.arange(10) * u.deg,
lat=-np.arange(10) * u.deg)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.lon, [2, 4, 6] * u.deg)
assert_allclose_quantity(s_slc.lat, [-2, -4, -6] * u.deg)
def test_getitem_scalar(self):
s = UnitSphericalRepresentation(lon=1 * u.deg,
lat=-2 * u.deg)
with pytest.raises(TypeError):
s_slc = s[0]
class TestPhysicsSphericalRepresentation:
def test_name(self):
assert PhysicsSphericalRepresentation.get_name() == 'physicsspherical'
assert PhysicsSphericalRepresentation.get_name() in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = PhysicsSphericalRepresentation()
def test_init_quantity(self):
s3 = PhysicsSphericalRepresentation(phi=8 * u.hourangle, theta=5 * u.deg, r=10 * u.kpc)
assert s3.phi == 8. * u.hourangle
assert s3.theta == 5. * u.deg
assert s3.r == 10 * u.kpc
assert isinstance(s3.phi, Angle)
assert isinstance(s3.theta, Angle)
assert isinstance(s3.r, Distance)
def test_init_phitheta(self):
s2 = PhysicsSphericalRepresentation(Angle(8, u.hour),
Angle(5, u.deg),
Distance(10, u.kpc))
assert s2.phi == 8. * u.hourangle
assert s2.theta == 5. * u.deg
assert s2.r == 10. * u.kpc
assert isinstance(s2.phi, Angle)
assert isinstance(s2.theta, Angle)
assert isinstance(s2.r, Distance)
def test_init_array(self):
s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle,
theta=[5, 6] * u.deg,
r=[1, 2] * u.kpc)
assert_allclose(s1.phi.degree, [120, 135])
assert_allclose(s1.theta.degree, [5, 6])
assert_allclose(s1.r.kpc, [1, 2])
assert isinstance(s1.phi, Angle)
assert isinstance(s1.theta, Angle)
assert isinstance(s1.r, Distance)
def test_init_array_nocopy(self):
phi = Angle([8, 9] * u.hourangle)
theta = Angle([5, 6] * u.deg)
r = Distance([1, 2] * u.kpc)
s1 = PhysicsSphericalRepresentation(phi=phi, theta=theta, r=r, copy=False)
phi[:] = [1, 2] * u.rad
theta[:] = [3, 4] * u.arcmin
r[:] = [8, 9] * u.Mpc
assert_allclose_quantity(phi, s1.phi)
assert_allclose_quantity(theta, s1.theta)
assert_allclose_quantity(r, s1.r)
def test_reprobj(self):
s1 = PhysicsSphericalRepresentation(phi=8 * u.hourangle, theta=5 * u.deg, r=10 * u.kpc)
s2 = PhysicsSphericalRepresentation.from_representation(s1)
assert_allclose_quantity(s2.phi, 8. * u.hourangle)
assert_allclose_quantity(s2.theta, 5. * u.deg)
assert_allclose_quantity(s2.r, 10 * u.kpc)
def test_broadcasting(self):
s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle,
theta=[5, 6] * u.deg,
r=10 * u.kpc)
assert_allclose_quantity(s1.phi, [120, 135] * u.degree)
assert_allclose_quantity(s1.theta, [5, 6] * u.degree)
assert_allclose_quantity(s1.r, [10, 10] * u.kpc)
def test_broadcasting_mismatch(self):
with pytest.raises(ValueError) as exc:
s1 = PhysicsSphericalRepresentation(phi=[8, 9, 10] * u.hourangle,
theta=[5, 6] * u.deg,
r=[1, 2] * u.kpc)
assert exc.value.args[0] == "Input parameters phi, theta, and r cannot be broadcast"
def test_readonly(self):
s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle,
theta=[5, 6] * u.deg,
r=[10, 20] * u.kpc)
with pytest.raises(AttributeError):
s1.phi = 1. * u.deg
with pytest.raises(AttributeError):
s1.theta = 1. * u.deg
with pytest.raises(AttributeError):
s1.r = 1. * u.kpc
def test_getitem(self):
s = PhysicsSphericalRepresentation(phi=np.arange(10) * u.deg,
theta=np.arange(5, 15) * u.deg,
r=1 * u.kpc)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.phi, [2, 4, 6] * u.deg)
assert_allclose_quantity(s_slc.theta, [7, 9, 11] * u.deg)
assert_allclose_quantity(s_slc.r, [1, 1, 1] * u.kpc)
def test_getitem_scalar(self):
s = PhysicsSphericalRepresentation(phi=1 * u.deg,
theta=2 * u.deg,
r=3 * u.kpc)
with pytest.raises(TypeError):
s_slc = s[0]
class TestCartesianRepresentation:
def test_name(self):
assert CartesianRepresentation.get_name() == 'cartesian'
assert CartesianRepresentation.get_name() in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = CartesianRepresentation()
def test_init_quantity(self):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
assert s1.x.unit is u.kpc
assert s1.y.unit is u.kpc
assert s1.z.unit is u.kpc
assert_allclose(s1.x.value, 1)
assert_allclose(s1.y.value, 2)
assert_allclose(s1.z.value, 3)
def test_init_singleunit(self):
s1 = CartesianRepresentation(x=1, y=2, z=3, unit=u.kpc)
assert s1.x.unit is u.kpc
assert s1.y.unit is u.kpc
assert s1.z.unit is u.kpc
assert_allclose(s1.x.value, 1)
assert_allclose(s1.y.value, 2)
assert_allclose(s1.z.value, 3)
def test_init_array(self):
s1 = CartesianRepresentation(x=[1, 2, 3] * u.pc,
y=[2, 3, 4] * u.Mpc,
z=[3, 4, 5] * u.kpc)
assert s1.x.unit is u.pc
assert s1.y.unit is u.Mpc
assert s1.z.unit is u.kpc
assert_allclose(s1.x.value, [1, 2, 3])
assert_allclose(s1.y.value, [2, 3, 4])
assert_allclose(s1.z.value, [3, 4, 5])
def test_init_one_array(self):
s1 = CartesianRepresentation(x=[1, 2, 3] * u.pc)
assert s1.x.unit is u.pc
assert s1.y.unit is u.pc
assert s1.z.unit is u.pc
assert_allclose(s1.x.value, 1)
assert_allclose(s1.y.value, 2)
assert_allclose(s1.z.value, 3)
r = np.arange(27.).reshape(3, 3, 3) * u.kpc
s2 = CartesianRepresentation(r, xyz_axis=0)
assert s2.shape == (3, 3)
assert s2.x.unit == u.kpc
assert np.all(s2.x == r[0])
assert np.all(s2.xyz == r)
assert np.all(s2.get_xyz(xyz_axis=0) == r)
s3 = CartesianRepresentation(r, xyz_axis=1)
assert s3.shape == (3, 3)
assert np.all(s3.x == r[:, 0])
assert np.all(s3.y == r[:, 1])
assert np.all(s3.z == r[:, 2])
assert np.all(s3.get_xyz(xyz_axis=1) == r)
s4 = CartesianRepresentation(r, xyz_axis=2)
assert s4.shape == (3, 3)
assert np.all(s4.x == r[:, :, 0])
assert np.all(s4.get_xyz(xyz_axis=2) == r)
s5 = CartesianRepresentation(r, unit=u.pc)
assert s5.x.unit == u.pc
assert np.all(s5.xyz == r)
s6 = CartesianRepresentation(r.value, unit=u.pc, xyz_axis=2)
assert s6.x.unit == u.pc
assert np.all(s6.get_xyz(xyz_axis=2).value == r.value)
def test_init_one_array_size_fail(self):
with pytest.raises(ValueError) as exc:
CartesianRepresentation(x=[1, 2, 3, 4] * u.pc)
assert exc.value.args[0].startswith("too many values to unpack")
def test_init_xyz_but_more_than_one_array_fail(self):
with pytest.raises(ValueError) as exc:
CartesianRepresentation(x=[1, 2, 3] * u.pc, y=[2, 3, 4] * u.pc,
z=[3, 4, 5] * u.pc, xyz_axis=0)
assert 'xyz_axis should only be set' in str(exc)
def test_init_one_array_yz_fail(self):
with pytest.raises(ValueError) as exc:
CartesianRepresentation(x=[1, 2, 3, 4] * u.pc, y=[1, 2] * u.pc)
assert exc.value.args[0] == ("x, y, and z are required to instantiate "
"CartesianRepresentation")
def test_init_array_nocopy(self):
x = [8, 9, 10] * u.pc
y = [5, 6, 7] * u.Mpc
z = [2, 3, 4] * u.kpc
s1 = CartesianRepresentation(x=x, y=y, z=z, copy=False)
x[:] = [1, 2, 3] * u.kpc
y[:] = [9, 9, 8] * u.kpc
z[:] = [1, 2, 1] * u.kpc
assert_allclose_quantity(x, s1.x)
assert_allclose_quantity(y, s1.y)
assert_allclose_quantity(z, s1.z)
def test_xyz_is_view_if_possible(self):
xyz = np.arange(1., 10.).reshape(3, 3)
s1 = CartesianRepresentation(xyz, unit=u.kpc, copy=False)
s1_xyz = s1.xyz
assert s1_xyz.value[0, 0] == 1.
xyz[0, 0] = 0.
assert s1.x[0] == 0.
assert s1_xyz.value[0, 0] == 0.
# Not possible: we don't check that tuples are from the same array
xyz = np.arange(1., 10.).reshape(3, 3)
s2 = CartesianRepresentation(*xyz, unit=u.kpc, copy=False)
s2_xyz = s2.xyz
assert s2_xyz.value[0, 0] == 1.
xyz[0, 0] = 0.
assert s2.x[0] == 0.
assert s2_xyz.value[0, 0] == 1.
def test_reprobj(self):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
s2 = CartesianRepresentation.from_representation(s1)
assert s2.x == 1 * u.kpc
assert s2.y == 2 * u.kpc
assert s2.z == 3 * u.kpc
def test_broadcasting(self):
s1 = CartesianRepresentation(x=[1, 2] * u.kpc, y=[3, 4] * u.kpc, z=5 * u.kpc)
assert s1.x.unit == u.kpc
assert s1.y.unit == u.kpc
assert s1.z.unit == u.kpc
assert_allclose(s1.x.value, [1, 2])
assert_allclose(s1.y.value, [3, 4])
assert_allclose(s1.z.value, [5, 5])
def test_broadcasting_mismatch(self):
with pytest.raises(ValueError) as exc:
s1 = CartesianRepresentation(x=[1, 2] * u.kpc, y=[3, 4] * u.kpc, z=[5, 6, 7] * u.kpc)
assert exc.value.args[0] == "Input parameters x, y, and z cannot be broadcast"
def test_readonly(self):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
with pytest.raises(AttributeError):
s1.x = 1. * u.kpc
with pytest.raises(AttributeError):
s1.y = 1. * u.kpc
with pytest.raises(AttributeError):
s1.z = 1. * u.kpc
def test_xyz(self):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
assert isinstance(s1.xyz, u.Quantity)
assert s1.xyz.unit is u.kpc
assert_allclose(s1.xyz.value, [1, 2, 3])
def test_unit_mismatch(self):
q_len = u.Quantity([1], u.km)
q_nonlen = u.Quantity([1], u.kg)
with pytest.raises(u.UnitsError) as exc:
s1 = CartesianRepresentation(x=q_nonlen, y=q_len, z=q_len)
assert exc.value.args[0] == "x, y, and z should have matching physical types"
with pytest.raises(u.UnitsError) as exc:
s1 = CartesianRepresentation(x=q_len, y=q_nonlen, z=q_len)
assert exc.value.args[0] == "x, y, and z should have matching physical types"
with pytest.raises(u.UnitsError) as exc:
s1 = CartesianRepresentation(x=q_len, y=q_len, z=q_nonlen)
assert exc.value.args[0] == "x, y, and z should have matching physical types"
def test_unit_non_length(self):
s1 = CartesianRepresentation(x=1 * u.kg, y=2 * u.kg, z=3 * u.kg)
s2 = CartesianRepresentation(x=1 * u.km / u.s, y=2 * u.km / u.s, z=3 * u.km / u.s)
banana = u.def_unit('banana')
s3 = CartesianRepresentation(x=1 * banana, y=2 * banana, z=3 * banana)
def test_getitem(self):
s = CartesianRepresentation(x=np.arange(10) * u.m,
y=-np.arange(10) * u.m,
z=3 * u.km)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.x, [2, 4, 6] * u.m)
assert_allclose_quantity(s_slc.y, [-2, -4, -6] * u.m)
assert_allclose_quantity(s_slc.z, [3, 3, 3] * u.km)
def test_getitem_scalar(self):
s = CartesianRepresentation(x=1 * u.m,
y=-2 * u.m,
z=3 * u.km)
with pytest.raises(TypeError):
s_slc = s[0]
def test_transform(self):
s1 = CartesianRepresentation(x=[1, 2] * u.kpc, y=[3, 4] * u.kpc, z=[5, 6] * u.kpc)
matrix = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
s2 = s1.transform(matrix)
assert_allclose(s2.x.value, [1 * 1 + 2 * 3 + 3 * 5, 1 * 2 + 2 * 4 + 3 * 6])
assert_allclose(s2.y.value, [4 * 1 + 5 * 3 + 6 * 5, 4 * 2 + 5 * 4 + 6 * 6])
assert_allclose(s2.z.value, [7 * 1 + 8 * 3 + 9 * 5, 7 * 2 + 8 * 4 + 9 * 6])
assert s2.x.unit is u.kpc
assert s2.y.unit is u.kpc
assert s2.z.unit is u.kpc
class TestCylindricalRepresentation:
def test_name(self):
assert CylindricalRepresentation.get_name() == 'cylindrical'
assert CylindricalRepresentation.get_name() in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = CylindricalRepresentation()
def test_init_quantity(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc)
assert s1.rho.unit is u.kpc
assert s1.phi.unit is u.deg
assert s1.z.unit is u.kpc
assert_allclose(s1.rho.value, 1)
assert_allclose(s1.phi.value, 2)
assert_allclose(s1.z.value, 3)
def test_init_array(self):
s1 = CylindricalRepresentation(rho=[1, 2, 3] * u.pc,
phi=[2, 3, 4] * u.deg,
z=[3, 4, 5] * u.kpc)
assert s1.rho.unit is u.pc
assert s1.phi.unit is u.deg
assert s1.z.unit is u.kpc
assert_allclose(s1.rho.value, [1, 2, 3])
assert_allclose(s1.phi.value, [2, 3, 4])
assert_allclose(s1.z.value, [3, 4, 5])
def test_init_array_nocopy(self):
rho = [8, 9, 10] * u.pc
phi = [5, 6, 7] * u.deg
z = [2, 3, 4] * u.kpc
s1 = CylindricalRepresentation(rho=rho, phi=phi, z=z, copy=False)
rho[:] = [9, 2, 3] * u.kpc
phi[:] = [1, 2, 3] * u.arcmin
z[:] = [-2, 3, 8] * u.kpc
assert_allclose_quantity(rho, s1.rho)
assert_allclose_quantity(phi, s1.phi)
assert_allclose_quantity(z, s1.z)
def test_reprobj(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc)
s2 = CylindricalRepresentation.from_representation(s1)
assert s2.rho == 1 * u.kpc
assert s2.phi == 2 * u.deg
assert s2.z == 3 * u.kpc
def test_broadcasting(self):
s1 = CylindricalRepresentation(rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=5 * u.kpc)
assert s1.rho.unit == u.kpc
assert s1.phi.unit == u.deg
assert s1.z.unit == u.kpc
assert_allclose(s1.rho.value, [1, 2])
assert_allclose(s1.phi.value, [3, 4])
assert_allclose(s1.z.value, [5, 5])
def test_broadcasting_mismatch(self):
with pytest.raises(ValueError) as exc:
s1 = CylindricalRepresentation(rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=[5, 6, 7] * u.kpc)
assert exc.value.args[0] == "Input parameters rho, phi, and z cannot be broadcast"
def test_readonly(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc,
phi=20 * u.deg,
z=3 * u.kpc)
with pytest.raises(AttributeError):
s1.rho = 1. * u.kpc
with pytest.raises(AttributeError):
s1.phi = 20 * u.deg
with pytest.raises(AttributeError):
s1.z = 1. * u.kpc
def unit_mismatch(self):
q_len = u.Quantity([1], u.kpc)
q_nonlen = u.Quantity([1], u.kg)
with pytest.raises(u.UnitsError) as exc:
s1 = CylindricalRepresentation(rho=q_nonlen, phi=10 * u.deg, z=q_len)
assert exc.value.args[0] == "rho and z should have matching physical types"
with pytest.raises(u.UnitsError) as exc:
s1 = CylindricalRepresentation(rho=q_len, phi=10 * u.deg, z=q_nonlen)
assert exc.value.args[0] == "rho and z should have matching physical types"
def test_getitem(self):
s = CylindricalRepresentation(rho=np.arange(10) * u.pc,
phi=-np.arange(10) * u.deg,
z=1 * u.kpc)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.rho, [2, 4, 6] * u.pc)
assert_allclose_quantity(s_slc.phi, [-2, -4, -6] * u.deg)
assert_allclose_quantity(s_slc.z, [1, 1, 1] * u.kpc)
def test_getitem_scalar(self):
s = CylindricalRepresentation(rho=1 * u.pc,
phi=-2 * u.deg,
z=3 * u.kpc)
with pytest.raises(TypeError):
s_slc = s[0]
def test_cartesian_spherical_roundtrip():
s1 = CartesianRepresentation(x=[1, 2000.] * u.kpc,
y=[3000., 4.] * u.pc,
z=[5., 6000.] * u.pc)
s2 = SphericalRepresentation.from_representation(s1)
s3 = CartesianRepresentation.from_representation(s2)
s4 = SphericalRepresentation.from_representation(s3)
assert_allclose_quantity(s1.x, s3.x)
assert_allclose_quantity(s1.y, s3.y)
assert_allclose_quantity(s1.z, s3.z)
assert_allclose_quantity(s2.lon, s4.lon)
assert_allclose_quantity(s2.lat, s4.lat)
assert_allclose_quantity(s2.distance, s4.distance)
def test_cartesian_physics_spherical_roundtrip():
s1 = CartesianRepresentation(x=[1, 2000.] * u.kpc,
y=[3000., 4.] * u.pc,
z=[5., 6000.] * u.pc)
s2 = PhysicsSphericalRepresentation.from_representation(s1)
s3 = CartesianRepresentation.from_representation(s2)
s4 = PhysicsSphericalRepresentation.from_representation(s3)
assert_allclose_quantity(s1.x, s3.x)
assert_allclose_quantity(s1.y, s3.y)
assert_allclose_quantity(s1.z, s3.z)
assert_allclose_quantity(s2.phi, s4.phi)
assert_allclose_quantity(s2.theta, s4.theta)
assert_allclose_quantity(s2.r, s4.r)
def test_spherical_physics_spherical_roundtrip():
s1 = SphericalRepresentation(lon=3 * u.deg, lat=4 * u.deg, distance=3 * u.kpc)
s2 = PhysicsSphericalRepresentation.from_representation(s1)
s3 = SphericalRepresentation.from_representation(s2)
s4 = PhysicsSphericalRepresentation.from_representation(s3)
assert_allclose_quantity(s1.lon, s3.lon)
assert_allclose_quantity(s1.lat, s3.lat)
assert_allclose_quantity(s1.distance, s3.distance)
assert_allclose_quantity(s2.phi, s4.phi)
assert_allclose_quantity(s2.theta, s4.theta)
assert_allclose_quantity(s2.r, s4.r)
assert_allclose_quantity(s1.lon, s4.phi)
assert_allclose_quantity(s1.lat, 90. * u.deg - s4.theta)
assert_allclose_quantity(s1.distance, s4.r)
def test_cartesian_cylindrical_roundtrip():
s1 = CartesianRepresentation(x=np.array([1., 2000.]) * u.kpc,
y=np.array([3000., 4.]) * u.pc,
z=np.array([5., 600.]) * u.cm)
s2 = CylindricalRepresentation.from_representation(s1)
s3 = CartesianRepresentation.from_representation(s2)
s4 = CylindricalRepresentation.from_representation(s3)
assert_allclose_quantity(s1.x, s3.x)
assert_allclose_quantity(s1.y, s3.y)
assert_allclose_quantity(s1.z, s3.z)
assert_allclose_quantity(s2.rho, s4.rho)
assert_allclose_quantity(s2.phi, s4.phi)
assert_allclose_quantity(s2.z, s4.z)
def test_unit_spherical_roundtrip():
s1 = UnitSphericalRepresentation(lon=[10., 30.] * u.deg,
lat=[5., 6.] * u.arcmin)
s2 = CartesianRepresentation.from_representation(s1)
s3 = SphericalRepresentation.from_representation(s2)
s4 = UnitSphericalRepresentation.from_representation(s3)
assert_allclose_quantity(s1.lon, s4.lon)
assert_allclose_quantity(s1.lat, s4.lat)
def test_no_unnecessary_copies():
s1 = UnitSphericalRepresentation(lon=[10., 30.] * u.deg,
lat=[5., 6.] * u.arcmin)
s2 = s1.represent_as(UnitSphericalRepresentation)
assert s2 is s1
assert np.may_share_memory(s1.lon, s2.lon)
assert np.may_share_memory(s1.lat, s2.lat)
s3 = s1.represent_as(SphericalRepresentation)
assert np.may_share_memory(s1.lon, s3.lon)
assert np.may_share_memory(s1.lat, s3.lat)
s4 = s1.represent_as(CartesianRepresentation)
s5 = s4.represent_as(CylindricalRepresentation)
assert np.may_share_memory(s5.z, s4.z)
def test_representation_repr():
r1 = SphericalRepresentation(lon=1 * u.deg, lat=2.5 * u.deg, distance=1 * u.kpc)
assert repr(r1) == ('<SphericalRepresentation (lon, lat, distance) in (deg, deg, kpc)\n'
' ({})>').format(' 1., 2.5, 1.' if NUMPY_LT_1_14
else '1., 2.5, 1.')
r2 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
assert repr(r2) == ('<CartesianRepresentation (x, y, z) in kpc\n'
' ({})>').format(' 1., 2., 3.' if NUMPY_LT_1_14
else '1., 2., 3.')
r3 = CartesianRepresentation(x=[1, 2, 3] * u.kpc, y=4 * u.kpc, z=[9, 10, 11] * u.kpc)
if NUMPY_LT_1_14:
assert repr(r3) == ('<CartesianRepresentation (x, y, z) in kpc\n'
' [( 1., 4., 9.), ( 2., 4., 10.), ( 3., 4., 11.)]>')
else:
assert repr(r3) == ('<CartesianRepresentation (x, y, z) in kpc\n'
' [(1., 4., 9.), (2., 4., 10.), (3., 4., 11.)]>')
def test_representation_repr_multi_d():
"""Regression test for #5889."""
cr = CartesianRepresentation(np.arange(27).reshape(3, 3, 3), unit='m')
if NUMPY_LT_1_14:
assert repr(cr) == (
'<CartesianRepresentation (x, y, z) in m\n'
' [[( 0., 9., 18.), ( 1., 10., 19.), ( 2., 11., 20.)],\n'
' [( 3., 12., 21.), ( 4., 13., 22.), ( 5., 14., 23.)],\n'
' [( 6., 15., 24.), ( 7., 16., 25.), ( 8., 17., 26.)]]>')
else:
assert repr(cr) == (
'<CartesianRepresentation (x, y, z) in m\n'
' [[(0., 9., 18.), (1., 10., 19.), (2., 11., 20.)],\n'
' [(3., 12., 21.), (4., 13., 22.), (5., 14., 23.)],\n'
' [(6., 15., 24.), (7., 16., 25.), (8., 17., 26.)]]>')
# This was broken before.
if NUMPY_LT_1_14:
assert repr(cr.T) == (
'<CartesianRepresentation (x, y, z) in m\n'
' [[( 0., 9., 18.), ( 3., 12., 21.), ( 6., 15., 24.)],\n'
' [( 1., 10., 19.), ( 4., 13., 22.), ( 7., 16., 25.)],\n'
' [( 2., 11., 20.), ( 5., 14., 23.), ( 8., 17., 26.)]]>')
else:
assert repr(cr.T) == (
'<CartesianRepresentation (x, y, z) in m\n'
' [[(0., 9., 18.), (3., 12., 21.), (6., 15., 24.)],\n'
' [(1., 10., 19.), (4., 13., 22.), (7., 16., 25.)],\n'
' [(2., 11., 20.), (5., 14., 23.), (8., 17., 26.)]]>')
def test_representation_str():
r1 = SphericalRepresentation(lon=1 * u.deg, lat=2.5 * u.deg, distance=1 * u.kpc)
assert str(r1) == ('( 1., 2.5, 1.) (deg, deg, kpc)' if NUMPY_LT_1_14 else
'(1., 2.5, 1.) (deg, deg, kpc)')
r2 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
assert str(r2) == ('( 1., 2., 3.) kpc' if NUMPY_LT_1_14 else
'(1., 2., 3.) kpc')
r3 = CartesianRepresentation(x=[1, 2, 3] * u.kpc, y=4 * u.kpc, z=[9, 10, 11] * u.kpc)
assert str(r3) == ('[( 1., 4., 9.), ( 2., 4., 10.), ( 3., 4., 11.)] kpc'
if NUMPY_LT_1_14 else
'[(1., 4., 9.), (2., 4., 10.), (3., 4., 11.)] kpc')
def test_representation_str_multi_d():
"""Regression test for #5889."""
cr = CartesianRepresentation(np.arange(27).reshape(3, 3, 3), unit='m')
if NUMPY_LT_1_14:
assert str(cr) == (
'[[( 0., 9., 18.), ( 1., 10., 19.), ( 2., 11., 20.)],\n'
' [( 3., 12., 21.), ( 4., 13., 22.), ( 5., 14., 23.)],\n'
' [( 6., 15., 24.), ( 7., 16., 25.), ( 8., 17., 26.)]] m')
else:
assert str(cr) == (
'[[(0., 9., 18.), (1., 10., 19.), (2., 11., 20.)],\n'
' [(3., 12., 21.), (4., 13., 22.), (5., 14., 23.)],\n'
' [(6., 15., 24.), (7., 16., 25.), (8., 17., 26.)]] m')
# This was broken before.
if NUMPY_LT_1_14:
assert str(cr.T) == (
'[[( 0., 9., 18.), ( 3., 12., 21.), ( 6., 15., 24.)],\n'
' [( 1., 10., 19.), ( 4., 13., 22.), ( 7., 16., 25.)],\n'
' [( 2., 11., 20.), ( 5., 14., 23.), ( 8., 17., 26.)]] m')
else:
assert str(cr.T) == (
'[[(0., 9., 18.), (3., 12., 21.), (6., 15., 24.)],\n'
' [(1., 10., 19.), (4., 13., 22.), (7., 16., 25.)],\n'
' [(2., 11., 20.), (5., 14., 23.), (8., 17., 26.)]] m')
@pytest.mark.remote_data
def test_subclass_representation():
from astropy.coordinates.builtin_frames import ICRS
class Longitude180(Longitude):
def __new__(cls, angle, unit=None, wrap_angle=180 * u.deg, **kwargs):
self = super().__new__(cls, angle, unit=unit, wrap_angle=wrap_angle,
**kwargs)
return self
class SphericalWrap180Representation(SphericalRepresentation):
attr_classes = OrderedDict([('lon', Longitude180),
('lat', Latitude),
('distance', u.Quantity)])
class ICRSWrap180(ICRS):
frame_specific_representation_info = ICRS._frame_specific_representation_info.copy()
frame_specific_representation_info[SphericalWrap180Representation] = \
frame_specific_representation_info[SphericalRepresentation]
default_representation = SphericalWrap180Representation
c = ICRSWrap180(ra=-1 * u.deg, dec=-2 * u.deg, distance=1 * u.m)
assert c.ra.value == -1
assert c.ra.unit is u.deg
assert c.dec.value == -2
assert c.dec.unit is u.deg
def test_minimal_subclass():
# Basically to check what we document works;
# see doc/coordinates/representations.rst
class LogDRepresentation(BaseRepresentation):
attr_classes = OrderedDict([('lon', Longitude),
('lat', Latitude),
('logd', u.Dex)])
def to_cartesian(self):
d = self.logd.physical
x = d * np.cos(self.lat) * np.cos(self.lon)
y = d * np.cos(self.lat) * np.sin(self.lon)
z = d * np.sin(self.lat)
return CartesianRepresentation(x=x, y=y, z=z, copy=False)
@classmethod
def from_cartesian(cls, cart):
s = np.hypot(cart.x, cart.y)
r = np.hypot(s, cart.z)
lon = np.arctan2(cart.y, cart.x)
lat = np.arctan2(cart.z, s)
return cls(lon=lon, lat=lat, logd=u.Dex(r), copy=False)
ld1 = LogDRepresentation(90.*u.deg, 0.*u.deg, 1.*u.dex(u.kpc))
ld2 = LogDRepresentation(lon=90.*u.deg, lat=0.*u.deg, logd=1.*u.dex(u.kpc))
assert np.all(ld1.lon == ld2.lon)
assert np.all(ld1.lat == ld2.lat)
assert np.all(ld1.logd == ld2.logd)
c = ld1.to_cartesian()
assert_allclose_quantity(c.xyz, [0., 10., 0.] * u.kpc, atol=1.*u.npc)
ld3 = LogDRepresentation.from_cartesian(c)
assert np.all(ld3.lon == ld2.lon)
assert np.all(ld3.lat == ld2.lat)
assert np.all(ld3.logd == ld2.logd)
s = ld1.represent_as(SphericalRepresentation)
assert_allclose_quantity(s.lon, ld1.lon)
assert_allclose_quantity(s.distance, 10.*u.kpc)
assert_allclose_quantity(s.lat, ld1.lat)
with pytest.raises(TypeError):
LogDRepresentation(0.*u.deg, 1.*u.deg)
with pytest.raises(TypeError):
LogDRepresentation(0.*u.deg, 1.*u.deg, 1.*u.dex(u.kpc), lon=1.*u.deg)
with pytest.raises(TypeError):
LogDRepresentation(0.*u.deg, 1.*u.deg, 1.*u.dex(u.kpc), True, False)
with pytest.raises(TypeError):
LogDRepresentation(0.*u.deg, 1.*u.deg, 1.*u.dex(u.kpc), foo='bar')
with pytest.raises(ValueError):
# check we cannot redefine an existing class.
class LogDRepresentation(BaseRepresentation):
attr_classes = OrderedDict([('lon', Longitude),
('lat', Latitude),
('logr', u.Dex)])
def test_combine_xyz():
x, y, z = np.arange(27).reshape(3, 9) * u.kpc
xyz = _combine_xyz(x, y, z, xyz_axis=0)
assert xyz.shape == (3, 9)
assert np.all(xyz[0] == x)
assert np.all(xyz[1] == y)
assert np.all(xyz[2] == z)
x, y, z = np.arange(27).reshape(3, 3, 3) * u.kpc
xyz = _combine_xyz(x, y, z, xyz_axis=0)
assert xyz.ndim == 3
assert np.all(xyz[0] == x)
assert np.all(xyz[1] == y)
assert np.all(xyz[2] == z)
xyz = _combine_xyz(x, y, z, xyz_axis=1)
assert xyz.ndim == 3
assert np.all(xyz[:, 0] == x)
assert np.all(xyz[:, 1] == y)
assert np.all(xyz[:, 2] == z)
xyz = _combine_xyz(x, y, z, xyz_axis=-1)
assert xyz.ndim == 3
assert np.all(xyz[..., 0] == x)
assert np.all(xyz[..., 1] == y)
assert np.all(xyz[..., 2] == z)
class TestCartesianRepresentationWithDifferential:
def test_init_differential(self):
diff = CartesianDifferential(d_x=1 * u.km/u.s,
d_y=2 * u.km/u.s,
d_z=3 * u.km/u.s)
# Check that a single differential gets turned into a 1-item dict.
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials=diff)
assert s1.x.unit is u.kpc
assert s1.y.unit is u.kpc
assert s1.z.unit is u.kpc
assert len(s1.differentials) == 1
assert s1.differentials['s'] is diff
# can also pass in an explicit dictionary
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials={'s': diff})
assert len(s1.differentials) == 1
assert s1.differentials['s'] is diff
# using the wrong key will cause it to fail
with pytest.raises(ValueError):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials={'1 / s2': diff})
# make sure other kwargs are handled properly
s1 = CartesianRepresentation(x=1, y=2, z=3,
differentials=diff, copy=False, unit=u.kpc)
assert len(s1.differentials) == 1
assert s1.differentials['s'] is diff
with pytest.raises(TypeError): # invalid type passed to differentials
CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials='garmonbozia')
# make sure differentials can't accept differentials
with pytest.raises(TypeError):
CartesianDifferential(d_x=1 * u.km/u.s, d_y=2 * u.km/u.s,
d_z=3 * u.km/u.s, differentials=diff)
def test_init_differential_compatible(self):
# TODO: more extensive checking of this
# should fail - representation and differential not compatible
diff = SphericalDifferential(d_lon=1 * u.mas/u.yr,
d_lat=2 * u.mas/u.yr,
d_distance=3 * u.km/u.s)
with pytest.raises(TypeError):
CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials=diff)
# should succeed - representation and differential are compatible
diff = SphericalCosLatDifferential(d_lon_coslat=1 * u.mas/u.yr,
d_lat=2 * u.mas/u.yr,
d_distance=3 * u.km/u.s)
r1 = SphericalRepresentation(lon=15*u.deg, lat=21*u.deg,
distance=1*u.pc,
differentials=diff)
def test_init_differential_multiple_equivalent_keys(self):
d1 = CartesianDifferential(*[1, 2, 3] * u.km/u.s)
d2 = CartesianDifferential(*[4, 5, 6] * u.km/u.s)
# verify that the check against expected_unit validates against passing
# in two different but equivalent keys
with pytest.raises(ValueError):
r1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials={'s': d1, 'yr': d2})
def test_init_array_broadcasting(self):
arr1 = np.arange(8).reshape(4, 2) * u.km/u.s
diff = CartesianDifferential(d_x=arr1, d_y=arr1, d_z=arr1)
# shapes aren't compatible
arr2 = np.arange(27).reshape(3, 9) * u.kpc
with pytest.raises(ValueError):
rep = CartesianRepresentation(x=arr2, y=arr2, z=arr2,
differentials=diff)
arr2 = np.arange(8).reshape(4, 2) * u.kpc
rep = CartesianRepresentation(x=arr2, y=arr2, z=arr2,
differentials=diff)
assert rep.x.unit is u.kpc
assert rep.y.unit is u.kpc
assert rep.z.unit is u.kpc
assert len(rep.differentials) == 1
assert rep.differentials['s'] is diff
assert rep.xyz.shape == rep.differentials['s'].d_xyz.shape
def test_reprobj(self):
# should succeed - representation and differential are compatible
diff = SphericalCosLatDifferential(d_lon_coslat=1 * u.mas/u.yr,
d_lat=2 * u.mas/u.yr,
d_distance=3 * u.km/u.s)
r1 = SphericalRepresentation(lon=15*u.deg, lat=21*u.deg,
distance=1*u.pc,
differentials=diff)
r2 = CartesianRepresentation.from_representation(r1)
assert r2.get_name() == 'cartesian'
assert not r2.differentials
def test_readonly(self):
s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc)
with pytest.raises(AttributeError): # attribute is not settable
s1.differentials = 'thing'
def test_represent_as(self):
diff = CartesianDifferential(d_x=1 * u.km/u.s,
d_y=2 * u.km/u.s,
d_z=3 * u.km/u.s)
rep1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc,
differentials=diff)
# Only change the representation, drop the differential
new_rep = rep1.represent_as(SphericalRepresentation)
assert new_rep.get_name() == 'spherical'
assert not new_rep.differentials # dropped
# Pass in separate classes for representation, differential
new_rep = rep1.represent_as(SphericalRepresentation,
SphericalCosLatDifferential)
assert new_rep.get_name() == 'spherical'
assert new_rep.differentials['s'].get_name() == 'sphericalcoslat'
# Pass in a dictionary for the differential classes
new_rep = rep1.represent_as(SphericalRepresentation,
{'s': SphericalCosLatDifferential})
assert new_rep.get_name() == 'spherical'
assert new_rep.differentials['s'].get_name() == 'sphericalcoslat'
# make sure represent_as() passes through the differentials
for name in REPRESENTATION_CLASSES:
if name == 'radial':
# TODO: Converting a CartesianDifferential to a
# RadialDifferential fails, even on `master`
continue
new_rep = rep1.represent_as(REPRESENTATION_CLASSES[name],
DIFFERENTIAL_CLASSES[name])
assert new_rep.get_name() == name
assert len(new_rep.differentials) == 1
assert new_rep.differentials['s'].get_name() == name
with pytest.raises(ValueError) as excinfo:
rep1.represent_as('name')
assert 'use frame object' in str(excinfo.value)
def test_getitem(self):
d = CartesianDifferential(d_x=np.arange(10) * u.m/u.s,
d_y=-np.arange(10) * u.m/u.s,
d_z=1. * u.m/u.s)
s = CartesianRepresentation(x=np.arange(10) * u.m,
y=-np.arange(10) * u.m,
z=3 * u.km,
differentials=d)
s_slc = s[2:8:2]
s_dif = s_slc.differentials['s']
assert_allclose_quantity(s_slc.x, [2, 4, 6] * u.m)
assert_allclose_quantity(s_slc.y, [-2, -4, -6] * u.m)
assert_allclose_quantity(s_slc.z, [3, 3, 3] * u.km)
assert_allclose_quantity(s_dif.d_x, [2, 4, 6] * u.m/u.s)
assert_allclose_quantity(s_dif.d_y, [-2, -4, -6] * u.m/u.s)
assert_allclose_quantity(s_dif.d_z, [1, 1, 1] * u.m/u.s)
def test_transform(self):
d1 = CartesianDifferential(d_x=[1, 2] * u.km/u.s,
d_y=[3, 4] * u.km/u.s,
d_z=[5, 6] * u.km/u.s)
r1 = CartesianRepresentation(x=[1, 2] * u.kpc,
y=[3, 4] * u.kpc,
z=[5, 6] * u.kpc,
differentials=d1)
matrix = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
r2 = r1.transform(matrix)
d2 = r2.differentials['s']
assert_allclose_quantity(d2.d_x, [22., 28]*u.km/u.s)
assert_allclose_quantity(d2.d_y, [49, 64]*u.km/u.s)
assert_allclose_quantity(d2.d_z, [76, 100.]*u.km/u.s)
def test_with_differentials(self):
# make sure with_differential correctly creates a new copy with the same
# differential
cr = CartesianRepresentation([1, 2, 3]*u.kpc)
diff = CartesianDifferential([.1, .2, .3]*u.km/u.s)
cr2 = cr.with_differentials(diff)
assert cr.differentials != cr2.differentials
assert cr2.differentials['s'] is diff
# make sure it works even if a differential is present already
diff2 = CartesianDifferential([.1, .2, .3]*u.m/u.s)
cr3 = CartesianRepresentation([1, 2, 3]*u.kpc, differentials=diff)
cr4 = cr3.with_differentials(diff2)
assert cr4.differentials['s'] != cr3.differentials['s']
assert cr4.differentials['s'] == diff2
# also ensure a *scalar* differential will works
cr5 = cr.with_differentials(diff)
assert len(cr5.differentials) == 1
assert cr5.differentials['s'] == diff
# make sure we don't update the original representation's dict
d1 = CartesianDifferential(*np.random.random((3, 5)), unit=u.km/u.s)
d2 = CartesianDifferential(*np.random.random((3, 5)), unit=u.km/u.s**2)
r1 = CartesianRepresentation(*np.random.random((3, 5)), unit=u.pc,
differentials=d1)
r2 = r1.with_differentials(d2)
assert r1.differentials['s'] is r2.differentials['s']
assert 's2' not in r1.differentials
assert 's2' in r2.differentials
def test_repr_with_differentials():
diff = CartesianDifferential([.1, .2, .3]*u.km/u.s)
cr = CartesianRepresentation([1, 2, 3]*u.kpc, differentials=diff)
assert "has differentials w.r.t.: 's'" in repr(cr)
def test_to_cartesian():
"""
Test that to_cartesian drops the differential.
"""
sd = SphericalDifferential(d_lat=1*u.deg, d_lon=2*u.deg, d_distance=10*u.m)
sr = SphericalRepresentation(lat=1*u.deg, lon=2*u.deg, distance=10*u.m,
differentials=sd)
cart = sr.to_cartesian()
assert cart.get_name() == 'cartesian'
assert not cart.differentials
def test_recommended_units_deprecation():
sr = SphericalRepresentation(lat=1*u.deg, lon=2*u.deg, distance=10*u.m)
with catch_warnings(AstropyDeprecationWarning) as w:
sr.recommended_units
assert 'recommended_units' in str(w[0].message)
with catch_warnings(AstropyDeprecationWarning) as w:
class MyClass(SphericalRepresentation):
attr_classes = SphericalRepresentation.attr_classes
recommended_units = {}
assert 'recommended_units' in str(w[0].message)
@pytest.fixture
def unitphysics():
"""
This fixture is used
"""
had_unit = False
if hasattr(PhysicsSphericalRepresentation, '_unit_representation'):
orig = PhysicsSphericalRepresentation._unit_representation
had_unit = True
class UnitPhysicsSphericalRepresentation(BaseRepresentation):
attr_classes = OrderedDict([('phi', Angle),
('theta', Angle)])
def __init__(self, phi, theta, differentials=None, copy=True):
super().__init__(phi, theta, copy=copy, differentials=differentials)
# Wrap/validate phi/theta
if copy:
self._phi = self._phi.wrap_at(360 * u.deg)
else:
# necessary because the above version of `wrap_at` has to be a copy
self._phi.wrap_at(360 * u.deg, inplace=True)
if np.any(self._theta < 0.*u.deg) or np.any(self._theta > 180.*u.deg):
raise ValueError('Inclination angle(s) must be within '
'0 deg <= angle <= 180 deg, '
'got {0}'.format(theta.to(u.degree)))
@property
def phi(self):
return self._phi
@property
def theta(self):
return self._theta
def unit_vectors(self):
sinphi, cosphi = np.sin(self.phi), np.cos(self.phi)
sintheta, costheta = np.sin(self.theta), np.cos(self.theta)
return OrderedDict(
(('phi', CartesianRepresentation(-sinphi, cosphi, 0., copy=False)),
('theta', CartesianRepresentation(costheta*cosphi,
costheta*sinphi,
-sintheta, copy=False))))
def scale_factors(self):
sintheta = np.sin(self.theta)
l = np.broadcast_to(1.*u.one, self.shape, subok=True)
return OrderedDict((('phi', sintheta),
('theta', l)))
def to_cartesian(self):
x = np.sin(self.theta) * np.cos(self.phi)
y = np.sin(self.theta) * np.sin(self.phi)
z = np.cos(self.theta)
return CartesianRepresentation(x=x, y=y, z=z, copy=False)
@classmethod
def from_cartesian(cls, cart):
"""
Converts 3D rectangular cartesian coordinates to spherical polar
coordinates.
"""
s = np.hypot(cart.x, cart.y)
phi = np.arctan2(cart.y, cart.x)
theta = np.arctan2(s, cart.z)
return cls(phi=phi, theta=theta, copy=False)
def norm(self):
return u.Quantity(np.ones(self.shape), u.dimensionless_unscaled,
copy=False)
PhysicsSphericalRepresentation._unit_representation = UnitPhysicsSphericalRepresentation
yield UnitPhysicsSphericalRepresentation
if had_unit:
PhysicsSphericalRepresentation._unit_representation = orig
else:
del PhysicsSphericalRepresentation._unit_representation
# remove from the module-level representations, if present
REPRESENTATION_CLASSES.pop(UnitPhysicsSphericalRepresentation.get_name(), None)
def test_unitphysics(unitphysics):
obj = unitphysics(phi=0*u.deg, theta=10*u.deg)
objkw = unitphysics(phi=0*u.deg, theta=10*u.deg)
assert objkw.phi == obj.phi
assert objkw.theta == obj.theta
asphys = obj.represent_as(PhysicsSphericalRepresentation)
assert asphys.phi == obj.phi
assert asphys.theta == obj.theta
assert_allclose_quantity(asphys.r, 1*u.dimensionless_unscaled)
assph = obj.represent_as(SphericalRepresentation)
assert assph.lon == obj.phi
assert assph.lat == 80*u.deg
assert_allclose_quantity(assph.distance, 1*u.dimensionless_unscaled)
def test_distance_warning(recwarn):
SphericalRepresentation(1*u.deg, 2*u.deg, 1*u.kpc)
with pytest.raises(ValueError) as excinfo:
SphericalRepresentation(1*u.deg, 2*u.deg, -1*u.kpc)
assert 'Distance must be >= 0' in str(excinfo.value)
# second check is because the "originating" ValueError says the above,
# while the representation one includes the below
assert 'you must explicitly pass' in str(excinfo.value)
| 37.221349 | 103 | 0.558685 |
9bf7f3ef7516df730b96af06883e1cbd6e0a51c7 | 9,356 | py | Python | tests/sentry/search/django/tests.py | Munyola/sentry | ab8923b2801d7d72d6903e0d9180584817bb1b9a | [
"BSD-3-Clause"
] | 1 | 2017-10-18T19:40:14.000Z | 2017-10-18T19:40:14.000Z | tests/sentry/search/django/tests.py | Munyola/sentry | ab8923b2801d7d72d6903e0d9180584817bb1b9a | [
"BSD-3-Clause"
] | null | null | null | tests/sentry/search/django/tests.py | Munyola/sentry | ab8923b2801d7d72d6903e0d9180584817bb1b9a | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from datetime import datetime, timedelta
from sentry import tagstore
from sentry.models import (
GroupAssignee, GroupBookmark, GroupStatus, GroupSubscription
)
from sentry.search.base import ANY
from sentry.search.django.backend import DjangoSearchBackend
from sentry.testutils import TestCase
class DjangoSearchBackendTest(TestCase):
def create_backend(self):
return DjangoSearchBackend()
def setUp(self):
self.backend = self.create_backend()
self.project1 = self.create_project(name='foo')
self.project2 = self.create_project(name='bar')
self.group1 = self.create_group(
project=self.project1,
checksum='a' * 32,
message='foo',
times_seen=5,
status=GroupStatus.UNRESOLVED,
last_seen=datetime(2013, 8, 13, 3, 8, 24, 880386),
first_seen=datetime(2013, 7, 13, 3, 8, 24, 880386),
)
self.event1 = self.create_event(
event_id='a' * 32,
group=self.group1,
datetime=datetime(2013, 7, 13, 3, 8, 24, 880386),
tags={
'server': 'example.com',
'env': 'production',
}
)
self.event3 = self.create_event(
event_id='c' * 32,
group=self.group1,
datetime=datetime(2013, 8, 13, 3, 8, 24, 880386),
tags={
'server': 'example.com',
'env': 'production',
}
)
self.group2 = self.create_group(
project=self.project1,
checksum='b' * 32,
message='bar',
times_seen=10,
status=GroupStatus.RESOLVED,
last_seen=datetime(2013, 7, 14, 3, 8, 24, 880386),
first_seen=datetime(2013, 7, 14, 3, 8, 24, 880386),
)
self.event2 = self.create_event(
event_id='b' * 32,
group=self.group2,
datetime=datetime(2013, 7, 14, 3, 8, 24, 880386),
tags={
'server': 'example.com',
'env': 'staging',
'url': 'http://example.com',
}
)
for key, value in self.event1.data['tags']:
tagstore.create_group_tag_value(
project_id=self.group1.project_id,
group_id=self.group1.id,
key=key,
value=value,
)
for key, value in self.event2.data['tags']:
tagstore.create_group_tag_value(
project_id=self.group2.project_id,
group_id=self.group2.id,
key=key,
value=value,
)
GroupBookmark.objects.create(
user=self.user,
group=self.group2,
project=self.group2.project,
)
GroupAssignee.objects.create(
user=self.user,
group=self.group2,
project=self.group2.project,
)
GroupSubscription.objects.create(
user=self.user,
group=self.group1,
project=self.group1.project,
is_active=True,
)
GroupSubscription.objects.create(
user=self.user,
group=self.group2,
project=self.group2.project,
is_active=False,
)
def test_query(self):
results = self.backend.query(self.project1, query='foo')
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(self.project1, query='bar')
assert len(results) == 1
assert results[0] == self.group2
def test_sort(self):
results = self.backend.query(self.project1, sort_by='date')
assert len(results) == 2
assert results[0] == self.group1
assert results[1] == self.group2
results = self.backend.query(self.project1, sort_by='new')
assert len(results) == 2
assert results[0] == self.group2
assert results[1] == self.group1
results = self.backend.query(self.project1, sort_by='freq')
assert len(results) == 2
assert results[0] == self.group2
assert results[1] == self.group1
def test_status(self):
results = self.backend.query(self.project1, status=GroupStatus.UNRESOLVED)
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(self.project1, status=GroupStatus.RESOLVED)
assert len(results) == 1
assert results[0] == self.group2
def test_tags(self):
results = self.backend.query(self.project1, tags={'env': 'staging'})
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(self.project1, tags={'env': 'example.com'})
assert len(results) == 0
results = self.backend.query(self.project1, tags={'env': ANY})
assert len(results) == 2
results = self.backend.query(
self.project1, tags={'env': 'staging',
'server': 'example.com'}
)
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(self.project1, tags={'env': 'staging', 'server': ANY})
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(
self.project1, tags={'env': 'staging',
'server': 'bar.example.com'}
)
assert len(results) == 0
def test_bookmarked_by(self):
results = self.backend.query(self.project1, bookmarked_by=self.user)
assert len(results) == 1
assert results[0] == self.group2
def test_project(self):
results = self.backend.query(self.project2)
assert len(results) == 0
def test_pagination(self):
results = self.backend.query(self.project1, limit=1, sort_by='date')
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(self.project1, cursor=results.next, limit=1, sort_by='date')
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(self.project1, cursor=results.next, limit=1, sort_by='date')
assert len(results) == 0
def test_age_filter(self):
results = self.backend.query(
self.project1,
age_from=self.group2.first_seen,
)
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(
self.project1,
age_to=self.group1.first_seen + timedelta(minutes=1),
)
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(
self.project1,
age_from=self.group1.first_seen,
age_to=self.group1.first_seen + timedelta(minutes=1),
)
assert len(results) == 1
assert results[0] == self.group1
def test_last_seen_filter(self):
results = self.backend.query(
self.project1,
last_seen_from=self.group1.last_seen,
)
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(
self.project1,
last_seen_to=self.group2.last_seen + timedelta(minutes=1),
)
assert len(results) == 1
assert results[0] == self.group2
results = self.backend.query(
self.project1,
last_seen_from=self.group1.last_seen,
last_seen_to=self.group1.last_seen + timedelta(minutes=1),
)
assert len(results) == 1
assert results[0] == self.group1
def test_date_filter(self):
results = self.backend.query(
self.project1,
date_from=self.event2.datetime,
)
assert len(results) == 2
assert results[0] == self.group1
assert results[1] == self.group2
results = self.backend.query(
self.project1,
date_to=self.event1.datetime + timedelta(minutes=1),
)
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(
self.project1,
date_from=self.event1.datetime,
date_to=self.event2.datetime + timedelta(minutes=1),
)
assert len(results) == 2
assert results[0] == self.group1
assert results[1] == self.group2
def test_unassigned(self):
results = self.backend.query(self.project1, unassigned=True)
assert len(results) == 1
assert results[0] == self.group1
results = self.backend.query(self.project1, unassigned=False)
assert len(results) == 1
assert results[0] == self.group2
def test_assigned_to(self):
results = self.backend.query(self.project1, assigned_to=self.user)
assert len(results) == 1
assert results[0] == self.group2
def test_subscribed_by(self):
results = self.backend.query(
self.group1.project,
subscribed_by=self.user,
)
assert len(results) == 1
assert results[0] == self.group1
| 32.262069 | 97 | 0.564771 |
dd42b7a9c898a075f7f75f3652de2f58685137af | 4,642 | py | Python | game.py | ricardombrodriguez/Tetris-AI-Bot | ab1ccc37946b815ab87e4bb553a44938941ad511 | [
"MIT"
] | null | null | null | game.py | ricardombrodriguez/Tetris-AI-Bot | ab1ccc37946b815ab87e4bb553a44938941ad511 | [
"MIT"
] | null | null | null | game.py | ricardombrodriguez/Tetris-AI-Bot | ab1ccc37946b815ab87e4bb553a44938941ad511 | [
"MIT"
] | 2 | 2022-02-19T21:16:38.000Z | 2022-02-20T01:28:04.000Z | from asyncio.queues import Queue
import logging
import random
import asyncio
from common import Dimensions
from copy import deepcopy
from shape import SHAPES
from collections import Counter
logger = logging.getLogger("Game")
logger.setLevel(logging.DEBUG)
GAME_SPEED = 10
SPEED_STEP = 10 # points
class Game:
def __init__(self, x=10, y=30) -> None:
logger.info("Game")
self.dimensions = Dimensions(x, y)
self.current_piece = None
self.next_pieces = [deepcopy(random.choice(SHAPES)) for _ in range(3)]
self._bottom = [(i, y) for i in range(x)] # bottom
self._lateral = [(0, i) for i in range(y)] # left
self._lateral.extend([(x - 1, i) for i in range(y)]) # right
self.grid = self._bottom + self._lateral
self.game = []
self.score = 0
self.speed = 1
self.game_speed = 10
self._lastkeypress = None
self.running = True
def info(self):
return {
"dimensions": self.dimensions,
"grid": self.grid,
"game_speed": self.game_speed,
"score": self.score
}
def clear_rows(self):
lines = 0
for item, count in sorted(Counter(y for _, y in self.game).most_common()):
if count == len(self._bottom) - 2:
self.game = [
(x, y + 1) if y < item else (x, y)
for (x, y) in self.game
if y != item
] # remove row and drop lines
lines += 1
logger.debug("Clear line %s", item)
self.score += lines ** 2
self.game_speed = GAME_SPEED + self.score // SPEED_STEP
most_common = Counter(y for _, y in self.game).most_common(1)
if most_common != []:
(_, count) = most_common[0]
assert count != len(self._bottom) - 2, f"please create an issue https://github.com/dgomes/ia-tetris/issues sharing:\n {self.game}"
def keypress(self, key):
"""Update locally last key pressed."""
self._lastkeypress = key
async def loop(self):
logger.info("Loop - score: %s - speed: %s", self.score, self.game_speed)
await asyncio.sleep(1.0 / self.game_speed)
if self.current_piece is None:
self.current_piece = self.next_pieces.pop(0)
self.next_pieces.append(deepcopy(random.choice(SHAPES)))
logger.debug("New piece: %s", self.current_piece)
self.current_piece.set_pos(
(self.dimensions.x - self.current_piece.dimensions.x) / 2, 0
)
if not self.valid(self.current_piece):
logger.info("GAME OVER")
self.running = False
self.current_piece.y += 1
if self.valid(self.current_piece):
if self._lastkeypress == "s":
while self.valid(self.current_piece):
self.current_piece.y += 1
self.current_piece.y -= 1
elif self._lastkeypress == "w":
self.current_piece.rotate()
if not self.valid(self.current_piece):
self.current_piece.rotate(-1)
elif self._lastkeypress == "a":
shift = -1
elif self._lastkeypress == "d":
shift = +1
if self._lastkeypress in ["a", "d"]:
self.current_piece.translate(shift, 0)
if self.collide_lateral(self.current_piece):
logger.debug("Hitting the wall")
self.current_piece.translate(-shift, 0)
elif not self.valid(self.current_piece):
self.current_piece.translate(-shift, 0)
else:
self.current_piece.y -= 1
self.game.extend(self.current_piece.positions)
self.clear_rows()
self.current_piece = None
self._lastkeypress = None
logger.debug("Current piece: %s", self.current_piece)
return {
"game": self.game,
"piece": self.current_piece.positions if self.current_piece else None,
"next_pieces": [n.positions for n in self.next_pieces],
"game_speed": self.game_speed,
"score": self.score,
}
def valid(self, piece):
return not any(
[piece_part in self.grid for piece_part in piece.positions]
) and not any([piece_part in self.game for piece_part in piece.positions])
def collide_lateral(self, piece):
return any([piece_part in self._lateral for piece_part in piece.positions]) | 33.883212 | 142 | 0.562688 |
50fbe764637461f46f93df552766f99b8770a87e | 18,329 | py | Python | test/functional/wallet_multiwallet.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | 1 | 2019-12-05T08:16:41.000Z | 2019-12-05T08:16:41.000Z | test/functional/wallet_multiwallet.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | null | null | null | test/functional/wallet_multiwallet.py | rebitcoin/rebitcoin | 87a39edad2362c3f196a98d4703f6f23cde9c13e | [
"MIT"
] | 1 | 2020-11-04T06:59:41.000Z | 2020-11-04T06:59:41.000Z | #!/usr/bin/env python3
# Copyright (c) 2017-2019 The ReBitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiwallet.
Verify that a rebitcoind node can load multiple wallet files
"""
import os
import shutil
import time
from test_framework.test_framework import ReBitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
FEATURE_LATEST = 169900
class MultiWalletTest(ReBitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.supports_cli = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument(
'--data_wallets_dir',
default=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/wallets/'),
help='Test data with wallet directories (default: %(default)s)',
)
def run_test(self):
node = self.nodes[0]
data_dir = lambda *p: os.path.join(node.datadir, 'regtest', *p)
wallet_dir = lambda *p: data_dir('wallets', *p)
wallet = lambda name: node.get_wallet_rpc(name)
def wallet_file(name):
if os.path.isdir(wallet_dir(name)):
return wallet_dir(name, "wallet.dat")
return wallet_dir(name)
assert_equal(self.nodes[0].listwalletdir(), { 'wallets': [{ 'name': '' }] })
# check wallet.dat is created
self.stop_nodes()
assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)
# create symlink to verify wallet directory path can be referenced
# through symlink
os.mkdir(wallet_dir('w7'))
os.symlink('w7', wallet_dir('w7_symlink'))
# rename wallet.dat to make sure plain wallet file paths (as opposed to
# directory paths) can be loaded
os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))
# create another dummy wallet for use in testing backups later
self.start_node(0, [])
self.stop_nodes()
empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
os.rename(wallet_dir("wallet.dat"), empty_wallet)
# restart node with a mix of wallet names:
# w1, w2, w3 - to verify new wallets created when non-existing paths specified
# w - to verify wallet name matching works when one wallet path is prefix of another
# sub/w5 - to verify relative wallet path is created correctly
# extern/w6 - to verify absolute wallet path is created correctly
# w7_symlink - to verify symlinked wallet path is initialized correctly
# w8 - to verify existing wallet file is loaded correctly
# '' - to verify default wallet file is created correctly
wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '']
extra_args = ['-wallet={}'.format(n) for n in wallet_names]
self.start_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8'])
assert_equal(set(node.listwallets()), set(wallet_names))
# check that all requested wallets were created
self.stop_node(0)
for wallet_name in wallet_names:
assert_equal(os.path.isfile(wallet_file(wallet_name)), True)
# should not initialize if wallet path can't be created
exp_stderr = "boost::filesystem::create_directory:"
self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
self.nodes[0].assert_start_raises_init_error(['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir())
# should not initialize if there are duplicate wallets
self.nodes[0].assert_start_raises_init_error(['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.')
# should not initialize if one wallet is a copy of another
shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
exp_stderr = r"BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
# should not initialize if wallet file is a symlink
os.symlink('w8', wallet_dir('w8_symlink'))
self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], r'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX)
# should not initialize if the specified walletdir does not exist
self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
# should not initialize if the specified walletdir is not a directory
not_a_dir = wallet_dir('notadir')
open(not_a_dir, 'a', encoding="utf8").close()
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
self.log.info("Do not allow -zapwallettxes with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.log.info("Do not allow -salvagewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.log.info("Do not allow -upgradewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
os.rename(wallet_dir(), wallet_dir2)
self.start_node(0, ['-wallet=w4', '-wallet=w5'])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
node.generatetoaddress(nblocks=1, address=w5.getnewaddress())
# now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
os.rename(wallet_dir2, wallet_dir())
self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
w5_info = w5.getwalletinfo()
assert_equal(w5_info['immature_balance'], 50)
competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir')
os.mkdir(competing_wallet_dir)
self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.restart_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy'])
wallets = [wallet(w) for w in wallet_names]
wallet_bad = wallet("bad")
# check wallet names and balances
node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress())
for wallet_name, wallet in zip(wallet_names, wallets):
info = wallet.getwalletinfo()
assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0)
assert_equal(info['walletname'], wallet_name)
# accessing invalid wallet fails
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo)
# accessing wallet RPC without using wallet endpoint fails
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w1, w2, w3, w4, *_ = wallets
node.generatetoaddress(nblocks=101, address=w1.getnewaddress())
assert_equal(w1.getbalance(), 100)
assert_equal(w2.getbalance(), 0)
assert_equal(w3.getbalance(), 0)
assert_equal(w4.getbalance(), 0)
w1.sendtoaddress(w2.getnewaddress(), 1)
w1.sendtoaddress(w3.getnewaddress(), 2)
w1.sendtoaddress(w4.getnewaddress(), 3)
node.generatetoaddress(nblocks=1, address=w1.getnewaddress())
assert_equal(w2.getbalance(), 1)
assert_equal(w3.getbalance(), 2)
assert_equal(w4.getbalance(), 3)
batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()])
assert_equal(batch[0]["result"]["chain"], "regtest")
assert_equal(batch[1]["result"]["walletname"], "w1")
self.log.info('Check for per-wallet settxfee call')
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
w2.settxfee(4.0)
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
self.log.info("Test dynamic wallet loading")
self.restart_node(0, ['-nowallet'])
assert_equal(node.listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo)
self.log.info("Load first wallet")
loadwallet_name = node.loadwallet(wallet_names[0])
assert_equal(loadwallet_name['name'], wallet_names[0])
assert_equal(node.listwallets(), wallet_names[0:1])
node.getwalletinfo()
w1 = node.get_wallet_rpc(wallet_names[0])
w1.getwalletinfo()
self.log.info("Load second wallet")
loadwallet_name = node.loadwallet(wallet_names[1])
assert_equal(loadwallet_name['name'], wallet_names[1])
assert_equal(node.listwallets(), wallet_names[0:2])
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w2 = node.get_wallet_rpc(wallet_names[1])
w2.getwalletinfo()
self.log.info("Load remaining wallets")
for wallet_name in wallet_names[2:]:
loadwallet_name = self.nodes[0].loadwallet(wallet_name)
assert_equal(loadwallet_name['name'], wallet_name)
assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))
# Fail to load if wallet doesn't exist
assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if wallet file is a symlink
assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir')
self.log.info("Test dynamic wallet creation.")
# Fail to create a wallet if it already exists.
assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2')
# Successfully create a wallet with a new name
loadwallet_name = self.nodes[0].createwallet('w9')
assert_equal(loadwallet_name['name'], 'w9')
w9 = node.get_wallet_rpc('w9')
assert_equal(w9.getwalletinfo()['walletname'], 'w9')
assert 'w9' in self.nodes[0].listwallets()
# Successfully create a wallet using a full path
new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir')
new_wallet_name = os.path.join(new_wallet_dir, 'w10')
loadwallet_name = self.nodes[0].createwallet(new_wallet_name)
assert_equal(loadwallet_name['name'], new_wallet_name)
w10 = node.get_wallet_rpc(new_wallet_name)
assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name)
assert new_wallet_name in self.nodes[0].listwallets()
self.log.info("Test dynamic wallet unloading")
# Test `unloadwallet` errors
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet)
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy")
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet)
assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"),
# Successfully unload the specified wallet name
self.nodes[0].unloadwallet("w1")
assert 'w1' not in self.nodes[0].listwallets()
# Successfully unload the wallet referenced by the request endpoint
# Also ensure unload works during walletpassphrase timeout
w2.encryptwallet('test')
w2.walletpassphrase('test', 1)
w2.unloadwallet()
time.sleep(1.1)
assert 'w2' not in self.nodes[0].listwallets()
# Successfully unload all wallets
for wallet_name in self.nodes[0].listwallets():
self.nodes[0].unloadwallet(wallet_name)
assert_equal(self.nodes[0].listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo)
# Successfully load a previously unloaded wallet
self.nodes[0].loadwallet('w1')
assert_equal(self.nodes[0].listwallets(), ['w1'])
assert_equal(w1.getwalletinfo()['walletname'], 'w1')
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9'])
# Test backing up and restoring wallets
self.log.info("Test wallet backup")
self.restart_node(0, ['-nowallet'])
for wallet_name in wallet_names:
self.nodes[0].loadwallet(wallet_name)
for wallet_name in wallet_names:
rpc = self.nodes[0].get_wallet_rpc(wallet_name)
addr = rpc.getnewaddress()
backup = os.path.join(self.options.tmpdir, 'backup.dat')
rpc.backupwallet(backup)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(empty_wallet, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(backup, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], True)
# Test .walletlock file is closed
self.start_node(1)
wallet = os.path.join(self.options.tmpdir, 'my_wallet')
self.nodes[0].createwallet(wallet)
assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
self.nodes[0].unloadwallet(wallet)
self.nodes[1].loadwallet(wallet)
# Fail to load if wallet is downgraded
shutil.copytree(os.path.join(self.options.data_wallets_dir, 'high_minversion'), wallet_dir('high_minversion'))
self.restart_node(0, extra_args=['-upgradewallet={}'.format(FEATURE_LATEST)])
assert {'name': 'high_minversion'} in self.nodes[0].listwalletdir()['wallets']
self.log.info("Fail -upgradewallet that results in downgrade")
assert_raises_rpc_error(
-4,
'Wallet loading failed: Error loading {}: Wallet requires newer version of {}'.format(
wallet_dir('high_minversion', 'wallet.dat'), self.config['environment']['PACKAGE_NAME']),
lambda: self.nodes[0].loadwallet(filename='high_minversion'),
)
if __name__ == '__main__':
MultiWalletTest().main()
| 52.368571 | 195 | 0.672268 |
c953274553e9d5c453365fba6904e6134aba5e13 | 4,812 | py | Python | extensions.py | brenden17/Realtime-Poll-with-Flask-on-Redis | f259ec65797b643ef59ff509f4d701ba4cbb9e3a | [
"MIT"
] | null | null | null | extensions.py | brenden17/Realtime-Poll-with-Flask-on-Redis | f259ec65797b643ef59ff509f4d701ba4cbb9e3a | [
"MIT"
] | null | null | null | extensions.py | brenden17/Realtime-Poll-with-Flask-on-Redis | f259ec65797b643ef59ff509f4d701ba4cbb9e3a | [
"MIT"
] | null | null | null | import json
from random import getrandbits
from datetime import datetime, timedelta
from redis import Redis
from flask import session
from flask import request
from flask import Blueprint
from flask import render_template
from flask import Markup
from flask.views import View
redis = Redis()
class PollSession(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.add_url_rule('/poll', view_func=PollAjax.as_view('poll'))
class PollAjax(View):
methods = ['POST']
def dispatch_request(self):
if request.method == 'POST':
item = request.form.get('item')
poll_name = request.form.get('name')
session_id = session.sid
user_id = redis.get(session_id)
if not user_id:
user_id = getrandbits(24) # random number of 24bit
redis.set(session_id, user_id)
# vote
poll_item(user_id, item, poll_name)
return '{"result":"ok"}'
def poll_item(user_id, item='none', poll_name='poll'):
key = create_key(item, poll_name)
try:
redis.setbit(key, user_id, 1)
except Exception, e:
print(e)
def create_key(item='none', poll_name='poll', target_time=None):
if not target_time:
target_time = datetime.now()
created_time = target_time.strftime('%Y-%m-%d')
return '{0}:{1}:{2}'.format(poll_name, item, created_time)
class PollAnalytics(object):
def __init__(self, items, poll_name='poll'):
self.items = [item for item, caption in items]
self.item_captions = {item:caption for item, caption in items}
self.poll_name = poll_name
def fetch_daily(self, last=30):
items = ','.join([self.get_item(item, day) for item in self.items for day in range(last)])
return '[{}]'.format(items)
def get_item(self, item, last=30):
d = datetime.today() - timedelta(days=last)
key = create_key(item, self.poll_name, d)
count = redis.bitcount(key)
return '{"date":"%s", "item":"%s", "count":"%s"}' % \
(key.split(':')[2], self.item_captions[item], count)
def delete_all_events(self, action):
events = redis.keys('{}:*:*'.format(self.poll_name))
if events:
try:
redis.delete(*events)
except Exception, e:
print(e)
class PollRender(object):
def __init__(self,
title='',
description='',
items=[],
poll_name='poll-name',
poll_type='radio'):
self.title = title
self.description = description
self.items = items
self.poll_name = poll_name
self.poll_type = poll_type
def render(self, *args, **kwargs):
return render_template(*args, **kwargs)
@property
def html(self):
if self.poll_type == 'button':
return Markup(self.render('PollRedis/inline_button.html', pb=self))
else:
return Markup(self.render('PollRedis/inline.html', pb=self))
def poll(*args, **kwargs):
poll_render = PollRender(*args, **kwargs)
return poll_render.html
class PollResultRender(object):
def __init__(self,
items=[],
poll_name='Poll',
graph='bar',
width='600',
height='350',
last=7,
x='date',
y='count'):
self.items = items
self.poll_name = poll_name
self.graph = graph
self.width = width
self.height = height
self.x = x
self.y = y
self.PA = PollAnalytics(self.items, self.poll_name)
self.data = self.PA.fetch_daily(last)
def render(self, *args, **kwargs):
return render_template(*args, **kwargs)
@property
def html(self):
base_html = 'PollRedis/{}.html'.format(self.graph)
return Markup(self.render(base_html, pr=self))
def poll_analytics(*args, **kwargs):
pollresult_render = PollResultRender(*args, **kwargs)
return pollresult_render.html
class Poll(object):
def __init__(self, app):
self.init_app(app)
def init_app(self, app):
self.register_blueprint(app)
app.add_template_global(poll_analytics)
app.add_template_global(poll)
def register_blueprint(self, app):
module = Blueprint(
'PollRedis',
__name__,
template_folder='templates'
)
app.register_blueprint(module)
return module
class PollRedis(object):
def __init__(self, app):
PollSession(app)
Poll(app) | 28.305882 | 98 | 0.58084 |
e17f7152a308383234182044311a06b382bf0127 | 1,483 | py | Python | api/endpoints/user.py | saffist3r/Achba7-Back | 2edd18375d792179aa3ce0940e633f5d08f8a9c0 | [
"MIT"
] | 1 | 2021-02-07T23:32:07.000Z | 2021-02-07T23:32:07.000Z | api/endpoints/user.py | saffist3r/Achba7-Back | 2edd18375d792179aa3ce0940e633f5d08f8a9c0 | [
"MIT"
] | null | null | null | api/endpoints/user.py | saffist3r/Achba7-Back | 2edd18375d792179aa3ce0940e633f5d08f8a9c0 | [
"MIT"
] | null | null | null | from typing import List
from fastapi import APIRouter, Depends, HTTPException
from starlette.requests import Request
from starlette.responses import PlainTextResponse, JSONResponse
from sqlalchemy.orm import Session
from db import crud, models, schemas
from db.database import SessionLocal, engine
models.Base.metadata.create_all(bind=engine)
router = APIRouter()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@router.post("/")
async def create_user(db: Session, user: schemas.User):
db_user = models.User(password=user.password, name=user.name, surname=user.surname, photo=user.photo,
fonction=user.fonction, date=user.date, email=user.email, telephone=user.telephone,
city=user.city, country=user.country)
return PlainTextResponse("Create")
@router.get("/users/", response_model=List[schemas.User])
async def get_users(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
users = crud.get_users(db, skip=skip, limit=limit)
return users
@router.get("/{user_id}", response_model=schemas.User)
async def get_user_by_id(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(status_code=404, detail="User not found")
return db_user
@router.delete("/")
async def delete_user(request: Request):
return PlainTextResponse("delete")
| 29.66 | 109 | 0.712744 |
33ca0a4261c6d0f4647ff383255f02c6767cd5a2 | 1,829 | py | Python | ask-smapi-model/ask_smapi_model/v1/isp/editable_state.py | Signal-Kinetics/alexa-apis-for-python | abb8d3dce18a5510c48b215406ed36c024f01495 | [
"Apache-2.0"
] | 2 | 2021-10-30T06:52:48.000Z | 2021-11-16T12:34:16.000Z | ask-smapi-model/ask_smapi_model/v1/isp/editable_state.py | Signal-Kinetics/alexa-apis-for-python | abb8d3dce18a5510c48b215406ed36c024f01495 | [
"Apache-2.0"
] | null | null | null | ask-smapi-model/ask_smapi_model/v1/isp/editable_state.py | Signal-Kinetics/alexa-apis-for-python | abb8d3dce18a5510c48b215406ed36c024f01495 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union
from datetime import datetime
class EditableState(Enum):
"""
Whether or not the in-skill product is editable.
Allowed enum values: [EDITABLE, NOT_EDITABLE]
"""
EDITABLE = "EDITABLE"
NOT_EDITABLE = "NOT_EDITABLE"
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {self.name: self.value}
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.value)
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, EditableState):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 27.298507 | 96 | 0.650082 |
d842ff7b305887fed5ef118d90076798b55b7090 | 796 | py | Python | migrations/versions/47c2999a1506_update_user_model.py | kiza054/woodhall-scout-blog-prototype | bc7dc0b766263bb7a1a4d342d27c57d7989ff152 | [
"MIT"
] | null | null | null | migrations/versions/47c2999a1506_update_user_model.py | kiza054/woodhall-scout-blog-prototype | bc7dc0b766263bb7a1a4d342d27c57d7989ff152 | [
"MIT"
] | null | null | null | migrations/versions/47c2999a1506_update_user_model.py | kiza054/woodhall-scout-blog-prototype | bc7dc0b766263bb7a1a4d342d27c57d7989ff152 | [
"MIT"
] | null | null | null | """update user model
Revision ID: 47c2999a1506
Revises: 6a93be8d57c4
Create Date: 2019-11-24 17:48:00.972670
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '47c2999a1506'
down_revision = '6a93be8d57c4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('childs_section', sa.String(length=20), nullable=True))
op.add_column('user', sa.Column('leader', sa.String(length=6), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'leader')
op.drop_column('user', 'childs_section')
# ### end Alembic commands ###
| 25.677419 | 91 | 0.693467 |
643fb328cce7daac23bc3063087987c96d97bf96 | 724 | py | Python | raven/contrib/django/middleware/wsgi.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | 1 | 2019-10-11T14:43:53.000Z | 2019-10-11T14:43:53.000Z | raven/contrib/django/middleware/wsgi.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | null | null | null | raven/contrib/django/middleware/wsgi.py | opengovt/openroads-geostore | 336bdc352252ae34a66746e632ae0b8df66c04c0 | [
"MIT"
] | null | null | null | """
raven.contrib.django.middleware.wsgi
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from raven.middleware import Sentry
class Sentry(Sentry):
"""
Identical to the default WSGI middleware except that
the client comes dynamically via ``get_client
>>> from raven.contrib.django.middleware.wsgi import Sentry
>>> application = Sentry(application)
"""
def __init__(self, application):
self.application = application
@property
def client(self):
from raven.contrib.django.models import client
return client
| 25.857143 | 75 | 0.676796 |
ccaac54e97357e4384ceba4e5effa30879132670 | 767 | py | Python | Visualization/hillshade.py | cclauss/qgis-earthengine-examples | 02a417f524875f5e6033b65cca8417ff2cfb03e6 | [
"MIT"
] | null | null | null | Visualization/hillshade.py | cclauss/qgis-earthengine-examples | 02a417f524875f5e6033b65cca8417ff2cfb03e6 | [
"MIT"
] | null | null | null | Visualization/hillshade.py | cclauss/qgis-earthengine-examples | 02a417f524875f5e6033b65cca8417ff2cfb03e6 | [
"MIT"
] | null | null | null | import math
import ee
from ee_plugin import Map
def Radians(img):
return img.toFloat().multiply(math.pi).divide(180)
def Hillshade(az, ze, slope, aspect):
"""Compute hillshade for the given illumination az, el."""
azimuth = Radians(ee.Image(az))
zenith = Radians(ee.Image(ze))
# Hillshade = cos(Azimuth - Aspect) * sin(Slope) * sin(Zenith) +
# cos(Zenith) * cos(Slope)
return (azimuth.subtract(aspect).cos()
.multiply(slope.sin())
.multiply(zenith.sin())
.add(
zenith.cos().multiply(slope.cos())))
terrain = ee.Algorithms.Terrain(ee.Image('srtm90_v4'))
slope_img = Radians(terrain.select('slope'))
aspect_img = Radians(terrain.select('aspect'))
Map.addLayer(Hillshade(0, 60, slope_img, aspect_img))
| 28.407407 | 66 | 0.666232 |
d7508907df985f87eb678b2150f55889e5a76a24 | 4,264 | py | Python | scripts/src/link.py | 9999years/dotfiles | 763c2ca5f8aeb3b64eb28262e6708135e6cd2005 | [
"MIT"
] | 1 | 2020-09-09T15:06:43.000Z | 2020-09-09T15:06:43.000Z | scripts/src/link.py | 9999years/dotfiles | 763c2ca5f8aeb3b64eb28262e6708135e6cd2005 | [
"MIT"
] | 2 | 2020-09-09T14:16:21.000Z | 2020-09-29T17:31:15.000Z | scripts/src/link.py | 9999years/dotfiles | 763c2ca5f8aeb3b64eb28262e6708135e6cd2005 | [
"MIT"
] | 2 | 2020-09-04T14:55:57.000Z | 2020-10-30T19:08:58.000Z | """Linking the dotfiles themselves.
"""
import enum
import filecmp
import os
from dataclasses import dataclass
from enum import Enum
from typing import List
from . import actions
from . import color as co
from . import log, prompt
from .actions import ActionResult, mklink
from .schema import ResolvedDotfile, Status
from .util import Unreachable
class LinkStatus(Enum):
"""The result of linking a dotfile. In short, did any work happen?
"""
# Link was already OK
OK = enum.auto()
# Link was newly-created or fixed
FIXED = enum.auto()
# This dotfile was skipped or another error occured.
ERROR = enum.auto()
@dataclass
class Linker:
"""Manages the context around linking a set of dotfiles.
"""
# If True, don't actually link anything.
dry_run: bool = False
# Should we collapse multiple ok links in output to one line?
verbose: bool = False
def link_all(self, dotfiles: List[ResolvedDotfile]) -> None:
"""Link a list of dotfiles from configuration.
"""
# Count of already ok links, collapsed in output to one line
num_ok = 0
for resolved in dotfiles:
is_ok = _link_dotfile(resolved) is LinkStatus.OK
if self.verbose:
if is_ok:
print(log.ok_link(resolved))
else:
if is_ok:
if num_ok != 0:
print(co.move_cursor_up_beginning(1) + co.CLEAR_LINE, end="")
num_ok += 1
print(log.links_already_ok(resolved, num_ok))
else:
num_ok = 0
def _link_dotfile(resolved: ResolvedDotfile) -> LinkStatus:
"""Link a dotfile from configuration.
"""
status = resolved.status
link_str = log.ln(str(resolved.installed.disp), str(resolved.link_dest))
if status is Status.OK:
# no action needed
return LinkStatus.OK
elif status is Status.MISSING:
# create unless dry run
mklink(resolved.installed.abs, resolved.link_dest)
print(log.created_link(resolved))
return LinkStatus.FIXED
elif status is Status.DIFF_DEST:
# a link, but with a different destination
print(
co.RED + log.NOT_OK, link_str + co.RESET,
)
return _fix_link(resolved, status)
elif status is Status.NOT_LINK:
print(co.RED + log.NOT_OK, resolved.installed.disp, "is not a link" + co.RESET)
return _fix_link(resolved, status)
else:
raise Unreachable
def _fix_link(resolved: ResolvedDotfile, status: Status) -> LinkStatus:
if status is not Status.NOT_LINK and status is not Status.DIFF_DEST:
raise Unreachable
if not resolved.repo.abs.exists():
log.fatal(f"{resolved.repo.abs} doesn't exist!")
if filecmp.cmp(resolved.installed.abs, resolved.repo.abs, shallow=False):
log.info(
log.path(resolved.installed.disp)
+ " and "
+ log.path(resolved.repo.disp)
+ " have the same contents; replacing with a link"
)
# The files are the same! Just fix them up.
if status is Status.DIFF_DEST:
# TODO: oh my g-d test this
installed_dest = resolved.installed.abs.parent / os.readlink(
resolved.installed.abs
)
os.remove(installed_dest)
res = actions.fix(resolved)
if res is not ActionResult.OK:
log.error(
f"Unexpected result {res} while fixing {log.path(resolved.installed.disp)}"
)
return LinkStatus.ERROR
return LinkStatus.FIXED
else:
# show stat-diff summary, etc
print(actions.files_summary(resolved))
choices = (
prompt.NOT_LINK_CHOICES
if status is Status.NOT_LINK
else prompt.DIFF_DEST_CHOICES
)
while True:
choice = prompt.ask(choices)
res = choice.invoke(resolved)
if res is ActionResult.OK:
return LinkStatus.FIXED
elif res is ActionResult.SKIPPED:
return LinkStatus.ERROR
else:
# ask again
pass
| 30.028169 | 91 | 0.601782 |
768925fa7ccda3ea42f15dd665dcf39fdf6bb12c | 5,646 | py | Python | train_caption.py | keio-smilab22/RelationalFutureCaptioningModel | 447980d1c1d6fd16d54b05feedebdef974a66c08 | [
"Apache-2.0"
] | 2 | 2022-03-20T09:50:40.000Z | 2022-03-27T18:27:54.000Z | train_caption.py | keio-smilab22/RelationalFutureCaptioningModel | 447980d1c1d6fd16d54b05feedebdef974a66c08 | [
"Apache-2.0"
] | null | null | null | train_caption.py | keio-smilab22/RelationalFutureCaptioningModel | 447980d1c1d6fd16d54b05feedebdef974a66c08 | [
"Apache-2.0"
] | null | null | null | """
Train captioning with MART.
Originally published by https://github.com/jayleicn/recurrent-transformer under MIT license
Reworked by https://github.com/gingsi/coot-videotext under Apache 2 license
"""
import numpy as np
from coot.configs_retrieval import ExperimentTypesConst
from mart import arguments_mart
from mart.configs_mart import MartConfig as Config
from mart.model import create_mart_model
from mart.recursive_caption_dataset import create_mart_datasets_and_loaders
from mart.trainer_caption import MartTrainer
from nntrainer import arguments, utils
from nntrainer.utils_torch import set_seed
from nntrainer.utils_yaml import load_yaml_config_file
import datetime
from nntrainer.metric import TEXT_METRICS
from nntrainer.view_results import collect_results_data, output_results, update_performance_profile
EXP_TYPE = ExperimentTypesConst.CAPTION
def main():
# ---------- Setup script arguments. ----------
parser = utils.ArgParser(description=__doc__)
arguments.add_default_args(parser) # logging level etc.
arguments.add_exp_identifier_args(parser) # arguments to identify the experiment to run
arguments.add_trainer_args(parser, dataset_path=False) # general trainer arguments
parser.add_argument("--preload", action="store_true", help="Preload everything.") # feature preloading
arguments_mart.add_mart_args(parser) # some more paths for mart
parser.add_argument("--load_model", type=str, default=None, help="Load model from file.")
parser.add_argument("--print_model", action="store_true", help=f"Print model")
args = parser.parse_args()
# load repository config yaml file to dict
exp_group, exp_name, config_file = arguments.setup_experiment_identifier_from_args(args, EXP_TYPE)
config = load_yaml_config_file(config_file)
# update experiment config given the script arguments
config = arguments.update_config_from_args(config, args)
config = arguments_mart.update_mart_config_from_args(config, args)
# read experiment config dict
cfg = Config(config)
if args.print_config:
print(cfg)
# set seed
verb = "Set seed"
if cfg.random_seed is None:
cfg.random_seed = np.random.randint(0, 2 ** 15, dtype=np.int32)
verb = "Randomly generated seed"
print(f"{verb} {cfg.random_seed} deterministic {cfg.cudnn_deterministic} "
f"benchmark {cfg.cudnn_benchmark}")
set_seed(cfg.random_seed, cudnn_deterministic=cfg.cudnn_deterministic, cudnn_benchmark=cfg.cudnn_benchmark)
# create dataset
train_set, val_set, train_loader, val_loader, test_set, test_loader = create_mart_datasets_and_loaders(
cfg, args.coot_feat_dir, args.annotations_dir, args.video_feature_dir)
# run_number: run name
# for i, run_number in enumerate(range(args.start_run, args.start_run + args.num_runs)):
for i in range(args.start_run):
run_number = datetime.datetime.now()
run_name = f"{args.run_name}{run_number}"
# create model from config
model = create_mart_model(cfg, len(train_set.word2idx), cache_dir=args.cache_dir)
# print model for debug if requested
if args.print_model and i == 0:
print(model)
# always load best epoch during validation
load_best = args.load_best or args.validate
# create trainer
trainer = MartTrainer(
cfg, model, exp_group, exp_name, run_name, len(train_loader), log_dir=args.log_dir,
log_level=args.log_level, logger=None, print_graph=args.print_graph, reset=args.reset, load_best=load_best,
load_epoch=args.load_epoch, load_model=args.load_model, inference_only=args.validate,
annotations_dir=args.annotations_dir)
if args.validate:
# run validation
if not trainer.load and not args.ignore_untrained:
raise ValueError("Validating an untrained model! No checkpoints were loaded. Add --ignore_untrained "
"to ignore this error.")
trainer.validate_epoch(val_loader)
else:
# run training
trainer.train_model(train_loader, val_loader, test_loader)
# run_name = "yc2_100m_coot_clip_mart_" + run_name
# print("RUN_NAME*******************************")
# print(run_name)
# print("***************************************")
# exp_groups_names = {"default": []}
# exp_groups_names["default"].append(run_name)
# # exp_groups_names = utils.match_folder(args.log_dir, EXP_TYPE, args.exp_group, args.exp_list, args.search)
# collector = collect_results_data(
# EXP_TYPE, exp_groups_names, log_dir="experiments", read_last_epoch=False, add_group=False)
# print(collector)
# collector = update_performance_profile(collector)
# # ---------- Define which metrics to print ----------
# default_metrics = []
# default_fields = ["bleu4", "meteo", "rougl", "cider", "re4"]
# output_results(collector, custom_metrics=TEXT_METRICS, metrics="", default_metrics=default_metrics,
# fields="", default_fields=default_fields, mean=False, mean_all=False,
# sort="score", sort_asc=False,
# compact=False)
# done with this round
trainer.close()
del model
del trainer
if __name__ == "__main__":
main()
| 44.809524 | 122 | 0.666667 |
79ca253ecc8cb15fe7e971090b2393d0a99ef47c | 1,257 | py | Python | TimeSeriesTools/Regime_detection/pattern_matching_detection.py | Psicowired87/TimeSeriesTools | de42dbcc5371ee576df6c9521b1c79a47c147dd1 | [
"MIT"
] | 1 | 2015-05-01T14:14:02.000Z | 2015-05-01T14:14:02.000Z | TimeSeriesTools/Regime_detection/pattern_matching_detection.py | Psicowired87/TimeSeriesTools | de42dbcc5371ee576df6c9521b1c79a47c147dd1 | [
"MIT"
] | null | null | null | TimeSeriesTools/Regime_detection/pattern_matching_detection.py | Psicowired87/TimeSeriesTools | de42dbcc5371ee576df6c9521b1c79a47c147dd1 | [
"MIT"
] | 1 | 2015-05-01T14:15:03.000Z | 2015-05-01T14:15:03.000Z |
"""
Pattern matching detection.
"""
import numpy as np
import pandas as pd
def pattern_matching_detection(activation, patterns, method, **kwargs):
"""General function to perform pattern matching based detection.
Parameters
----------
activation: array_like
description of the activity of the elements of the system.
patterns: array_like
patterns we want to match in order to detect a wanted regime.
method: str, optional
the method used to perform pattern matching.
kwargs: dict
variables needed to call the method selected.
Returns
-------
spks: pd.DataFrame
spikes detected.
"""
possible = ['dtw']
method = method if method in possible else 'dtw'
if method == 'dtw':
spks = dtw_based_detection(activation, patterns, **kwargs)
return spks
def dtw_based_detection(activation, patterns):
"""This function is based on dynamic time warping and uses examples to
determine the parameters for dtw and the actual pattern shape in order to
detect the parts of the time series which has this kind of shape.
"""
# Infer parameter from patterns
# Transformation
# Dynamic time warping detection
return times_spks
| 24.173077 | 77 | 0.682578 |
7da1fc50cc66eebf39e17f406c23e0d25c18afae | 291 | py | Python | docs/examples/models_signature_custom_init.py | jasujm/pydantic | cc1cb4826c74ac5b651ef2d80c3478428a9950ca | [
"MIT"
] | 6 | 2021-08-11T11:37:59.000Z | 2021-11-12T01:33:11.000Z | docs/examples/models_signature_custom_init.py | jasujm/pydantic | cc1cb4826c74ac5b651ef2d80c3478428a9950ca | [
"MIT"
] | 197 | 2020-08-31T06:20:39.000Z | 2022-03-29T10:04:22.000Z | docs/examples/models_signature_custom_init.py | jasujm/pydantic | cc1cb4826c74ac5b651ef2d80c3478428a9950ca | [
"MIT"
] | 2 | 2021-11-23T16:28:21.000Z | 2021-11-23T16:28:33.000Z | import inspect
from pydantic import BaseModel
class MyModel(BaseModel):
id: int
info: str = 'Foo'
def __init__(self, id: int = 1, *, bar: str, **data) -> None:
"""My custom init!"""
super().__init__(id=id, bar=bar, **data)
print(inspect.signature(MyModel))
| 18.1875 | 65 | 0.611684 |
007fc6df764d4a621202600a8b4bcc52e7ac38be | 445 | py | Python | SimGeneral/MixingModule/test/read_Input_PCFcfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | SimGeneral/MixingModule/test/read_Input_PCFcfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | SimGeneral/MixingModule/test/read_Input_PCFcfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
process = cms.Process("PROInputA")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:/tmp/ebecheva/PCFwriter2.root')
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.Analyzer = cms.EDAnalyzer("InputAnalyzer",
dataStep2 = cms.bool(True),
collPCF = cms.InputTag("CFWriter")
)
process.p = cms.Path(process.Analyzer)
| 22.25 | 74 | 0.723596 |
bc89576ff01cbc78729d572ef4081fe88e17e673 | 5,529 | py | Python | azure-mgmt-iothub/azure/mgmt/iothub/models/iot_hub_properties_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | 1 | 2018-07-23T08:59:24.000Z | 2018-07-23T08:59:24.000Z | azure-mgmt-iothub/azure/mgmt/iothub/models/iot_hub_properties_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | 1 | 2021-06-02T00:24:51.000Z | 2021-06-02T00:24:51.000Z | azure-mgmt-iothub/azure/mgmt/iothub/models/iot_hub_properties_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | 1 | 2020-07-25T20:36:02.000Z | 2020-07-25T20:36:02.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class IotHubProperties(Model):
"""The properties of an IoT hub.
Variables are only populated by the server, and will be ignored when
sending a request.
:param authorization_policies: The shared access policies you can use to
secure a connection to the IoT hub.
:type authorization_policies:
list[~azure.mgmt.iothub.models.SharedAccessSignatureAuthorizationRule]
:param ip_filter_rules: The IP filter rules.
:type ip_filter_rules: list[~azure.mgmt.iothub.models.IpFilterRule]
:ivar provisioning_state: The provisioning state.
:vartype provisioning_state: str
:ivar state: Thehub state state.
:vartype state: str
:ivar host_name: The name of the host.
:vartype host_name: str
:param event_hub_endpoints: The Event Hub-compatible endpoint properties.
The possible keys to this dictionary are events and
operationsMonitoringEvents. Both of these keys have to be present in the
dictionary while making create or update calls for the IoT hub.
:type event_hub_endpoints: dict[str,
~azure.mgmt.iothub.models.EventHubProperties]
:param routing:
:type routing: ~azure.mgmt.iothub.models.RoutingProperties
:param storage_endpoints: The list of Azure Storage endpoints where you
can upload files. Currently you can configure only one Azure Storage
account and that MUST have its key as $default. Specifying more than one
storage account causes an error to be thrown. Not specifying a value for
this property when the enableFileUploadNotifications property is set to
True, causes an error to be thrown.
:type storage_endpoints: dict[str,
~azure.mgmt.iothub.models.StorageEndpointProperties]
:param messaging_endpoints: The messaging endpoint properties for the file
upload notification queue.
:type messaging_endpoints: dict[str,
~azure.mgmt.iothub.models.MessagingEndpointProperties]
:param enable_file_upload_notifications: If True, file upload
notifications are enabled.
:type enable_file_upload_notifications: bool
:param cloud_to_device:
:type cloud_to_device: ~azure.mgmt.iothub.models.CloudToDeviceProperties
:param comments: IoT hub comments.
:type comments: str
:param operations_monitoring_properties:
:type operations_monitoring_properties:
~azure.mgmt.iothub.models.OperationsMonitoringProperties
:param features: The capabilities and features enabled for the IoT hub.
Possible values include: 'None', 'DeviceManagement'
:type features: str or ~azure.mgmt.iothub.models.Capabilities
"""
_validation = {
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'host_name': {'readonly': True},
}
_attribute_map = {
'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'},
'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'},
'routing': {'key': 'routing', 'type': 'RoutingProperties'},
'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'},
'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'},
'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'},
'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'},
'comments': {'key': 'comments', 'type': 'str'},
'operations_monitoring_properties': {'key': 'operationsMonitoringProperties', 'type': 'OperationsMonitoringProperties'},
'features': {'key': 'features', 'type': 'str'},
}
def __init__(self, *, authorization_policies=None, ip_filter_rules=None, event_hub_endpoints=None, routing=None, storage_endpoints=None, messaging_endpoints=None, enable_file_upload_notifications: bool=None, cloud_to_device=None, comments: str=None, operations_monitoring_properties=None, features=None, **kwargs) -> None:
super(IotHubProperties, self).__init__(**kwargs)
self.authorization_policies = authorization_policies
self.ip_filter_rules = ip_filter_rules
self.provisioning_state = None
self.state = None
self.host_name = None
self.event_hub_endpoints = event_hub_endpoints
self.routing = routing
self.storage_endpoints = storage_endpoints
self.messaging_endpoints = messaging_endpoints
self.enable_file_upload_notifications = enable_file_upload_notifications
self.cloud_to_device = cloud_to_device
self.comments = comments
self.operations_monitoring_properties = operations_monitoring_properties
self.features = features
| 51.672897 | 326 | 0.70302 |
7a6f00ad1ed295042614f8ffaa9f2eb61bc6b8b5 | 214 | py | Python | 5-smallest +ve.py | svamja/learning_assignments | fa1b2279250a6a7f9dbfee86fdacfe8609e6ad7d | [
"MIT"
] | null | null | null | 5-smallest +ve.py | svamja/learning_assignments | fa1b2279250a6a7f9dbfee86fdacfe8609e6ad7d | [
"MIT"
] | null | null | null | 5-smallest +ve.py | svamja/learning_assignments | fa1b2279250a6a7f9dbfee86fdacfe8609e6ad7d | [
"MIT"
] | null | null | null | def is_divisible(n):
for divisor in range(2, 21):
if n % divisor != 0:
return False
return True
number = 1
while not is_divisible(number):
number += 1
print(number)
| 17.833333 | 33 | 0.556075 |
2d112de2e42886eae4c49ecd1fd828431a61718a | 38,726 | py | Python | .venv/Lib/site-packages/pyparsing/helpers.py | mehfoos/Nuclear-Fusion | 9540997fcfc0768b9e2ce16e0a091ea38997d32a | [
"BSD-3-Clause"
] | null | null | null | .venv/Lib/site-packages/pyparsing/helpers.py | mehfoos/Nuclear-Fusion | 9540997fcfc0768b9e2ce16e0a091ea38997d32a | [
"BSD-3-Clause"
] | null | null | null | .venv/Lib/site-packages/pyparsing/helpers.py | mehfoos/Nuclear-Fusion | 9540997fcfc0768b9e2ce16e0a091ea38997d32a | [
"BSD-3-Clause"
] | null | null | null | # helpers.py
import html.entities
from . import __diag__
from .core import *
from .util import _bslash, _flatten, _escapeRegexRangeChars
#
# global helpers
#
def delimited_list(
expr: ParserElement,
delim: Union[str, ParserElement] = ",",
combine: bool = False,
*,
allow_trailing_delim: bool = False,
) -> ParserElement:
"""Helper to define a delimited list of expressions - the delimiter
defaults to ','. By default, the list elements and delimiters can
have intervening whitespace, and comments, but this can be
overridden by passing ``combine=True`` in the constructor. If
``combine`` is set to ``True``, the matching tokens are
returned as a single token string, with the delimiters included;
otherwise, the matching tokens are returned as a list of tokens,
with the delimiters suppressed.
If ``allow_trailing_delim`` is set to True, then the list may end with
a delimiter.
Example::
delimited_list(Word(alphas)).parse_string("aa,bb,cc") # -> ['aa', 'bb', 'cc']
delimited_list(Word(hexnums), delim=':', combine=True).parse_string("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
"""
dlName = "{expr} [{delim} {expr}]...{end}".format(
expr=str(expr),
delim=str(delim),
end=" [{}]".format(str(delim)) if allow_trailing_delim else "",
)
if not combine:
delim = Suppress(delim)
delimited_list_expr = expr + ZeroOrMore(delim + expr)
if allow_trailing_delim:
delimited_list_expr += Opt(delim)
if combine:
return Combine(delimited_list_expr).set_name(dlName)
else:
return delimited_list_expr.set_name(dlName)
def counted_array(
expr: ParserElement,
int_expr: OptionalType[ParserElement] = None,
*,
intExpr: OptionalType[ParserElement] = None,
) -> ParserElement:
"""Helper to define a counted list of expressions.
This helper defines a pattern of the form::
integer expr expr expr...
where the leading integer tells how many expr expressions follow.
The matched tokens returns the array of expr tokens as a list - the
leading count token is suppressed.
If ``int_expr`` is specified, it should be a pyparsing expression
that produces an integer value.
Example::
counted_array(Word(alphas)).parse_string('2 ab cd ef') # -> ['ab', 'cd']
# in this parser, the leading integer value is given in binary,
# '10' indicating that 2 values are in the array
binary_constant = Word('01').set_parse_action(lambda t: int(t[0], 2))
counted_array(Word(alphas), int_expr=binary_constant).parse_string('10 ab cd ef') # -> ['ab', 'cd']
# if other fields must be parsed after the count but before the
# list items, give the fields results names and they will
# be preserved in the returned ParseResults:
count_with_metadata = integer + Word(alphas)("type")
typed_array = counted_array(Word(alphanums), int_expr=count_with_metadata)("items")
result = typed_array.parse_string("3 bool True True False")
print(result.dump())
# prints
# ['True', 'True', 'False']
# - items: ['True', 'True', 'False']
# - type: 'bool'
"""
intExpr = intExpr or int_expr
array_expr = Forward()
def count_field_parse_action(s, l, t):
nonlocal array_expr
n = t[0]
array_expr <<= (expr * n) if n else Empty()
# clear list contents, but keep any named results
del t[:]
if intExpr is None:
intExpr = Word(nums).set_parse_action(lambda t: int(t[0]))
else:
intExpr = intExpr.copy()
intExpr.set_name("arrayLen")
intExpr.add_parse_action(count_field_parse_action, call_during_try=True)
return (intExpr + array_expr).set_name("(len) " + str(expr) + "...")
def match_previous_literal(expr: ParserElement) -> ParserElement:
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks for
a 'repeat' of a previous expression. For example::
first = Word(nums)
second = match_previous_literal(first)
match_expr = first + ":" + second
will match ``"1:1"``, but not ``"1:2"``. Because this
matches a previous literal, will also match the leading
``"1:1"`` in ``"1:10"``. If this is not desired, use
:class:`match_previous_expr`. Do *not* use with packrat parsing
enabled.
"""
rep = Forward()
def copy_token_to_repeater(s, l, t):
if t:
if len(t) == 1:
rep << t[0]
else:
# flatten t tokens
tflat = _flatten(t.as_list())
rep << And(Literal(tt) for tt in tflat)
else:
rep << Empty()
expr.add_parse_action(copy_token_to_repeater, callDuringTry=True)
rep.set_name("(prev) " + str(expr))
return rep
def match_previous_expr(expr: ParserElement) -> ParserElement:
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks for
a 'repeat' of a previous expression. For example::
first = Word(nums)
second = match_previous_expr(first)
match_expr = first + ":" + second
will match ``"1:1"``, but not ``"1:2"``. Because this
matches by expressions, will *not* match the leading ``"1:1"``
in ``"1:10"``; the expressions are evaluated first, and then
compared, so ``"1"`` is compared with ``"10"``. Do *not* use
with packrat parsing enabled.
"""
rep = Forward()
e2 = expr.copy()
rep <<= e2
def copy_token_to_repeater(s, l, t):
matchTokens = _flatten(t.as_list())
def must_match_these_tokens(s, l, t):
theseTokens = _flatten(t.as_list())
if theseTokens != matchTokens:
raise ParseException("", 0, "")
rep.set_parse_action(must_match_these_tokens, callDuringTry=True)
expr.add_parse_action(copy_token_to_repeater, callDuringTry=True)
rep.set_name("(prev) " + str(expr))
return rep
def one_of(
strs: Union[IterableType[str], str],
caseless: bool = False,
use_regex: bool = True,
as_keyword: bool = False,
*,
useRegex: bool = True,
asKeyword: bool = False,
) -> ParserElement:
"""Helper to quickly define a set of alternative :class:`Literal` s,
and makes sure to do longest-first testing when there is a conflict,
regardless of the input order, but returns
a :class:`MatchFirst` for best performance.
Parameters:
- ``strs`` - a string of space-delimited literals, or a collection of
string literals
- ``caseless`` - treat all literals as caseless - (default= ``False``)
- ``use_regex`` - as an optimization, will
generate a :class:`Regex` object; otherwise, will generate
a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if
creating a :class:`Regex` raises an exception) - (default= ``True``)
- ``as_keyword`` - enforce :class:`Keyword`-style matching on the
generated expressions - (default= ``False``)
- ``asKeyword`` and ``useRegex`` are retained for pre-PEP8 compatibility,
but will be removed in a future release
Example::
comp_oper = one_of("< = > <= >= !=")
var = Word(alphas)
number = Word(nums)
term = var | number
comparison_expr = term + comp_oper + term
print(comparison_expr.search_string("B = 12 AA=23 B<=AA AA>12"))
prints::
[['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
"""
asKeyword = asKeyword or as_keyword
useRegex = useRegex and use_regex
if (
isinstance(caseless, str_type)
and __diag__.warn_on_multiple_string_args_to_oneof
):
warnings.warn(
"More than one string argument passed to one_of, pass"
" choices as a list or space-delimited string",
stacklevel=2,
)
if caseless:
isequal = lambda a, b: a.upper() == b.upper()
masks = lambda a, b: b.upper().startswith(a.upper())
parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral
else:
isequal = lambda a, b: a == b
masks = lambda a, b: b.startswith(a)
parseElementClass = Keyword if asKeyword else Literal
symbols = []
if isinstance(strs, str_type):
symbols = strs.split()
elif isinstance(strs, Iterable):
symbols = list(strs)
else:
raise TypeError("Invalid argument to one_of, expected string or iterable")
if not symbols:
return NoMatch()
if not asKeyword:
# if not producing keywords, need to reorder to take care to avoid masking
# longer choices with shorter ones
i = 0
while i < len(symbols) - 1:
cur = symbols[i]
for j, other in enumerate(symbols[i + 1 :]):
if isequal(other, cur):
del symbols[i + j + 1]
break
elif masks(cur, other):
del symbols[i + j + 1]
symbols.insert(i, other)
break
else:
i += 1
if not (caseless or asKeyword) and useRegex:
# ~ print(strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols]))
try:
if len(symbols) == len("".join(symbols)):
return Regex(
"[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)
).set_name(" | ".join(symbols))
else:
return Regex("|".join(re.escape(sym) for sym in symbols)).set_name(
" | ".join(symbols)
)
except sre_constants.error:
warnings.warn(
"Exception creating Regex for one_of, building MatchFirst", stacklevel=2
)
# last resort, just use MatchFirst
return MatchFirst(parseElementClass(sym) for sym in symbols).set_name(
" | ".join(symbols)
)
def dict_of(key: ParserElement, value: ParserElement) -> ParserElement:
"""Helper to easily and clearly define a dictionary by specifying
the respective patterns for the key and value. Takes care of
defining the :class:`Dict`, :class:`ZeroOrMore`, and
:class:`Group` tokens in the proper order. The key pattern
can include delimiting markers or punctuation, as long as they are
suppressed, thereby leaving the significant key text. The value
pattern can include named results, so that the :class:`Dict` results
can include named token fields.
Example::
text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join))
print(OneOrMore(attr_expr).parse_string(text).dump())
attr_label = label
attr_value = Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)
# similar to Dict, but simpler call format
result = dict_of(attr_label, attr_value).parse_string(text)
print(result.dump())
print(result['shape'])
print(result.shape) # object attribute access works too
print(result.as_dict())
prints::
[['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
- color: light blue
- posn: upper left
- shape: SQUARE
- texture: burlap
SQUARE
SQUARE
{'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
"""
return Dict(OneOrMore(Group(key + value)))
def original_text_for(
expr: ParserElement, as_string: bool = True, *, asString: bool = True
) -> ParserElement:
"""Helper to return the original, untokenized text for a given
expression. Useful to restore the parsed fields of an HTML start
tag into the raw tag text itself, or to revert separate tokens with
intervening whitespace back to the original matching input text. By
default, returns astring containing the original parsed text.
If the optional ``as_string`` argument is passed as
``False``, then the return value is
a :class:`ParseResults` containing any results names that
were originally matched, and a single token containing the original
matched text from the input string. So if the expression passed to
:class:`original_text_for` contains expressions with defined
results names, you must set ``as_string`` to ``False`` if you
want to preserve those results name values.
The ``asString`` pre-PEP8 argument is retained for compatibility,
but will be removed in a future release.
Example::
src = "this is test <b> bold <i>text</i> </b> normal text "
for tag in ("b", "i"):
opener, closer = make_html_tags(tag)
patt = original_text_for(opener + SkipTo(closer) + closer)
print(patt.search_string(src)[0])
prints::
['<b> bold <i>text</i> </b>']
['<i>text</i>']
"""
asString = asString and as_string
locMarker = Empty().set_parse_action(lambda s, loc, t: loc)
endlocMarker = locMarker.copy()
endlocMarker.callPreparse = False
matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
if asString:
extractText = lambda s, l, t: s[t._original_start : t._original_end]
else:
def extractText(s, l, t):
t[:] = [s[t.pop("_original_start") : t.pop("_original_end")]]
matchExpr.set_parse_action(extractText)
matchExpr.ignoreExprs = expr.ignoreExprs
return matchExpr
def ungroup(expr: ParserElement) -> ParserElement:
"""Helper to undo pyparsing's default grouping of And expressions,
even if all but one are non-empty.
"""
return TokenConverter(expr).add_parse_action(lambda t: t[0])
def locatedExpr(expr: ParserElement) -> ParserElement:
"""
(DEPRECATED - future code should use the Located class)
Helper to decorate a returned token with its starting and ending
locations in the input string.
This helper adds the following results names:
- ``locn_start`` - location where matched expression begins
- ``locn_end`` - location where matched expression ends
- ``value`` - the actual parsed results
Be careful if the input text contains ``<TAB>`` characters, you
may want to call :class:`ParserElement.parseWithTabs`
Example::
wd = Word(alphas)
for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
print(match)
prints::
[[0, 'ljsdf', 5]]
[[8, 'lksdjjf', 15]]
[[18, 'lkkjj', 23]]
"""
locator = Empty().set_parse_action(lambda ss, ll, tt: ll)
return Group(
locator("locn_start")
+ expr("value")
+ locator.copy().leaveWhitespace()("locn_end")
)
def nested_expr(
opener: Union[str, ParserElement] = "(",
closer: Union[str, ParserElement] = ")",
content: OptionalType[ParserElement] = None,
ignore_expr: ParserElement = quoted_string(),
*,
ignoreExpr: ParserElement = quoted_string(),
) -> ParserElement:
"""Helper method for defining nested lists enclosed in opening and
closing delimiters (``"("`` and ``")"`` are the default).
Parameters:
- ``opener`` - opening character for a nested list
(default= ``"("``); can also be a pyparsing expression
- ``closer`` - closing character for a nested list
(default= ``")"``); can also be a pyparsing expression
- ``content`` - expression for items within the nested lists
(default= ``None``)
- ``ignore_expr`` - expression for ignoring opening and closing delimiters
(default= :class:`quoted_string`)
- ``ignoreExpr`` - this pre-PEP8 argument is retained for compatibility
but will be removed in a future release
If an expression is not provided for the content argument, the
nested expression will capture all whitespace-delimited content
between delimiters as a list of separate values.
Use the ``ignore_expr`` argument to define expressions that may
contain opening or closing characters that should not be treated as
opening or closing characters for nesting, such as quoted_string or
a comment expression. Specify multiple expressions using an
:class:`Or` or :class:`MatchFirst`. The default is
:class:`quoted_string`, but if no expressions are to be ignored, then
pass ``None`` for this argument.
Example::
data_type = one_of("void int short long char float double")
decl_data_type = Combine(data_type + Opt(Word('*')))
ident = Word(alphas+'_', alphanums+'_')
number = pyparsing_common.number
arg = Group(decl_data_type + ident)
LPAR, RPAR = map(Suppress, "()")
code_body = nested_expr('{', '}', ignore_expr=(quoted_string | c_style_comment))
c_function = (decl_data_type("type")
+ ident("name")
+ LPAR + Opt(delimited_list(arg), [])("args") + RPAR
+ code_body("body"))
c_function.ignore(c_style_comment)
source_code = '''
int is_odd(int x) {
return (x%2);
}
int dec_to_hex(char hchar) {
if (hchar >= '0' && hchar <= '9') {
return (ord(hchar)-ord('0'));
} else {
return (10+ord(hchar)-ord('A'));
}
}
'''
for func in c_function.search_string(source_code):
print("%(name)s (%(type)s) args: %(args)s" % func)
prints::
is_odd (int) args: [['int', 'x']]
dec_to_hex (int) args: [['char', 'hchar']]
"""
if ignoreExpr != ignore_expr:
ignoreExpr = ignore_expr if ignoreExpr == quoted_string() else ignoreExpr
if opener == closer:
raise ValueError("opening and closing strings cannot be the same")
if content is None:
if isinstance(opener, str_type) and isinstance(closer, str_type):
if len(opener) == 1 and len(closer) == 1:
if ignoreExpr is not None:
content = Combine(
OneOrMore(
~ignoreExpr
+ CharsNotIn(
opener + closer + ParserElement.DEFAULT_WHITE_CHARS,
exact=1,
)
)
).set_parse_action(lambda t: t[0].strip())
else:
content = empty.copy() + CharsNotIn(
opener + closer + ParserElement.DEFAULT_WHITE_CHARS
).set_parse_action(lambda t: t[0].strip())
else:
if ignoreExpr is not None:
content = Combine(
OneOrMore(
~ignoreExpr
+ ~Literal(opener)
+ ~Literal(closer)
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)
)
).set_parse_action(lambda t: t[0].strip())
else:
content = Combine(
OneOrMore(
~Literal(opener)
+ ~Literal(closer)
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)
)
).set_parse_action(lambda t: t[0].strip())
else:
raise ValueError(
"opening and closing arguments must be strings if no content expression is given"
)
ret = Forward()
if ignoreExpr is not None:
ret <<= Group(
Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)
)
else:
ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer))
ret.set_name("nested %s%s expression" % (opener, closer))
return ret
def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")):
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
if isinstance(tagStr, str_type):
resname = tagStr
tagStr = Keyword(tagStr, caseless=not xml)
else:
resname = tagStr.name
tagAttrName = Word(alphas, alphanums + "_-:")
if xml:
tagAttrValue = dbl_quoted_string.copy().set_parse_action(remove_quotes)
openTag = (
suppress_LT
+ tagStr("tag")
+ Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue)))
+ Opt("/", default=[False])("empty").set_parse_action(
lambda s, l, t: t[0] == "/"
)
+ suppress_GT
)
else:
tagAttrValue = quoted_string.copy().set_parse_action(remove_quotes) | Word(
printables, exclude_chars=">"
)
openTag = (
suppress_LT
+ tagStr("tag")
+ Dict(
ZeroOrMore(
Group(
tagAttrName.set_parse_action(lambda t: t[0].lower())
+ Opt(Suppress("=") + tagAttrValue)
)
)
)
+ Opt("/", default=[False])("empty").set_parse_action(
lambda s, l, t: t[0] == "/"
)
+ suppress_GT
)
closeTag = Combine(Literal("</") + tagStr + ">", adjacent=False)
openTag.set_name("<%s>" % resname)
# add start<tagname> results name in parse action now that ungrouped names are not reported at two levels
openTag.add_parse_action(
lambda t: t.__setitem__(
"start" + "".join(resname.replace(":", " ").title().split()), t.copy()
)
)
closeTag = closeTag(
"end" + "".join(resname.replace(":", " ").title().split())
).set_name("</%s>" % resname)
openTag.tag = resname
closeTag.tag = resname
openTag.tag_body = SkipTo(closeTag())
return openTag, closeTag
def make_html_tags(
tag_str: Union[str, ParserElement]
) -> Tuple[ParserElement, ParserElement]:
"""Helper to construct opening and closing tag expressions for HTML,
given a tag name. Matches tags in either upper or lower case,
attributes with namespaces and with quoted or unquoted values.
Example::
text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
# make_html_tags returns pyparsing expressions for the opening and
# closing tags as a 2-tuple
a, a_end = make_html_tags("A")
link_expr = a + SkipTo(a_end)("link_text") + a_end
for link in link_expr.search_string(text):
# attributes in the <A> tag (like "href" shown here) are
# also accessible as named results
print(link.link_text, '->', link.href)
prints::
pyparsing -> https://github.com/pyparsing/pyparsing/wiki
"""
return _makeTags(tag_str, False)
def make_xml_tags(
tag_str: Union[str, ParserElement]
) -> Tuple[ParserElement, ParserElement]:
"""Helper to construct opening and closing tag expressions for XML,
given a tag name. Matches tags only in the given upper/lower case.
Example: similar to :class:`make_html_tags`
"""
return _makeTags(tag_str, True)
any_open_tag, any_close_tag = make_html_tags(
Word(alphas, alphanums + "_:").set_name("any tag")
)
_htmlEntityMap = {k.rstrip(";"): v for k, v in html.entities.html5.items()}
common_html_entity = Regex("&(?P<entity>" + "|".join(_htmlEntityMap) + ");").set_name(
"common HTML entity"
)
def replace_html_entity(t):
"""Helper parser action to replace common HTML entities with their special characters"""
return _htmlEntityMap.get(t.entity)
class OpAssoc(Enum):
LEFT = 1
RIGHT = 2
InfixNotationOperatorArgType = Union[
ParserElement, str, Tuple[Union[ParserElement, str], Union[ParserElement, str]]
]
InfixNotationOperatorSpec = Union[
Tuple[
InfixNotationOperatorArgType,
int,
OpAssoc,
OptionalType[ParseAction],
],
Tuple[
InfixNotationOperatorArgType,
int,
OpAssoc,
],
]
def infix_notation(
base_expr: ParserElement,
op_list: List[InfixNotationOperatorSpec],
lpar: Union[str, ParserElement] = Suppress("("),
rpar: Union[str, ParserElement] = Suppress(")"),
) -> ParserElement:
"""Helper method for constructing grammars of expressions made up of
operators working in a precedence hierarchy. Operators may be unary
or binary, left- or right-associative. Parse actions can also be
attached to operator expressions. The generated parser will also
recognize the use of parentheses to override operator precedences
(see example below).
Note: if you define a deep operator list, you may see performance
issues when using infix_notation. See
:class:`ParserElement.enable_packrat` for a mechanism to potentially
improve your parser performance.
Parameters:
- ``base_expr`` - expression representing the most basic operand to
be used in the expression
- ``op_list`` - list of tuples, one for each operator precedence level
in the expression grammar; each tuple is of the form ``(op_expr,
num_operands, right_left_assoc, (optional)parse_action)``, where:
- ``op_expr`` is the pyparsing expression for the operator; may also
be a string, which will be converted to a Literal; if ``num_operands``
is 3, ``op_expr`` is a tuple of two expressions, for the two
operators separating the 3 terms
- ``num_operands`` is the number of terms for this operator (must be 1,
2, or 3)
- ``right_left_assoc`` is the indicator whether the operator is right
or left associative, using the pyparsing-defined constants
``OpAssoc.RIGHT`` and ``OpAssoc.LEFT``.
- ``parse_action`` is the parse action to be associated with
expressions matching this operator expression (the parse action
tuple member may be omitted); if the parse action is passed
a tuple or list of functions, this is equivalent to calling
``set_parse_action(*fn)``
(:class:`ParserElement.set_parse_action`)
- ``lpar`` - expression for matching left-parentheses
(default= ``Suppress('(')``)
- ``rpar`` - expression for matching right-parentheses
(default= ``Suppress(')')``)
Example::
# simple example of four-function arithmetic with ints and
# variable names
integer = pyparsing_common.signed_integer
varname = pyparsing_common.identifier
arith_expr = infix_notation(integer | varname,
[
('-', 1, OpAssoc.RIGHT),
(one_of('* /'), 2, OpAssoc.LEFT),
(one_of('+ -'), 2, OpAssoc.LEFT),
])
arith_expr.run_tests('''
5+3*6
(5+3)*6
-2--11
''', full_dump=False)
prints::
5+3*6
[[5, '+', [3, '*', 6]]]
(5+3)*6
[[[5, '+', 3], '*', 6]]
-2--11
[[['-', 2], '-', ['-', 11]]]
"""
# captive version of FollowedBy that does not do parse actions or capture results names
class _FB(FollowedBy):
def parseImpl(self, instring, loc, doActions=True):
self.expr.try_parse(instring, loc)
return loc, []
_FB.__name__ = "FollowedBy>"
ret = Forward()
lpar = Suppress(lpar)
rpar = Suppress(rpar)
lastExpr = base_expr | (lpar + ret + rpar)
for i, operDef in enumerate(op_list):
opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4]
if isinstance(opExpr, str_type):
opExpr = ParserElement._literalStringClass(opExpr)
if arity == 3:
if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2:
raise ValueError(
"if numterms=3, opExpr must be a tuple or list of two expressions"
)
opExpr1, opExpr2 = opExpr
term_name = "{}{} term".format(opExpr1, opExpr2)
else:
term_name = "{} term".format(opExpr)
if not 1 <= arity <= 3:
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT):
raise ValueError("operator must indicate right or left associativity")
thisExpr = Forward().set_name(term_name)
if rightLeftAssoc is OpAssoc.LEFT:
if arity == 1:
matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + opExpr[1, ...])
elif arity == 2:
if opExpr is not None:
matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(
lastExpr + (opExpr + lastExpr)[1, ...]
)
else:
matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr[2, ...])
elif arity == 3:
matchExpr = _FB(
lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr
) + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))
elif rightLeftAssoc is OpAssoc.RIGHT:
if arity == 1:
# try to avoid LR with this extra test
if not isinstance(opExpr, Opt):
opExpr = Opt(opExpr)
matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr)
elif arity == 2:
if opExpr is not None:
matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(
lastExpr + (opExpr + thisExpr)[1, ...]
)
else:
matchExpr = _FB(lastExpr + thisExpr) + Group(
lastExpr + thisExpr[1, ...]
)
elif arity == 3:
matchExpr = _FB(
lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr
) + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)
if pa:
if isinstance(pa, (tuple, list)):
matchExpr.set_parse_action(*pa)
else:
matchExpr.set_parse_action(pa)
thisExpr <<= (matchExpr | lastExpr).setName(term_name)
lastExpr = thisExpr
ret <<= lastExpr
return ret
def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]):
"""
(DEPRECATED - use IndentedBlock class instead)
Helper method for defining space-delimited indentation blocks,
such as those used to define block statements in Python source code.
Parameters:
- ``blockStatementExpr`` - expression defining syntax of statement that
is repeated within the indented block
- ``indentStack`` - list created by caller to manage indentation stack
(multiple ``statementWithIndentedBlock`` expressions within a single
grammar should share a common ``indentStack``)
- ``indent`` - boolean indicating whether block must be indented beyond
the current level; set to ``False`` for block of left-most statements
(default= ``True``)
A valid block must contain at least one ``blockStatement``.
(Note that indentedBlock uses internal parse actions which make it
incompatible with packrat parsing.)
Example::
data = '''
def A(z):
A1
B = 100
G = A2
A2
A3
B
def BB(a,b,c):
BB1
def BBA():
bba1
bba2
bba3
C
D
def spam(x,y):
def eggs(z):
pass
'''
indentStack = [1]
stmt = Forward()
identifier = Word(alphas, alphanums)
funcDecl = ("def" + identifier + Group("(" + Opt(delimitedList(identifier)) + ")") + ":")
func_body = indentedBlock(stmt, indentStack)
funcDef = Group(funcDecl + func_body)
rvalue = Forward()
funcCall = Group(identifier + "(" + Opt(delimitedList(rvalue)) + ")")
rvalue << (funcCall | identifier | Word(nums))
assignment = Group(identifier + "=" + rvalue)
stmt << (funcDef | assignment | identifier)
module_body = OneOrMore(stmt)
parseTree = module_body.parseString(data)
parseTree.pprint()
prints::
[['def',
'A',
['(', 'z', ')'],
':',
[['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
'B',
['def',
'BB',
['(', 'a', 'b', 'c', ')'],
':',
[['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
'C',
'D',
['def',
'spam',
['(', 'x', 'y', ')'],
':',
[[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
"""
backup_stacks.append(indentStack[:])
def reset_stack():
indentStack[:] = backup_stacks[-1]
def checkPeerIndent(s, l, t):
if l >= len(s):
return
curCol = col(l, s)
if curCol != indentStack[-1]:
if curCol > indentStack[-1]:
raise ParseException(s, l, "illegal nesting")
raise ParseException(s, l, "not a peer entry")
def checkSubIndent(s, l, t):
curCol = col(l, s)
if curCol > indentStack[-1]:
indentStack.append(curCol)
else:
raise ParseException(s, l, "not a subentry")
def checkUnindent(s, l, t):
if l >= len(s):
return
curCol = col(l, s)
if not (indentStack and curCol in indentStack):
raise ParseException(s, l, "not an unindent")
if curCol < indentStack[-1]:
indentStack.pop()
NL = OneOrMore(LineEnd().set_whitespace_chars("\t ").suppress())
INDENT = (Empty() + Empty().set_parse_action(checkSubIndent)).set_name("INDENT")
PEER = Empty().set_parse_action(checkPeerIndent).set_name("")
UNDENT = Empty().set_parse_action(checkUnindent).set_name("UNINDENT")
if indent:
smExpr = Group(
Opt(NL)
+ INDENT
+ OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL))
+ UNDENT
)
else:
smExpr = Group(
Opt(NL)
+ OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL))
+ Opt(UNDENT)
)
# add a parse action to remove backup_stack from list of backups
smExpr.add_parse_action(
lambda: backup_stacks.pop(-1) and None if backup_stacks else None
)
smExpr.set_fail_action(lambda a, b, c, d: reset_stack())
blockStatementExpr.ignore(_bslash + LineEnd())
return smExpr.set_name("indented block")
class IndentedBlock(ParseElementEnhance):
"""
Expression to match one or more expressions at a given indentation level.
Useful for parsing text where structure is implied by indentation (like Python source code).
"""
def __init__(self, expr: ParserElement, recursive: bool = True):
super().__init__(expr, savelist=True)
self._recursive = recursive
def parseImpl(self, instring, loc, doActions=True):
# advance parse position to non-whitespace by using an Empty()
# this should be the column to be used for all subsequent indented lines
anchor_loc = Empty().preParse(instring, loc)
# see if self.expr matches at the current location - if not it will raise an exception
# and no further work is necessary
self.expr.try_parse(instring, anchor_loc, doActions)
indent_col = col(anchor_loc, instring)
peer_parse_action = match_only_at_col(indent_col)
peer_detect_expr = Empty().add_parse_action(peer_parse_action)
inner_expr = Empty() + peer_detect_expr + self.expr
inner_expr.set_name(f"inner {hex(id(inner_expr))[-4:].upper()}@{indent_col}")
if self._recursive:
indent_parse_action = condition_as_parse_action(
lambda s, l, t, relative_to_col=indent_col: col(l, s) > relative_to_col
)
indent_expr = FollowedBy(self.expr).add_parse_action(indent_parse_action)
inner_expr += Opt(Group(indent_expr + self.copy()))
return OneOrMore(inner_expr).parseImpl(instring, loc, doActions)
# it's easy to get these comment structures wrong - they're very common, so may as well make them available
c_style_comment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/").set_name(
"C style comment"
)
"Comment of the form ``/* ... */``"
html_comment = Regex(r"<!--[\s\S]*?-->").set_name("HTML comment")
"Comment of the form ``<!-- ... -->``"
rest_of_line = Regex(r".*").leave_whitespace().set_name("rest of line")
dbl_slash_comment = Regex(r"//(?:\\\n|[^\n])*").set_name("// comment")
"Comment of the form ``// ... (to end of line)``"
cpp_style_comment = Combine(
Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/" | dbl_slash_comment
).set_name("C++ style comment")
"Comment of either form :class:`c_style_comment` or :class:`dbl_slash_comment`"
java_style_comment = cpp_style_comment
"Same as :class:`cpp_style_comment`"
python_style_comment = Regex(r"#.*").set_name("Python style comment")
"Comment of the form ``# ... (to end of line)``"
# build list of built-in expressions, for future reference if a global default value
# gets updated
_builtin_exprs = [v for v in vars().values() if isinstance(v, ParserElement)]
# pre-PEP8 compatible names
delimitedList = delimited_list
countedArray = counted_array
matchPreviousLiteral = match_previous_literal
matchPreviousExpr = match_previous_expr
oneOf = one_of
dictOf = dict_of
originalTextFor = original_text_for
nestedExpr = nested_expr
makeHTMLTags = make_html_tags
makeXMLTags = make_xml_tags
anyOpenTag, anyCloseTag = any_open_tag, any_close_tag
commonHTMLEntity = common_html_entity
replaceHTMLEntity = replace_html_entity
opAssoc = OpAssoc
infixNotation = infix_notation
cStyleComment = c_style_comment
htmlComment = html_comment
restOfLine = rest_of_line
dblSlashComment = dbl_slash_comment
cppStyleComment = cpp_style_comment
javaStyleComment = java_style_comment
pythonStyleComment = python_style_comment
| 36.024186 | 120 | 0.598668 |
ce6ebc630ce8df77202baa85d46a2c2fb1b8c57f | 3,392 | py | Python | tests/edit_data/data_management/test_row_create.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 33 | 2019-05-27T13:04:35.000Z | 2022-03-17T13:33:05.000Z | tests/edit_data/data_management/test_row_create.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 31 | 2019-06-10T01:55:47.000Z | 2022-03-09T07:27:49.000Z | tests/edit_data/data_management/test_row_create.py | DaeunYim/pgtoolsservice | b7e548718d797883027b2caee2d4722810b33c0f | [
"MIT"
] | 25 | 2019-05-13T18:39:24.000Z | 2021-11-16T03:07:33.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
from unittest import mock
from ossdbtoolsservice.edit_data.update_management import RowCreate, CellUpdate
from ossdbtoolsservice.query import create_result_set, ResultSetStorageType
from ossdbtoolsservice.query.contracts import DbColumn
from ossdbtoolsservice.edit_data.contracts import EditRowState
from ossdbtoolsservice.edit_data import EditTableMetadata
from tests.utils import MockCursor
class TestRowCreate(unittest.TestCase):
def setUp(self):
self._row_id = 1
self._rows = [("False"), ("True")]
self._result_set = create_result_set(ResultSetStorageType.IN_MEMORY, 0, 0)
self._cursor = MockCursor(self._rows, ['IsTrue'])
with mock.patch('ossdbtoolsservice.query.in_memory_result_set.get_columns_info', new=mock.Mock()):
self._result_set.read_result_to_end(self._cursor)
db_column = DbColumn()
db_column.data_type = 'bool'
db_column.column_name = 'IsValid'
db_column.is_updatable = True
self._result_set.columns_info = [db_column]
self._table_metadata = EditTableMetadata('public', 'TestTable', [])
self._row_create = RowCreate(self._row_id, self._result_set, self._table_metadata)
def test_set_cell_value_returns_edit_cell_response(self):
column_index = 0
new_value = 'True'
response = self._row_create.set_cell_value(column_index, new_value)
self.assertEqual(response.cell.display_value, new_value)
self.assertTrue(response.cell.is_dirty)
cell_update = self._row_create.new_cells[column_index]
self.assertEqual(cell_update.value, True)
def test_revert_cell_value(self):
column_index = 0
self._row_create.new_cells[column_index] = 'Some cell update'
self._row_create.revert_cell_value(column_index)
self.assertIsNone(self._row_create.new_cells[column_index])
def test_get_edit_row(self):
cached_row = []
edit_row = self._row_create.get_edit_row(cached_row)
self.assertEqual(edit_row.id, self._row_id)
self.assertEqual(edit_row.state, EditRowState.DIRTY_INSERT)
self.assertTrue(edit_row.cells[0].display_value == '')
def test_get_script(self):
column_index = 0
db_column = DbColumn()
db_column.data_type = 'bool'
new_cell_value = '0'
self._row_create.new_cells[column_index] = CellUpdate(db_column, new_cell_value)
script = self._row_create.get_script()
self.assertEqual(script.query_template, 'INSERT INTO "public"."TestTable"("IsValid") VALUES(%s) RETURNING *')
self.assertEquals(script.query_paramters[0], False)
def test_apply_changes(self):
self.assertTrue(len(self._result_set.rows) == 2)
cursor = MockCursor([('True',)], ['IsTrue'])
self._row_create.apply_changes(cursor)
self.assertTrue(len(self._result_set.rows) == 3)
self.assertTrue(self._result_set.rows[2][0] == 'True')
if __name__ == '__main__':
unittest.main()
| 35.333333 | 117 | 0.67158 |
baa196b0cef6d4241e1847d0cced1b7e59467133 | 3,754 | py | Python | clip/ViT-B-32-cpu/code/__torch__/torch/nn/modules/activation/___torch_mangle_9450.py | shawwn/CLIP | ba33b4eb956e6f507b4b39468b3b7336ac2260a1 | [
"MIT"
] | 6 | 2021-01-09T14:34:17.000Z | 2021-06-13T06:46:43.000Z | clip/ViT-B-32-cpu/code/__torch__/torch/nn/modules/activation/___torch_mangle_9450.py | shawwn/CLIP | ba33b4eb956e6f507b4b39468b3b7336ac2260a1 | [
"MIT"
] | null | null | null | clip/ViT-B-32-cpu/code/__torch__/torch/nn/modules/activation/___torch_mangle_9450.py | shawwn/CLIP | ba33b4eb956e6f507b4b39468b3b7336ac2260a1 | [
"MIT"
] | 2 | 2021-01-09T10:30:04.000Z | 2021-01-09T18:43:29.000Z | class MultiheadAttention(Module):
__parameters__ = ["in_proj_weight", "in_proj_bias", ]
__buffers__ = []
in_proj_weight : Tensor
in_proj_bias : Tensor
training : bool
out_proj : __torch__.torch.nn.modules.linear.___torch_mangle_9449._LinearWithBias
def forward(self: __torch__.torch.nn.modules.activation.___torch_mangle_9450.MultiheadAttention,
argument_1: Tensor) -> Tensor:
_0 = self.out_proj.bias
_1 = self.out_proj.weight
_2 = self.in_proj_bias
_3 = self.in_proj_weight
tgt_len = ops.prim.NumToTensor(torch.size(argument_1, 0))
_4 = int(tgt_len)
_5 = int(tgt_len)
bsz = ops.prim.NumToTensor(torch.size(argument_1, 1))
_6 = int(bsz)
embed_dim = ops.prim.NumToTensor(torch.size(argument_1, 2))
_7 = int(embed_dim)
head_dim = torch.floor_divide(embed_dim, CONSTANTS.c0)
_8 = int(head_dim)
_9 = int(head_dim)
_10 = int(head_dim)
output = torch.matmul(argument_1.float(), torch.t(_3).float())
_11 = torch.chunk(torch.add_(output, _2, alpha=1), 3, -1)
q, k, v, = _11
q0 = torch.mul(q, CONSTANTS.c1)
q1 = torch.contiguous(q0, memory_format=0)
_12 = [_5, int(torch.mul(bsz, CONSTANTS.c0)), _10]
q2 = torch.transpose(torch.view(q1, _12), 0, 1)
_13 = torch.contiguous(k, memory_format=0)
_14 = [-1, int(torch.mul(bsz, CONSTANTS.c0)), _9]
k0 = torch.transpose(torch.view(_13, _14), 0, 1)
_15 = torch.contiguous(v, memory_format=0)
_16 = [-1, int(torch.mul(bsz, CONSTANTS.c0)), _8]
v0 = torch.transpose(torch.view(_15, _16), 0, 1)
attn_output_weights = torch.bmm(q2, torch.transpose(k0, 1, 2))
input = torch.softmax(attn_output_weights, -1, None)
attn_output_weights0 = torch.dropout(input, 0., True)
attn_output = torch.bmm(attn_output_weights0, v0)
_17 = torch.contiguous(torch.transpose(attn_output, 0, 1), memory_format=0)
input0 = torch.view(_17, [_4, _6, _7])
output0 = torch.matmul(input0, torch.t(_1))
return torch.add_(output0, _0, alpha=1)
def forward1(self: __torch__.torch.nn.modules.activation.___torch_mangle_9450.MultiheadAttention,
argument_1: Tensor) -> Tensor:
_18 = self.out_proj.bias
_19 = self.out_proj.weight
_20 = self.in_proj_bias
_21 = self.in_proj_weight
tgt_len = ops.prim.NumToTensor(torch.size(argument_1, 0))
_22 = int(tgt_len)
_23 = int(tgt_len)
bsz = ops.prim.NumToTensor(torch.size(argument_1, 1))
_24 = int(bsz)
embed_dim = ops.prim.NumToTensor(torch.size(argument_1, 2))
_25 = int(embed_dim)
head_dim = torch.floor_divide(embed_dim, CONSTANTS.c0)
_26 = int(head_dim)
_27 = int(head_dim)
_28 = int(head_dim)
output = torch.matmul(argument_1, torch.t(_21))
_29 = torch.chunk(torch.add_(output, _20, alpha=1), 3, -1)
q, k, v, = _29
q3 = torch.mul(q, CONSTANTS.c1)
q4 = torch.contiguous(q3, memory_format=0)
_30 = [_23, int(torch.mul(bsz, CONSTANTS.c0)), _28]
q5 = torch.transpose(torch.view(q4, _30), 0, 1)
_31 = torch.contiguous(k, memory_format=0)
_32 = [-1, int(torch.mul(bsz, CONSTANTS.c0)), _27]
k1 = torch.transpose(torch.view(_31, _32), 0, 1)
_33 = torch.contiguous(v, memory_format=0)
_34 = [-1, int(torch.mul(bsz, CONSTANTS.c0)), _26]
v1 = torch.transpose(torch.view(_33, _34), 0, 1)
attn_output_weights = torch.bmm(q5, torch.transpose(k1, 1, 2))
input = torch.softmax(attn_output_weights, -1, None)
attn_output_weights1 = torch.dropout(input, 0., True)
attn_output = torch.bmm(attn_output_weights1, v1)
_35 = torch.contiguous(torch.transpose(attn_output, 0, 1), memory_format=0)
input1 = torch.view(_35, [_22, _24, _25])
output1 = torch.matmul(input1, torch.t(_19))
return torch.add_(output1, _18, alpha=1)
| 44.690476 | 99 | 0.677944 |
4fb6ee8e0ad7c316cae9dae3c0f7669096d93038 | 2,339 | py | Python | Basenji/dataloader.py | jeffmylife/models | 30f398518ea1709245a9de445fcff8071528e400 | [
"MIT"
] | null | null | null | Basenji/dataloader.py | jeffmylife/models | 30f398518ea1709245a9de445fcff8071528e400 | [
"MIT"
] | null | null | null | Basenji/dataloader.py | jeffmylife/models | 30f398518ea1709245a9de445fcff8071528e400 | [
"MIT"
] | null | null | null | """Basenji dataloader
"""
# python2, 3 compatibility
from __future__ import absolute_import, division, print_function
import numpy as np
import pandas as pd
import pybedtools
from pybedtools import BedTool
from genomelake.extractors import FastaExtractor
from kipoi.data import Dataset
from kipoi.metadata import GenomicRanges
import linecache
# --------------------------------------------
class BedToolLinecache(BedTool):
"""Faster BedTool accessor by Ziga Avsec
Normal BedTools loops through the whole file to get the
line of interest. Hence the access it o(n)
Note: this might load the whole bedfile into memory
"""
def __getitem__(self, idx):
line = linecache.getline(self.fn, idx + 1)
return pybedtools.create_interval_from_list(line.strip().split("\t"))
class SeqDataset(Dataset):
"""
Args:
intervals_file: bed3 file containing intervals
fasta_file: file path; Genome sequence
target_file: file path; path to the targets in the csv format
"""
SEQ_WIDTH = 131072
def __init__(self, intervals_file, fasta_file,
use_linecache=True):
# intervals
if use_linecache:
self.bt = BedToolLinecache(intervals_file)
else:
self.bt = BedTool(intervals_file)
self.fasta_file = fasta_file
self.fasta_extractor = None
if len(self.bt) % 2 == 1:
raise ValueError("Basenji strictly requires batch_size=2," +
" hence the bed file should have an od length")
def __len__(self):
return len(self.bt)
def __getitem__(self, idx):
if self.fasta_extractor is None:
self.fasta_extractor = FastaExtractor(self.fasta_file)
interval = self.bt[idx]
if interval.stop - interval.start != self.SEQ_WIDTH:
raise ValueError("Expected the interval to be {0} wide. Recieved stop - start = {1}".
format(self.SEQ_WIDTH, interval.stop - interval.start))
# Run the fasta extractor
seq = np.squeeze(self.fasta_extractor([interval]), axis=0)
return {
"inputs": seq,
"targets": {}, # No Targets
"metadata": {
"ranges": GenomicRanges.from_interval(interval)
}
}
| 30.376623 | 97 | 0.624198 |
e32735f98015c97efbbfebfeedf39cdaa4bfe867 | 5,827 | py | Python | app/auth.py | HyperBCS/RedditNotify | 11a67b41ef46d002a0a2de03b303a4d9118470f9 | [
"MIT"
] | null | null | null | app/auth.py | HyperBCS/RedditNotify | 11a67b41ef46d002a0a2de03b303a4d9118470f9 | [
"MIT"
] | null | null | null | app/auth.py | HyperBCS/RedditNotify | 11a67b41ef46d002a0a2de03b303a4d9118470f9 | [
"MIT"
] | 1 | 2020-10-06T10:33:30.000Z | 2020-10-06T10:33:30.000Z | import app.models as models
import redis
import pickle
import requests
import flask_login
import config
from flask import Blueprint, render_template, url_for, redirect, request, flash
from app import LM as login_manager
from flask_login import UserMixin
from datetime import timedelta
from urllib.parse import urlencode
from functools import wraps
login_page = Blueprint('login_page', __name__, template_folder="./views/templates")
logged_in_users = {}
r = redis.Redis(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
password=config.REDIS_PASSWORD)
class User(UserMixin):
'''A User model for who will be using the software. Users have different levels of access with different roles
Current active roles:
- user
- admin
'''
def __init__(self,id, token, username, avatar):
self.id = id
self.username = username
self.avatar = avatar
self.token = token
self.role = 'user'
def get_id(self):
return self.id
def exchange_code(code):
data = {
'client_id': config.DISCORD_CLIENT_ID,
'client_secret': config.DISCORD_CLIENT_SECRET,
'grant_type': 'authorization_code',
'code': code,
'redirect_uri': config.OAUTH2_REDIRECT_URL,
'scope': 'identify email connections'
}
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
r = requests.post('%s/oauth2/token' % config.DISCORD_API_ENDPOINT, data=data, headers=headers)
r.raise_for_status()
return r.json()
def get_discord_user(token):
url = 'https://discordapp.com/api/users/@me'
headers = {'Authorization': 'Bearer '+token}
r = requests.get(url,headers=headers)
r.raise_for_status()
return r.json()
@login_manager.user_loader
def user_loader(id):
'''Loads the user via a DB call
Args:
email (str): The email to load
Returns:
User: The user object corresponding to the email passed, or None if it doesn't exist
'''
user_sess = r.get(id)
if user_sess != None:
return pickle.loads(user_sess)
else:
return
@login_manager.unauthorized_handler
def unauth():
'''Function to handle requests to resources that are not authorized or authenticated.'''
if flask_login.current_user.is_authenticated:
user = flask_login.current_user
return render_template('index.html', logged_in=True, DISCORD_CLIENT_ID = config.DISCORD_CLIENT_ID, REDIRECT_URI=urlencode({'redirect_uri': config.OAUTH2_REDIRECT_URL}), data=user), 403
else:
return render_template('index.html', DISCORD_CLIENT_ID = config.DISCORD_CLIENT_ID, REDIRECT_URI=urlencode({'redirect_uri': config.OAUTH2_REDIRECT_URL})), 200
def require_login(func):
'''Wrapper around the login_required wrapper from flask-login
This allows us to keep the same style and also not have to have multiple imports for
roles and require_login
'''
@wraps(func)
@flask_login.login_required
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
def require_role(role,**kwargss):
'''Decorate a function with this in order to require a specific role(s), to access a view.
Also decorates a function, so that you must pass the current user's role into it's first
argument if it's needed.
By decorating a function with @require_role you are implicity forcing @login_required as well.
Example:
.. code-block:: python
@APP.route('/admin-dashboard')
@require_role('admin')
def view_dash():
# Something here
@APP.route('/reservationH')
@require_role('admin','host',getrole=True)
def view_dash(role):
...
Args:
role(list or str): A single role name or list of role names for which users are allowed to access the specified resource
If a user is not authorized then the flask_login.unauthorized handler is called.
'''
def real_wrap(func):
@wraps(func)
@flask_login.login_required
def wrapper(*args, **kwargs):
user = flask_login.current_user
kwargs['user'] = user
if isinstance(role, list) and user.role in role:
return func(*args, **kwargs)
elif user.role == role:
return func(*args, **kwargs)
else:
return login_manager.unauthorized()
return wrapper
return real_wrap
@login_page.route('/callback', methods=['GET'])
def callback():
''''Callback for oauth'''
try:
data = exchange_code(request.args.get('code'))
user_info = get_discord_user(data['access_token'])
user = User(user_info['id'],data['access_token'], user_info['username'],user_info['avatar'])
user_pickle = pickle.dumps(user)
r.setex(user_info['id'],timedelta(minutes=10080), user_pickle)
flask_login.login_user(user)
return redirect(url_for('servers.show'))
except Exception as e:
print(e)
flash(u'Unable to login user', 'danger')
return render_template('index.html',error='Unable to login user', DISCORD_CLIENT_ID = config.DISCORD_CLIENT_ID, REDIRECT_URI=urlencode({'redirect_uri': config.OAUTH2_REDIRECT_URL})), 401
@login_page.route('/logout')
def logout():
''''Logs a user out and renders the login template with a message'''
if flask_login.current_user.is_authenticated:
r.delete(flask_login.current_user.id)
flask_login.logout_user()
flash(u'Successfully logged out', 'success')
return render_template('index.html', DISCORD_CLIENT_ID = config.DISCORD_CLIENT_ID, REDIRECT_URI=urlencode({'redirect_uri': config.OAUTH2_REDIRECT_URL})) | 37.837662 | 196 | 0.666209 |
1e7da334cedc33f6961ffdbe2ba6d4d45ba57612 | 122,978 | py | Python | pypeit/core/trace_slits.py | baileyji/PypeIt | eea71304f4a4bcf70148ea686967ed699dc36dfb | [
"BSD-3-Clause"
] | null | null | null | pypeit/core/trace_slits.py | baileyji/PypeIt | eea71304f4a4bcf70148ea686967ed699dc36dfb | [
"BSD-3-Clause"
] | null | null | null | pypeit/core/trace_slits.py | baileyji/PypeIt | eea71304f4a4bcf70148ea686967ed699dc36dfb | [
"BSD-3-Clause"
] | null | null | null | """ Module for core algorithms related to tracing slits/orders
These should primarily be called by the TraceSlits class
"""
import inspect
import copy
from collections import Counter
import numpy as np
from scipy import ndimage
from scipy.special import erf
from scipy import signal
import matplotlib.pyplot as plt
from matplotlib import cm, font_manager
from pypeit import msgs
from pypeit.core import qa
from pypeit.core import plot
from pypeit import utils
from pypeit.core import pca
from pypeit.core import pixels
from pypeit.core import procimg
from pypeit import debugger
from pypeit.utils import calc_ivar
from pypeit.core import extract
from pypeit.core import arc
from pypeit.core import pydl
from astropy.stats import sigma_clipped_stats
try:
from pypeit import ginga
except ImportError:
pass
# Testing
import time
def add_user_edges(lcen, rcen, add_slits):
"""
Add user-defined slit(s)
Warning: There is no real error checking here.
The user is assumed to know what they are doing!
tc_dict is updated in place
Args:
lcen (np.ndarray): Left traces of slit/orders
rcen (np.ndarray): Right traces of slit/orders
add_slits (list): List of slit info for adding
y_spec, x_spat0, x_spat1 (int)
Returns:
np.ndarray, np.ndarray: new lcen, rcen arrays
"""
nspec = lcen.shape[0]
ycen = nspec//2
# Loop me
for new_slit in add_slits:
msgs.info("Adding a user-defined slit [x0, x1, yrow]: {}".format(new_slit))
# Parse
y_spec, x_spat0, x_spat1 = new_slit
for xx, side in zip([x_spat0,x_spat1], ['left', 'right']):
# Left
ref_t = lcen if side == 'left' else rcen
ref_x = ref_t[y_spec,:]
# Find the closest
idx = np.argmin(np.abs(xx-ref_x))
dx = ref_x[idx]-xx
# New trace
new_trace = ref_t[:,idx] - dx
if side == 'left':
lcen = np.append(ref_t, new_trace.reshape(nspec,1), axis=1)
else:
rcen = np.append(ref_t, new_trace.reshape(nspec,1), axis=1)
# Sort me
for side in ['left', 'right']:
ref_t = lcen if side == 'left' else rcen
allx = ref_t[ycen,:]
isrt = np.argsort(allx)
# Do it
if side == 'left':
lcen = ref_t[:,isrt]
else:
rcen = ref_t[:,isrt]
# Done
return lcen, rcen
def rm_user_edges(lcen, rcen, rm_slits):
"""
Remove one or more slits, as applicable
Code compares exisiting slits (which must be sycnhronized)
against the input request and removes any that match.
Args:
lcen (np.ndarray): Left traces of slit/orders
rcen (np.ndarray): Right traces of slit/orders
rm_slits: list
y_spec, x_spat pairs
Returns:
np.ndarray, np.ndarray: new lcen, rcen arrays
"""
# Mask me
good_left = np.ones(lcen.shape[1], dtype=bool)
good_right = np.ones(rcen.shape[1], dtype=bool)
# Check
if good_left.size != good_right.size:
msgs.error("Slits need to be sync'd to use this method!")
# Loop me
for rm_slit in rm_slits:
# Deconstruct
y_spec, xcen = rm_slit
# Edges
lefts = lcen[y_spec,:]
rights = rcen[y_spec,:]
# Match?
bad_slit = (lefts < xcen) & (rights > xcen)
if np.any(bad_slit):
# Double check
if np.sum(bad_slit) != 1:
msgs.error("Something went horribly wrong in edge tracing")
#
idx = np.where(bad_slit)[0]
msgs.info("Removing user-supplied slit at {},{}".format(xcen,y_spec))
good_right[idx] = False
good_left[idx] = False
# Finish
lcen = lcen[:,good_left]
rcen = rcen[:,good_right]
return lcen, rcen
def orig_add_user_edges(edgearr, siglev, tc_dict, add_slits):
"""Add user-defined slit(s)
Warning: There is no real error checking here.
The user is assumed to know what they are doing!
Parameters
----------
edgearr
siglev
tc_dict
add_slits
Returns
-------
"""
# Indices
lmin = np.min(edgearr)
rmax = np.max(edgearr)
new_l = lmin-1
new_r = rmax+1
# Grab the edge indexes and xval's
left_idx, left_xval, right_idx, right_xval = tc_indices(tc_dict)
# Loop me
nrow = edgearr.shape[0]
ycen = nrow//2
for new_slit in add_slits:
msgs.info("Adding a user-defined slit [x0, x1, yrow]: {}".format(new_slit))
# Parse
xleft, xright, yrow = new_slit
# Left or right
for side in ['left','right']:
# Trace crude and setup
if side == 'left':
xset, xerr = trace_crude_init(np.maximum(siglev, -0.1), np.array([xleft]), yrow, maxshift0=0.5, maxshift=0.15, maxerr=0.2)
#
new_i = new_l
ref_x = left_xval
ref_i = left_idx
else:
xset, xerr = trace_crude_init(np.maximum(-1*siglev, -0.1), np.array([xright]), yrow,maxshift0=0.5, maxshift=0.15, maxerr=0.2)
#
new_i = new_r
ref_x = right_xval
ref_i = right_idx
# Was the trace good enough?
ygd = np.where(xerr[:,0] != 999.)[0]
new_xval = int(np.round(xset[ycen, 0])) # Always defined at the 1/2 point
if len(ygd) > nrow//2: # Use the trace if it was primarily successful
xvals = np.round(xset[:, 0]).astype(int)
edgearr[ygd, xvals[ygd]] = new_i
else: # Otherwise, find the closest left edge and use that
# Find the closest
idx = np.argmin(np.abs(ref_x-new_xval))
ref_slit = ref_i[idx]
dx = ref_x[idx]-new_xval
# Grab its pixels
i_pix = np.where(edgearr == ref_slit)
new_pix = (i_pix[0], i_pix[1]-dx)
# And use them
edgearr[new_pix] = new_i
# Update
tc_dict[side]['xval'][str(new_i)] = new_xval
if side == 'left':
new_l -= 1
else:
new_r += 1
# Return
return edgearr
def assign_slits(binarr, edgearr, ednum=100000, lor=-1, function='legendre', polyorder=3):
"""
This routine will trace the locations of the slit edges. Putative
edges come in with |values| > 200000 (in the edgearr) and leave
(ideally) with values near ednum.
Args:
binarr (numpy.ndarray):
Calibration frame that will be used to identify slit traces
(in most cases, the slit edge). Typically previously
processed by a uniform filter
edgearr (numpy.ndarray):
An array of negative/positive numbers (left/right edges
respectively) and zeros (no edge).
ednum (:obj:`int`, optional):
A dummy number given to define slit edges.
lor (:obj:`int`, optional):
A flag that indicates if the left edge (-1) or right edge
(+1) should be assigned.
function (:obj:`str`, optional):
The type of function used to trace the slit edges. Used by
:func:`utils.robust_polyfit` and :func:`utils.func_val`.
polyorder (:obj:`int`, optional):
The order of the function used for the fit. Used by
:func:`utils.robust_polyfit`.
Returns:
numpy.ndarray: An array of negative/positive numbers (left/right
edges respectively) and zeros (no edge). The input
:arg:`edgearr` is actually modified in place and returned.
"""
# if settings is None:
# settings = dict(trace={'slits': {'polyorder': 3, 'function': 'legendre'}})
if lor == -1:
lortxt = "left"
else:
lortxt = "right"
outitnm = 1
oldedgearr = edgearr.copy()
prevedgearr = edgearr.copy()
while True:
msgs.prindent("Outer {0:s} edge loop, Iteration {1:d}".format(lortxt, outitnm))
labnum = lor*ednum
itnm = 0
nslit = 0
cmnold = None
firstpass = True
while True:
# Array to hold edge information
edgehist = np.zeros(binarr.shape[1]*2, dtype=np.int)
itnm += 1
# Locate edges relative to the most common edge
if lor == -1:
wl = np.where(edgearr <= -2*ednum)
else:
wl = np.where(edgearr >= 2*ednum)
if wl[0].size == 0:
break
cl = Counter(edg for edg in edgearr[wl])
comml = cl.most_common(1)
# Pixels of the most common edge
common_pix = np.where(edgearr == comml[0][0])
if not firstpass:
if (cmnold[0] == comml[0][0]) and (cmnold[1] == comml[0][1]):
# Nothing has changed since the previous iteration, so end the loop
break
if comml[0][1] < binarr.shape[0]/100.0:
# Now considering an edge that spans less than 1 per cent of the detector ---> insignificant
break
# Save
cmnold = comml[0]
# Extract just these elements
tedgearr = edgearr[common_pix[0], :]
# Set the offset
offs = binarr.shape[1]
# Add these into edgehist
edgehist[offs] = np.sum(binarr[common_pix])#common_pix[0].size
# And a fudge to give this edge detection some width (for peak finding, below)
edgehist[offs-1] = 1 + common_pix[0].size/2
edgehist[offs+1] = 1 + common_pix[0].size/2
# Find the difference between unknown edges
if lor == -1: # Left edges
www = np.where(tedgearr <= -2*ednum)
else:
www = np.where(tedgearr >= 2*ednum)
if www[0].size == 0:
break
shft = www[1] - common_pix[1][www[0]] # Calculate the shift between right edges
shft += offs # Apply the offset to the edgehist arr
#np.add.at(edgehist, shft, 1)
# Insert other edges into edgehist
np.add.at(edgehist, shft, binarr[common_pix[0], :][www])
# Smooth the histogram with a Gaussian of standard deviation 1 pixel to reduce noise
smedgehist = ndimage.uniform_filter1d(edgehist, 3)
# Identify peaks (which indicate the locations of the right slit edges)
# Might consider another peak-finding algorithm, e.g. the one used for arc lines
arrlfr = smedgehist[0:-4]
arrlft = smedgehist[1:-3]
arrcen = smedgehist[2:-2]
arrrgt = smedgehist[3:-1]
arrrfr = smedgehist[4:]
wpk = np.where((arrcen >= arrlft) & (arrcen > arrrgt) & # Exactly one of these should be >=
((arrlft > arrlfr) | (arrrgt > arrrfr)))[0]
if wpk.size == 0:
# No more peaks
break
if wpk.size != 1:
try:
wpkmsk = prune_peaks(smedgehist, wpk, np.where(wpk+2 == offs)[0][0])
except:
debugger.set_trace()
wpk = wpk[np.where(wpkmsk == 1)]
if wpk.size == 0:
# After pruning, there are no more peaks
break
pks = wpk+2 # Shifted by 2 because of the peak finding algorithm above
pedges = find_peak_limits(smedgehist, pks)
if np.all(pedges[:, 1]-pedges[:, 0] == 0):
# Remaining peaks have no width
break
# Label all edge ids (in the original edgearr) that are located in each peak with the same number
for ii in range(pks.size):
shbad = np.zeros(edgearr.shape)
wp = np.where((shft >= pedges[ii, 0]) & (shft <= pedges[ii, 1]))
vals = np.unique(tedgearr[(www[0][wp], www[1][wp])])
# Fit the edge detections in this edge and calculate the offsets
strev = "np.where("
for vv in vals:
strev += "(edgearr=={0:d})|".format(vv)
strev = strev[:-1] + ")"
widx = eval(strev)
if widx[0].size < 2*polyorder:
continue
badmsk, fitcof = utils.robust_polyfit(widx[0], widx[1], polyorder,
function=function, minx=0,
maxx=binarr.shape[0]-1)
shbad[widx] = badmsk
smallhist = np.zeros(101, dtype=np.int)
meddiff = np.zeros(vals.size)
for vv in range(vals.size):
widx = np.where((edgearr == vals[vv]) & (shbad == 0))
if widx[0].size == 0:
# These pixels were deemed to be bad
continue
diff = widx[1] - utils.func_val(fitcof, widx[0], function, minx=0,
maxx=binarr.shape[0]-1)
diff = 50 + np.round(diff).astype(np.int)
np.add.at(smallhist, diff, 1)
meddiff[vv] = np.median(diff)
# Find the peaks of this distribution
wspk = np.where((smallhist[1:-1] >= smallhist[2:]) & (smallhist[1:-1] > smallhist[:-2]))[0]
wspk += 1 # Add one here to account for peak finding
# if False:
# plt.clf()
# plt.plot(smallhist, 'k-', drawstyle='steps')
# plt.show()
for pp in range(wspk.size): # For all small peaks identified
for vv in range(vals.size):
if lor == -1 and vals[vv] > -2*ednum:
continue
elif lor == 1 and vals[vv] < 2*ednum:
continue
# Make sure this value is within 1 pixel of the peak
if meddiff[vv] < wspk[pp]-1:
continue
if meddiff[vv] > wspk[pp]+1:
continue
edgearr[np.where(edgearr == vals[vv])] = labnum
meddiff[vv] = -1 # Flag this val as done
labnum += lor*1
# Find any vals that weren't applied
for vv in range(vals.size):
if meddiff[vv] == -1:
continue
edgearr[np.where(edgearr == vals[vv])] = 0
nslit += pks.size
msgs.prindent(" Inner loop, Iteration {0:d}, {1:d} {2:s} edges assigned ({3:d} total)".format(itnm, pks.size, lortxt, nslit))
firstpass = False
outitnm += 1
if lor == -1:
edgearr[np.where(edgearr <= -2*ednum)] = 0
else:
edgearr[np.where(edgearr >= 2*ednum)] = 0
if np.array_equal(edgearr, oldedgearr) or np.array_equal(edgearr, prevedgearr):
break
elif outitnm > 10:
msgs.warn("Edge assignment may not have converged")
msgs.info("Please check the slit edge traces")
#debugger.set_trace()
break
else:
oldedgearr = prevedgearr.copy()
prevedgearr = edgearr.copy()
if lor == -1:
edgearr[np.where(edgearr <= -ednum)] -= ednum
else:
edgearr[np.where(edgearr >= ednum)] += ednum
# Ignore any order detections that weren't identified in the loop
if lor == -1:
edgearr[np.where(edgearr <= -2*ednum)] = 0
else:
edgearr[np.where(edgearr >= 2*ednum)] = 0
# Sort vals by increasing spatial position on the detector
# First, determine the model for the most common slit edge
if lor == -1:
wcm = np.where(edgearr <= -ednum)
else:
wcm = np.where(edgearr >= ednum)
if wcm[0].size != 0:
cntr = Counter(edg for edg in edgearr[wcm])
commn = cntr.most_common(1)
wedx, wedy = np.where(edgearr == commn[0][0])
msk, cf = utils.robust_polyfit(wedx, wedy, polyorder, function=function,
minx=0, maxx=binarr.shape[0]-1)
cenmodl = utils.func_val(cf, np.arange(binarr.shape[0]), function,
minx=0, maxx=binarr.shape[0]-1)
if lor == -1:
vals = np.unique(edgearr[np.where(edgearr < 0)])
else:
vals = np.unique(edgearr[np.where(edgearr > 0)])
diffarr = np.zeros(vals.size)
diffstd = 0.0
for jj in range(vals.size):
wedx, wedy = np.where(edgearr == vals[jj])
diffarr[jj] = np.mean(wedy-cenmodl[wedx])
diffstd += np.std(wedy-cenmodl[wedx])
diffstd /= vals.size
dasrt = np.argsort(diffarr)
# Relabel the edges from left to right
edgearr[wcm] += lor*ednum
labnum = lor*ednum
diffarrsrt = diffarr[dasrt]
diffs = diffarrsrt[1:] - diffarrsrt[:-1]
for jj in range(vals.size):
wrplc = np.where(edgearr == lor*ednum + vals[dasrt[jj]])
edgearr[wrplc] = labnum
if jj != vals.size-1:
if diffs[jj] > 3.0*diffstd:
# The next edge must be a different edge
labnum += lor*1
return
def add_edge(ref_slit, insert_offset, earr, t_dict, final_left, final_right, left=True):
""" Add a new edge using a reference slit
right_slit : Reference slit for the left one
insert_offset : int
Offset fromm the right slit for the new left slit
"""
# New left edge index
if left== 'left':
new_e = np.min(earr)-1
else:
new_e = np.max(earr)+1
# Use the reference edge for the shape
i_pix = np.where(earr == ref_slit)
new_pix = (i_pix[0], i_pix[1]+insert_offset)
# Add it in
earr[new_pix] = new_e
if left:
t_dict['left']['xval'][str(new_e)] = t_dict['right']['xval'][str(ref_slit)]+insert_offset
else:
t_dict['right']['xval'][str(new_e)] = t_dict['left']['xval'][str(ref_slit)]+insert_offset
# Lists
if left:
final_left.append(new_e)
final_right.append(ref_slit)
else:
final_right.append(new_e)
final_left.append(ref_slit)
def new_add_edge(ref_slit, insert_offset, t_dict, left=True):
""" Add a new edge using a reference slit
Args:
ref_slit: int
insert_offset: int
Offset from the right slit for the new left slit
or vice-versa
t_dict: dict
left: bool, optional
Returns:
Fills tdict in-place
"""
# Current indices (book-keeping)
if left:
use = 'right'
fill = 'left'
else:
use = 'left'
fill = 'right'
traces = t_dict[use]['traces']
# TODO - Use the PCA
# Use the reference edge for the shape
new_trace = traces[:,ref_slit] + insert_offset
ypos = new_trace.shape[0]//2
# Add it in
t_dict[fill]['new_xval'].append(new_trace[ypos])
t_dict[fill]['new_traces'].append(new_trace)
# Return
return
def sync_edges(tc_dict, nspat, insert_buff=5, verbose=False):
""" Method to synchronize the slit edges
Adds in extra edges according to a few criteria
Developed for ARMLSD
Parameters
----------
tc_dict : dict
For book-keeping
ednum : int
nspat : int
insert_buff : int, optional
Offset from existing edge for any edge added in
Returns
-------
"""
# TODO - Should avoid adding a slit at the edge if those columns are masked in the BPM
# Init
for key in ['left', 'right']:
tc_dict[key]['new_xval'] = []
tc_dict[key]['new_traces'] = []
# Grab the edge indexes and xval's
#left_idx, left_xval, right_idx, right_xval = tc_indices(tc_dict)
left_xval = tc_dict['left']['xval']
right_xval = tc_dict['right']['xval']
# Only one slit?
if (len(left_xval) == 1) and (len(right_xval)==1):
if left_xval[0] < right_xval[0]: # Ok slit, otherwise continue
return
# Masks: True is a good edge, False is bad
good_left = np.ones_like(left_xval, dtype=bool)
good_right = np.ones_like(right_xval, dtype=bool)
# Deal with missing left edges first (at left edge of detector)
rights_missing_lefts = np.where(right_xval < left_xval[0])[0]
for kk in rights_missing_lefts:
# Grab the trace
right_pix = tc_dict['right']['traces'][:,kk] #np.where(edgearr == right_idx[0])
mn_rp = np.min(right_pix)
if mn_rp <= insert_buff:
good_right[kk] = False
msgs.warn("Partial or too small right edge at start of detector. Skipping it.")
else:
if kk == 0:
ioff = -1*mn_rp + insert_buff
else:
ioff = right_xval[kk-1] - right_xval[kk] + insert_buff
msgs.warn("Adding in a left edge near start of detector which mirrors the first right edge")
new_add_edge(kk, ioff, tc_dict, left=True)
# Loop on left edges
for kk,left in enumerate(left_xval):
# Grab location of the next left edge
if kk < len(left_xval)-1:
next_left = left_xval[kk+1]
else:
next_left = nspat-1
# Search for a proper right edge
# Should be right of the current left and left of the next left
gd_right = np.where((right_xval < next_left) & (right_xval > left))[0]
if len(gd_right) == 0: # None found?
# Last slit?
if kk == len(left_xval)-1:
msgs.warn("Last slit has no right edge. Adding one in which will not touch the detector edge")
left_pix = tc_dict['left']['traces'][:, kk] #np.where(edgearr == left_idx[kk])
mx_lp = np.max(left_pix[1])
if mx_lp >= nspat-1:
msgs.warn("Partial left edge at end of detector. Skipping it.")
good_left[kk] = False
else:
# Stay on the detector!
ioff = nspat - mx_lp - insert_buff
# Add
new_add_edge(-1, ioff, tc_dict, left=False)
continue
else: # Not the last slit, add one in!
msgs.warn("Missing a right edge for slit with left edge at {}".format(left))
msgs.warn("Adding in a corresponding right edge!")
# Offset from the next left edge
ioff = next_left-left-insert_buff
# Add
new_add_edge(kk, ioff, tc_dict, left=False)
else:
# Check for multiple right edges between the two lefts (i.e. missing Left)
# Will only add in one missing left
if len(gd_right) > 1:
msgs.warn("Missing a left edge for slit with right edge(s) at {}".format(
right_xval[gd_right[1:]]))
msgs.warn("Adding one (and only one)")
# Offset is difference between the two right slits + a buffer
ioff = right_xval[gd_right[0]] - right_xval[gd_right[1]] + insert_buff
# Add
new_add_edge(gd_right[1], ioff, tc_dict, left=True)
#add_edge(right_idx[gd_right[1]], ioff, edgearr, tc_dict, final_left, final_right, left=True)
# Deal with good
tc_dict['right']['xval'] = tc_dict['right']['xval'][good_right]
tc_dict['right']['traces'] = tc_dict['right']['traces'][:,good_right]
tc_dict['left']['xval'] = tc_dict['left']['xval'][good_left]
tc_dict['left']['traces'] = tc_dict['left']['traces'][:,good_left]
# Add em in and then sort
for side in ['left', 'right']:
for kk, xval in enumerate(tc_dict[side]['new_xval']):
tc_dict[side]['xval'] = np.append(tc_dict[side]['xval'], xval)
tmp = tc_dict[side]['new_traces'][kk]
tc_dict[side]['traces'] = np.append(tc_dict[side]['traces'], np.resize(tmp, (tmp.size,1)), axis=1)
# Sort
isrt = np.argsort(tc_dict[side]['xval'])
tc_dict[side]['xval'] = tc_dict[side]['xval'][isrt]
tc_dict[side]['traces'] = tc_dict[side]['traces'][:,isrt]
# Return
return
'''
def edgearr_mslit_sync(edgearr, tc_dict, ednum, insert_buff=5, add_left_edge_slit=True, verbose=False):
""" Method to synchronize the slit edges
Adds in extra edges according to a few criteria
Developed for ARMLSD
Parameters
----------
edgearr : ndarray
tc_dict : dict
For book-keeping
ednum : int
insert_buff : int, optional
Offset from existing edge for any edge added in
add_left_edge_slit : bool, optional
Allow the method to add in a left slit at the edge of the detector
Returns
-------
new_edgearr : ndarray
Updated edgearr
"""
# Init
new_edgearr = np.zeros_like(edgearr, dtype=int)
final_left = []
final_right = []
# Grab the edge indexes and xval's
left_idx, left_xval, right_idx, right_xval = tc_indices(tc_dict)
# Only one slit?
if (len(left_xval) == 1) and (len(right_xval)==1):
if left_xval[0] < right_xval[0]: # Ok slit, otherwise continue
return edgearr
# First slit (often up against the detector)
if (right_xval[0] < left_xval[0]) and add_left_edge_slit:
right_pix = np.where(edgearr == right_idx[0])
mn_rp = np.min(right_pix[1])
if mn_rp <= insert_buff:
msgs.warn("Partial or too small right edge at start of detector. Skipping it.")
else:
ioff = -1*mn_rp + insert_buff
msgs.warn("Adding in a left edge at start of detector which mirrors the first right edge")
add_edge(right_idx[0], ioff, edgearr, tc_dict, final_left, final_right, left=True)
# Loop on left edges
for kk,left in enumerate(left_xval):
# Grab location of the next left edge
if kk < len(left_idx)-1:
next_left = left_xval[kk+1]
else:
next_left = edgearr.shape[1]-1
# Search for a proper right edge
# Should be right of the current left and left of the next left
gd_right = np.where((right_xval < next_left) & (right_xval > left))[0]
if len(gd_right) == 0: # None found?
# Last slit?
if kk == len(left_idx)-1:
msgs.warn("Last slit has no right edge. Adding one in which will not touch the detector edge")
left_pix = np.where(edgearr == left_idx[kk])
mx_lp = np.max(left_pix[1])
if mx_lp >= edgearr.shape[1]:
msgs.warn("Partial left edge at end of detector. Skipping it.")
else:
# Stay on the detector!
ioff = edgearr.shape[1] - mx_lp - insert_buff
# Add
add_edge(left_idx[kk], ioff, edgearr, tc_dict, final_left, final_right, left=False)
continue
else: # Not the last slit, add one in!
msgs.warn("Missing a right edge for slit with left edge at {}".format(left))
msgs.warn("Adding in a corresponding right edge!")
# Offset from the next left edge
ioff = next_left-left-insert_buff
# Add
add_edge(left_idx[kk], ioff, edgearr, tc_dict, final_left, final_right, left=False)
else:
# Add in the first right edge
final_left.append(left_idx[kk])
iright = np.min(gd_right[0])
final_right.append(right_idx[iright])
# Check for multiple right edges between the two lefts (i.e. missing Left)
# Will only add in one missing left
if len(gd_right) > 1:
msgs.warn("Missing a left edge for slit with right edge(s) at {}".format(
right_xval[gd_right[1:]]))
msgs.warn("Adding one (and only one) in unless you turn off the setting [blah]")
# Offset is difference between the two right slits + a buffer
ioff = right_xval[gd_right[0]] - right_xval[gd_right[1]] + insert_buff
# Add
add_edge(right_idx[gd_right[1]], ioff, edgearr, tc_dict, final_left, final_right, left=True)
# Finish by remaking the edgearr
# And update the book-keeping dict
ldict, rdict = {}, {}
# Left
for ss, left_i in enumerate(final_left):
newval = -1*ednum - ss
# Edge
pix = edgearr == left_i
new_edgearr[pix] = newval
# Dict
ldict[str(newval)] = tc_dict['left']['xval'][str(left_i)]
tc_dict['left']['xval'] = ldict
# Right
for ss, right_i in enumerate(final_right):
newval = ednum + ss
# Edge
pix = edgearr == right_i
new_edgearr[pix] = newval
# Dict
rdict[str(newval)] = tc_dict['right']['xval'][str(right_i)]
tc_dict['right']['xval'] = rdict
if verbose:
print(tc_dict['left']['xval'])
print(tc_dict['right']['xval'])
# Return
return new_edgearr
'''
def edgearr_tcrude(edgearr, siglev, ednum, TOL=3., tfrac=0.33, verbose=False,
maxshift=0.15, bpm=None, skip_bad=True):
""" Use trace_crude to refine slit edges
It is also used to remove bad slit edges and merge slit edges
Parameters
----------
edgearr : ndarray
Edge array
siglev : ndarray
Sigma level image
ednum : int
TOL : float (optional)
Tolerance for matching 2 edges (and ignoring one)
tfrac : float (optional)
Fraction of the slit edge that must be traced to keep
There are exceptions, however (e.g. single slits)
maxshift : float
Maximum shift in trace crude
Returns
-------
new_edgarr : ndarray
A new version of the edgearr
tc_dict : dict
A dict that has book-keeping on the edges
left/right
xval -- Position of edge at ycen (1/2 point on the detector); most useful parameter recorded
uni_idx -- Unique edge numbers
flags -- Internal book-keeping on edge analysis
xset -- trace set values
xerr -- trace set errors
"""
msgs.info("Crude tracing the edges")
# Init
nspec = edgearr.shape[0]
ycen = nspec//2
# Items to return
new_edgarr = np.zeros_like(edgearr, dtype=int)
tc_dict = {}
# Loop on side
for side in ['left', 'right']:
tc_dict[side] = {}
tc_dict[side]['xval'] = {}
# Unique edge values
if side == 'left':
uni_e = np.unique(edgearr[edgearr < 0])
else:
uni_e = np.unique(edgearr[edgearr > 0])
# Save sorted (in absolute value) -- This is necessary
uni_e = uni_e[np.argsort(np.abs(uni_e))]
tc_dict[side]['uni_idx'] = uni_e
# Flag: 0=not traced; 1=traced; -1=duplicate
tc_dict[side]['flags'] = np.zeros(len(uni_e), dtype=int)
tc_dict[side]['xset'] = np.zeros((nspec,len(uni_e)))
tc_dict[side]['xerr'] = np.zeros((nspec,len(uni_e))) + 999.
# Loop on edges to trace
niter = 0
while np.any(tc_dict[side]['flags'] == 0):
# Most common yrow to speed up trace_crude
if side == 'left':
all_e = np.where(edgearr < 0)
else:
all_e = np.where(edgearr > 0)
cnt = Counter(all_e[0])
yrow = cnt.most_common(1)[0][0]
# Grab the x values on that row
xinit = all_e[1][all_e[0] == yrow]
# Unique..
_, uidx = np.unique(edgearr[yrow, xinit], return_index=True)
xinit = xinit[uidx] # Yes, this is needed..
#
msk = np.ones_like(xinit, dtype=bool)
# If not first pass, look for duplicates
if niter > 0:
# Check if one of the existing slits is consistent
# with this new, offset edge
for ii,xx in enumerate(xinit):
# Clip (this doesn't catch them all, but there is another check below)
if np.min(np.abs(xx-tc_dict[side]['xset'][yrow])) < TOL:
msk[ii] = False
eval = edgearr[yrow,xx]
msgs.warn("Duplicate {} edge at x={} and y={}. Clipping..".format(side, xx,yrow))
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
# Zero out edgearr
edgearr[edgearr==eval] = 0
# Any of these new? If not continue
if not np.any(msk):
# Next iteration
niter += 1
continue
else:
xinit = xinit[msk]
pass
# Trace crude
if side == 'left':
xset, xerr = trace_crude_init(np.maximum(siglev, -0.1), np.array(xinit), yrow, maxshift=maxshift,maxshift0=0.5, maxerr=0.2)
else:
xset, xerr = trace_crude_init(np.maximum(-1*siglev, -0.1), np.array(xinit), yrow, maxshift=maxshift, maxshift0=0.5, maxerr=0.2)
# Fill it up
for kk,x in enumerate(xinit):
# Annoying index
idx = np.where(uni_e == edgearr[yrow,x])[0]
#
tc_dict[side]['xset'][:,idx[0]] = xset[:,kk]
tc_dict[side]['xerr'][:,idx[0]] = xerr[:,kk]
# Good values allowing for edge of detector
goodx = np.any([(xerr != 999.), (xset==0.), (xset==edgearr.shape[1]-1.)], axis=0)
# Fill in
for kk, x in enumerate(xinit):
yval = np.where(goodx[:,kk])[0]
eval = edgearr[yrow,x]
new_xval = int(np.round(xset[ycen, kk]))
# Check whether the trace is well determined on tface of the detector
# If only one trace, this check is ignored
if (len(yval) < int(tfrac*edgearr.shape[0])) and (len(uni_e) > 1):
msgs.warn("Edge at x={}, y={} traced less than {} of the detector. Removing".format(
x,yrow,tfrac))
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
# Zero out edgearr
edgearr[edgearr==eval] = 0
continue
# Do not allow a right edge at x=0
if (side == 'right') and (new_xval==0):
msgs.warn("Right edge at x=0 removed")
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
edgearr[edgearr==eval] = 0
continue
# or a left edge at x=end
if (side == 'left') and (new_xval==edgearr.shape[1]-1):
msgs.warn("Left edge at detector right edge removed")
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
edgearr[edgearr==eval] = 0
continue
# Check it really is a new xval (within TOL)
if niter > 0:
curr_xvals = np.array([tc_dict[side]['xval'][key] for key in tc_dict[side]['xval'].keys()])
if np.min(np.abs(new_xval-curr_xvals)) < TOL:
msgs.warn("Edge matched exiting xval within TOL, clipping")
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
edgearr[edgearr==eval] = 0
continue
# All bad trace_crude?
if not np.any(goodx[:,kk]):
msgs.warn("No good trace values. Rejecting")
tc_dict[side]['flags'][uni_e==eval] = -1 # Clip me
edgearr[edgearr==eval] = 0
continue
# Edge is ok, keep it
xvals = np.round(xset[:, kk]).astype(int)
# Single edge requires a bit more care (it is so precious!)
if len(uni_e) == 1:
if np.sum(edgearr==eval)>len(yval):
new_edgarr[edgearr==eval] = edgearr[edgearr==eval]
else:
new_edgarr[yval, xvals[yval]] = eval
else:
# Traces can disappear and then the crude trace can wind up hitting a neighbor
# Therefore, take only the continuous good piece from the starting point
if skip_bad:
ybad_xerr = np.array([])
else:
ybad_xerr = np.where(~goodx[:,kk])[0]
# Ignore bad pixels in BPM -- Somewhat kludgy
if bpm is not None:
keep_bad = []
for ibad in ybad_xerr:
xval = int(np.round(xset[ibad,kk]))
if bpm[ibad, xval] == 0:
keep_bad.append(ibad)
ybad_xerr = np.array(keep_bad)
# Lower point
ylow = ybad_xerr < yrow
if np.any(ylow):
y0 = np.max(ybad_xerr[ylow])+1 # Avoid the bad one
else:
y0 = 0
# Upper
yhi = ybad_xerr > yrow
if np.any(yhi):
y1 = np.min(ybad_xerr[yhi])
else:
y1 = edgearr.shape[0]
new_yval = np.arange(y0,y1).astype(int) # Yes, this is necessary; slicing fails..
#
new_edgarr[new_yval, xvals[new_yval]] = eval
#new_edgarr[yval, xvals[yval]] = eval
# Flag
tc_dict[side]['flags'][uni_e == eval] = 1
# Save new_xval
tc_dict[side]['xval'][str(eval)] = new_xval
# Zero out edgearr
edgearr[edgearr==eval] = 0
# Next pass
niter += 1
# Reset edgearr values to run sequentially and update the dict
for side in ['left', 'right']:
if np.any(tc_dict[side]['flags'] == -1):
# Loop on good edges
gde = np.where(tc_dict[side]['flags'] == 1)[0]
for ss,igde in enumerate(gde):
if side == 'left':
newval = -1*ednum - ss
else:
newval = ednum + ss
oldval = tc_dict[side]['uni_idx'][igde]
pix = new_edgarr == oldval
new_edgarr[pix] = newval
# Reset the dict too..
if newval == 0:
debugger.set_trace()
tc_dict[side]['xval'][str(newval)] = tc_dict[side]['xval'].pop(str(oldval))
# Remove bad traces
if len(gde) == 0:
msgs.warn("Side {} had no good edges; Keeping one!".format(side))
# Keep 1 (this is mainly for Longslit)
xval = tc_dict[side]['xset'][ycen,:]
if side == 'left':
idx = np.argmin(xval)
tc_dict[side]['xval'][str(-1*ednum)] = xval[idx]
else:
idx = np.argmax(xval)
tc_dict[side]['xval'][str(ednum)] = xval[idx]
idx = np.array([idx]) # Needs to be an array for the shape
#
tc_dict[side]['xset'] = tc_dict[side]['xset'][:,idx]
tc_dict[side]['xerr'] = tc_dict[side]['xerr'][:,idx]
else:
tc_dict[side]['xset'] = tc_dict[side]['xset'][:,gde]
tc_dict[side]['xerr'] = tc_dict[side]['xerr'][:,gde]
# Remove uni_idx
for side in ['left', 'right']:
for key in ['uni_idx']:
tc_dict[side].pop(key)
if verbose:
print(tc_dict['left']['xval'])
print(tc_dict['right']['xval'])
# Return
return new_edgarr, tc_dict.copy()
def edgearr_from_binarr(binarr, binbpx, medrep=0, min_sqm=30.,
sobel_mode='nearest', sigdetect=30.):
""" Generate the edge array from an input, trace image (likely slightly filtered)
Primary algorithm is to run a Sobolev filter on the image and then
trigger on all significant features.
The bad pixel mask is also used to fuss with bad columns, etc.
Parameters
----------
binarr : numpy ndarray
Calibration frame that will be used to identify slit traces (in most cases, the slit edge)
Lightly filtered
binbpx : ndarray
Bad pixel max image
medrep : int, optional
Number of times to perform median smoothing on the mstrace
One uniform filter is always done
medrep = 0 is recommended for ARMLSD
sobel_mode : str, optional
ndimage.sobel mode; default is 'nearest'
sigdetect : float, optional
threshold for edge detection
min_sqm : float, optional
Minimum error used when detecting a slit edge
Returns
-------
siglev : ndarray
edgearr : ndarray
"""
# Specify how many times to repeat the median filter
# Even better would be to fit the filt/sqrt(abs(binarr)) array with a Gaussian near the maximum in each column
msgs.info("Detecting slit edges in the mstrace image")
# Replace bad columns
# TODO -- Should consider replacing bad 'rows' for rotated detectors (e.g. GMOS)
bad_cols = np.sum(binbpx, axis=0) > (binbpx.shape[0]//2)
if np.any(bad_cols):
ms2 = procimg.replace_columns(binarr, bad_cols)
else:
ms2 = binarr.copy()
# Generate sqrt image
sqmstrace = np.sqrt(np.abs(ms2))
# Median filter, as desired
# TODO -- Try size=(7,3) to bring up the edges instead of (3,7)
for ii in range(medrep):
#sqmstrace = ndimage.median_filter(sqmstrace, size=(3, 7))
sqmstrace = ndimage.median_filter(sqmstrace, size=(7, 3))
# Make sure there are no spuriously low pixels
sqmstrace[(sqmstrace < 1.0) & (sqmstrace >= 0.0)] = 1.0
sqmstrace[(sqmstrace > -1.0) & (sqmstrace <= 0.0)] = -1.0
# Filter with a Sobel
filt = ndimage.sobel(sqmstrace, axis=1, mode=sobel_mode)
filt *= (1.0 - binbpx) # Apply to the bad pixel mask
# siglev
siglev = np.sign(filt)*(filt**2)/np.maximum(sqmstrace, min_sqm)
# First edges assigned according to S/N
tedges = np.zeros(binarr.shape, dtype=np.float)
wl = np.where(siglev > + sigdetect) # A positive gradient is a left edge
wr = np.where(siglev < - sigdetect) # A negative gradient is a right edge
tedges[wl] = -1.0
tedges[wr] = +1.0
# Clean the edges
wcl = np.where((ndimage.maximum_filter1d(siglev, 10, axis=1) == siglev) & (tedges == -1))
wcr = np.where((ndimage.minimum_filter1d(siglev, 10, axis=1) == siglev) & (tedges == +1))
nedgear = np.zeros(siglev.shape, dtype=np.int)
nedgear[wcl] = -1
nedgear[wcr] = +1
######
msgs.info("Applying bad pixel mask")
nedgear *= (1-binbpx.astype(np.int)) # Apply the bad pixel mask
siglev *= (1-binbpx.astype(np.int)) # Apply the bad pixel mask
# Return
return siglev, nedgear
def edgearr_add_left_right(edgearr, binarr, binbpx, lcnt, rcnt, ednum):
""" Add left/right edges in the event that none were found thus far
This is especially useful for long slits that fill the full detector,
e.g. Kast
Parameters
----------
edgearr : ndarray
binarr : ndarray
binbpx : ndarray
Bad pixel mask
lcnt : int
Number of left edges
rcnt : int
Number of right edges
ednum : int
Returns
-------
edgearrcp : ndarray
New edgearr
lcnt : int
Updated count
rcnt : int
Updated count
If 0 is returned for both counts, this detector will be skipped
"""
if lcnt == 1:
letxt = "edge"
else:
letxt = "edges"
if rcnt == 1:
retxt = "edge"
else:
retxt = "edges"
msgs.info("{0:d} left {1:s} and {2:d} right {3:s} were found in the trace".format(lcnt, letxt, rcnt, retxt))
if (lcnt == 0) and (rcnt == 0):
if np.median(binarr) > 500:
msgs.warn("Found flux but no edges. Assuming they go to the edge of the detector.")
edgearr[:, -1] = 2*ednum
rcnt = 1
edgearr[:, 0] = -2*ednum
lcnt = 1
else:
msgs.warn("Unable to trace any edges"+msgs.newline()+"try a different method to trace the order edges")
return None, 0, 0
elif rcnt == 0:
msgs.warn("Unable to find a right edge. Adding one in.")
# Respecting the BPM (using first column where there is no mask)
sum_bpm = np.sum(binbpx, axis=0)
gdi1 = np.max(np.where(sum_bpm == 0)[0])
# Apply
edgearr[:, gdi1] = 2*ednum
rcnt = 1
elif lcnt == 0:
msgs.warn("Unable to find a left edge. Adding one in.")
# Respecting the BPM (using first column where there is no mask)
sum_bpm = np.sum(binbpx, axis=0)
gdi0 = np.min(np.where(sum_bpm == 0)[0])
# Apply
edgearr[:, gdi0] = -2*ednum
lcnt = 1
msgs.info("Assigning slit edge traces")
# Find the most common set of edges
edgearrcp = edgearr.copy()
return edgearrcp, lcnt, rcnt
def edgearr_final_left_right(edgearr, ednum, siglev):
""" Final fussing with left/right edges, as needed
Adds in missing ones, truncates when there are too many of
one type versus the other
Parameters
----------
edgearr : ndarray
ednum : int
siglev : ndarray
Returns
-------
edgearr : ndarray
lcnt : int
rcnt: int
"""
nspec, nspat = edgearr.shape
eaunq = np.unique(edgearr)
lcnt = np.where(eaunq < 0)[0].size
rcnt = np.where(eaunq > 0)[0].size
if lcnt == 0:
msgs.warn("Unable to find a left edge. Adding one in.")
edgearr[:, 0] = -2 * ednum
lcnt = 1
if rcnt == 0:
msgs.warn("Unable to find a right edge. Adding one in.")
edgearr[:, -1] = 2 * ednum
rcnt = 1
if (lcnt == 1) & (rcnt > 1): # This is mainly in here for LRISb which is a real pain..
msgs.warn("Only one left edge, and multiple right edges.")
msgs.info("Restricting right edge detection to the most significantly detected edge.")
wtst = np.where(eaunq > 0)[0]
bval, bidx = -np.median(siglev[np.where(edgearr == eaunq[wtst[0]])]), 0
for r in range(1, rcnt):
wed = np.where(edgearr == eaunq[wtst[r]])
tstv = -np.median(siglev[wed])
if tstv > bval:
bval = tstv
bidx = r
edgearr[np.where((edgearr > 0) & (edgearr != eaunq[wtst[bidx]]))] = 0
edgearr[np.where(edgearr > 0)] = +ednum # Reset the edge value
rcnt = 1
if (lcnt > 1) & (rcnt == 1):
msgs.warn("Only one right edge, and multiple left edges.")
msgs.info("Restricting left edge detection to the most significantly detected edge.")
wtst = np.where(eaunq < 0)[0]
bval, bidx = np.median(siglev[np.where(edgearr == eaunq[wtst[0]])]), 0
for r in range(1, lcnt):
wed = np.where(edgearr == eaunq[wtst[r]])
tstv = np.median(siglev[wed])
if tstv > bval:
bval = tstv
bidx = r
edgearr[np.where((edgearr < 0) & (edgearr != eaunq[wtst[bidx]]))] = 0
edgearr[np.where(edgearr < 0)] = -ednum # Reset the edge value
lcnt = 1
if lcnt == 1:
letxt = "edge"
else:
letxt = "edges"
if rcnt == 1:
retxt = "edge"
else:
retxt = "edges"
msgs.info("{0:d} left {1:s} and {2:d} right {3:s} were found in the trace".format(lcnt, letxt, rcnt, retxt))
return edgearr, lcnt, rcnt
def fit_edges(edgearr, lmin, lmax, plxbin, plybin, left=True, polyorder=3, function='ledgendre'):
""" Fit the edges, either left or right ones
Parameters
----------
edgearr : ndarray
lmin : int
Minimum edge to fit
lmax : int
Maximum edge to fit
plxbin : ndarray
plybin : ndarray
left : bool
Fitting left (or right) edges?
polyorder : int, optional
function : str, optional
Returns
-------
coeff : ndarray
Fit coefficients (nedge x polyorder)
nmbrarr : ndarray
Indexing of the edges
diffarr : ndarray
wghtarr : ndarray
"""
minvf, maxvf = plxbin[0, 0], plxbin[-1, 0]
# First, determine the model for the most common left slit edge
if left:
wcm = np.where(edgearr < 0)
else:
wcm = np.where(edgearr > 0)
cntr = Counter(edg for edg in edgearr[wcm])
commn = cntr.most_common(1)
wedx, wedy = np.where(edgearr == commn[0][0])
msk, cf = utils.robust_polyfit(wedx, wedy,
polyorder,
function=function,
minx=0, maxx=edgearr.shape[0] - 1)
cenmodl = utils.func_val(cf, np.arange(edgearr.shape[0]), function,
minx=0, maxx=edgearr.shape[0] - 1)
if left:
msgs.info("Fitting left slit traces")
else:
msgs.info("Fitting right slit traces")
coeff = np.zeros((1 + polyorder, lmax - lmin + 1))
diffarr = np.zeros(lmax - lmin + 1)
wghtarr = np.zeros(lmax - lmin + 1)
nmbrarr = np.zeros(lmax - lmin + 1)
offs = cenmodl[int(edgearr.shape[0] / 2)]
for i in range(lmin, lmax + 1):
if left:
w = np.where(edgearr == -i)
else:
w = np.where(edgearr == i)
if np.size(w[0]) <= polyorder + 2:
# lfail = np.append(lfail,i-lmin)
continue
tlfitx = plxbin[w]
tlfity = plybin[w]
diffarr[i - lmin] = np.mean(w[1] - cenmodl[w[0]]) + offs
wghtarr[i - lmin] = np.size(w[0]) / float(edgearr.shape[0])
if left:
nmbrarr[i - lmin] = -i
else:
nmbrarr[i - lmin] = i
msk, coeff[:, i - lmin] = utils.robust_polyfit(tlfitx, tlfity, polyorder,
function=function,
minx=minvf, maxx=maxvf)
# Return
return coeff, nmbrarr, diffarr, wghtarr
def get_slitid(shape, lordloc, rordloc, islit, ypos=0.5):
""" Convert slit position to a slitid
Parameters
----------
slf : SciExpObj or tuple
det : int
islit : int
ypos : float, optional
Returns
-------
slitid : int
Slit center position on the detector normalized to range from 0-10000
slitcen : float
Slitcenter relative to the detector ranging from 0-1
xslit : tuple
left, right positions of the slit edges
"""
#if isinstance(slf, tuple):
# shape, lordloc, rordloc = slf
#else:
# shape = slf._mstrace[det-1].shape
# lordloc = slf._lordloc[det-1]
# rordloc = slf._rordloc[det-1]
# Index at ypos
yidx = int(np.round(ypos*lordloc.shape[0]))
# Slit at yidx
pixl_slit = lordloc[yidx, islit]
pixr_slit = rordloc[yidx, islit]
# Relative to full image
xl_slit = pixl_slit/shape[1]
xr_slit = pixr_slit/shape[1]
# Center
slitcen = np.mean([xl_slit, xr_slit])
slitid = int(np.round(slitcen*1e4))
# Return them all
return slitid, slitcen, (xl_slit, xr_slit)
# TODO: Need a better name for this function
def base_expand_slits(msedge, ordcen, extord):
t = time.clock()
sz_x, sz_y = msedge.shape
sz_o = ordcen.shape[1]
# Get the pixels at the mid-point between orders
mid_order = (ordcen[:,:-1] + ordcen[:,1:])//2
# Instantiate the output
pordwid = np.zeros(ordcen.shape, dtype=int)
mordwid = np.zeros(ordcen.shape, dtype=int)
# Ignore extracted orders
mordwid[:,extord.astype(bool)] = -1
pordwid[:,extord.astype(bool)] = -1
# Set left edges to ignore
lindx = (mid_order < 0) | (msedge[np.arange(sz_x)[:,None],ordcen[:,1:]] \
< msedge[np.arange(sz_x)[:,None],mid_order])
lindx = np.append(np.ones(sz_x, dtype=bool).reshape(-1,1), lindx, axis=1)
mordwid[lindx] = -1
# Set right edges to ignore
rindx = (mid_order >= sz_y) | (msedge[np.arange(sz_x)[:,None],ordcen[:,:-1]] \
< msedge[np.arange(sz_x)[:,None],mid_order])
rindx = np.append(rindx, np.ones(sz_x, dtype=bool).reshape(-1,1), axis=1)
pordwid[rindx] = -1
# Find the separation between orders
medgv = 0.5*(msedge[np.arange(sz_x)[:,None],ordcen[:,1:]] \
+ msedge[np.arange(sz_x)[:,None],mid_order])
pedgv = 0.5*(msedge[np.arange(sz_x)[:,None],ordcen[:,:-1]] \
+ msedge[np.arange(sz_x)[:,None],mid_order])
for o in range(sz_o):
for x in range(sz_x):
# Trace from centre to left
if mordwid[x,o] != -1:
mordwid[x,o] = -1
for y in range(mid_order[x,o-1], ordcen[x, o]):
if msedge[x,y] > medgv[x,o-1]:
mordwid[x,o] = ordcen[x,o] - y
break
# Trace from centre to right
if pordwid[x,o] != -1:
pordwid[x,o] = -1
for y in range(mid_order[x,o], ordcen[x, o], -1):
if msedge[x,y] > pedgv[x,o]:
pordwid[x,o] = y-ordcen[x, o]
break
return mordwid, pordwid
def find_between(edgdet, ledgem, ledgep, dirc):
"""
.. todo::
Document this!
"""
if len(edgdet.shape) != 2:
msgs.error('Edge pixels array must be 2D.')
if len(ledgem.shape) != 1 or len(ledgep.shape) !=1:
msgs.error('Input must be 1D.')
sz_x, sz_y = edgdet.shape
# Setup the coefficient arrays
edgbtwn = np.full(3, -1, dtype=int)
for x in range(0,sz_x):
rng = np.sort([ledgem[x],ledgep[x]])
if not np.any(edgdet[x,slice(*rng)] > 0):
continue
e = edgdet[x,slice(*rng)][edgdet[x,slice(*rng)] > 0]
if edgbtwn[0] == -1:
edgbtwn[0] = e[0]
indx = e != edgbtwn[0]
if edgbtwn[1] == -1 and np.any(indx):
edgbtwn[1] = e[indx][0]
# If no right order edges were found between these two left order
# edges, find the next right order edge
if edgbtwn[0] == -1 and edgbtwn[1] == -1:
for x in range(0,sz_x):
ystrt = np.max([ledgem[x],ledgep[x]])
if dirc == 1:
emin = np.min(edgdet[x,ystrt:][edgdet[x,ystrt:] > 0])
if edgbtwn[2] == -1 or emin < edgbtwn[2]:
edgbtwn[2] = emin
else:
emax = np.max(edgdet[x,:ystrt+1][edgdet[x,:ystrt+1] > 0])
if edgbtwn[2] == -1 or emax > edgbtwn[2]:
edgbtwn[2] = emax
# Now return the array
return edgbtwn
def find_shift(mstrace, minarr, lopos, diffarr, numsrch):
"""
.. todo::
Document this!
"""
sz_y = mstrace.shape[1]
maxcnts = -999999.9
shift = 0
d = mstrace - minarr[:,None]
for s in range(0,numsrch):
cnts = 0.0
ymin = lopos + s
ymin[ymin < 0] = 0
ymax = ymin + diffarr
ymax[ymax > sz_y] = sz_y
indx = ymax > ymin
if np.sum(indx) == 0:
continue
cnts = np.sum([ np.sum(t[l:h]) for t,l,h in zip(d[indx], ymin[indx], ymax[indx]) ]) \
/ np.sum(ymax[indx]-ymin[indx])
if cnts > maxcnts:
maxcnts = cnts
shift = s
return shift
def ignore_orders(edgdet, fracpix, lmin, lmax, rmin, rmax):
"""
.. warning::
edgdet is alted by the function.
"""
sz_x, sz_y = edgdet.shape
lsize = lmax-lmin+1
larr = np.zeros((2,lsize), dtype=int)
larr[0,:] = sz_x
rsize = rmax-rmin+1
rarr = np.zeros((2,rsize), dtype=int)
rarr[0,:] = sz_x
# TODO: Can I remove the loop? Or maybe just iterate through the
# smallest dimension of edgdet?
for x in range(sz_x):
indx = edgdet[x,:] < 0
if np.any(indx):
larr[0,-edgdet[x,indx]-lmin] = np.clip(larr[0,-edgdet[x,indx]-lmin], None, x)
larr[1,-edgdet[x,indx]-lmin] = np.clip(larr[1,-edgdet[x,indx]-lmin], x, None)
indx = edgdet[x,:] > 0
if np.any(indx):
rarr[0,edgdet[x,indx]-rmin] = np.clip(rarr[0,edgdet[x,indx]-rmin], None, x)
rarr[1,edgdet[x,indx]-rmin] = np.clip(rarr[1,edgdet[x,indx]-rmin], x, None)
# Go through the array once more to remove pixels that do not cover fracpix
edgdet = edgdet.ravel()
lt_zero = np.arange(edgdet.size)[edgdet < 0]
if len(lt_zero) > 0:
edgdet[lt_zero[larr[1,-edgdet[lt_zero]-lmin]-larr[0,-edgdet[lt_zero]-lmin] < fracpix]] = 0
gt_zero = np.arange(edgdet.size)[edgdet > 0]
if len(gt_zero) > 0:
edgdet[gt_zero[rarr[1,edgdet[gt_zero]-rmin]-rarr[0,edgdet[gt_zero]-rmin] < fracpix]] = 0
edgdet = edgdet.reshape(sz_x,sz_y)
# Check if lmin, lmax, rmin, and rmax need to be changed
lindx = np.arange(lsize)[larr[1,:]-larr[0,:] > fracpix]
lnc = lindx[0]
lxc = lsize-1-lindx[-1]
rindx = np.arange(rsize)[rarr[1,:]-rarr[0,:] > fracpix]
rnc = rindx[0]
rxc = rsize-1-rindx[-1]
return lnc, lxc, rnc, rxc, larr, rarr
def limit_yval(yc, maxv):
yn = 0 if yc == 0 else (-yc if yc < 3 else -3)
yx = maxv-yc if yc > maxv-4 and yc < maxv else 4
return yn, yx
def match_edges(edgdet, ednum, mr=50):
""" Label groups of edge pixels and give them
a unique identifier.
Parameters
----------
edgdet : ndarray
Modified in place
ednum : int
a large dummy number used for slit edge assignment.
ednum should be larger than the number of edges detected
mr : int, optional
minimum number of acceptable pixels required to form the detection of an order edge
JXP increased the default value from 5 to 50
50 is probably best for
Returns
-------
lcnt-2*ednum
rcnt-2*ednum
"""
mrxarr = np.zeros(mr, dtype=int) -1 # -1 so as to be off the chip
mryarr = np.zeros(mr, dtype=int) -1 # -1 so as to be off the chip
sz_x, sz_y = edgdet.shape
lcnt = 2*ednum
rcnt = 2*ednum
# TODO -- Consider starting at sz_x/2
# Note: x=rows and y=columns in the following
for y in range(sz_y):
for x in range(sz_x):
if edgdet[x,y] != -1 and edgdet[x,y] != 1:
# No edge at this pixel
continue
anyt = 0
left = edgdet[x,y] == -1
# Search upwards from x,y
#xs = x + 1 (was x+1)
xs = x
yt = y
while xs <= sz_x-1:
xr = 10 if xs + 10 < sz_x else sz_x - xs - 1
yn, yx = limit_yval(yt, sz_y)
suc = 0
for s in range(xs, xs+xr):
suc = 0
for t in range(yt + yn, yt + yx):
if edgdet[s, t] == -1 and left:
edgdet[s, t] = -lcnt
elif edgdet[s, t] == 1 and not left:
edgdet[s, t] = rcnt
else:
continue
suc = 1
if anyt < mr:
mrxarr[anyt] = s
mryarr[anyt] = t
anyt += 1
yt = t
break
if suc == 1:
xs = s + 1
break
if suc == 0: # The trace is lost!
break
# Search downwards from x,y
xs = x - 1
yt = y
while xs >= 0:
xr = xs if xs-10 < 0 else 10
yn, yx = limit_yval(yt, sz_y)
suc = 0
for s in range(0, xr):
suc = 0
for t in range(yt+yn, yt+yx):
if edgdet[xs-s, t] == -1 and left:
edgdet[xs-s, t] = -lcnt
elif edgdet[xs-s, t] == 1 and not left:
edgdet[xs-s, t] = rcnt
else:
continue
suc = 1
if anyt < mr:
mrxarr[anyt] = xs-s
mryarr[anyt] = t
anyt += 1
yt = t
break
if suc == 1:
xs = xs - s - 1
break
if suc == 0: # The trace is lost!
break
if anyt > mr and left:
edgdet[x, y] = -lcnt
lcnt = lcnt + 1
elif anyt > mr and not left:
edgdet[x, y] = rcnt
rcnt = rcnt + 1
else:
edgdet[x, y] = 0
for s in range(anyt):
if mrxarr[s] != -1 and mryarr[s] != -1:
edgdet[mrxarr[s], mryarr[s]] = 0
return lcnt-2*ednum, rcnt-2*ednum
# TODO: This pure python function was translated from the cython
# function above but was never fully tested; compare with function in
# pypeit/arcytrace.pyx!
def close_edges(edgdet, dets, npix):
sz_x, sz_y = edgdet.shape
sz_d = dets.size
hasedge = np.zeros(sz_d, dtype=int)
for d in range(sz_d):
for x in range(sz_x):
for y in range(sz_y):
if edgdet[x,y] != dets[d]:
continue
else:
# Check if there's an edge nearby
mgap = sz_y if y+npix+1 > sz_y else y+npix+1
for s in range(y+1, mgap):
if edgdet[x,s] == dets[d]:
hasedge[d] = 1
break
if hasedge[d] != 0:
break
if hasedge[d] != 0:
break
return hasedge
# TODO: This pure python function was translated from the cython
# function above but was never fully tested; compare with function in
# pypeit/arcytrace.pyx! The code is knarly and may need some attention!
def close_slits(trframe, edgdet, dets, npix, ednum):
sz_x, sz_y = edgdet.shape
sz_d = dets.size
edgearr = np.zeros(edgdet.shape, dtype=int)
hasedge = np.zeros(sz_d, dtype=int)
for d in range(sz_d):
tmp = sz_y
for x in range(sz_x):
for y in range(sz_y):
if edgdet[x, y] != dets[d]:
continue
else:
# Check if there's an edge nearby
mgap = sz_y if y+npix+1 > sz_y else y+npix+1
for s in range(y+1, mgap):
if edgdet[x,s] != 0:
if s-y < tmp:
tmp = s-y
tix = edgdet[x,s]
hasedge[d] = edgdet[x,s]
break
if tmp != sz_y:
hasedge[d] = tix
# Now, if there's an edge in hasedge, mark the corresponding index
# in hasedge with -1
for d in range(sz_d):
if hasedge[d] == dets[d]:
# Close slits have caused a left/right edge to be labelled
# as one edge. Find only instances where there is a left and
# right edge detection. Then, take their average and set
# hadedge to be zero
tmp = 0
diff = 0
for x in range(sz_x):
for y in range(sz_y):
if edgdet[x,y] != dets[d]:
continue
else:
# Check if there's an edge nearby
mgap = sz_y if y+npix+1 > sz_y else y+npix+1
flg = 0
for s in range(y+1, mgap):
if edgdet[x,s] == edgdet[x,y]:
edgdet[x,s] = 0
edgdet[x,y] = 0
# +0.5 for rounding
tix = y + int(0.5*(s-y) + 0.5)
edgdet[x,tix] = dets[d]
flg = 1
tmp += 1
diff += (s-y)
break
if flg == 0:
# If there isn't a common left/right edge
# for this pixel, ignore this single edge
# detection
edgdet[x,y] = 0
hasedge[d] = diff/tmp
continue
if hasedge[d] > 0:
for s in range(sz_d):
if hasedge[d] == dets[s]:
hasedge[s] = -1
break
# Introduce an edge in cases where no edge exists, and redefine an
# edge where one does exist.
enum = ednum
for d in range(sz_d):
tmp = 0
for x in range(sz_x):
for y in range(sz_y):
if edgdet[x,y] != dets[d]:
continue
if hasedge[d] >= ednum:
edgearr[x,y] = enum
# Relabel the appropriate hasedge
if tmp == 0:
for s in range(sz_d):
if hasedge[d] == dets[s]:
# Label hasedge as negative, to avoid
# confusion with the positive hasedge
# numbers
hasedge[s] = -enum
tmp = 1
break
elif hasedge[d] < -1:
edgearr[x, y] = hasedge[d]
elif hasedge[d] >= 0:
# Create a new edge
edgearr[x, y-(1+hasedge[d])] = enum
edgearr[x, y+(1+hasedge[d])] = -enum
else:
msgs.bug('Check slit traces in close_slits!')
if hasedge[d] >= 0:
enum += 1
# Finally return the new slit edges array
return edgearr
# TODO: This pure python function was translated from the cython
# function above but was never fully tested; compare with function in
# pypeit/arcytrace.pyx! This edits everything in place. Do we want to
# do that?
def dual_edge(edgearr, edgearrcp, wx, wy, wl, wr, shft, npix, newval):
sz_x, sz_y = edgearr.shape
sz_a = wl.shape[0]
sz_b = wr.shape[0]
sz_e = wx.shape[0]
# First go through the leftmost edge (suffix a)
for x in range(sz_a):
for ee in range(sz_e):
if edgearr[wx[ee], wy[ee]] == wl[x]:
# Update the value given to this edge
edgearrcp[wx[ee], wy[ee]] = newval
# Determine if an edge can be placed in this row
maxy = npix
if wy[ee] + maxy >= sz_y:
maxy = sz_y - wy[ee] - 1
flg = 0
for y in range(1, maxy):
if edgearrcp[wx[ee], wy[ee]+y] != 0:
flg = 1
if flg == 0:
edgearrcp[wx[ee], wy[ee]+shft] = newval+1
# Now go through the rightmost edge (suffix b)
for x in range(sz_b):
for ee in range(sz_e):
if edgearr[wx[ee], wy[ee]] == wr[x]:
# Update the value given to this edge
edgearrcp[wx[ee], wy[ee]] = newval + 1
# Determine if an edge can be placed in this row
maxy = npix
if wy[ee] - maxy < 0:
maxy = wy[ee] + 1
flg = 0
for y in range(1, maxy):
if edgearrcp[wx[ee], wy[ee]-y] != 0:
flg = 1
if flg == 0:
edgearrcp[wx[ee], wy[ee]-shft] = newval
def minbetween(mstrace, loord, hiord):
"""
.. todo::
Document this!
"""
# TODO: Check shapes
ymin = np.clip(loord, 0, mstrace.shape[1])
ymax = np.clip(hiord, 0, mstrace.shape[1])
minarr = np.zeros(mstrace.shape[0])
indx = ymax > ymin
minarr[indx] = np.array([ np.amin(t[l:h])
for t,l,h in zip(mstrace[indx], ymin[indx], ymax[indx]) ])
return minarr
def find_peak_limits(hist, pks):
"""
Find all values between the zeros of hist
Parameters
----------
hist : ndarray
1D vector
pks : ndarray
1D vector
"""
if len(hist.shape) != 1 or len(pks.shape) != 1:
msgs.error('Arrays provided to find_peak_limits must be vectors.')
# Pixel indices in hist for each peak
hn = np.arange(hist.shape[0])
indx = np.ma.MaskedArray(np.array([hn]*pks.shape[0]))
# Instantiate output
edges = np.zeros((pks.shape[0],2), dtype=int)
# Find the left edges
indx.mask = (hist != 0)[None,:] | (hn[None,:] > pks[:,None])
edges[:,0] = np.ma.amax(indx, axis=1)
# Find the right edges
indx.mask = (hist != 0)[None,:] | (hn[None,:] < pks[:,None])
edges[:,1] = np.ma.amin(indx, axis=1)
return edges
def parse_user_slits(add_slits, this_det, rm=False):
"""
Parse the parset syntax for adding slits
Args:
add_slits: str, list
Taken from the parset
this_det: int
current detector
rm: bool, optional
Remove instead of add?
Returns:
user_slits: list or None
if list, [[x0,x1,yrow]] for add with one or more entries
if list, [[xcen,yrow]] for rm with one or more entries
"""
# Might not be a list yet (only a str)
if not isinstance(add_slits, list):
add_slits = [add_slits]
#
user_slits = []
for islit in add_slits:
if not rm:
det, x0, x1, yrow = [int(ii) for ii in islit.split(':')]
if det == this_det:
user_slits.append([x0,x1,yrow])
else:
det, xcen, yrow = [int(ii) for ii in islit.split(':')]
if det == this_det:
user_slits.append([xcen,yrow])
# Finish
if len(user_slits) == 0:
return None
else:
return user_slits
def pca_order_slit_edges(binarr, edgearr, lcent, rcent, gord, lcoeff, rcoeff, plxbin, slitcen,
pixlocn, function='lengendre', polyorder=3, diffpolyorder=2,
ofit=[3,2,1,0,0,0], extrapolate=[0,0], doqa=True):
""" Perform a PCA analyis on the order edges
Primarily for extrapolation
KBW: extrapolate neg was never used
Parameters
----------
binarr : ndarray
edgearr : ndarray
lcent : ndarray
Left edges
rcent : ndarray
Right edges
gord : ndarray
Orders detected on both the left and right edge
lcoeff : ndarray
Fit coefficients for left edges
rcoeff : ndarray
Fit coefficients for right edges
plxbin
slitcen : ndarray
pixlocn
Returns
-------
"""
# Init
wl = np.where(edgearr < 0)
wr = np.where(edgearr > 0)
##############
xv = plxbin[:, 0]
minvf, maxvf = plxbin[0, 0], plxbin[-1, 0]
# min and max switched because left edges have negative values
almin, almax = -np.max(edgearr[wl]), -np.min(edgearr[wl])
armin, armax = np.min(edgearr[wr]), np.max(edgearr[wr])
# maskord = np.where((np.all(lcoeff[:,lg],axis=0)==False)
# | (np.all(rcoeff[:,rg],axis=0)==False))[0]
maskord = np.where((np.all(lcoeff, axis=0) == False) | (np.all(rcoeff, axis=0) == False))[0]
ordsnd = np.arange(min(almin, armin), max(almax, armax) + 1)
totord = ordsnd[-1] + extrapolate[1]
# Identify the orders to be extrapolated during reconstruction
extrapord = (1.0 - np.in1d(np.linspace(1.0, totord, totord), gord).astype(np.int)).astype(np.bool)
msgs.info("Performing a PCA on the order edges")
lnpc = len(ofit) - 1
msgs.work("May need to do a check here to make sure ofit is reasonable")
coeffs = utils.func_fit(xv, slitcen, function, polyorder, minx=minvf, maxx=maxvf)
for i in range(ordsnd.size):
if i in maskord:
if (i>=ordsnd[0]) and (i<ordsnd[-1]-1): # JXP: Don't add orders that are already in there
continue
coeffs = np.insert(coeffs, i, 0.0, axis=1)
slitcen = np.insert(slitcen, i, 0.0, axis=1)
lcent = np.insert(lcent, i, 0.0, axis=0)
rcent = np.insert(rcent, i, 0.0, axis=0)
xcen = xv[:, np.newaxis].repeat(ordsnd.size, axis=1)
fitted, outpar = pca.basis(xcen, slitcen, coeffs, lnpc, ofit, x0in=ordsnd, mask=maskord,
skipx0=False, function=function)
# if doqa:
# debugger.set_trace() # NEED TO REMOVE slf
# # pca.pca_plot(slf, outpar, ofit, "Slit_Trace", pcadesc=pcadesc)
# Extrapolate the remaining orders requested
orders = 1 + np.arange(totord)
extrap_cent, outpar = pca.extrapolate(outpar, orders, function=function)
# Fit a function for the difference between left and right edges.
diff_coeff, diff_fit = utils.polyfitter2d(rcent - lcent, mask=maskord, order=diffpolyorder)
# Now extrapolate the order difference
ydet = np.linspace(0.0, 1.0, lcent.shape[0])
ydetd = ydet[1] - ydet[0]
lnum = ordsnd[0] - 1.0
ydet = np.append(-ydetd * np.arange(1.0, 1.0 + lnum)[::-1], ydet)
ydet = np.append(ydet, 1.0 + ydetd * np.arange(1.0, 1.0 + extrapolate[1]))
xde, yde = np.meshgrid(np.linspace(0.0, 1.0, lcent.shape[1]), ydet)
extrap_diff = utils.polyval2d(xde, yde, diff_coeff).T
msgs.info("Refining the trace for reconstructed and predicted orders")
# NOTE:: MIGHT NEED TO APPLY THE BAD PIXEL MASK HERE TO BINARR
msgs.work("Should the bad pixel mask be applied to the frame here?")
refine_cent, outpar = refine_traces(binarr, outpar, extrap_cent, extrap_diff,
[gord[0] - orders[0], orders[-1] - gord[-1]], orders,
ofit[0], pixlocn, function=function)
# Generate the left and right edges
lcen = refine_cent - 0.5 * extrap_diff
rcen = refine_cent + 0.5 * extrap_diff
# Return
return lcen, rcen, extrapord
def pca_pixel_slit_edges(binarr, edgearr, lcoeff, rcoeff, ldiffarr, rdiffarr,
lnmbrarr, rnmbrarr, lwghtarr, rwghtarr, lcent, rcent, plxbin,
function='lengendre', polyorder=3, ofit=[3,2,1,0,0,0], doqa=True):
""" PCA analysis for slit edges
Parameters
----------
binarr : ndarray
edgearr : ndarray
lcoeff : ndarray
Fit coefficients for left edges
rcoeff : ndarray
Fit coefficients for right edges
ldiffarr : ndarray
rdiffarr : ndarray
lnmbrarr : ndarray
rnmbrarr : ndarray
lwghtarr : ndarray
rwghtarr : ndarray
lcent : ndarray
rcent : ndarray
plxbin
settings : dict-like object
Returns
-------
lcen : ndarray
Left edges
rcen : ndarray
Right edges
extrapord
"""
minvf, maxvf = plxbin[0, 0], plxbin[-1, 0]
maskord = np.where((np.all(lcoeff, axis=0) == False) | (np.all(rcoeff, axis=0) == False))[0]
allord = np.arange(ldiffarr.shape[0])
ww = np.where(np.in1d(allord, maskord) == False)[0]
# Unmask where an order edge is located
maskrows = np.ones(binarr.shape[1], dtype=np.int)
# ldiffarr = np.round(ldiffarr[ww]).astype(np.int)
# rdiffarr = np.round(rdiffarr[ww]).astype(np.int)
ldiffarr = np.fmax(np.fmin(np.round(ldiffarr[ww]).astype(np.int), binarr.shape[1] - 1), 0)
rdiffarr = np.fmax(np.fmin(np.round(rdiffarr[ww]).astype(np.int), binarr.shape[1] - 1), 0)
maskrows[ldiffarr] = 0
maskrows[rdiffarr] = 0
# Extract the slit edge ID numbers associated with the acceptable traces
lnmbrarr = lnmbrarr[ww]
rnmbrarr = rnmbrarr[ww]
# Fill in left/right coefficients
tcoeff = np.ones((polyorder + 1, binarr.shape[1]))
tcoeff[:, ldiffarr] = lcoeff[:, ww]
tcoeff[:, rdiffarr] = rcoeff[:, ww]
# Weight the PCA fit by the number of detections in each slit edge
pxwght = np.zeros(binarr.shape[1])
pxwght[ldiffarr] = lwghtarr[ww]
pxwght[rdiffarr] = rwghtarr[ww]
maskrw = np.where(maskrows == 1)[0]
maskrw.sort()
extrap_row = maskrows.copy()
xv = np.arange(binarr.shape[0])
# trace values
trcval = utils.func_val(tcoeff, xv, function, minx=minvf, maxx=maxvf).T
msgs.work("May need to do a check here to make sure ofit is reasonable")
lnpc = len(ofit) - 1
# Only do a PCA if there are enough good slits
if np.sum(1.0 - extrap_row) > ofit[0] + 1:
# Perform a PCA on the locations of the slits
msgs.info("Performing a PCA on the slit traces")
ordsnd = np.arange(binarr.shape[1])
xcen = xv[:, np.newaxis].repeat(binarr.shape[1], axis=1)
fitted, outpar = pca.basis(xcen, trcval, tcoeff, lnpc, ofit, weights=pxwght, x0in=ordsnd, mask=maskrw, skipx0=False, function=function)
# if doqa:
# # JXP -- NEED TO REMOVE SLF FROM THE NEXT BIT
# msgs.warn("NEED TO REMOVE SLF FROM THE NEXT BIT")
# # pca.pca_plot(slf, outpar, ofit, "Slit_Trace", pcadesc=pcadesc, addOne=False)
# Now extrapolate to the whole detector
pixpos = np.arange(binarr.shape[1])
extrap_trc, outpar = pca.extrapolate(outpar, pixpos, function=function)
# Extract the resulting edge traces
lcen = extrap_trc[:, ldiffarr]
rcen = extrap_trc[:, rdiffarr]
# Perform a final shift fit to ensure the traces closely follow the edge detections
for ii in range(lnmbrarr.size):
wedx, wedy = np.where(edgearr == lnmbrarr[ii])
shft = np.mean(lcen[wedx, ii] - wedy)
lcen[:, ii] -= shft
for ii in range(rnmbrarr.size):
wedx, wedy = np.where(edgearr == rnmbrarr[ii])
shft = np.mean(rcen[wedx, ii] - wedy)
rcen[:, ii] -= shft
else:
allord = np.arange(lcent.shape[0])
maskord = np.where((np.all(lcent, axis=1) == False) | (np.all(rcent, axis=1) == False))[0]
ww = np.where(np.in1d(allord, maskord) == False)[0]
lcen = lcent[ww, :].T.copy()
rcen = rcent[ww, :].T.copy()
extrapord = np.zeros(lcen.shape[1], dtype=np.bool)
# Return
return lcen, rcen, extrapord
def prune_peaks(hist, pks, pkidx, debug=False):
""" Identify the most well defined peaks
And prune peaks too close to one another
Parameters
----------
hist : ndarray
Histogram of detections
pks : ndarray
Indices of candidate peak locations
pkidx : int
Index of highest peak
Returns
-------
msk : ndarray
An mask of good peaks (1) and bad peaks (0)
"""
sz_i = pks.shape[0]
msk = np.zeros(sz_i, dtype=np.int)
lgd = 1 # Was the previously inspected peak a good one?
for ii in range(0, sz_i-1):
cnt = 0
for jj in range(pks[ii], pks[ii+1]):
if hist[jj] == 0:
cnt += 1
if cnt < 2: # Two peaks too close to each other. Should we really eliminate both??
msk[ii] = 1 # JXP modifies this from 0 to 1
msk[ii+1] = 0
lgd = 0
else:
# If the difference is acceptable, the right peak is acceptable,
# the left peak is acceptable if it was not previously labelled as unacceptable
if lgd == 1:
msk[ii] = 1
msk[ii+1] = 1
lgd = 1
if debug:
debugger.set_trace()
# Now only consider the peaks closest to the highest peak
lgd = 1
# If the highest peak was zeroed out, this will zero out everyone!
for ii in range(pkidx, sz_i):
if msk[ii] == 0:
lgd = 0
elif lgd == 0:
msk[ii] = 0
lgd = 1
# If the highest peak was zeroed out, this will zero out everyone!
for ii in range(0, pkidx):
if msk[pkidx-ii] == 0:
lgd = 0
elif lgd == 0:
msk[pkidx-ii] = 0
if debug:
debugger.set_trace()
return msk
def refine_traces(binarr, outpar, extrap_cent, extrap_diff, extord, orders,
fitord, locations, function='polynomial'):
"""
Parameters
----------
binarr
outpar
extrap_cent
extrap_diff
extord
orders
fitord
locations
function : str, optional
Returns
-------
"""
# Refine the orders in the positive direction
i = extord[1]
hiord = pixels.phys_to_pix(extrap_cent[:, -i-2], locations, 1)
nxord = pixels.phys_to_pix(extrap_cent[:, -i-1], locations, 1)
mask = np.ones(orders.size)
mask[0:extord[0]] = 0.0
mask[-extord[1]:] = 0.0
extfit = extrap_cent.copy()
outparcopy = copy.deepcopy(outpar)
while i > 0:
loord = hiord
hiord = nxord
nxord = pixels.phys_to_pix(extrap_cent[:,-i], locations, 1)
# Minimum counts between loord and hiord
minarrL = minbetween(binarr, loord, hiord)
minarrR = minbetween(binarr, hiord, nxord)
minarr = 0.5*(minarrL+minarrR)
srchz = np.abs(extfit[:,-i]-extfit[:,-i-1])/3.0
lopos = pixels.phys_to_pix(extfit[:,-i]-srchz, locations, 1) # The pixel indices for the bottom of the search window
numsrch = np.int(np.max(np.round(2.0*srchz-extrap_diff[:,-i])))
diffarr = np.round(extrap_diff[:,-i]).astype(np.int)
shift = find_shift(binarr, minarr, lopos, diffarr, numsrch)
relshift = np.mean(shift+extrap_diff[:,-i]/2-srchz)
if shift == -1:
msgs.info(" Refining order {0:d}: NO relative shift applied".format(int(orders[-i])))
relshift = 0.0
else:
msgs.info(" Refining order {0:d}: relative shift = {1:+f}".format(int(orders[-i]), relshift))
# Renew guess for the next order
mask[-i] = 1.0
extfit, outpar, fail = pca.refine_iter(outpar, orders, mask, -i, relshift, fitord, function=function)
if fail:
msgs.warn("Order refinement has large residuals -- check order traces")
return extrap_cent, outparcopy
i -= 1
# Refine the orders in the negative direction
i = extord[0]
loord = pixels.phys_to_pix(extrap_cent[:,i+1], locations, 1)
extrap_cent = extfit.copy()
outparcopy = copy.deepcopy(outpar)
while i > 0:
hiord = loord
loord = pixels.phys_to_pix(extfit[:,i], locations, 1)
minarr = minbetween(binarr,loord, hiord)
srchz = np.abs(extfit[:,i]-extfit[:,i-1])/3.0
lopos = pixels.phys_to_pix(extfit[:,i-1]-srchz, locations, 1)
numsrch = np.int(np.max(np.round(2.0*srchz-extrap_diff[:,i-1])))
diffarr = np.round(extrap_diff[:,i-1]).astype(np.int)
shift = find_shift(binarr, minarr, lopos, diffarr, numsrch)
relshift = np.mean(shift+extrap_diff[:,i-1]/2-srchz)
if shift == -1:
msgs.info(" Refining order {0:d}: NO relative shift applied".format(int(orders[i-1])))
relshift = 0.0
else:
msgs.info(" Refining order {0:d}: relative shift = {1:+f}".format(int(orders[i-1]),relshift))
# Renew guess for the next order
mask[i-1] = 1.0
extfit, outpar, fail = pca.refine_iter(outpar, orders, mask, i-1, relshift, fitord, function=function)
if fail:
msgs.warn("Order refinement has large residuals -- check order traces")
return extrap_cent, outparcopy
i -= 1
return extfit, outpar
def remove_slit(edgearr, lcen, rcen, tc_dict, rm_slits, TOL=3.):
""" Remove slit
Parameters
----------
edgearr : ndarray
lcen : ndarray
rcen : ndarray
tc_dict : dict
rm_slits : list
List of slits to remove
[[left0, right0], [left1, right1]]
Specified at ycen = nrows//2
TOL : float
Tolerance for specifying the left/right edge
Returns
-------
edgearr : ndarray
lcen
rcen
tc_dict
"""
# Grab the edge indexes and xval's
left_idx, left_xval, right_idx, right_xval = tc_indices(tc_dict)
# Final edges
ycen = lcen.shape[0] // 2
lcen_yc = lcen[ycen,:]
rcen_yc = rcen[ycen,:]
msk = np.ones(lcen.shape[1], dtype=bool)
# Loop on the slits to remove
for rm_slit in rm_slits:
left, right = rm_slit
# Left check
if (np.min(np.abs(left_xval-left)) < TOL) & (np.min(np.abs(lcen_yc-left)) < TOL):
ileft = np.argmin(np.abs(left_xval-left))
ilcen = np.argmin(np.abs(lcen_yc-left))
else:
msgs.warn("Could not find a left slit corresponding to {}".format(left))
return
# Right check
if (np.min(np.abs(right_xval-right)) < TOL) & (np.min(np.abs(rcen_yc-right)) < TOL):
iright = np.argmin(np.abs(right_xval-right))
ircen = np.argmin(np.abs(rcen_yc-right))
if ilcen != ircen:
msgs.warn("lcen not in sync with rcen or you misdefined the slit to remove")
return
else:
msgs.warn("Could not find a right slit corresponding to {}".format(right))
return
# Remove from final edges -- Am not sure these will be indexed identically to tc_dict..
msk[ilcen] = False
# Remove from edgearr
edgearr[edgearr == left_idx[ileft]] = 0.
edgearr[edgearr == right_idx[iright]] = 0.
tc_dict['left']['xval'].pop(str(left_idx[ileft]))
tc_dict['right']['xval'].pop(str(right_idx[iright]))
msgs.info("Removed the slit at [left,right]: {}".format(rm_slit))
# Do I need to reindex everything??
lcen = lcen[:,msk]
rcen = rcen[:,msk]
# Return
return edgearr, lcen, rcen, tc_dict
def synchronize_edges(binarr, edgearr, plxbin, lmin, lmax, lcoeff, rmin, rcoeff, lnmbrarr,
ldiffarr, lwghtarr, rnmbrarr, rdiffarr, rwghtarr, function='legendre',
polyorder=3, extrapolate=[0,0]):
""" Synchrnoizes the existing edges
For ARMLSD, this step is largely unnecessary given multi_sync()
@Ryan :: Could use help making this method less onerous..
KBW: extrapolate pos is never used
Parameters
----------
binarr
edgearr
plxbin
lmin
lmax
lcoeff
rmin
rcoeff
lnmbrarr
ldiffarr
lwghtarr
rnmbrarr
rdiffarr
rwghtarr
settings
Returns
-------
"""
minvf, maxvf = plxbin[0, 0], plxbin[-1, 0]
# Define the array of pixel values along the dispersion direction
xv = plxbin[:, 0]
num = (lmax - lmin) // 2
lval = lmin + num # Pick an order, somewhere in between lmin and lmax
lv = (utils.func_val(lcoeff[:, lval - lmin], xv, function, minx=minvf, maxx=maxvf) \
+ 0.5).astype(np.int)
if np.any(lv < 0) or np.any(lv + 1 >= binarr.shape[1]):
msgs.warn("At least one slit is poorly traced")
msgs.info("Refer to the manual, and adjust the input trace parameters")
debugger.set_trace()
msgs.error("Cannot continue without a successful trace")
mnvalp = np.median(binarr[:, lv + 1]) # Go one row above and one row below an order edge,
mnvalm = np.median(binarr[:, lv - 1]) # then see which mean value is greater.
"""
lvp = (utils.func_val(lcoeff[:,lval+1-lmin],xv,function,min=minvf,max=maxvf)+0.5).astype(np.int)
edgbtwn = arcytrace.find_between(edgearr,lv,lvp,1)
print lval, edgbtwn
# edgbtwn is a 3 element array that determines what is between two adjacent left edges
# edgbtwn[0] is the next right order along, from left order lval
# edgbtwn[1] is only !=-1 when there's an order overlap.
# edgebtwn[2] is only used when a left order is found before a right order
if edgbtwn[0] == -1 and edgbtwn[1] == -1:
rsub = edgbtwn[2]-(lval) # There's an order overlap
elif edgbtwn[1] == -1: # No overlap
rsub = edgbtwn[0]-(lval)
else: # There's an order overlap
rsub = edgbtwn[1]-(lval)
"""
if mnvalp > mnvalm:
lvp = (utils.func_val(lcoeff[:, lval + 1 - lmin], xv, function, minx=minvf, maxx=maxvf) \
+ 0.5).astype(np.int)
edgbtwn = find_between(edgearr, lv, lvp, 1)
# edgbtwn is a 3 element array that determines what is between two adjacent left edges
# edgbtwn[0] is the next right order along, from left order lval
# edgbtwn[1] is only !=-1 when there's an order overlap.
# edgebtwn[2] is only used when a left order is found before a right order
if edgbtwn[0] == -1 and edgbtwn[1] == -1:
rsub = edgbtwn[2] - lval # There's an order overlap
elif edgbtwn[1] == -1: # No overlap
rsub = edgbtwn[0] - lval
else: # There's an order overlap
rsub = edgbtwn[1] - lval
else:
lvp = (utils.func_val(lcoeff[:, lval - 1 - lmin], xv, function,
minx=minvf, maxx=maxvf) + 0.5).astype(np.int)
edgbtwn = find_between(edgearr, lvp, lv, -1)
if edgbtwn[0] == -1 and edgbtwn[1] == -1:
rsub = edgbtwn[2] - (lval - 1) # There's an order overlap
elif edgbtwn[1] == -1: # No overlap
rsub = edgbtwn[0] - (lval - 1)
else: # There's an order overlap
rsub = edgbtwn[1] - (lval - 1)
msgs.info("Relabelling slit edges")
rsub = int(round(rsub))
if lmin < rmin - rsub:
esub = lmin - (extrapolate[0] + 1)
else:
esub = (rmin - rsub) - (extrapolate[0] + 1)
wl = np.where(edgearr < 0)
wr = np.where(edgearr > 0)
edgearr[wl] += esub
edgearr[wr] -= (esub + rsub)
lnmbrarr += esub
rnmbrarr -= (esub + rsub)
# Insert new rows into coefficients arrays if rsub != 0 (if orders were not labelled correctly, there will be a mismatch for the lcoeff and rcoeff)
almin, almax = -np.max(edgearr[wl]), -np.min(
edgearr[wl]) # min and max switched because left edges have negative values
armin, armax = np.min(edgearr[wr]), np.max(edgearr[wr])
nmord = polyorder + 1
if armin != almin:
if armin < almin:
lcoeff = np.append(np.zeros((nmord, almin - armin)), lcoeff, axis=1)
ldiffarr = np.append(np.zeros(almin - armin), ldiffarr)
lnmbrarr = np.append(np.zeros(almin - armin), lnmbrarr)
lwghtarr = np.append(np.zeros(almin - armin), lwghtarr)
else:
rcoeff = np.append(np.zeros((nmord, armin - almin)), rcoeff, axis=1)
rdiffarr = np.append(np.zeros(armin - almin), rdiffarr)
rnmbrarr = np.append(np.zeros(armin - almin), rnmbrarr)
rwghtarr = np.append(np.zeros(armin - almin), rwghtarr)
if armax != almax:
if armax < almax:
rcoeff = np.append(rcoeff, np.zeros((nmord, almax - armax)), axis=1)
rdiffarr = np.append(rdiffarr, np.zeros(almax - armax))
rnmbrarr = np.append(rnmbrarr, np.zeros(almax - armax))
rwghtarr = np.append(rwghtarr, np.zeros(almax - armax))
else:
lcoeff = np.append(lcoeff, np.zeros((nmord, armax - almax)), axis=1)
ldiffarr = np.append(ldiffarr, np.zeros(armax - almax))
lnmbrarr = np.append(lnmbrarr, np.zeros(armax - almax))
lwghtarr = np.append(lwghtarr, np.zeros(armax - almax))
# import astropy.io.fits as pyfits minvf, maxvf = plxbin[0, 0], plxbin[-1, 0]
# hdu = pyfits.PrimaryHDU(edgearr)
# hdu.writeto("edgearr_{0:02d}.fits".format(det))
# Now consider traces where both the left and right edges are detected
ordunq = np.unique(edgearr)
lunqt = ordunq[np.where(ordunq < 0)[0]]
runqt = ordunq[np.where(ordunq > 0)[0]]
lunq = np.arange(lunqt.min(), lunqt.max() + 1)
runq = np.arange(runqt.min(), runqt.max() + 1)
# Determine which orders are detected on both the left and right edge
gord = np.intersect1d(-lunq, runq, assume_unique=True)
# We need to ignore the orders labelled rfail and lfail.
lg = np.where(np.in1d(-lunq, gord))[0]
rg = np.where(np.in1d(runq, gord))[0]
lgm = np.where(np.in1d(-lunq, gord, invert=True))[0]
rgm = np.where(np.in1d(runq, gord, invert=True))[0]
maxord = np.max(np.append(gord, np.append(-lunq[lgm], runq[rgm])))
lcent = utils.func_val(lcoeff[:, -lunq[lg][::-1] - 1 - extrapolate[0]], xv, function,
minx=minvf, maxx=maxvf)
rcent = utils.func_val(rcoeff[:, runq[rg] - 1 - extrapolate[0]], xv, function, minx=minvf,
maxx=maxvf)
# Return
return lcent, rcent, gord, lcoeff, ldiffarr, lnmbrarr, lwghtarr, \
rcoeff, rdiffarr, rnmbrarr, rwghtarr
# TODO Make this a proper trace_crude, rename consistently with IDL
def trace_crude_init(image, xinit0, ypass, invvar=None, nave=5, radius=3.0,maxshift0=0.5, maxshift=0.1, maxerr=0.2):
"""Python port of trace_crude_idl.pro from IDLUTILS
Modified for initial guess
Parameters
----------
image : 2D ndarray, shape (nspec, nspat)
Image for tracing
xinit : ndarray
Initial guesses for trace peak at ypass
ypass : int
Row for initial guesses
Optional Parameters
-------------------
radius: float, default = 3.0
Radius for centroiding; default to 3.0
nmed = int, default = None [NOT YET IMPLEMENTED!]
Median filtering size down the nspec direction before performing trace
nave = int, default = 5
Boxcar averaging size down the nspec direction before performing trace. If set to None no averaging
will be performed.
maxerr: float, default = 0.2
Maximum error in centroid allowed for valid recentering;
maxshift: float, default = 0.1
Maximum shift in centroid allowed for valid recentering.
maxshift0: float, default 0.5
Maximum shift in centroid allowed for initial row.
Returns
-------
xset : Trace for each fiber
xerr : Estimated error in that trace
"""
# JFH TODO add error checking on input parameters
# Init
xinit = xinit0.astype(float)
#xinit = xinit[0:3]
ntrace = xinit.size
ny = image.shape[0]
xset = np.zeros((ny,ntrace))
xerr = np.zeros((ny,ntrace))
# Make copies of the image and the inverse variance image
imgtemp = image.copy()
if invvar is None:
invtemp = np.zeros_like(image) + 1.
else:
invtemp = invvar.copy()
# ToDo implement median filtering!
# Boxcar-sum the entire image along columns by NAVE rows
if nave is not None:
nave = np.fmin(nave,ny)
# Boxcar sum the entire image weighted by inverse variance over nave spectral pixels
kernel = np.ones((nave, 1))/float(nave)
imgconv = ndimage.convolve(imgtemp*invtemp, kernel, mode='nearest')
# Add the weights
invtemp = ndimage.convolve(invtemp, kernel, mode='nearest')
# Look for pixels with infinite errors - replace with original values
ibad = invtemp == 0.0
invtemp[ibad] = 1.0
imgconv[ibad] = imgtemp[ibad]
# Renormalize the summed image by the weights
imgtemp = imgconv/invtemp
# JFH It seems odd to me that one is passing invtemp to trace_fweight, i.e. this is not correct
# error propagation. While the image should be smoothed with inverse variance weights, the new noise
# of the smoothed image has changed, and proper error propagation would then give:
# var_convol = ndimage.convolve(1/invvar, kernel**2, mode='nearest')
# invvar_convol = 1.0/var_convol
# I have not implemented this for fear of breaking the behavior, and furthermore I think the desire was not
# to have trace_fweight operate on formally correct errors.
# Recenter INITIAL Row for all traces simultaneously
#
iy = ypass * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(imgtemp, xinit, ycen = iy, invvar=invtemp, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift0, maxshift0) * (xfiterr < maxerr)
xset[ypass,:] = xinit + xshift
xerr[ypass,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
# /* LOOP FROM INITIAL (COL,ROW) NUMBER TO LARGER ROW NUMBERS */
for iy in range(ypass+1, ny):
xinit = xset[iy-1, :]
ycen = iy * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(imgtemp, xinit, ycen = ycen, invvar=invtemp, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift, maxshift) * (xfiterr < maxerr)
# Save
xset[iy,:] = xinit + xshift
xerr[iy,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
# /* LOOP FROM INITIAL (COL,ROW) NUMBER TO SMALLER ROW NUMBERS */
for iy in range(ypass-1, -1,-1):
xinit = xset[iy+1, :]
ycen = iy * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(imgtemp, xinit, ycen = ycen, invvar=invtemp, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift, maxshift) * (xfiterr < maxerr)
# Save
xset[iy,:] = xinit + xshift
xerr[iy,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
return xset, xerr
def trace_fweight(fimage, xinit_in, radius = 3.0, ycen=None, invvar=None):
''' Routine to recenter a trace using flux-weighted centroiding.
Python port of trace_fweight.pro from IDLUTILS
Parameters
----------
fimage: 2D ndarray
Image for tracing which shape (nspec, nspat)
xinit: ndarray
Initial guesses for spatial direction trace. This can either be an 2-d array with shape
(nspec, nTrace) array, or a 1-d array with shape (nspec) for the case of a single trace.
Optional Parameters:
--------------------
ycen: ndarray, default = None
Optionally y-position of trace can be provided. It should be an integer array the same size as x-trace (nspec, nTrace). If
not provided np.arange(nspec) will be assumed for each trace
invvar: ndarray, default = None
Inverse variance array for the image. Array with shape (nspec, nspat) matching fimage
radius : float or ndarray, default = 3.0
Radius for centroiding in floating point pixels. This can be either be input as a scalar or as an array to perform
centroiding with a varaible radius. If an array is input it must have the same size and shape as xinit_in, i.e.
a 2-d array with shape (nspec, nTrace) array, or a 1-d array with shape (nspec) for the case of a single trace.
Returns
-------
xnew: ndarray
Recentroided trace. The output will have the same shape as xinit i.e. an 2-d array with shape (nspec, nTrace)
array if multiple traces were input, or a 1-d array with shape (nspec) for
the case of a single trace.
xerr: ndarray
Formal propagated error on recentroided trace. These errors will only make sense if invvar is passed in, since
otherwise invvar is set to 1.0. The output will have the same shape as xinit i.e. an 2-d array with shape
(nspec, nTrace) array if multiple traces were input, or a 1-d array with shape (nspec) for the case of a single
trace. Locations will have this error set to 999 if any of the following conditions are true: 1) the flux weighted
centroid deviates from the input guess by > radius, or 2) the centering window falls off the image, or 3) where any masked
pixels (invvar == 0.0) contribute to the centroiding. The xnew values for the pixels which have xerr = 999 are reset to
that of the input trace. These should thus be masked in any fit using the condition (xerr < 999)
TODO we should probably either output a mask or set this 999 to something else, since I could imagine this causing problems.
Revision History
----------------
Python port of trace_fweight.pro from IDLUTILS
24-Mar-1999 Written by David Schlegel, Princeton.
27-Jun-2018 Ported to python by X. Prochaska and J. Hennawi
"""
'''
# Init
nx = fimage.shape[1]
# Checks on radius
if isinstance(radius,(int, float)):
radius_out = radius
elif ((np.size(radius)==np.size(xinit_in)) & (np.shape(radius) == np.shape(xinit_in))):
radius_out = radius
else:
raise ValueError('Boxcar radius must a be either an integer, a floating point number, or an ndarray '
'with the same shape and size as xinit_in')
# Figure out dimensions of xinit
dim = xinit_in.shape
npix = dim[0]
ndim = xinit_in.ndim
if (ndim == 1):
nTrace = 1
else:
nTrace = dim[1]
ncen = xinit_in.size
xinit = xinit_in.flatten()
# Create xnew, xerr
xnew = xinit.astype(float)
xerr = np.full(ncen,999.0)
if npix > fimage.shape[0]:
raise ValueError('The number of pixels in xinit npix={:d} will run of the image nspec={:d}'.format(npix,fimage.shape[0]))
if ycen is None:
if ndim == 1:
ycen = np.arange(npix, dtype='int')
elif ndim == 2:
ycen = np.outer(np.arange(npix, dtype='int'), np.ones(nTrace, dtype='int'))
else:
raise ValueError('xinit is not 1 or 2 dimensional')
else: # check values of input ycen
if (ycen.min() < 0) | (ycen.max() > (fimage.shape[0] - 1)):
raise ValueError('Input ycen values will run off the fimage')
ycen_out = ycen.astype(int)
ycen_out = ycen_out.flatten()
if np.size(xinit) != np.size(ycen_out):
raise ValueError('Number of elements in xinit and ycen must be equal')
# if npix != fimage.shape[0]:
# raise ValueError('Number of elements in xinit npix = {:d} does not match spectral dimension of '
# 'input image {:d}'.format(npix,fimage.shape[0]))
if invvar is None:
invvar = np.zeros_like(fimage) + 1.
x1 = xinit - radius_out + 0.5
x2 = xinit + radius_out + 0.5
ix1 = np.floor(x1).astype(int)
ix2 = np.floor(x2).astype(int)
fullpix = int(np.maximum(np.min(ix2-ix1)-1,0))
sumw = np.zeros_like(xinit)
sumxw = np.zeros_like(xinit)
sumwt = np.zeros_like(xinit)
sumsx1 = np.zeros_like(xinit)
sumsx2 = np.zeros_like(xinit)
qbad = np.zeros_like(xinit,dtype=bool)
# Compute
for ii in range(0,fullpix+3):
spot = ix1 - 1 + ii
ih = np.clip(spot,0,nx-1)
xdiff = spot - xinit
#
wt = np.clip(radius_out - np.abs(xdiff) + 0.5,0,1) * ((spot >= 0) & (spot < nx))
sumw = sumw + fimage[ycen_out,ih] * wt
sumwt = sumwt + wt
sumxw = sumxw + fimage[ycen_out,ih] * xdiff * wt
var_term = wt**2 / (invvar[ycen_out,ih] + (invvar[ycen_out,ih] == 0))
sumsx2 = sumsx2 + var_term
sumsx1 = sumsx1 + xdiff**2 * var_term
#qbad = qbad or (invvar[ycen_out,ih] <= 0)
#qbad = np.any([qbad, invvar[ycen_out,ih] <= 0], axis=0)
qbad = qbad | (invvar[ycen_out,ih] <= 0)
# Fill up
good = (sumw > 0) & (~qbad)
if np.sum(good) > 0:
delta_x = sumxw[good]/sumw[good]
xnew[good] = delta_x + xinit[good]
xerr[good] = np.sqrt(sumsx1[good] + sumsx2[good]*delta_x**2)/sumw[good]
bad = np.any([np.abs(xnew-xinit) > radius_out + 0.5,xinit < radius_out - 0.5,xinit > nx - 0.5 - radius_out],axis=0)
if np.sum(bad) > 0:
xnew[bad] = xinit[bad]
xerr[bad] = 999.0
# Reshape to the right size for output if more than one trace was input
if ndim > 1:
xnew = xnew.reshape(npix,nTrace)
xerr = xerr.reshape(npix,nTrace)
# Return
return xnew, xerr
def trace_gweight(fimage, xinit_in, sigma = 1.0, ycen = None, invvar=None, maskval=-999999.9):
''' Routine to recenter a trace using gaussian-weighted centroiding. Specifically the flux in the image is weighted
by the integral of a Gaussian over a pixel. Port of idlutils trace_gweight.pro algorithm
Parameters
----------
fimage: 2D ndarray
Image for tracing which shape (nspec, nspat)
xinit: ndarray
Initial guesses for spatial direction trace. This can either be an 2-d array with shape
(nspec, nTrace) array, or a 1-d array with shape (nspec) for the case of a single trace.
Optional Parameters:
--------------------
sigma : float or ndarray, default = 1.0
Sigma of Gaussian for centroiding in floating point pixels. This can be either be input as a scalar or as an array to perform
centroiding with a varaible sigma. If an array is input it must have the same size and shape as xinit_in, i.e.
a 2-d array with shape (nspec, nTrace) array, or a 1-d array with shape (nspec) for the case of a single trace.
ycen: ndarray, default = None
Optionally y-position of trace can be provided. It should be an integer array the same size as x-trace (nspec, nTrace). If
not provided np.arange(nspec) will be assumed for each trace.
invvar: ndarray, default = None
Inverse variance array for the image. Array with shape (nspec, nspat) matching fimage
Returns
-------
xnew: ndarray
Recentroided trace. The output will have the same shape as xinit i.e. an 2-d array with shape (nspec, nTrace)
array if multiple traces were input, or a 1-d array with shape (nspec) for
the case of a single trace.
xerr: ndarray
Formal propagated error on recentroided trace. These errors will only make sense if invvar is passed in, since
otherwise invvar is set to 1.0. The output will have the same shape as xinit i.e. an 2-d array with shape
(nspec, nTrace) array if multiple traces were input, or a 1-d array with shape (nspec) for the case of a single
trace. Locations where the gaussian weighted centroid falls off the image, will have this error set to 999 and
will their xnew values set to that of the input trace. These should thus be masked in any fit via a condition like (xerr < 999)
Revision History
----------------
Python port of trace_fweight.pro from IDLUTILS
17-Jan-2000 Written by Scott Burles, Chicago
27-Jun-2018 Ported to python by X. Prochaska and J. Hennawi
'''
# Init
nx = fimage.shape[1]
# Checks on radius
if isinstance(sigma,(int,float)):
sigma_out = sigma
elif ((np.size(sigma)==np.size(xinit_in)) & (np.shape(sigma) == np.shape(xinit_in))):
sigma_out = sigma
else:
raise ValueError('Gaussian sigma must a be either an integer, a floating point number, or an ndarray '
'with the same shape and size as xinit_in')
# Figure out dimensions of xinit
dim = xinit_in.shape
npix = dim[0]
ndim = xinit_in.ndim
if (ndim == 1):
nTrace = 1
else:
nTrace = dim[1]
ncen = xinit_in.size
xinit = xinit_in.flatten()
# Create xnew, xerr
xnew = xinit.astype(float)
xerr = np.full(ncen,999.)
if npix > fimage.shape[0]:
raise ValueError(
'The number of pixels in xinit npix={:d} will run of the image nspec={:d}'.format(npix, fimage.shape[0]))
if ycen is None:
if ndim == 1:
ycen = np.arange(npix, dtype=int)
elif ndim == 2:
ycen = np.outer(np.arange(npix, dtype='int'), np.ones(nTrace, dtype='int'))
else:
raise ValueError('xinit is not 1 or 2 dimensional')
else: # check value of input ycen
if (ycen.min() < 0) | (ycen.max() > (fimage.shape[0] - 1)):
raise ValueError('Input ycen values will run off the fimage')
ycen_out = ycen.astype(int)
ycen_out = ycen_out.flatten()
if np.size(xinit) != np.size(ycen_out):
raise ValueError('Number of elements in xinit and ycen must be equal')
# if npix != fimage.shape[0]:
# raise ValueError('Number of elements in xinit npix = {:d} does not match spectral dimension of '
# 'input image {:d}'.format(npix,fimage.shape[0]))
if invvar is None:
invvar = np.zeros_like(fimage) + 1.
var =calc_ivar(invvar)
# More setting up
x_int = np.rint(xinit).astype(int)
nstep = 2*int(3.0*np.max(sigma_out)) - 1
weight = np.zeros_like(xinit)
numer = np.zeros_like(xinit)
numer_var = np.zeros_like(xinit)
meanvar = np.zeros_like(xinit)
qbad = np.zeros_like(xinit).astype(bool)
nby2 = nstep//2
for i in range(nstep):
xh = x_int - nby2 + i
xtemp = (xh - xinit - 0.5)/sigma_out/np.sqrt(2.0)
g_int = (erf(xtemp+1./sigma_out/np.sqrt(2.0)) - erf(xtemp))/2.
xs = np.fmin(np.fmax(xh,0),(nx-1))
cur_weight = fimage[ycen_out, xs] * (invvar[ycen_out, xs] > 0) * g_int * ((xh >= 0) & (xh < nx))
weight += cur_weight
numer += cur_weight * xh
numer_var += var[ycen_out,xs]*(invvar[ycen_out, xs] > 0) * (g_int**2) *((xh >= 0) & (xh < nx))
# Below is Burles calculation of the error which I'm not following
meanvar += cur_weight * cur_weight * (xinit-xh)**2/(invvar[ycen_out, xs] + (invvar[ycen_out, xs] == 0))
qbad = qbad | (xh < 0) | (xh >= nx)
# bad = np.any([bad, xh < 0, xh >= nx], axis=0)
# Masking
good = (~qbad) & (weight > 0)
if np.sum(good) > 0:
xnew[good] = numer[good]/weight[good]
xerr[good] = np.sqrt(numer_var[good])/weight[good]
# xerr[good] = np.sqrt(meanvar[good])/weight[good] # Burles error which I don't follow
# For pixels with large deviations, simply reset to initial values and set large error as with trace_fweight
bad = np.any([np.abs(xnew-xinit) > 2*sigma_out + 0.5,xinit < 2*sigma_out - 0.5,xinit > nx - 0.5 - 2*sigma_out],axis=0)
if np.sum(bad) > 0:
xnew[bad] = xinit[bad]
xerr[bad] = 999.0
# Reshape to the right size for output if more than one trace was input
if ndim > 1:
xnew = xnew.reshape(npix,nTrace)
xerr = xerr.reshape(npix,nTrace)
# Return
return xnew, xerr
def tc_indices(tc_dict):
""" Quick parser of tc_dict
Parameters
----------
tc_dict : dict
Returns
-------
left_idx : list
left_xval : ndarray
right_idx : list
right_xval : ndarray
"""
# Grab the existing edges (code is duplicated in mslit_sync)
left_idx = [int(key) for key in tc_dict['left']['xval']] # These match to the edge values in edgearr
left_idx.sort(reverse=True)
left_xval = np.array([tc_dict['left']['xval'][str(idx)] for idx in left_idx])
right_idx = [int(key) for key in tc_dict['right']['xval'].keys()] # These match to the edge values in edgearr
right_idx.sort()
right_xval = np.array([tc_dict['right']['xval'][str(idx)] for idx in right_idx])
# Return
return left_idx, left_xval, right_idx, right_xval
# ToDo 1) Add an option where the user specifies the number of slits, and so it takes only the highest peaks
# from detect_lines
def trace_refine(filt_image, edges, edges_mask, ncoeff=5, npca = None, pca_explained_var = 99.8, coeff_npoly_pca = 3,
fwhm=3.0, sigthresh=100.0, trc_thresh=10.0, trc_median_frac=0.01, upper=2.0, lower=2.0, debug=False, fweight_boost=1.,
maxrej=1, smash_range=(0, 1)):
"""
Refines input trace using a PCA analysis
Args:
filt_image: ndarray
Filtered image (usually Sobolev)
edges: ndarray
Current set of edges
edges_mask: ndarray
Mask, of edges; 1 = Good
ncoeff: int, optional
Order of polynomial for fits
npca: int, optional
If provided, restrict the PCA analysis to this order
pca_explained_var: float, optional
If npca=None, the PCA will add coefficients until explaining this amount of the variance
coeff_npoly_pca: int, optional
fwhm: float, optional
Size used for tracing (fweight and gweight)
trc_thresh: float, optional
Threshold for masking pixels when the tracing is done with iter_tracefit. Basically we extract the filt_image
with a boxcar extraction, median filter it with a kernel that is trc_median_frac*nspec, and then mask all pixels
which have an extracted value < trc_thresh in the fitting.
fit: float, optional
Threshold for an edge to be included
upper: float, optional
lower: float, optional
debug:
fweight_boost: float, optional
Boost on fwhm for fweight. This was 3.0 at one point (and that may be preferred for echelle instruments)
maxrej: int, optional
Rejection parameter for PCA. 1 makes the rejection go slowly (preferred)
smash_range: tuple, optional
Spectral range to smash (in fraction of nspec) when finding slit edges, e.g. (0.5, 1.0)
If not provided, all rows are smashed
Returns:
trace_dict: dict
dict containing the edge output
"""
# edges_mask True = Good, Bad = False
# filt image has left as positive, right as negative
nedges = edges.shape[1]
nspec = filt_image.shape[0]
nspat = filt_image.shape[1]
spec_vec = np.arange(nspec)
spat_vec = np.arange(nspat)
# ToDo I would take this stuff out of this routine and put it in the calling routine. For the iterations, we've
# already done fits, so need to re-fit
edge_spec = np.outer(np.ones(nedges), spec_vec)
tset = pydl.xy2traceset(edge_spec, edges.T, ncoeff=ncoeff, maxdev=5.0, maxiter=25, invvar=edges_mask.T.astype(float))
edges_fit = tset.yfit.T
# ToDO I think this is part is still okay
spat_not_junk = np.sum(edges_mask, 1)
iref = int(np.round(np.sum(spat_not_junk * spec_vec)/np.sum(spat_not_junk)))
edges_ref = edges_fit[iref, :]
msgs.info('PCA modeling {:d} slit edges'.format(nedges))
pca_fit, poly_fit_dict, pca_mean, pca_vectors = extract.pca_trace(
edges_fit, npca=npca, pca_explained_var = pca_explained_var,coeff_npoly=coeff_npoly_pca, order_vec=edges_ref,
xinit_mean=edges_ref, upper = upper, lower = lower, minv = 0.0, maxv = float(nspat-1), debug= debug,
maxrej=maxrej)
# pca_poly_fit is list
npca_out = len(poly_fit_dict)
pca_coeff_spat = np.zeros((nspat, npca_out))
for idim in range(npca_out):
pca_coeff_spat[:, idim] = utils.func_val(poly_fit_dict[str(idim)]['coeffs'], spat_vec, 'polynomial',
minx=poly_fit_dict[str(idim)]['minv'],maxx=poly_fit_dict[str(idim)]['maxv'])
trace_model = np.outer(pca_mean, np.ones(nspat)) + (np.dot(pca_coeff_spat, pca_vectors)).T + np.arange(nspat)
# JFH What should this aperture size be? I think fwhm=3.0 since that is the width of the sobel filter
trace_model_left = trace_model - fwhm/2.0
trace_model_righ = trace_model + fwhm/2.0
# trace_model_left = trace_model - 0.5 #fwhm/2.0
# trace_model_righ = trace_model + 0.5 #fwhm/2.0
msgs.info('Extracting filt_image along curved edge traces')
filt_extract = extract.extract_asymbox2(filt_image, trace_model_left, trace_model_righ)
if debug:
ginga.show_image(filt_extract, chname ='rectified filt_image')
# Smash the filtered image
# For instruments where light runs on only a portion of the detector,
# one is recommended to smash only that portion
smash_spec = (int(smash_range[0]*nspec), int(smash_range[1]*nspec))
filt_smash_mean, filt_smash_median, filt_smash_sig = sigma_clipped_stats(
filt_extract[smash_spec[0]:smash_spec[1],:], axis=0, sigma=4.0)
# Perform initial finding with a very liberal threshold
# Put in Gaussian smoothing here?
kernel_size = int(np.ceil(nspec*trc_median_frac)//2 * 2 + 1) # This ensure kernel_size is odd
trace_dict = {}
for key,sign in zip(['left','right'], [1., -1.]):
ypeak, _, edge_start, sigma_pk, _, igd, _, _ = arc.detect_lines(
sign*filt_smash_mean, cont_subtract=False, fwhm=fwhm, input_thresh=sigthresh,
max_frac_fwhm = 4.0, min_pkdist_frac_fwhm=5.0, debug=debug)
# ToDO add error catching here if there are no peaks found!
trace_dict[key] = {}
trace_dict[key]['start'] = edge_start[igd]
trace_dict[key]['nstart'] = len(edge_start[igd])
msgs.info('Found {:d} {:s} slit edges'.format(len(edge_start[igd]),key))
trace_crutch = trace_model[:, np.round(edge_start[igd]).astype(int)]
msgs.info('Iteratively tracing {:s} edges'.format(key))
# Extract a flux about the trace_crutch to mask out pixels that have no signal
flux_fw = extract.extract_boxcar(np.fmax(sign*filt_image, -1.0*sign), trace_crutch, fwhm)
flux_fw_med = signal.medfilt(flux_fw, kernel_size=(1,kernel_size))
trc_inmask_fw = (flux_fw_med.T > trc_thresh) & (trace_crutch > 0) & (trace_crutch < (nspat-1))
trace_fweight, _, _, _ = extract.iter_tracefit(np.fmax(sign*filt_image, -1.0*sign), trace_crutch, ncoeff,
trc_inmask = trc_inmask_fw, fwhm=fweight_boost*fwhm, niter=9)
# Extract a flux about the trace_fweight to mask out pixels that have no signal
flux_gw = extract.extract_boxcar(np.fmax(sign*filt_image, -1.0*sign), trace_fweight, fwhm)
flux_gw_med = signal.medfilt(flux_gw, kernel_size=(1,kernel_size))
trc_inmask_gw = (flux_gw_med.T > trc_thresh) & (trace_fweight > 0) & (trace_fweight < (nspat-1))
trace_gweight, _, _, _ = extract.iter_tracefit(np.fmax(sign*filt_image, -1.0*sign), trace_fweight, ncoeff,
trc_inmask = trc_inmask_gw, fwhm=fwhm,gweight=True, niter=6)
trace_dict[key]['trace'] = trace_gweight
color = dict(left='green', right='red')
if debug:
viewer, ch = ginga.show_image(filt_image, chname='filt_image')
for key in trace_dict.keys():
for kk in range(trace_dict[key]['nstart']):
ginga.show_trace(viewer, ch, trace_dict[key]['trace'][:,kk],trc_name = key + '_' + str(kk), color = color[key])
return trace_dict
def slit_trace_qa(frame, ltrace, rtrace, slitmask, extslit, setup, desc="",
normalize=True, use_slitid=None, out_dir=None):
""" Generate a QA plot for the slit traces
Parameters
----------
frame : ndarray
trace image
ltrace : ndarray
Left slit edge traces
rtrace : ndarray
Right slit edge traces
extslit : ndarray
Mask of extrapolated slits (True = extrapolated)
setup : str
desc : str, optional
A description to be used as a title for the page
root : str, optional
Root name for generating output file, e.g. msflat_01blue_000.fits
normalize: bool, optional
Normalize the flat? If not, use zscale for output
"""
plt.rcdefaults()
plt.rcParams['font.family']= 'times new roman'
ticks_font = font_manager.FontProperties(family='times new roman', style='normal', size=16,
weight='normal', stretch='normal')
# Outfile
nspec, nspat = frame.shape
method = inspect.stack()[0][3]
outfile = qa.set_qa_filename(setup, method, out_dir=out_dir)
nslits = ltrace.shape[1]
spec_vec = np.arange(nspec)
slitcen = (ltrace + rtrace)/2.0
# Normalize flux in the traces
if normalize:
sclmin, sclmax = 0.4, 1.1
nrm_frame = np.zeros_like(frame)
for islit in range(nslits):
# Extract the flux down this trace
flat_counts = extract.extract_boxcar(frame,slitcen[:,islit],1.5)/3.0
trc_norm = np.outer(flat_counts,np.ones(nspat))
slitind = slitmask == islit
nrm_frame[slitind] = frame[slitind]/(trc_norm[slitind] + (trc_norm[slitind] <= 0.0))
else:
nrm_frame = frame.copy()
nrm_frame[frame > 0.0] = np.sqrt(nrm_frame[frame > 0.0])
sclmin, sclmax = plot.zscale(nrm_frame)
# Plot
plt.clf()
ax = plt.gca()
# set_fonts(ax)
for label in ax.get_yticklabels():
label.set_fontproperties(ticks_font)
for label in ax.get_xticklabels():
label.set_fontproperties(ticks_font)
cmm = cm.Greys_r
mplt = plt.imshow(nrm_frame, origin='lower', cmap=cmm, interpolation=None,
extent=(0., frame.shape[1], 0., frame.shape[0]))
mplt.set_clim(vmin=sclmin, vmax=sclmax)
# Axes
plt.xlim(0., frame.shape[1])
plt.ylim(0., frame.shape[0])
plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off',
labelbottom='off', labelleft='off')
# Traces
iy = int(frame.shape[0]/2.)
for islit in range(nslits):
if extslit[islit] is True:
ptyp = ':'
else:
ptyp = '--'
# Left
plt.plot(ltrace[:, islit]+0.5, spec_vec, 'r'+ptyp, linewidth=0.3, alpha=0.7)
# Right
plt.plot(rtrace[:, islit]+0.5, spec_vec, 'c'+ptyp, linewidth=0.3, alpha=0.7)
# Label
if use_slitid is not None:
slitid, _, _ = get_slitid(frame.shape, ltrace, rtrace, islit, ypos=0.5)
lbl = 'S{:04d}'.format(slitid)
else:
lbl = '{0:d}'.format(islit+1)
plt.text(0.5*(ltrace[iy, islit]+rtrace[iy, islit]), spec_vec[iy], lbl, color='green', ha='center', size='small')
# Title
tstamp = qa.gen_timestamp()
if desc == "":
plt.suptitle(tstamp)
else:
plt.suptitle(desc+'\n'+tstamp)
# Write
plt.savefig(outfile, dpi=800)
plt.close()
plt.rcdefaults()
| 37.75806 | 151 | 0.561613 |
a1486ae7953d56ee1a39d35fef2de50e71de273a | 4,222 | py | Python | library/inky/eeprom.py | mfraser/inky | 431b5a1fed119904cdb0fb472382949b2ab2725f | [
"MIT"
] | null | null | null | library/inky/eeprom.py | mfraser/inky | 431b5a1fed119904cdb0fb472382949b2ab2725f | [
"MIT"
] | null | null | null | library/inky/eeprom.py | mfraser/inky | 431b5a1fed119904cdb0fb472382949b2ab2725f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Inky display-type EEPROM tools."""
import datetime
import struct
EEP_ADRESS = 0x50
EEP_WP = 12
DISPLAY_VARIANT = [
None,
'Red pHAT (High-Temp)',
'Yellow wHAT',
'Black wHAT',
'Black pHAT',
'Yellow pHAT',
'Red wHAT',
'Red wHAT (High-Temp)',
'Red wHAT',
None,
'Black pHAT (SSD1608)',
'Red pHAT (SSD1608)',
'Yellow pHAT (SSD1608)',
None,
'7-Colour (UC8159)'
]
class EPDType:
"""Class to represent EPD EEPROM structure."""
valid_colors = [None, 'black', 'red', 'yellow', None, '7colour']
def __init__(self, width, height, color, pcb_variant, display_variant, write_time=None):
"""Initialise new EEPROM data structure."""
self.width = width
self.height = height
self.color = color
if type(color) == str:
self.set_color(color)
self.pcb_variant = pcb_variant
self.display_variant = display_variant
self.eeprom_write_time = str(datetime.datetime.now()) if write_time is None else write_time
def __repr__(self):
"""Return string representation of EEPROM data structure."""
return """Display: {}x{}
Color: {}
PCB Variant: {}
Display Variant: {}
Time: {}""".format(self.width,
self.height,
self.get_color(),
self.pcb_variant / 10.0,
self.display_variant,
self.eeprom_write_time)
@classmethod
def from_bytes(class_object, data):
"""Initialise new EEPROM data structure from a bytes-like object or list."""
data = bytearray(data)
data = struct.unpack('<HHBBB22p', data)
return class_object(*data)
def update_eeprom_write_time(self):
"""Update the stored write time."""
self.eeprom_write_time = str(datetime.datetime.now())
def encode(self):
"""Return a bytearray representing the EEPROM data structure."""
return struct.pack('<HHBBB22p',
self.width,
self.height,
self.color,
self.pcb_variant,
self.display_variant,
str(datetime.datetime.now()).encode("ASCII"))
def to_list(self):
"""Return a list of bytes representing the EEPROM data structure."""
return [ord(c) for c in self.encode()]
def set_color(self, color):
"""Set the stored colour value."""
try:
self.color = self.valid_colors.index(color)
except IndexError:
raise ValueError('Invalid colour: {}'.format(color))
def get_color(self):
"""Get the stored colour value."""
try:
return self.valid_colors[self.color]
except IndexError:
return None
def get_variant(self):
"""Return text name of the display variant."""
try:
return DISPLAY_VARIANT[self.display_variant]
except IndexError:
return None
# Normal Yellow wHAT
yellow_what_1_E = EPDType(400, 300, color='yellow', pcb_variant=12, display_variant=2)
# Normal Black wHAT
black_what_1_E = EPDType(400, 300, color='black', pcb_variant=12, display_variant=3)
# Normal Black pHAT
black_phat_1_E = EPDType(212, 104, color='black', pcb_variant=12, display_variant=4)
# Hightemp Red pHAT
red_small_1_E = EPDType(212, 104, color='red', pcb_variant=12, display_variant=1)
def read_eeprom(i2c_bus=None):
"""Return a class representing EEPROM contents, or none."""
try:
if i2c_bus is None:
try:
from smbus2 import SMBus
except ImportError:
raise ImportError('This library requires the smbus2 module\nInstall with: sudo pip install smbus2')
i2c_bus = SMBus(1)
i2c_bus.write_i2c_block_data(EEP_ADRESS, 0x00, [0x00])
return EPDType.from_bytes(i2c_bus.read_i2c_block_data(0x50, 0, 29))
except IOError:
return None
def main(args):
"""EEPROM Test Function."""
print(read_eeprom())
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
| 29.117241 | 115 | 0.600663 |
acc1ff7a925a3b0195196456b40980d6fbc47749 | 8,469 | py | Python | ionoscloud/models/kubernetes_node_metadata.py | ionos-cloud/ionos-cloud-sdk-python | 3c5804697c262898e6f6a438dc40e1b45a4bb5c9 | [
"Apache-2.0"
] | null | null | null | ionoscloud/models/kubernetes_node_metadata.py | ionos-cloud/ionos-cloud-sdk-python | 3c5804697c262898e6f6a438dc40e1b45a4bb5c9 | [
"Apache-2.0"
] | null | null | null | ionoscloud/models/kubernetes_node_metadata.py | ionos-cloud/ionos-cloud-sdk-python | 3c5804697c262898e6f6a438dc40e1b45a4bb5c9 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
CLOUD API
IONOS Enterprise-grade Infrastructure as a Service (IaaS) solutions can be managed through the Cloud API, in addition or as an alternative to the \"Data Center Designer\" (DCD) browser-based tool. Both methods employ consistent concepts and features, deliver similar power and flexibility, and can be used to perform a multitude of management tasks, including adding servers, volumes, configuring networks, and so on. # noqa: E501
The version of the OpenAPI document: 6.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from ionoscloud.configuration import Configuration
class KubernetesNodeMetadata(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'etag': 'str',
'created_date': 'datetime',
'last_modified_date': 'datetime',
'state': 'str',
'last_software_updated_date': 'datetime',
}
attribute_map = {
'etag': 'etag',
'created_date': 'createdDate',
'last_modified_date': 'lastModifiedDate',
'state': 'state',
'last_software_updated_date': 'lastSoftwareUpdatedDate',
}
def __init__(self, etag=None, created_date=None, last_modified_date=None, state=None, last_software_updated_date=None, local_vars_configuration=None): # noqa: E501
"""KubernetesNodeMetadata - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._etag = None
self._created_date = None
self._last_modified_date = None
self._state = None
self._last_software_updated_date = None
self.discriminator = None
if etag is not None:
self.etag = etag
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if state is not None:
self.state = state
if last_software_updated_date is not None:
self.last_software_updated_date = last_software_updated_date
@property
def etag(self):
"""Gets the etag of this KubernetesNodeMetadata. # noqa: E501
Resource's Entity Tag as defined in http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.11 Entity Tag is also added as an 'ETag response header to requests which don't use 'depth' parameter. # noqa: E501
:return: The etag of this KubernetesNodeMetadata. # noqa: E501
:rtype: str
"""
return self._etag
@etag.setter
def etag(self, etag):
"""Sets the etag of this KubernetesNodeMetadata.
Resource's Entity Tag as defined in http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.11 Entity Tag is also added as an 'ETag response header to requests which don't use 'depth' parameter. # noqa: E501
:param etag: The etag of this KubernetesNodeMetadata. # noqa: E501
:type etag: str
"""
self._etag = etag
@property
def created_date(self):
"""Gets the created_date of this KubernetesNodeMetadata. # noqa: E501
The last time the resource was created. # noqa: E501
:return: The created_date of this KubernetesNodeMetadata. # noqa: E501
:rtype: datetime
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this KubernetesNodeMetadata.
The last time the resource was created. # noqa: E501
:param created_date: The created_date of this KubernetesNodeMetadata. # noqa: E501
:type created_date: datetime
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this KubernetesNodeMetadata. # noqa: E501
The last time the resource was modified. # noqa: E501
:return: The last_modified_date of this KubernetesNodeMetadata. # noqa: E501
:rtype: datetime
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this KubernetesNodeMetadata.
The last time the resource was modified. # noqa: E501
:param last_modified_date: The last_modified_date of this KubernetesNodeMetadata. # noqa: E501
:type last_modified_date: datetime
"""
self._last_modified_date = last_modified_date
@property
def state(self):
"""Gets the state of this KubernetesNodeMetadata. # noqa: E501
State of the resource. # noqa: E501
:return: The state of this KubernetesNodeMetadata. # noqa: E501
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this KubernetesNodeMetadata.
State of the resource. # noqa: E501
:param state: The state of this KubernetesNodeMetadata. # noqa: E501
:type state: str
"""
allowed_values = ["PROVISIONING", "PROVISIONED", "READY", "TERMINATING", "REBUILDING", "BUSY"] # noqa: E501
if self.local_vars_configuration.client_side_validation and state not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `state` ({0}), must be one of {1}" # noqa: E501
.format(state, allowed_values)
)
self._state = state
@property
def last_software_updated_date(self):
"""Gets the last_software_updated_date of this KubernetesNodeMetadata. # noqa: E501
The last time the software was updated on the node. # noqa: E501
:return: The last_software_updated_date of this KubernetesNodeMetadata. # noqa: E501
:rtype: datetime
"""
return self._last_software_updated_date
@last_software_updated_date.setter
def last_software_updated_date(self, last_software_updated_date):
"""Sets the last_software_updated_date of this KubernetesNodeMetadata.
The last time the software was updated on the node. # noqa: E501
:param last_software_updated_date: The last_software_updated_date of this KubernetesNodeMetadata. # noqa: E501
:type last_software_updated_date: datetime
"""
self._last_software_updated_date = last_software_updated_date
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, KubernetesNodeMetadata):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, KubernetesNodeMetadata):
return True
return self.to_dict() != other.to_dict()
| 33.741036 | 438 | 0.638092 |
0fd60c4a351dda166f14e7eaa72f76832dbbf683 | 7,358 | py | Python | samples/client/petstore/python/petstore_api/models/enum_test.py | netfarma/openapi-generator | 8ac80203ec557a7198e48adc66e9c1961c4cd6ce | [
"Apache-2.0"
] | 5 | 2019-12-03T13:50:09.000Z | 2021-11-14T12:59:48.000Z | samples/client/petstore/python/petstore_api/models/enum_test.py | netfarma/openapi-generator | 8ac80203ec557a7198e48adc66e9c1961c4cd6ce | [
"Apache-2.0"
] | 7 | 2021-03-01T21:26:03.000Z | 2022-02-27T10:10:20.000Z | samples/client/petstore/python/petstore_api/models/enum_test.py | netfarma/openapi-generator | 8ac80203ec557a7198e48adc66e9c1961c4cd6ce | [
"Apache-2.0"
] | 4 | 2019-04-08T17:06:09.000Z | 2020-06-09T18:16:08.000Z | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class EnumTest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'enum_string': 'str',
'enum_string_required': 'str',
'enum_integer': 'int',
'enum_number': 'float',
'outer_enum': 'OuterEnum'
}
attribute_map = {
'enum_string': 'enum_string',
'enum_string_required': 'enum_string_required',
'enum_integer': 'enum_integer',
'enum_number': 'enum_number',
'outer_enum': 'outerEnum'
}
def __init__(self, enum_string=None, enum_string_required=None, enum_integer=None, enum_number=None, outer_enum=None): # noqa: E501
"""EnumTest - a model defined in OpenAPI""" # noqa: E501
self._enum_string = None
self._enum_string_required = None
self._enum_integer = None
self._enum_number = None
self._outer_enum = None
self.discriminator = None
if enum_string is not None:
self.enum_string = enum_string
self.enum_string_required = enum_string_required
if enum_integer is not None:
self.enum_integer = enum_integer
if enum_number is not None:
self.enum_number = enum_number
if outer_enum is not None:
self.outer_enum = outer_enum
@property
def enum_string(self):
"""Gets the enum_string of this EnumTest. # noqa: E501
:return: The enum_string of this EnumTest. # noqa: E501
:rtype: str
"""
return self._enum_string
@enum_string.setter
def enum_string(self, enum_string):
"""Sets the enum_string of this EnumTest.
:param enum_string: The enum_string of this EnumTest. # noqa: E501
:type: str
"""
allowed_values = ["UPPER", "lower", ""] # noqa: E501
if enum_string not in allowed_values:
raise ValueError(
"Invalid value for `enum_string` ({0}), must be one of {1}" # noqa: E501
.format(enum_string, allowed_values)
)
self._enum_string = enum_string
@property
def enum_string_required(self):
"""Gets the enum_string_required of this EnumTest. # noqa: E501
:return: The enum_string_required of this EnumTest. # noqa: E501
:rtype: str
"""
return self._enum_string_required
@enum_string_required.setter
def enum_string_required(self, enum_string_required):
"""Sets the enum_string_required of this EnumTest.
:param enum_string_required: The enum_string_required of this EnumTest. # noqa: E501
:type: str
"""
if enum_string_required is None:
raise ValueError("Invalid value for `enum_string_required`, must not be `None`") # noqa: E501
allowed_values = ["UPPER", "lower", ""] # noqa: E501
if enum_string_required not in allowed_values:
raise ValueError(
"Invalid value for `enum_string_required` ({0}), must be one of {1}" # noqa: E501
.format(enum_string_required, allowed_values)
)
self._enum_string_required = enum_string_required
@property
def enum_integer(self):
"""Gets the enum_integer of this EnumTest. # noqa: E501
:return: The enum_integer of this EnumTest. # noqa: E501
:rtype: int
"""
return self._enum_integer
@enum_integer.setter
def enum_integer(self, enum_integer):
"""Sets the enum_integer of this EnumTest.
:param enum_integer: The enum_integer of this EnumTest. # noqa: E501
:type: int
"""
allowed_values = [1, -1] # noqa: E501
if enum_integer not in allowed_values:
raise ValueError(
"Invalid value for `enum_integer` ({0}), must be one of {1}" # noqa: E501
.format(enum_integer, allowed_values)
)
self._enum_integer = enum_integer
@property
def enum_number(self):
"""Gets the enum_number of this EnumTest. # noqa: E501
:return: The enum_number of this EnumTest. # noqa: E501
:rtype: float
"""
return self._enum_number
@enum_number.setter
def enum_number(self, enum_number):
"""Sets the enum_number of this EnumTest.
:param enum_number: The enum_number of this EnumTest. # noqa: E501
:type: float
"""
allowed_values = [1.1, -1.2] # noqa: E501
if enum_number not in allowed_values:
raise ValueError(
"Invalid value for `enum_number` ({0}), must be one of {1}" # noqa: E501
.format(enum_number, allowed_values)
)
self._enum_number = enum_number
@property
def outer_enum(self):
"""Gets the outer_enum of this EnumTest. # noqa: E501
:return: The outer_enum of this EnumTest. # noqa: E501
:rtype: OuterEnum
"""
return self._outer_enum
@outer_enum.setter
def outer_enum(self, outer_enum):
"""Sets the outer_enum of this EnumTest.
:param outer_enum: The outer_enum of this EnumTest. # noqa: E501
:type: OuterEnum
"""
self._outer_enum = outer_enum
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EnumTest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.404959 | 174 | 0.589155 |
3d247d93f6560c3eb371b439c37ac153ff2db1b4 | 5,535 | py | Python | models/uncertainty/unet_gaussian/train.py | seansegal/fastMRI | 44ebd517d792c5f6e66c64c004d0e0603057e7e1 | [
"MIT"
] | 2 | 2019-04-17T22:31:21.000Z | 2019-05-28T10:28:01.000Z | models/uncertainty/unet_gaussian/train.py | seansegal/fastMRI | 44ebd517d792c5f6e66c64c004d0e0603057e7e1 | [
"MIT"
] | null | null | null | models/uncertainty/unet_gaussian/train.py | seansegal/fastMRI | 44ebd517d792c5f6e66c64c004d0e0603057e7e1 | [
"MIT"
] | null | null | null | import logging
import numpy as np
import torch
from common.subsample import MaskFunc
from data import transforms
from models.uncertainty.unet_gaussian.unet_model import UnetModel
from models.mri_model import MRIModel
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DataTransform:
"""
Data Transformer for training U-Net models.
"""
def __init__(self, mask_func, resolution, which_challenge, use_seed=True):
"""
Args:
mask_func (common.subsample.MaskFunc): A function that can create a mask of
appropriate shape.
resolution (int): Resolution of the image.
which_challenge (str): Either "singlecoil" or "multicoil" denoting the dataset.
use_seed (bool): If true, this class computes a pseudo random number generator seed
from the filename. This ensures that the same mask is used for all the slices of
a given volume every time.
"""
if which_challenge not in ('singlecoil', 'multicoil'):
raise ValueError(f'Challenge should either be "singlecoil" or "multicoil"')
self.mask_func = mask_func
self.resolution = resolution
self.which_challenge = which_challenge
self.use_seed = use_seed
def __call__(self, kspace, target, attrs, fname, slice):
"""
Args:
kspace (numpy.array): Input k-space of shape (num_coils, rows, cols, 2) for multi-coil
data or (rows, cols, 2) for single coil data.
target (numpy.array): Target image
attrs (dict): Acquisition related information stored in the HDF5 object.
fname (str): File name
slice (int): Serial number of the slice.
Returns:
(tuple): tuple containing:
image (torch.Tensor): Zero-filled input image.
target (torch.Tensor): Target image converted to a torch Tensor.
mean (float): Mean value used for normalization.
std (float): Standard deviation value used for normalization.
norm (float): L2 norm of the entire volume.
"""
kspace = transforms.to_tensor(kspace)
# Apply mask
seed = None if not self.use_seed else tuple(map(ord, fname))
mask = transforms.get_mask(kspace, self.mask_func, seed)
masked_kspace = mask * kspace
# Inverse Fourier Transform to get zero filled solution
image = transforms.ifft2(masked_kspace)
# Crop input image
image = transforms.complex_center_crop(image, (self.resolution, self.resolution))
# Absolute value
image = transforms.complex_abs(image)
# Apply Root-Sum-of-Squares if multicoil data
if self.which_challenge == 'multicoil':
image = transforms.root_sum_of_squares(image)
# Normalize input
image, mean, std = transforms.normalize_instance(image, eps=1e-11)
image = image.clamp(-6, 6)
target = transforms.to_tensor(target)
# Normalize target
target = transforms.normalize(target, mean, std, eps=1e-11)
target_clamped = target.clamp(-6, 6) # Return target (for viz) and target_clamped (for training)
return image, target_clamped, mean, std, attrs['norm'].astype(np.float32), target
class DensityNetwork(MRIModel):
def __init__(self, config=None):
pass
def get_transforms(self, args):
train_mask = MaskFunc(args.center_fractions, args.accelerations)
dev_mask = MaskFunc(args.center_fractions, args.accelerations)
train_transform = DataTransform(train_mask, args.resolution, args.challenge)
val_transform = DataTransform(dev_mask, args.resolution, args.challenge, use_seed=True)
return train_transform, val_transform, None
def train_step(self, model, data, device):
input, target, mean, std, norm, _ = data
input = input.unsqueeze(1).to(device)
target = target.to(device)
output = model(input).squeeze(1)
mu, sigma = output[:, 0, :, :], torch.exp(output[:, 1, :, :]) + 1e-4
dist = torch.distributions.normal.Normal(mu, sigma)
per_pixel_loss = dist.log_prob(target)
nan_mask = torch.isnan(per_pixel_loss)
loss = - torch.mean(per_pixel_loss[1 - nan_mask])
return loss
def inference(self, model, data, device):
input, _, mean, std, _, target = data
input = input.unsqueeze(1).to(device)
target = target.to(device)
output = model(input).squeeze(1)
output, sigmas = output[:, 0, :, :], torch.exp(output[:, 1, :, :]) + 1e-4
mean = mean.unsqueeze(1).unsqueeze(2).to(device)
std = std.unsqueeze(1).unsqueeze(2).to(device)
target = transforms.unnormalize(target, mean, std)
output = transforms.unnormalize(output, mean, std)
sigmas = transforms.unnormalize(sigmas, mean, std)
confidence = - (sigmas**2).sum(dim=2).sum(dim=1)
return output, target, confidence, sigmas
def build_model(self, args):
model = UnetModel(
in_chans=1,
out_chans=2,
chans=args.num_chans,
num_pool_layers=args.num_pools,
drop_prob=args.drop_prob
).to(args.device)
return model
def build_optim(self, args, model):
optimizer = torch.optim.RMSprop(model.parameters(), args.lr, weight_decay=args.weight_decay)
return optimizer
| 39.820144 | 105 | 0.639024 |
a925c75951a7fab660151bec6ed785d6b62e7bae | 1,226 | py | Python | 2_LoadingData/load_csv_exercise.py | felker/BigDataCourse | 4f3937e94623e4171016fb5e1f03a0fa254757f7 | [
"Apache-2.0"
] | 31 | 2015-12-04T20:44:48.000Z | 2021-08-31T16:33:51.000Z | 2_LoadingData/load_csv_exercise.py | felker/BigDataCourse | 4f3937e94623e4171016fb5e1f03a0fa254757f7 | [
"Apache-2.0"
] | 3 | 2016-04-04T16:42:11.000Z | 2017-07-06T01:23:47.000Z | 2_LoadingData/load_csv_exercise.py | felker/BigDataCourse | 4f3937e94623e4171016fb5e1f03a0fa254757f7 | [
"Apache-2.0"
] | 16 | 2015-12-06T18:21:47.000Z | 2021-04-22T16:50:12.000Z | from pyspark.sql import SparkSession
import sys
import time
import os
#Exercise: use what you have learned in the LoadingData.ipynb notebook to load a set of CSV datasets
#and find movies where Tom Hanks played in
def main(args):
start = time.time()
spark = SparkSession.builder.appName("LoadCsv").getOrCreate()
delimiter = "|"
#Load 3 csv files into spark dataframe
person_df = spark.read.options(header='true', inferschema='true',delimiter=delimiter).csv('/scratch/network/alexeys/BigDataCourse/csv/person_nodes.csv')
movie_df =
relationships_df =
#Prepare a linked dataset of people, movies and the roles for people who played in those movies
df = person_df.join(...
combined_df = df.join(...
#Use where statement analogous to that in Pandas dataframes to find movies associated with name "Tom Hanks"
answer = combined_df.where(...
#Return only actor name, movie title and roles
print (answer.select(...).show())
#Save the answer in JSON format
answer.coalesce(1).select(...).write.json(os.environ.get('SCRATCH_PATH')+"/json/")
end = time.time()
print ("Elapsed time: ", (end-start))
if __name__ == "__main__":
main(sys.argv)
| 30.65 | 156 | 0.699837 |
dcd53f4239a9d48b4a90d3e801bd511356730e86 | 427 | py | Python | common/migrations/0002_auto_20180506_0109.py | greenelabwustl/greenelabsite | f5c798540ab06fbee260d56ebb012990e6dee962 | [
"MIT"
] | null | null | null | common/migrations/0002_auto_20180506_0109.py | greenelabwustl/greenelabsite | f5c798540ab06fbee260d56ebb012990e6dee962 | [
"MIT"
] | null | null | null | common/migrations/0002_auto_20180506_0109.py | greenelabwustl/greenelabsite | f5c798540ab06fbee260d56ebb012990e6dee962 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.5 on 2018-05-06 06:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='publication_link',
name='name',
field=models.CharField(blank=True, max_length=350, null=True, verbose_name='Name'),
),
]
| 22.473684 | 95 | 0.611241 |
3e9f06f5d5dcfbe43b00c7135a8653736c60b756 | 25,756 | py | Python | test/dialect/oracle/test_dialect.py | ewengillies/sqlalchemy | cbfa1363d7201848a56e7209146e81b9c51aa8af | [
"MIT"
] | 6 | 2019-02-18T12:42:44.000Z | 2020-11-11T23:10:17.000Z | test/dialect/oracle/test_dialect.py | oladimejiala/sqlalchemy | cbfa1363d7201848a56e7209146e81b9c51aa8af | [
"MIT"
] | null | null | null | test/dialect/oracle/test_dialect.py | oladimejiala/sqlalchemy | cbfa1363d7201848a56e7209146e81b9c51aa8af | [
"MIT"
] | 2 | 2021-06-12T01:38:00.000Z | 2021-09-05T21:18:29.000Z | # coding: utf-8
import re
from sqlalchemy import bindparam
from sqlalchemy import Computed
from sqlalchemy import create_engine
from sqlalchemy import exc
from sqlalchemy import Float
from sqlalchemy import Integer
from sqlalchemy import literal_column
from sqlalchemy import outparam
from sqlalchemy import select
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy.dialects.oracle import base as oracle
from sqlalchemy.dialects.oracle import cx_oracle
from sqlalchemy.engine import url
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import AssertsExecutionResults
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import u
from sqlalchemy.util import ue
class DialectTest(fixtures.TestBase):
def test_cx_oracle_version_parse(self):
dialect = cx_oracle.OracleDialect_cx_oracle()
eq_(dialect._parse_cx_oracle_ver("5.2"), (5, 2))
eq_(dialect._parse_cx_oracle_ver("5.0.1"), (5, 0, 1))
eq_(dialect._parse_cx_oracle_ver("6.0b1"), (6, 0))
def test_minimum_version(self):
with mock.patch(
"sqlalchemy.dialects.oracle.cx_oracle.OracleDialect_cx_oracle."
"_parse_cx_oracle_ver",
lambda self, vers: (5, 1, 5),
):
assert_raises_message(
exc.InvalidRequestError,
"cx_Oracle version 5.2 and above are supported",
cx_oracle.OracleDialect_cx_oracle,
dbapi=Mock(),
)
with mock.patch(
"sqlalchemy.dialects.oracle.cx_oracle.OracleDialect_cx_oracle."
"_parse_cx_oracle_ver",
lambda self, vers: (5, 3, 1),
):
cx_oracle.OracleDialect_cx_oracle(dbapi=Mock())
class EncodingErrorsTest(fixtures.TestBase):
"""mock test for encoding_errors.
While we tried to write a round trip test, I could only reproduce the
problem on Python 3 and only for STRING/CHAR. I couldn't get a CLOB to
come back with broken encoding and also under py2k cx_Oracle would always
return a bytestring with the correct encoding. Since the test barely
worked, it is not included here to avoid future problems. It's not clear
what other levels of encode/decode are going on such that explicitly
selecting for AL16UTF16 is still returning a utf-8 bytestring under py2k or
for CLOBs, nor is it really clear that this flag is useful, however, at
least for the Py3K case, cx_Oracle supports the flag and we did have one
user reporting that they had a (non-reproducible) database which
illustrated the problem so we will pass it in.
"""
# NOTE: these numbers are arbitrary, they are not the actual
# cx_Oracle constants
cx_Oracle_NUMBER = 0
cx_Oracle_STRING = 1
cx_Oracle_FIXED_CHAR = 2
cx_Oracle_CLOB = 3
cx_Oracle_NCLOB = 4
@testing.fixture
def cx_Oracle(self):
return mock.Mock(
NUMBER=self.cx_Oracle_NUMBER,
STRING=self.cx_Oracle_STRING,
FIXED_CHAR=self.cx_Oracle_FIXED_CHAR,
CLOB=self.cx_Oracle_CLOB,
NCLOB=self.cx_Oracle_NCLOB,
version="7.0.1",
__future__=mock.Mock(),
)
_oracle_char_combinations = testing.combinations(
("STRING", cx_Oracle_STRING,),
("FIXED_CHAR", cx_Oracle_FIXED_CHAR,),
("CLOB", cx_Oracle_CLOB,),
("NCLOB", cx_Oracle_NCLOB,),
argnames="cx_oracle_type",
id_="ia",
)
def _assert_errorhandler(self, outconverter, has_errorhandler):
data = ue("\uee2c\u9a66") # this is u"\uee2c\u9a66"
utf8_w_errors = data.encode("utf-16")
if has_errorhandler:
eq_(
outconverter(utf8_w_errors),
data.encode("utf-16").decode("utf-8", "ignore"),
)
else:
assert_raises(UnicodeDecodeError, outconverter, utf8_w_errors)
@_oracle_char_combinations
@testing.requires.python3
def test_older_cx_oracle_warning(self, cx_Oracle, cx_oracle_type):
cx_Oracle.version = "6.3"
ignore_dialect = cx_oracle.dialect(
dbapi=cx_Oracle, encoding_errors="ignore"
)
ignore_outputhandler = (
ignore_dialect._generate_connection_outputtype_handler()
)
cursor = mock.Mock()
with testing.expect_warnings(
r"cx_oracle version \(6, 3\) does not support encodingErrors"
):
ignore_outputhandler(
cursor, "foo", cx_oracle_type, None, None, None
)
@_oracle_char_combinations
@testing.requires.python2
def test_encoding_errors_sqla_py2k(
self, cx_Oracle, cx_oracle_type,
):
ignore_dialect = cx_oracle.dialect(
dbapi=cx_Oracle, encoding_errors="ignore"
)
ignore_outputhandler = (
ignore_dialect._generate_connection_outputtype_handler()
)
cursor = mock.Mock()
ignore_outputhandler(cursor, "foo", cx_oracle_type, None, None, None)
outconverter = cursor.mock_calls[0][2]["outconverter"]
self._assert_errorhandler(outconverter, True)
@_oracle_char_combinations
@testing.requires.python2
def test_no_encoding_errors_sqla_py2k(
self, cx_Oracle, cx_oracle_type,
):
plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
plain_outputhandler = (
plain_dialect._generate_connection_outputtype_handler()
)
cursor = mock.Mock()
plain_outputhandler(cursor, "foo", cx_oracle_type, None, None, None)
outconverter = cursor.mock_calls[0][2]["outconverter"]
self._assert_errorhandler(outconverter, False)
@_oracle_char_combinations
@testing.requires.python3
def test_encoding_errors_cx_oracle_py3k(
self, cx_Oracle, cx_oracle_type,
):
ignore_dialect = cx_oracle.dialect(
dbapi=cx_Oracle, encoding_errors="ignore"
)
ignore_outputhandler = (
ignore_dialect._generate_connection_outputtype_handler()
)
cursor = mock.Mock()
ignore_outputhandler(cursor, "foo", cx_oracle_type, None, None, None)
eq_(
cursor.mock_calls,
[
mock.call.var(
mock.ANY, None, cursor.arraysize, encodingErrors="ignore",
)
],
)
@_oracle_char_combinations
@testing.requires.python3
def test_no_encoding_errors_cx_oracle_py3k(
self, cx_Oracle, cx_oracle_type,
):
plain_dialect = cx_oracle.dialect(dbapi=cx_Oracle)
plain_outputhandler = (
plain_dialect._generate_connection_outputtype_handler()
)
cursor = mock.Mock()
plain_outputhandler(cursor, "foo", cx_oracle_type, None, None, None)
eq_(
cursor.mock_calls,
[mock.call.var(mock.ANY, None, cursor.arraysize)],
)
class ComputedReturningTest(fixtures.TablesTest):
__only_on__ = "oracle"
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
Column("foo", Integer),
Column("bar", Integer, Computed("foo + 42")),
)
Table(
"test_no_returning",
metadata,
Column("id", Integer, primary_key=True),
Column("foo", Integer),
Column("bar", Integer, Computed("foo + 42")),
implicit_returning=False,
)
def test_computed_insert(self):
test = self.tables.test
with testing.db.connect() as conn:
result = conn.execute(
test.insert().return_defaults(), {"id": 1, "foo": 5}
)
eq_(result.returned_defaults, (47,))
eq_(conn.scalar(select([test.c.bar])), 47)
def test_computed_update_warning(self):
test = self.tables.test
with testing.db.connect() as conn:
conn.execute(test.insert(), {"id": 1, "foo": 5})
with testing.expect_warnings(
"Computed columns don't work with Oracle UPDATE"
):
result = conn.execute(
test.update().values(foo=10).return_defaults()
)
# returns the *old* value
eq_(result.returned_defaults, (47,))
eq_(conn.scalar(select([test.c.bar])), 52)
def test_computed_update_no_warning(self):
test = self.tables.test_no_returning
with testing.db.connect() as conn:
conn.execute(test.insert(), {"id": 1, "foo": 5})
result = conn.execute(
test.update().values(foo=10).return_defaults()
)
# no returning
eq_(result.returned_defaults, None)
eq_(conn.scalar(select([test.c.bar])), 52)
class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = "oracle+cx_oracle"
__backend__ = True
@classmethod
def setup_class(cls):
with testing.db.connect() as c:
c.exec_driver_sql(
"""
create or replace procedure foo(x_in IN number, x_out OUT number,
y_out OUT number, z_out OUT varchar) IS
retval number;
begin
retval := 6;
x_out := 10;
y_out := x_in * 15;
z_out := NULL;
end;
"""
)
def test_out_params(self, connection):
result = connection.execute(
text(
"begin foo(:x_in, :x_out, :y_out, " ":z_out); end;"
).bindparams(
bindparam("x_in", Float),
outparam("x_out", Integer),
outparam("y_out", Float),
outparam("z_out", String),
),
x_in=5,
)
eq_(result.out_parameters, {"x_out": 10, "y_out": 75, "z_out": None})
assert isinstance(result.out_parameters["x_out"], int)
@classmethod
def teardown_class(cls):
with testing.db.connect() as conn:
conn.execute(text("DROP PROCEDURE foo"))
class QuotedBindRoundTripTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.provide_metadata
def test_table_round_trip(self, connection):
oracle.RESERVED_WORDS.remove("UNION")
metadata = self.metadata
table = Table(
"t1",
metadata,
Column("option", Integer),
Column("plain", Integer, quote=True),
# test that quote works for a reserved word
# that the dialect isn't aware of when quote
# is set
Column("union", Integer, quote=True),
)
metadata.create_all()
connection.execute(
table.insert(), {"option": 1, "plain": 1, "union": 1}
)
eq_(connection.execute(table.select()).first(), (1, 1, 1))
connection.execute(table.update().values(option=2, plain=2, union=2))
eq_(connection.execute(table.select()).first(), (2, 2, 2))
def test_numeric_bind_round_trip(self, connection):
eq_(
connection.scalar(
select(
[
literal_column("2", type_=Integer())
+ bindparam("2_1", value=2)
]
)
),
4,
)
@testing.provide_metadata
def test_numeric_bind_in_crud(self, connection):
t = Table("asfd", self.metadata, Column("100K", Integer))
t.create(connection)
connection.execute(t.insert(), {"100K": 10})
eq_(connection.scalar(t.select()), 10)
@testing.provide_metadata
def test_expanding_quote_roundtrip(self, connection):
t = Table("asfd", self.metadata, Column("foo", Integer))
t.create(connection)
connection.execute(
select([t]).where(t.c.foo.in_(bindparam("uid", expanding=True))),
uid=[1, 2, 3],
)
class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
def _dialect(self, server_version, **kw):
def server_version_info(conn):
return server_version
dialect = oracle.dialect(
dbapi=Mock(version="0.0.0", paramstyle="named"), **kw
)
dialect._get_server_version_info = server_version_info
dialect._check_unicode_returns = Mock()
dialect._check_unicode_description = Mock()
dialect._get_default_schema_name = Mock()
dialect._detect_decimal_char = Mock()
dialect.__check_max_identifier_length = Mock()
dialect._get_compat_server_version_info = Mock()
return dialect
def test_ora8_flags(self):
dialect = self._dialect((8, 2, 5))
# before connect, assume modern DB
assert dialect._supports_char_length
assert dialect.use_ansi
assert not dialect._use_nchar_for_unicode
dialect.initialize(Mock())
assert not dialect.implicit_returning
assert not dialect._supports_char_length
assert not dialect.use_ansi
self.assert_compile(String(50), "VARCHAR2(50)", dialect=dialect)
self.assert_compile(Unicode(50), "VARCHAR2(50)", dialect=dialect)
self.assert_compile(UnicodeText(), "CLOB", dialect=dialect)
dialect = self._dialect((8, 2, 5), implicit_returning=True)
dialect.initialize(testing.db.connect())
assert dialect.implicit_returning
def test_default_flags(self):
"""test with no initialization or server version info"""
dialect = self._dialect(None)
assert dialect._supports_char_length
assert not dialect._use_nchar_for_unicode
assert dialect.use_ansi
self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect)
self.assert_compile(Unicode(50), "VARCHAR2(50 CHAR)", dialect=dialect)
self.assert_compile(UnicodeText(), "CLOB", dialect=dialect)
def test_ora10_flags(self):
dialect = self._dialect((10, 2, 5))
dialect.initialize(Mock())
assert dialect._supports_char_length
assert not dialect._use_nchar_for_unicode
assert dialect.use_ansi
self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect)
self.assert_compile(Unicode(50), "VARCHAR2(50 CHAR)", dialect=dialect)
self.assert_compile(UnicodeText(), "CLOB", dialect=dialect)
def test_use_nchar(self):
dialect = self._dialect((10, 2, 5), use_nchar_for_unicode=True)
dialect.initialize(Mock())
assert dialect._use_nchar_for_unicode
self.assert_compile(String(50), "VARCHAR2(50 CHAR)", dialect=dialect)
self.assert_compile(Unicode(50), "NVARCHAR2(50)", dialect=dialect)
self.assert_compile(UnicodeText(), "NCLOB", dialect=dialect)
def test_ident_length_in_13_is_30(self):
from sqlalchemy import __version__
m = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", __version__)
version = tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
if version >= (1, 4):
length = 128
else:
length = 30
eq_(oracle.OracleDialect.max_identifier_length, length)
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
exec_driver_sql=mock.Mock(
return_value=mock.Mock(scalar=lambda: "12.2.0")
)
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (12, 2, 0)
)
eq_(dialect.max_identifier_length, length)
def test_max_ident_122(self):
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
exec_driver_sql=mock.Mock(
return_value=mock.Mock(scalar=lambda: "12.2.0")
)
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (12, 2, 0)
)
eq_(
dialect.max_identifier_length,
oracle.OracleDialect.max_identifier_length,
)
def test_max_ident_112(self):
dialect = self._dialect((11, 2, 0))
conn = mock.Mock(
exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar="11.0.0"))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (11, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (11, 2, 0)
)
eq_(dialect.max_identifier_length, 30)
def test_max_ident_122_11compat(self):
dialect = self._dialect((12, 2, 0))
conn = mock.Mock(
exec_driver_sql=mock.Mock(
return_value=mock.Mock(scalar=lambda: "11.0.0")
)
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (11, 0, 0)
)
eq_(dialect.max_identifier_length, 30)
def test_max_ident_122_11compat_vparam_raises(self):
dialect = self._dialect((12, 2, 0))
def c122():
raise exc.DBAPIError(
"statement", None, "no such table", None, None
)
conn = mock.Mock(
exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar=c122))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (12, 2, 0)
)
eq_(
dialect.max_identifier_length,
oracle.OracleDialect.max_identifier_length,
)
def test_max_ident_122_11compat_vparam_cant_parse(self):
dialect = self._dialect((12, 2, 0))
def c122():
return "12.thisiscrap.0"
conn = mock.Mock(
exec_driver_sql=mock.Mock(return_value=mock.Mock(scalar=c122))
)
dialect.initialize(conn)
eq_(dialect.server_version_info, (12, 2, 0))
eq_(
dialect._get_effective_compat_server_version_info(conn), (12, 2, 0)
)
eq_(
dialect.max_identifier_length,
oracle.OracleDialect.max_identifier_length,
)
class ExecuteTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
def test_basic(self):
with testing.db.connect() as conn:
eq_(
conn.exec_driver_sql(
"/*+ this is a comment */ SELECT 1 FROM " "DUAL"
).fetchall(),
[(1,)],
)
def test_sequences_are_integers(self, connection):
seq = Sequence("foo_seq")
seq.create(connection)
try:
val = connection.execute(seq)
eq_(val, 1)
assert type(val) is int
finally:
seq.drop(connection)
@testing.provide_metadata
def test_limit_offset_for_update(self):
metadata = self.metadata
# oracle can't actually do the ROWNUM thing with FOR UPDATE
# very well.
t = Table(
"t1",
metadata,
Column("id", Integer, primary_key=True),
Column("data", Integer),
)
metadata.create_all()
t.insert().execute(
{"id": 1, "data": 1},
{"id": 2, "data": 7},
{"id": 3, "data": 12},
{"id": 4, "data": 15},
{"id": 5, "data": 32},
)
# here, we can't use ORDER BY.
eq_(
t.select().with_for_update().limit(2).execute().fetchall(),
[(1, 1), (2, 7)],
)
# here, its impossible. But we'd prefer it to raise ORA-02014
# instead of issuing a syntax error.
assert_raises_message(
exc.DatabaseError,
"ORA-02014",
t.select().with_for_update().limit(2).offset(3).execute,
)
class UnicodeSchemaTest(fixtures.TestBase):
__only_on__ = "oracle"
__backend__ = True
@testing.provide_metadata
def test_quoted_column_non_unicode(self, connection):
metadata = self.metadata
table = Table(
"atable",
metadata,
Column("_underscorecolumn", Unicode(255), primary_key=True),
)
metadata.create_all()
connection.execute(table.insert(), {"_underscorecolumn": u("’é")})
result = connection.execute(
table.select().where(table.c._underscorecolumn == u("’é"))
).scalar()
eq_(result, u("’é"))
@testing.provide_metadata
def test_quoted_column_unicode(self, connection):
metadata = self.metadata
table = Table(
"atable",
metadata,
Column(u("méil"), Unicode(255), primary_key=True),
)
metadata.create_all()
connection.execute(table.insert(), {u("méil"): u("’é")})
result = connection.execute(
table.select().where(table.c[u("méil")] == u("’é"))
).scalar()
eq_(result, u("’é"))
class CXOracleConnectArgsTest(fixtures.TestBase):
__only_on__ = "oracle+cx_oracle"
__backend__ = True
def test_cx_oracle_service_name(self):
url_string = "oracle+cx_oracle://scott:tiger@host/?service_name=hr"
eng = create_engine(url_string, _initialize=False)
cargs, cparams = eng.dialect.create_connect_args(eng.url)
assert "SERVICE_NAME=hr" in cparams["dsn"]
assert "SID=hr" not in cparams["dsn"]
def test_cx_oracle_service_name_bad(self):
url_string = "oracle+cx_oracle://scott:tiger@host/hr1?service_name=hr2"
assert_raises(
exc.InvalidRequestError,
create_engine,
url_string,
_initialize=False,
)
def _test_db_opt(self, url_string, key, value):
import cx_Oracle
url_obj = url.make_url(url_string)
dialect = cx_oracle.dialect(dbapi=cx_Oracle)
arg, kw = dialect.create_connect_args(url_obj)
eq_(kw[key], value)
def _test_db_opt_unpresent(self, url_string, key):
import cx_Oracle
url_obj = url.make_url(url_string)
dialect = cx_oracle.dialect(dbapi=cx_Oracle)
arg, kw = dialect.create_connect_args(url_obj)
assert key not in kw
def _test_dialect_param_from_url(self, url_string, key, value):
import cx_Oracle
url_obj = url.make_url(url_string)
dialect = cx_oracle.dialect(dbapi=cx_Oracle)
with testing.expect_deprecated(
"cx_oracle dialect option %r should" % key
):
arg, kw = dialect.create_connect_args(url_obj)
eq_(getattr(dialect, key), value)
# test setting it on the dialect normally
dialect = cx_oracle.dialect(dbapi=cx_Oracle, **{key: value})
eq_(getattr(dialect, key), value)
def test_mode(self):
import cx_Oracle
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?mode=sYsDBA",
"mode",
cx_Oracle.SYSDBA,
)
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?mode=SYSOPER",
"mode",
cx_Oracle.SYSOPER,
)
def test_int_mode(self):
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?mode=32767", "mode", 32767
)
@testing.requires.cxoracle6_or_greater
def test_purity(self):
import cx_Oracle
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?purity=attr_purity_new",
"purity",
cx_Oracle.ATTR_PURITY_NEW,
)
def test_encoding(self):
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/"
"?encoding=AMERICAN_AMERICA.UTF8",
"encoding",
"AMERICAN_AMERICA.UTF8",
)
def test_threaded(self):
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?threaded=true",
"threaded",
True,
)
self._test_db_opt_unpresent(
"oracle+cx_oracle://scott:tiger@host/", "threaded"
)
def test_events(self):
self._test_db_opt(
"oracle+cx_oracle://scott:tiger@host/?events=true", "events", True
)
def test_threaded_deprecated_at_dialect_level(self):
with testing.expect_deprecated(
"The 'threaded' parameter to the cx_oracle dialect"
):
dialect = cx_oracle.dialect(threaded=False)
arg, kw = dialect.create_connect_args(
url.make_url("oracle+cx_oracle://scott:tiger@dsn")
)
eq_(kw["threaded"], False)
def test_deprecated_use_ansi(self):
self._test_dialect_param_from_url(
"oracle+cx_oracle://scott:tiger@host/?use_ansi=False",
"use_ansi",
False,
)
def test_deprecated_auto_convert_lobs(self):
self._test_dialect_param_from_url(
"oracle+cx_oracle://scott:tiger@host/?auto_convert_lobs=False",
"auto_convert_lobs",
False,
)
| 31.955335 | 79 | 0.607082 |
a22323ee475a6683d606d5551a5c4748c47a5237 | 637 | py | Python | config.py | yuanqili/ModernFlask | bf75ce9b6bafa9f1c1e942ab34ceb01a80e1c16d | [
"BSD-4-Clause-UC"
] | null | null | null | config.py | yuanqili/ModernFlask | bf75ce9b6bafa9f1c1e942ab34ceb01a80e1c16d | [
"BSD-4-Clause-UC"
] | null | null | null | config.py | yuanqili/ModernFlask | bf75ce9b6bafa9f1c1e942ab34ceb01a80e1c16d | [
"BSD-4-Clause-UC"
] | null | null | null | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess secret key'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = os.environ.get('MAIL_SERVER')
MAIL_PORT = int(os.environ.get('MAIL_PORT') or 25)
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS') is not None
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
ADMINS = ['your-email@example.com']
| 33.526316 | 110 | 0.704867 |
77090d497d4538fe78ddae36b6460a70e1f6d8bf | 429 | py | Python | todotxt-sort-priority-then-number.py | ChrisDavison/scripts | ebbe52626aac041f37a1143607dc0cc74515a198 | [
"MIT"
] | null | null | null | todotxt-sort-priority-then-number.py | ChrisDavison/scripts | ebbe52626aac041f37a1143607dc0cc74515a198 | [
"MIT"
] | null | null | null | todotxt-sort-priority-then-number.py | ChrisDavison/scripts | ebbe52626aac041f37a1143607dc0cc74515a198 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import re
import sys
re_pri = re.compile("\d+ \(([A-Z])\)")
tasks_with_priority = []
tasks_no_priority = []
for line in sys.stdin:
pri = re_pri.match(line)
if pri:
tasks_with_priority.append((pri.group(1), line.strip()))
else:
tasks_no_priority.append(line.strip())
for _pri, task in sorted(tasks_with_priority):
print(task)
for task in tasks_no_priority:
print(task)
| 22.578947 | 64 | 0.671329 |
c726fa113a313d9a347f71e8121653a3c5c8edd0 | 4,619 | py | Python | tests/test_ref_coors.py | Gkdnz/SfePy | a3a39d4e087705e9e0e8884cbf63513a2ded2108 | [
"BSD-3-Clause"
] | null | null | null | tests/test_ref_coors.py | Gkdnz/SfePy | a3a39d4e087705e9e0e8884cbf63513a2ded2108 | [
"BSD-3-Clause"
] | null | null | null | tests/test_ref_coors.py | Gkdnz/SfePy | a3a39d4e087705e9e0e8884cbf63513a2ded2108 | [
"BSD-3-Clause"
] | null | null | null | import os.path as op
import numpy as nm
import sfepy
from sfepy.discrete.common import Field
import sfepy.discrete.common.global_interp as gi
from sfepy.base.testing import TestCommon
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
test = Test(conf=conf, options=options)
return test
def test_ref_coors_fem(self):
from sfepy.discrete.fem import Mesh, FEDomain
mesh = Mesh.from_file('meshes/3d/special/cross3d.mesh',
prefix_dir=sfepy.data_dir)
domain = FEDomain('domain', mesh)
omega = domain.create_region('Omega', 'all')
field = Field.from_args('linear', nm.float64, 'scalar', omega,
approx_order=1)
mcoors = field.domain.get_mesh_coors()
conn = field.domain.get_conn()
bbox = field.domain.get_mesh_bounding_box()
ray = nm.linspace(bbox[0, 0], bbox[1, 0], 7)
coors = nm.zeros((ray.shape[0], 3), dtype=nm.float64)
def gen_rays():
coors[:, 0] = ray
yield coors
coors.fill(0.0)
coors[:, 1] = ray
yield coors
coors.fill(0.0)
coors[:, 2] = ray
yield coors
ok = True
ctx = field.create_basis_context()._geo_ctx
for ir, coors in enumerate(gen_rays()):
self.report('ray %d' % ir)
ref_coors, cells, status = gi.get_ref_coors(field, coors,
strategy='general',
close_limit=0.0,
verbose=False)
self.report(ref_coors)
self.report(cells)
self.report(status)
# In the distorted cell 2, the Newton method finds a solution
# outside of the cell. This will be fixed when box constraints
# are applied.
_ok = nm.all((status == 0) | ((cells == 2) & (status == 3)))
if not _ok:
self.report('wrong status %s for ray %d!' % (status, ir))
ok = ok and _ok
for ic, cell in enumerate(cells):
ctx.iel = cell
bf = ctx.evaluate(ref_coors[ic:ic+1], check_errors=False)
cell_coors = mcoors[conn[cell]]
coor = nm.dot(bf, cell_coors).ravel()
_ok = nm.allclose(coor, coors[ic], atol=1e-14, rtol=0.0)
if not _ok:
self.report('ray %d point %d:' % (ir, ic))
self.report(' - wrong reference coordinates %s!'
% ref_coors[ic])
self.report(' - given point: %s' % coors[ic])
self.report(' - found point: %s' % coor)
ok = ok and _ok
return ok
def test_ref_coors_iga(self):
from sfepy.discrete.iga.domain import IGDomain
domain = IGDomain.from_file(op.join(sfepy.data_dir,
'meshes/iga/block2d.iga'))
omega = domain.create_region('Omega', 'all')
field = Field.from_args('iga', nm.float64, 'scalar', omega,
approx_order='iga', poly_space_base='iga')
mcoors = field.nurbs.cps
conn = field.get_econn('volume', field.region)
bbox = domain.eval_mesh.get_bounding_box()
ray = nm.linspace(bbox[0, 0], bbox[1, 0], 11)
coors = nm.c_[ray, ray]
ref_coors, cells, status = gi.get_ref_coors(field, coors,
strategy='general',
close_limit=0.0,
verbose=False)
self.report(ref_coors)
self.report(cells)
self.report(status)
ok = nm.all(status == 0)
ctx = field.create_basis_context()
for ic, cell in enumerate(cells):
ctx.iel = cell
bf = ctx.evaluate(ref_coors[ic:ic+1])
cell_coors = mcoors[conn[cell]]
coor = nm.dot(bf, cell_coors).ravel()
_ok = nm.allclose(coor, coors[ic], atol=1e-14, rtol=0.0)
if not _ok:
self.report('point %d:' % ic)
self.report(' - wrong reference coordinates %s!'
% ref_coors[ic])
self.report(' - given point: %s' % coors[ic])
self.report(' - found point: %s' % coor)
ok = ok and _ok
return ok
| 33.471014 | 75 | 0.497294 |
bf24a75b96a00d53db2a9cb2ce48f7f90546ceab | 4,132 | py | Python | tests/test_her.py | DonsetPG/stable-baselines | 7048a63841a3808a14ae5bc8643ee8a83ae64a21 | [
"MIT"
] | 2 | 2021-04-20T18:04:53.000Z | 2021-04-20T18:04:55.000Z | tests/test_her.py | DonsetPG/stable-baselines | 7048a63841a3808a14ae5bc8643ee8a83ae64a21 | [
"MIT"
] | null | null | null | tests/test_her.py | DonsetPG/stable-baselines | 7048a63841a3808a14ae5bc8643ee8a83ae64a21 | [
"MIT"
] | 3 | 2020-05-21T11:36:54.000Z | 2022-01-10T12:08:36.000Z | import os
import pytest
from stable_baselines import HER, DQN, SAC, DDPG, TD3
from stable_baselines.her import GoalSelectionStrategy, HERGoalEnvWrapper
from stable_baselines.her.replay_buffer import KEY_TO_GOAL_STRATEGY
from stable_baselines.common.bit_flipping_env import BitFlippingEnv
from stable_baselines.common.vec_env import DummyVecEnv, VecNormalize
N_BITS = 10
def model_predict(model, env, n_steps, additional_check=None):
"""
Test helper
:param model: (rl model)
:param env: (gym.Env)
:param n_steps: (int)
:param additional_check: (callable)
"""
obs = env.reset()
for _ in range(n_steps):
action, _ = model.predict(obs)
obs, reward, done, _ = env.step(action)
if additional_check is not None:
additional_check(obs, action, reward, done)
if done:
obs = env.reset()
@pytest.mark.parametrize('goal_selection_strategy', list(GoalSelectionStrategy))
@pytest.mark.parametrize('model_class', [DQN, SAC, DDPG, TD3])
@pytest.mark.parametrize('discrete_obs_space', [False, True])
def test_her(model_class, goal_selection_strategy, discrete_obs_space):
env = BitFlippingEnv(N_BITS, continuous=model_class in [DDPG, SAC, TD3],
max_steps=N_BITS, discrete_obs_space=discrete_obs_space)
# Take random actions 10% of the time
kwargs = {'random_exploration': 0.1} if model_class in [DDPG, SAC, TD3] else {}
model = HER('MlpPolicy', env, model_class, n_sampled_goal=4, goal_selection_strategy=goal_selection_strategy,
verbose=0, **kwargs)
model.learn(1000)
@pytest.mark.parametrize('model_class', [DDPG, SAC, DQN, TD3])
def test_long_episode(model_class):
"""
Check that the model does not break when the replay buffer is still empty
after the first rollout (because the episode is not over).
"""
# n_bits > nb_rollout_steps
n_bits = 10
env = BitFlippingEnv(n_bits, continuous=model_class in [DDPG, SAC, TD3],
max_steps=n_bits)
kwargs = {}
if model_class == DDPG:
kwargs['nb_rollout_steps'] = 9 # < n_bits
elif model_class in [DQN, SAC, TD3]:
kwargs['batch_size'] = 8 # < n_bits
kwargs['learning_starts'] = 0
model = HER('MlpPolicy', env, model_class, n_sampled_goal=4, goal_selection_strategy='future',
verbose=0, **kwargs)
model.learn(200)
@pytest.mark.parametrize('goal_selection_strategy', [list(KEY_TO_GOAL_STRATEGY.keys())[0]])
@pytest.mark.parametrize('model_class', [DQN, SAC, DDPG, TD3])
def test_model_manipulation(model_class, goal_selection_strategy):
env = BitFlippingEnv(N_BITS, continuous=model_class in [DDPG, SAC, TD3], max_steps=N_BITS)
env = DummyVecEnv([lambda: env])
model = HER('MlpPolicy', env, model_class, n_sampled_goal=3, goal_selection_strategy=goal_selection_strategy,
verbose=0)
model.learn(1000)
model_predict(model, env, n_steps=100, additional_check=None)
model.save('./test_her')
del model
# NOTE: HER does not support VecEnvWrapper yet
with pytest.raises(AssertionError):
model = HER.load('./test_her', env=VecNormalize(env))
model = HER.load('./test_her')
# Check that the model raises an error when the env
# is not wrapped (or no env passed to the model)
with pytest.raises(ValueError):
model.predict(env.reset())
env_ = BitFlippingEnv(N_BITS, continuous=model_class in [DDPG, SAC, TD3], max_steps=N_BITS)
env_ = HERGoalEnvWrapper(env_)
model_predict(model, env_, n_steps=100, additional_check=None)
model.set_env(env)
model.learn(1000)
model_predict(model, env_, n_steps=100, additional_check=None)
assert model.n_sampled_goal == 3
del model
env = BitFlippingEnv(N_BITS, continuous=model_class in [DDPG, SAC, TD3], max_steps=N_BITS)
model = HER.load('./test_her', env=env)
model.learn(1000)
model_predict(model, env_, n_steps=100, additional_check=None)
assert model.n_sampled_goal == 3
if os.path.isfile('./test_her.pkl'):
os.remove('./test_her.pkl')
| 34.433333 | 113 | 0.693127 |
93993b527516dce8623e428779f704c153a40671 | 1,019 | py | Python | test/functional/rpc_bip38.py | MichaelHDesigns/HodlCash | c3ca85c436ba40afadfda11db207068a16527379 | [
"MIT"
] | null | null | null | test/functional/rpc_bip38.py | MichaelHDesigns/HodlCash | c3ca85c436ba40afadfda11db207068a16527379 | [
"MIT"
] | null | null | null | test/functional/rpc_bip38.py | MichaelHDesigns/HodlCash | c3ca85c436ba40afadfda11db207068a16527379 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018-2019 The HodlCash developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for BIP38 encrypting and decrypting addresses."""
from test_framework.test_framework import HodlCashTestFramework
from test_framework.util import assert_equal
class Bip38Test(HodlCashTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
password = 'test'
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
self.log.info('encrypt address %s' % (address))
bip38key = self.nodes[0].bip38encrypt(address, password)['Encrypted Key']
self.log.info('decrypt bip38 key %s' % (bip38key))
assert_equal(self.nodes[1].bip38decrypt(bip38key, password)['Address'], address)
if __name__ == '__main__':
Bip38Test().main()
| 36.392857 | 88 | 0.712463 |
f731cba9a9650e31323b7b28e22fc1895735634d | 9,219 | py | Python | code/combine_sectors.py | jradavenport/helloTESS | 1bd4680640e7b92ae3b2eeba19cc63e8b834eead | [
"MIT"
] | null | null | null | code/combine_sectors.py | jradavenport/helloTESS | 1bd4680640e7b92ae3b2eeba19cc63e8b834eead | [
"MIT"
] | null | null | null | code/combine_sectors.py | jradavenport/helloTESS | 1bd4680640e7b92ae3b2eeba19cc63e8b834eead | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib
import os
from glob import glob
from matplotlib.colors import LogNorm
from scipy.optimize import curve_fit
from astropy.table import Table
import astropy.io.fits as fits
from astropy.stats import LombScargle, BoxLeastSquares
import exoplanet as xo
from stuff import FINDflare, EasyE
matplotlib.rcParams.update({'font.size':18})
matplotlib.rcParams.update({'font.family':'serif'})
ftype = '.pdf'
def RunSectors(tess_dir = '/Users/james/Desktop/tess/', run_dir = '/Users/james/Desktop/helloTESS/', clobber=True, Nsector=3):
'''
Do some simplier things on stars that are observed in mulitple sectors
should probably be combined with run_sector.py.... but oh well for now!
'''
sectors = ['sector001', 'sector002', 'sector003', 'sector004', 'sector005', 'sector006', 'sector007', 'sector008']
# just in case glob wants to re-order things, be sure grab them in Sector order
files = []
for k in range(len(sectors)):
files = files + glob(tess_dir + sectors[k] + '/*.fits', recursive=True)
# get the unique object IDs (NOT the simplest way, but matches the next step)
obj = pd.Series(files).str.split('-', expand=True).groupby(by=2).count().index
# get the count of unique object IDs
Nobj = pd.Series(files).str.split('-', expand=True).groupby(by=2).count()[0]
# for k in range(max(Nobj)):
# print(k+1, sum(Nobj > k))
# obj[0] # example Object ID (TIC #)
o5 = np.where((Nobj > Nsector))[0] # was named "o5" because originally wanted Over 5 observations. Now pick other N
print(str(len(o5)) + ' objects with Nobs > 3 Sectors')
for k in range(0, len(o5)):
print(k, obj[o5][k])
files_k = pd.Series(files)[np.where((pd.Series(files).str.split('-', expand=True)[2] == obj[o5][k]))[0]].values
rot_out_k = MultiSector(files_k, clobber=clobber)
if k==0:
rot_out = rot_out_k
else:
rot_out = pd.concat([rot_out, rot_out_k], ignore_index=True, sort=False)
rot_out.to_csv(run_dir + '/outputs/longerP_rot_out.csv')
return
def MultiSector(TICs, tess_dir = '/Users/james/Desktop/tess/',
run_dir = '/Users/james/Desktop/helloTESS/',
clobber=False):
'''
Run the basic set of tools on every light curve -> NOW FOR MULTI-SECTOR DATA
Produce a diagnostic plot for each light curve
'''
if not os.path.isdir(run_dir + 'figures/longerP'):
os.makedirs(run_dir + 'figures/longerP')
tbit = False
for k in range(len(TICs)):
tbl = -1
try:
tbl = Table.read(TICs[k], format='fits')
tbl['PDCSAP_FLUX'] = tbl['PDCSAP_FLUX'] - np.nanmedian(tbl['PDCSAP_FLUX'])
if tbit == False:
df_tbl = tbl.to_pandas()
tbit = True
else:
df_tbl = pd.concat([df_tbl, tbl.to_pandas()], ignore_index=True, sort=False)
except:
tbl = -1
print('bad file: ' + TICs[k])
df_tbl['PDCSAP_FLUX'] = df_tbl['PDCSAP_FLUX'] + np.nanmedian(df_tbl['SAP_FLUX'])
# make harsh quality cuts, and chop out a known bad window of time (might add more later)
AOK = (df_tbl['QUALITY'] == 0) & ((df_tbl['TIME'] < 1347) | (df_tbl['TIME'] > 1350))
# do a running median for a basic smooth
smo = df_tbl['PDCSAP_FLUX'][AOK].rolling(128, center=True).median().values
med = np.nanmedian(smo)
# make an output plot for every file
figname = run_dir + 'figures/longerP/' + TICs[0].split('-')[2] + '.jpeg'
makefig = ((not os.path.exists(figname)) | clobber)
if makefig:
plt.figure(figsize=(14,6))
plt.errorbar(df_tbl['TIME'][AOK], df_tbl['PDCSAP_FLUX'][AOK]/med, yerr=df_tbl['PDCSAP_FLUX_ERR'][AOK]/med,
linestyle=None, alpha=0.25, label='PDC_FLUX')
plt.plot(df_tbl['TIME'][AOK], smo/med, label='128pt MED', c='orange')
# Smed = np.nanmedian(df_tbl['SAP_FLUX'][AOK])
# plt.errorbar(df_tbl['TIME'][AOK], df_tbl['SAP_FLUX'][AOK]/Smed, yerr=df_tbl['SAP_FLUX_ERR'][AOK]/Smed,
# linestyle=None, alpha=0.25, label='SAP_FLUX')
# require at least 1000 good datapoints for analysis
if sum(AOK) > 1000:
# find OK points in the smoothed LC
SOK = np.isfinite(smo)
# Lomb Scargle
LS = LombScargle(df_tbl['TIME'][AOK][SOK], smo[SOK]/med, dy=df_tbl['PDCSAP_FLUX_ERR'][AOK][SOK]/med)
frequency, power = LS.autopower(minimum_frequency=1./40.,
maximum_frequency=1./0.1,
samples_per_peak=7)
best_frequency = frequency[np.argmax(power)]
per_out = 1./best_frequency
per_amp = np.nanmax(power)
per_med = np.nanmedian(power)
per_std = np.nanstd(smo[SOK]/med)
if np.nanmax(power) > 0.2:
LSmodel = LS.model(df_tbl['TIME'][AOK][SOK], best_frequency)
if makefig:
plt.plot(df_tbl['TIME'][AOK][SOK], LSmodel,
label='L-S P='+format(1./best_frequency, '6.3f')+'d, pk='+format(np.nanmax(power), '6.3f'),
c='green')
# ACF w/ Exoplanet package
acf = xo.autocorr_estimator(df_tbl['TIME'][AOK][SOK].values, smo[SOK]/med,
yerr=df_tbl['PDCSAP_FLUX_ERR'][AOK][SOK].values/med,
min_period=0.1, max_period=40, max_peaks=2)
ACF_1pk = -1
ACF_1dt = -1
if len(acf['peaks']) > 0:
ACF_1dt = acf['peaks'][0]['period']
ACF_1pk = acf['autocorr'][1][np.where((acf['autocorr'][0] == acf['peaks'][0]['period']))[0]][0]
if makefig:
plt.plot(df_tbl['TIME'][AOK][SOK],
np.nanstd(smo[SOK]/med) * ACF_1pk * np.sin(df_tbl['TIME'][AOK][SOK] / ACF_1dt * 2 * np.pi) + 1,
label = 'ACF=' + format(ACF_1dt, '6.3f') + 'd, pk=' + format(ACF_1pk, '6.3f'), lw=2,
alpha=0.7, c='FireBrick')
# here is where a simple Eclipse (EB) finder goes
EE = EasyE(smo[SOK]/med, df_tbl['PDCSAP_FLUX_ERR'][AOK][SOK]/med, N1=5, N2=3, N3=2)
EclFlg = 0
if np.size(EE) > 0:
EclFlg = 1
if makefig:
for j in range(len(EE[0])):
plt.scatter(df_tbl['TIME'][AOK][SOK][(EE[0][j]):(EE[1][j]+1)],
smo[SOK] [(EE[0][j]):(EE[1][j]+1)] / med,
color='k', marker='s', s=5, alpha=0.75, label='_nolegend_')
plt.scatter([],[], color='k', marker='s', s=5, alpha=0.75, label='Ecl?')
# add BLS
# bls = BoxLeastSquares(df_tbl['TIME'][AOK][SOK], smo[SOK]/med, dy=df_tbl['PDCSAP_FLUX_ERR'][AOK][SOK]/med)
# blsP = bls.autopower(0.1, method='fast', objective='snr')
# blsPer = blsP['period'][np.argmax(blsP['power'])]
# if ((4*np.nanstd(blsP['power']) + np.nanmedian(blsP['power']) < np.nanmax(blsP['power'])) &
# (np.nanmax(blsP['power']) > 50.) &
# (blsPer < 0.95 * np.nanmax(blsP['period']))
# ):
# blsPeriod = blsPer
# blsAmpl = np.nanmax(blsP['power'])
# plt.plot([],[], ' ', label='BLS='+format(blsPer, '6.3f')+'d')
if makefig:
plt.title(TICs[0].split('-')[2], fontsize=12)
plt.ylabel('Flux')
plt.xlabel('BJD - 2457000 (days)')
plt.legend(fontsize=10)
plt.savefig(figname, bbox_inches='tight', pad_inches=0.25, dpi=100)
plt.close()
# # write per-sector output files
# ALL_TIC = pd.Series(files_i).str.split('-', expand=True).iloc[:,-3].astype('int')
# flare_out = pd.DataFrame(data={'TIC':ALL_TIC[FL_id], 'i0':FL_t0, 'i1':FL_t1, 'med':FL_f0, 'peak':FL_f1})
# flare_out.to_csv(run_dir + sector + '_flare_out.csv')
rot_out = pd.DataFrame(data={'TIC':TICs[0].split('-')[2],
'per':per_out, 'Pamp':per_amp, 'Pmed':per_med, 'StdLC':per_std,
'acf_pk':ACF_1pk, 'acf_per':ACF_1dt, 'ecl_flg':EclFlg}, index=[0])
# 'bls_period':blsPeriod, 'bls_ampl':blsAmpl, )
# rot_out.to_csv(run_dir + sector + '_rot_out.csv')
return rot_out
if __name__ == "__main__":
'''
let this file be called from the terminal directly. e.g.:
$ python analysis.py
'''
RunSectors()
### junk code i probably dont need
# sect1 = glob(tess_dir + sectors[0] + '/*.fits', recursive=True)
# sect2 = glob(tess_dir + sectors[1] + '/*.fits', recursive=True)
# sect3 = glob(tess_dir + sectors[2] + '/*.fits', recursive=True)
# sect4 = glob(tess_dir + sectors[3] + '/*.fits', recursive=True)
# sect5 = glob(tess_dir + sectors[4] + '/*.fits', recursive=True)
# sect6 = glob(tess_dir + sectors[5] + '/*.fits', recursive=True)
#
# files = sect1 + sect2 + sect3 + sect4 + sect5 + sect6
# # make into an array for looping later!
# s_lens = [len(sect1), len(sect2), len(sect3), len(sect4), len(sect5), len(sect6)]
# print(s_lens, len(files))
| 38.735294 | 126 | 0.574574 |
14373c503399e2dd76bab8076de19f6ed6a033d1 | 598 | py | Python | Arase/MEPe/RebuildDataIndex.py | mattkjames7/Arase | 996167be35a13bbb1fdddfbe75e3a06d124b1d25 | [
"MIT"
] | null | null | null | Arase/MEPe/RebuildDataIndex.py | mattkjames7/Arase | 996167be35a13bbb1fdddfbe75e3a06d124b1d25 | [
"MIT"
] | 1 | 2021-06-10T22:51:09.000Z | 2021-06-10T22:51:09.000Z | Arase/MEPe/RebuildDataIndex.py | mattkjames7/Arase | 996167be35a13bbb1fdddfbe75e3a06d124b1d25 | [
"MIT"
] | null | null | null | import numpy as np
from .. import Globals
from ..Tools.Downloading._RebuildDataIndex import _RebuildDataIndex
def RebuildDataIndex(L,prod):
'''
Rebuilds the data index for a data product.
Inputs
======
L : int
Level of data to download
prod : str
Data product to download
Available data products
=======================
L prod
2 'omniflux'
2 '3dflux'
3 '3dflux'
'''
vfmt = ['v','_']
idxfname = Globals.DataPath + 'MEPe/Index-L{:01d}-{:s}.dat'.format(L,prod)
datapath = Globals.DataPath + 'MEPe/l{:01d}/{:s}/'.format(L,prod)
_RebuildDataIndex(datapath,idxfname,vfmt)
| 19.933333 | 75 | 0.662207 |
df1913bce449880ea8b822e4852bafc04c84f649 | 63,176 | py | Python | atom/nucleus/python/nucleus_api/api/utils_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | atom/nucleus/python/nucleus_api/api/utils_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | atom/nucleus/python/nucleus_api/api/utils_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class UtilsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_account_status_using_post(self, account_status_request, **kwargs): # noqa: E501
"""Create an account status # noqa: E501
Create an account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_status_using_post(account_status_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccountStatus account_status_request: accountStatusRequest (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_account_status_using_post_with_http_info(account_status_request, **kwargs) # noqa: E501
else:
(data) = self.create_account_status_using_post_with_http_info(account_status_request, **kwargs) # noqa: E501
return data
def create_account_status_using_post_with_http_info(self, account_status_request, **kwargs): # noqa: E501
"""Create an account status # noqa: E501
Create an account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account_status_using_post_with_http_info(account_status_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccountStatus account_status_request: accountStatusRequest (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_status_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_account_status_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_status_request' is set
if ('account_status_request' not in params or
params['account_status_request'] is None):
raise ValueError("Missing the required parameter `account_status_request` when calling `create_account_status_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'account_status_request' in params:
body_params = params['account_status_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account_status', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_stage_using_post(self, stage_request, **kwargs): # noqa: E501
"""Create an account stage # noqa: E501
Create a new account stage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_stage_using_post(stage_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Stage stage_request: stageRequest (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_stage_using_post_with_http_info(stage_request, **kwargs) # noqa: E501
else:
(data) = self.create_stage_using_post_with_http_info(stage_request, **kwargs) # noqa: E501
return data
def create_stage_using_post_with_http_info(self, stage_request, **kwargs): # noqa: E501
"""Create an account stage # noqa: E501
Create a new account stage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_stage_using_post_with_http_info(stage_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Stage stage_request: stageRequest (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stage_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_stage_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stage_request' is set
if ('stage_request' not in params or
params['stage_request'] is None):
raise ValueError("Missing the required parameter `stage_request` when calling `create_stage_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stage_request' in params:
body_params = params['stage_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/stage', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Stage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_transaction_code_using_post(self, transaction_request, **kwargs): # noqa: E501
"""Create a transaction code # noqa: E501
Create a new transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transaction_code_using_post(transaction_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TransactionCode transaction_request: transactionRequest (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_transaction_code_using_post_with_http_info(transaction_request, **kwargs) # noqa: E501
else:
(data) = self.create_transaction_code_using_post_with_http_info(transaction_request, **kwargs) # noqa: E501
return data
def create_transaction_code_using_post_with_http_info(self, transaction_request, **kwargs): # noqa: E501
"""Create a transaction code # noqa: E501
Create a new transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transaction_code_using_post_with_http_info(transaction_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TransactionCode transaction_request: transactionRequest (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['transaction_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_transaction_code_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'transaction_request' is set
if ('transaction_request' not in params or
params['transaction_request'] is None):
raise ValueError("Missing the required parameter `transaction_request` when calling `create_transaction_code_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transaction_request' in params:
body_params = params['transaction_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/transaction_code', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_account_status_using_delete(self, account_status_id, **kwargs): # noqa: E501
"""Delete an account status # noqa: E501
Permanently delete an account status record from an account’s history. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_account_status_using_delete(account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_status_id: UUID account_status_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_account_status_using_delete_with_http_info(account_status_id, **kwargs) # noqa: E501
else:
(data) = self.delete_account_status_using_delete_with_http_info(account_status_id, **kwargs) # noqa: E501
return data
def delete_account_status_using_delete_with_http_info(self, account_status_id, **kwargs): # noqa: E501
"""Delete an account status # noqa: E501
Permanently delete an account status record from an account’s history. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_account_status_using_delete_with_http_info(account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_status_id: UUID account_status_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_account_status_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_status_id' is set
if ('account_status_id' not in params or
params['account_status_id'] is None):
raise ValueError("Missing the required parameter `account_status_id` when calling `delete_account_status_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_status_id' in params:
path_params['account_status_id'] = params['account_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account_status/{account_status_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_stage_using_delete(self, stage_id, **kwargs): # noqa: E501
"""Delete an account stage # noqa: E501
Permanently delete an account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_stage_using_delete(stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str stage_id: UUID stage_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_stage_using_delete_with_http_info(stage_id, **kwargs) # noqa: E501
else:
(data) = self.delete_stage_using_delete_with_http_info(stage_id, **kwargs) # noqa: E501
return data
def delete_stage_using_delete_with_http_info(self, stage_id, **kwargs): # noqa: E501
"""Delete an account stage # noqa: E501
Permanently delete an account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_stage_using_delete_with_http_info(stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str stage_id: UUID stage_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stage_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_stage_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stage_id' is set
if ('stage_id' not in params or
params['stage_id'] is None):
raise ValueError("Missing the required parameter `stage_id` when calling `delete_stage_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stage_id' in params:
path_params['stage_id'] = params['stage_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/stage/{stage_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_transaction_code_using_delete(self, transaction_code_id, **kwargs): # noqa: E501
"""Delete a transaction code # noqa: E501
Permanently delete a transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_transaction_code_using_delete(transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str transaction_code_id: UUID transaction_code_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_transaction_code_using_delete_with_http_info(transaction_code_id, **kwargs) # noqa: E501
else:
(data) = self.delete_transaction_code_using_delete_with_http_info(transaction_code_id, **kwargs) # noqa: E501
return data
def delete_transaction_code_using_delete_with_http_info(self, transaction_code_id, **kwargs): # noqa: E501
"""Delete a transaction code # noqa: E501
Permanently delete a transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_transaction_code_using_delete_with_http_info(transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str transaction_code_id: UUID transaction_code_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['transaction_code_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_transaction_code_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'transaction_code_id' is set
if ('transaction_code_id' not in params or
params['transaction_code_id'] is None):
raise ValueError("Missing the required parameter `transaction_code_id` when calling `delete_transaction_code_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'transaction_code_id' in params:
path_params['transaction_code_id'] = params['transaction_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/transaction_code/{transaction_code_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_account_status_all_using_get(self, **kwargs): # noqa: E501
"""List all account statuses # noqa: E501
Get the account status history information for all accounts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_status_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageAccountStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_account_status_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_account_status_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_account_status_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all account statuses # noqa: E501
Get the account status history information for all accounts. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_status_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageAccountStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_status_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account_status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageAccountStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_account_status_using_get(self, account_status_id, **kwargs): # noqa: E501
"""Retrieve an account status # noqa: E501
Retrieve the information for a specific account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_status_using_get(account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_status_id: UUID account_status_id (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_account_status_using_get_with_http_info(account_status_id, **kwargs) # noqa: E501
else:
(data) = self.get_account_status_using_get_with_http_info(account_status_id, **kwargs) # noqa: E501
return data
def get_account_status_using_get_with_http_info(self, account_status_id, **kwargs): # noqa: E501
"""Retrieve an account status # noqa: E501
Retrieve the information for a specific account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_status_using_get_with_http_info(account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_status_id: UUID account_status_id (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_status_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_status_id' is set
if ('account_status_id' not in params or
params['account_status_id'] is None):
raise ValueError("Missing the required parameter `account_status_id` when calling `get_account_status_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_status_id' in params:
path_params['account_status_id'] = params['account_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account_status/{account_status_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_stage_all_using_get(self, **kwargs): # noqa: E501
"""List all account stages # noqa: E501
Get the information for all possible account stages. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stage_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageStage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_stage_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_stage_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_stage_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all account stages # noqa: E501
Get the information for all possible account stages. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stage_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageStage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_stage_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/stage', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageStage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_stage_using_get(self, stage_id, **kwargs): # noqa: E501
"""Retrieve an account stage # noqa: E501
Retrieve the information for a specific account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stage_using_get(stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str stage_id: UUID stage_id (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_stage_using_get_with_http_info(stage_id, **kwargs) # noqa: E501
else:
(data) = self.get_stage_using_get_with_http_info(stage_id, **kwargs) # noqa: E501
return data
def get_stage_using_get_with_http_info(self, stage_id, **kwargs): # noqa: E501
"""Retrieve an account stage # noqa: E501
Retrieve the information for a specific account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stage_using_get_with_http_info(stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str stage_id: UUID stage_id (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stage_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_stage_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stage_id' is set
if ('stage_id' not in params or
params['stage_id'] is None):
raise ValueError("Missing the required parameter `stage_id` when calling `get_stage_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stage_id' in params:
path_params['stage_id'] = params['stage_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/stage/{stage_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Stage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_transaction_code_all_using_get(self, **kwargs): # noqa: E501
"""List all transaction codes # noqa: E501
Get the information for all transaction codes defined by your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_code_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageTransactionCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_transaction_code_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_transaction_code_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_transaction_code_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all transaction codes # noqa: E501
Get the information for all transaction codes defined by your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_code_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageTransactionCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_transaction_code_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/transaction_code', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageTransactionCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_transaction_code_using_get(self, transaction_code_id, **kwargs): # noqa: E501
"""Retrieve a transaction code # noqa: E501
Retrieve the information for a transaction code defined by your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_code_using_get(transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str transaction_code_id: UUID transaction_code_id (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_transaction_code_using_get_with_http_info(transaction_code_id, **kwargs) # noqa: E501
else:
(data) = self.get_transaction_code_using_get_with_http_info(transaction_code_id, **kwargs) # noqa: E501
return data
def get_transaction_code_using_get_with_http_info(self, transaction_code_id, **kwargs): # noqa: E501
"""Retrieve a transaction code # noqa: E501
Retrieve the information for a transaction code defined by your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_code_using_get_with_http_info(transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str transaction_code_id: UUID transaction_code_id (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['transaction_code_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_transaction_code_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'transaction_code_id' is set
if ('transaction_code_id' not in params or
params['transaction_code_id'] is None):
raise ValueError("Missing the required parameter `transaction_code_id` when calling `get_transaction_code_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'transaction_code_id' in params:
path_params['transaction_code_id'] = params['transaction_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/transaction_code/{transaction_code_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_account_status_using_put(self, account_status, account_status_id, **kwargs): # noqa: E501
"""Update an account status # noqa: E501
Update an account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_account_status_using_put(account_status, account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccountStatus account_status: account_status (required)
:param str account_status_id: UUID account_status_id (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_account_status_using_put_with_http_info(account_status, account_status_id, **kwargs) # noqa: E501
else:
(data) = self.update_account_status_using_put_with_http_info(account_status, account_status_id, **kwargs) # noqa: E501
return data
def update_account_status_using_put_with_http_info(self, account_status, account_status_id, **kwargs): # noqa: E501
"""Update an account status # noqa: E501
Update an account status record for an account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_account_status_using_put_with_http_info(account_status, account_status_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccountStatus account_status: account_status (required)
:param str account_status_id: UUID account_status_id (required)
:return: AccountStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_status', 'account_status_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_account_status_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_status' is set
if ('account_status' not in params or
params['account_status'] is None):
raise ValueError("Missing the required parameter `account_status` when calling `update_account_status_using_put`") # noqa: E501
# verify the required parameter 'account_status_id' is set
if ('account_status_id' not in params or
params['account_status_id'] is None):
raise ValueError("Missing the required parameter `account_status_id` when calling `update_account_status_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_status_id' in params:
path_params['account_status_id'] = params['account_status_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'account_status' in params:
body_params = params['account_status']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account_status/{account_status_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccountStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_stage_using_put(self, stage, stage_id, **kwargs): # noqa: E501
"""Update an account stage # noqa: E501
Update the information for an account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_stage_using_put(stage, stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Stage stage: stage (required)
:param str stage_id: UUID stage_id (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_stage_using_put_with_http_info(stage, stage_id, **kwargs) # noqa: E501
else:
(data) = self.update_stage_using_put_with_http_info(stage, stage_id, **kwargs) # noqa: E501
return data
def update_stage_using_put_with_http_info(self, stage, stage_id, **kwargs): # noqa: E501
"""Update an account stage # noqa: E501
Update the information for an account stage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_stage_using_put_with_http_info(stage, stage_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Stage stage: stage (required)
:param str stage_id: UUID stage_id (required)
:return: Stage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stage', 'stage_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_stage_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'stage' is set
if ('stage' not in params or
params['stage'] is None):
raise ValueError("Missing the required parameter `stage` when calling `update_stage_using_put`") # noqa: E501
# verify the required parameter 'stage_id' is set
if ('stage_id' not in params or
params['stage_id'] is None):
raise ValueError("Missing the required parameter `stage_id` when calling `update_stage_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'stage_id' in params:
path_params['stage_id'] = params['stage_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stage' in params:
body_params = params['stage']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/stage/{stage_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Stage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_transaction_code_using_put(self, transaction_code, transaction_code_id, **kwargs): # noqa: E501
"""Update a transaction code # noqa: E501
Update a transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_transaction_code_using_put(transaction_code, transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TransactionCode transaction_code: transaction_code (required)
:param str transaction_code_id: UUID transaction_code_id (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_transaction_code_using_put_with_http_info(transaction_code, transaction_code_id, **kwargs) # noqa: E501
else:
(data) = self.update_transaction_code_using_put_with_http_info(transaction_code, transaction_code_id, **kwargs) # noqa: E501
return data
def update_transaction_code_using_put_with_http_info(self, transaction_code, transaction_code_id, **kwargs): # noqa: E501
"""Update a transaction code # noqa: E501
Update a transaction code for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_transaction_code_using_put_with_http_info(transaction_code, transaction_code_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TransactionCode transaction_code: transaction_code (required)
:param str transaction_code_id: UUID transaction_code_id (required)
:return: TransactionCode
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['transaction_code', 'transaction_code_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_transaction_code_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'transaction_code' is set
if ('transaction_code' not in params or
params['transaction_code'] is None):
raise ValueError("Missing the required parameter `transaction_code` when calling `update_transaction_code_using_put`") # noqa: E501
# verify the required parameter 'transaction_code_id' is set
if ('transaction_code_id' not in params or
params['transaction_code_id'] is None):
raise ValueError("Missing the required parameter `transaction_code_id` when calling `update_transaction_code_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'transaction_code_id' in params:
path_params['transaction_code_id'] = params['transaction_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transaction_code' in params:
body_params = params['transaction_code']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/transaction_code/{transaction_code_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionCode', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.917098 | 150 | 0.623987 |
7d5ea9d3d3a28ca6d1ba52cef1dd150e30ba098b | 3,126 | py | Python | oldVersion/main_2stage_enumeration.py | zhuzhich/13_CBM | 4fd98ce621b106a6db6b4849463261ebd19713c4 | [
"FSFAP"
] | null | null | null | oldVersion/main_2stage_enumeration.py | zhuzhich/13_CBM | 4fd98ce621b106a6db6b4849463261ebd19713c4 | [
"FSFAP"
] | null | null | null | oldVersion/main_2stage_enumeration.py | zhuzhich/13_CBM | 4fd98ce621b106a6db6b4849463261ebd19713c4 | [
"FSFAP"
] | null | null | null | #Author: Zhicheng Zhu
#Email: zhicheng.zhu@ttu.edu, yisha.xiang@ttu.edu
#copyright @ 2019: Zhicheng Zhu. All right reserved.
#Info:
# enumeration of two-stage, no solver
#
#Last update: 02/17/2019
#!/usr/bin/python
from __future__ import print_function
import sys
import itertools
import time
import class_info
#######################################
#1. initialization, START FROM HERE!!!.
#######################################
#init system parameter
def main(sysInfo):
start_time = time.clock();
# 2.1 solve the last stage problem:
omega = [];
for i in itertools.product(list(range(sysInfo.comInfoAll[0].nStates)), \
repeat = sysInfo.nComponents):
omega.append(list(i));
objValue = float("inf");
solutionX = [];
solutionY = [];
solutionZ = [];
secondStageObj = [];
#get second-stage objective values.
for w in range(len(omega)):
scenState = omega[w];
tmpObj = 0;
z = 0;
for i in range(sysInfo.nComponents):
y = 0;
if scenState[i] >= sysInfo.nStates - 1:
y = 1;
z = 1;
tmpObj += sysInfo.comInfoAll[i].cCM * y;
tmpObj = tmpObj + sysInfo.cS * z;
secondStageObj.append(tmpObj);
#get solution space:
for i in itertools.product([0, 1], repeat = sysInfo.nComponents):
solutionSpace.append(list(i));
#find out the optimal solution
for i in range(len(solutionSpace)):
soluitonSpaceI = solutionSpace[i];
objTmp = 0;
for w2 in range(len(omega)):
objTmp1 = 1;
infeasibile = False;
if secondStageObj[w2] == 0:
continue;
for j in range(sysInfo.nComponents):
#check feasibility first
solIX = soluitonSpaceI[j];
if (solIX == 0) and \
(sysInfo.comInfoAll[j].initState >= \
sysInfo.comInfoAll[j].nStates - 1):
infeasibile = True;
break;
prob1 = sysInfo.comInfoAll[j].currentToFail;
prob2 = sysInfo.comInfoAll[j].newToFail;
objTmp1 = objTmp1 * (prob1 * (1 - solIX) + prob2 * solIX);
if objTmp1 == 0:
break;
if infeasibile == True:
break;
objTmp = objTmp + objTmp1*secondStageObj[w2];
if infeasibile == True:
objTmp = float("inf");
else:
#add first stage
solutionZTmp = 0;
solutionYTmp = [];
for ii in range(sysInfo.nComponents):
objTmp = objTmp + sysInfo.comInfoAll[ii].cPM * soluitonSpaceI[ii];
if soluitonSpaceI[ii] == 1:
solutionZTmp = 1;
if sysInfo.comInfoAll[ii].initState >= \
sysInfo.comInfoAll[ii].nStates - 1:
solutionYTmp.append(1);
objTmp = objTmp + sysInfo.comInfoAll[ii].cCM - \
sysInfo.comInfoAll[ii].cPM;
else:
solutionYTmp.append(0);
objTmp = objTmp + solutionZTmp * sysInfo.cS;
if objTmp < objValue:
objValue = objTmp;
solutionX = soluitonSpaceI;
solutionZ = solutionZTmp;
solutionY = solutionYTmp;
########################################
#3. Result handling
########################################
end_time = time.clock();
time_elapsed = end_time - start_time;
N0 = [];
N1 = [];
for i in range(sysInfo.nComponents):
if solutionX[i] == 0:
N0.append(i);
else:
N1.append(i);
sysInfo.time = time_elapsed;
sysInfo.N0 = N0;
sysInfo.N1 = N1;
| 24.614173 | 73 | 0.618682 |
9554b3594f252bb33337b5ef928d39c9cd96cb45 | 2,492 | py | Python | src/pybel/manager/utils.py | cthoyt/pybel | ed66f013a77f9cbc513892b0dad1025b8f68bb46 | [
"Apache-2.0"
] | null | null | null | src/pybel/manager/utils.py | cthoyt/pybel | ed66f013a77f9cbc513892b0dad1025b8f68bb46 | [
"Apache-2.0"
] | 11 | 2017-12-28T08:03:14.000Z | 2019-01-15T02:13:58.000Z | src/pybel/manager/utils.py | cthoyt/pybel | ed66f013a77f9cbc513892b0dad1025b8f68bb46 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Utilities for the PyBEL database manager."""
from typing import Dict, Mapping, Optional, Tuple, Union
from ..utils import parse_datetime
def extract_shared_required(config, definition_header: str = "Namespace"):
"""Get the required annotations shared by BEL namespace and annotation resource documents.
:param dict config: The configuration dictionary representing a BEL resource
:param definition_header: ``Namespace`` or ``AnnotationDefinition``
:rtype: dict
"""
return {
"keyword": config[definition_header]["Keyword"],
"created": parse_datetime(config[definition_header]["CreatedDateTime"]),
}
def extract_shared_optional(bel_resource, definition_header: str = "Namespace"):
"""Get the optional annotations shared by BEL namespace and annotation resource documents.
:param dict bel_resource: A configuration dictionary representing a BEL resource
:param definition_header: ``Namespace`` or ``AnnotationDefinition``
:rtype: dict
"""
shared_mapping = {
"description": (definition_header, "DescriptionString"),
"version": (definition_header, "VersionString"),
"author": ("Author", "NameString"),
"license": ("Author", "CopyrightString"),
"contact": ("Author", "ContactInfoString"),
"citation": ("Citation", "NameString"),
"citation_description": ("Citation", "DescriptionString"),
"citation_version": ("Citation", "PublishedVersionString"),
"citation_url": ("Citation", "ReferenceURL"),
}
result = {}
update_insert_values(bel_resource, shared_mapping, result)
if "PublishedDate" in bel_resource.get("Citation", {}):
result["citation_published"] = parse_datetime(bel_resource["Citation"]["PublishedDate"])
return result
def update_insert_values(
bel_resource: Mapping,
mapping: Mapping[str, Tuple[str, str]],
values: Dict[str, str],
) -> None:
"""Update the value dictionary with a BEL resource dictionary."""
for database_column, (section, key) in mapping.items():
if section in bel_resource and key in bel_resource[section]:
values[database_column] = bel_resource[section][key]
def int_or_str(v: Optional[str]) -> Union[None, int, str]:
"""Safe converts an string represent an integer to an integer or passes through ``None``."""
if v is None:
return
try:
return int(v)
except ValueError:
return v
| 35.098592 | 96 | 0.680979 |
7039fd8778c3146409f383f84ad34a4da436bf6c | 1,109 | py | Python | Traffic Modeling Real Vision Based/tail_length.py | pb-10/Smart-Traffic-Signals-in-India-using-Deep-Reinforcement-Learning-and-Advanced-Computer-Vision | 511746246434f4c199df8118c66d318ac39a02c8 | [
"MIT"
] | 140 | 2019-05-16T11:15:14.000Z | 2022-03-28T18:29:12.000Z | Traffic Modeling Real Vision Based/tail_length.py | DaScientist/Smart-Traffic-Signals-in-India-using-Deep-Reinforcement-Learning-and-Advanced-Computer-Vision | 511746246434f4c199df8118c66d318ac39a02c8 | [
"MIT"
] | 2 | 2019-11-05T10:13:57.000Z | 2020-07-02T06:56:53.000Z | Traffic Modeling Real Vision Based/tail_length.py | DaScientist/Smart-Traffic-Signals-in-India-using-Deep-Reinforcement-Learning-and-Advanced-Computer-Vision | 511746246434f4c199df8118c66d318ac39a02c8 | [
"MIT"
] | 46 | 2019-05-25T09:42:56.000Z | 2022-03-21T06:01:07.000Z | import numpy as np
import cv2
cap = cv2.VideoCapture('out-1.ogv')
fgbg = cv2.createBackgroundSubtractorMOG2()
checker = np.zeros((810),dtype=int)
while(1):
ret, frame = cap.read()
roi = frame[438:465,959:1770]
#print(checker)
start = 0
for i in range(810):
density = roi[:25,start:start+1]
d_gray = cv2.cvtColor(density, cv2.COLOR_BGR2GRAY)
white = cv2.countNonZero(d_gray)
print(" ",white)
if white>15:
checker[i] = 1
else:
checker[i] = 0
start += 1
print(checker)
tail = 810
for i in range(782):
over = 1
for j in range(i,i+28):
if checker[j] == 1:
over = 0
break
if over == 1:
tail = i
break
print(tail)
cv2.imshow("roi",roi)
fgmask = fgbg.apply(roi)
cv2.imshow('roi_bg',fgmask)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows() | 16.80303 | 58 | 0.481515 |
17ee41ae59ba810df4eea53a34c549e703beed04 | 489 | py | Python | sudolver_api/app/sudolver/image_binary.py | andrinmeier/sudolver | 3eed7688cd884bc0fd5cd88c7d93e1558f528f47 | [
"MIT"
] | null | null | null | sudolver_api/app/sudolver/image_binary.py | andrinmeier/sudolver | 3eed7688cd884bc0fd5cd88c7d93e1558f528f47 | [
"MIT"
] | 4 | 2022-03-26T08:58:24.000Z | 2022-03-30T20:45:35.000Z | sudolver_api/app/sudolver/image_binary.py | andrinmeier/sudolver | 3eed7688cd884bc0fd5cd88c7d93e1558f528f47 | [
"MIT"
] | null | null | null | from typing import Any
import cv2
import numpy as np
class ImageBinary:
def __init__(self, image_bytes) -> None:
self.image_bytes = image_bytes
def get_bytes(self) -> Any:
return self.image_bytes
def find_contours(self) -> Any:
contours, _ = cv2.findContours(
self.image_bytes, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE
)
return contours
def invert(self) -> Any:
return ImageBinary(np.invert(self.get_bytes()))
| 23.285714 | 68 | 0.652352 |
d4c3799b2d2da387f43e376d0f7a6e5ac97e1c40 | 2,901 | py | Python | tests/unit/admin/views/test_emails.py | leoz0610/warehouse | 58d569b864d0a9b0627d21a15f4ffe92540752f5 | [
"Apache-2.0"
] | 2 | 2015-04-08T20:48:40.000Z | 2016-04-23T00:14:49.000Z | tests/unit/admin/views/test_emails.py | leoz0610/warehouse | 58d569b864d0a9b0627d21a15f4ffe92540752f5 | [
"Apache-2.0"
] | 14 | 2018-10-11T01:00:52.000Z | 2018-10-16T23:01:23.000Z | tests/unit/admin/views/test_emails.py | leoz0610/warehouse | 58d569b864d0a9b0627d21a15f4ffe92540752f5 | [
"Apache-2.0"
] | 1 | 2015-04-08T20:48:41.000Z | 2015-04-08T20:48:41.000Z | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
import pytest
import pretend
from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest
from warehouse.admin.views import emails as views
from ....common.db.ses import EmailMessageFactory
class TestEmailList:
def test_no_query(self, db_request):
emails = sorted(
[EmailMessageFactory.create() for _ in range(30)],
key=lambda e: e.created,
reverse=True,
)
result = views.email_list(db_request)
assert result == {"emails": emails[:25], "query": None}
def test_with_page(self, db_request):
emails = sorted(
[EmailMessageFactory.create() for _ in range(30)],
key=lambda e: e.created,
reverse=True,
)
db_request.GET["page"] = "2"
result = views.email_list(db_request)
assert result == {"emails": emails[25:], "query": None}
def test_with_invalid_page(self):
request = pretend.stub(params={"page": "not an integer"})
with pytest.raises(HTTPBadRequest):
views.email_list(request)
def test_basic_query(self, db_request):
emails = sorted(
[EmailMessageFactory.create() for _ in range(30)],
key=lambda e: e.created,
reverse=True,
)
db_request.GET["q"] = emails[0].to
result = views.email_list(db_request)
assert result == {"emails": [emails[0]], "query": emails[0].to}
def test_wildcard_query(self, db_request):
emails = sorted(
[EmailMessageFactory.create() for _ in range(30)],
key=lambda e: e.created,
reverse=True,
)
db_request.GET["q"] = emails[0].to[:-1] + "%"
result = views.email_list(db_request)
assert result == {"emails": [emails[0]], "query": emails[0].to[:-1] + "%"}
class TestEmailDetail:
def test_existing_email(self, db_session):
em = EmailMessageFactory.create()
request = pretend.stub(matchdict={"email_id": em.id}, db=db_session)
assert views.email_detail(request) == {"email": em}
def test_nonexistent_email(self, db_session):
EmailMessageFactory.create()
request = pretend.stub(matchdict={"email_id": str(uuid.uuid4())}, db=db_session)
with pytest.raises(HTTPNotFound):
views.email_detail(request)
| 31.879121 | 88 | 0.640814 |
1a68fdc38be640382a35b56a5ca4a631e1954d3f | 5,727 | py | Python | ValidationQ.py | Arcadianlee/Deep-Learning-Design-Photonic-Crystals | 163090c9e25c160a1a994af6eea72fe28f2aa987 | [
"Apache-2.0"
] | 2 | 2021-12-22T05:23:18.000Z | 2022-03-23T19:32:06.000Z | ValidationQ.py | Arcadianlee/Deep-Learning-Design-Photonic-Crystals | 163090c9e25c160a1a994af6eea72fe28f2aa987 | [
"Apache-2.0"
] | null | null | null | ValidationQ.py | Arcadianlee/Deep-Learning-Design-Photonic-Crystals | 163090c9e25c160a1a994af6eea72fe28f2aa987 | [
"Apache-2.0"
] | 2 | 2022-02-06T18:22:26.000Z | 2022-03-08T16:53:12.000Z | #Code for the validation of trained model on Q
#Apr 2021 Renjie Li, NOEL @ CUHK SZ
import torch
import torchvision
import matplotlib.pyplot as plt
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import pandas as pd
import numpy as np
import h5py
import torchvision.transforms as transforms
from datetime import datetime
class TensorsDataset(torch.utils.data.Dataset):
'''
A simple loading dataset - loads the tensor that are passed in input. This is the same as
torch.utils.data.TensorDataset except that you can add transformations to your data and target tensor.
Target tensor can also be None, in which case it is not returned.
'''
def __init__(self, data_tensor, target_tensor=None, transforms=None, target_transforms=None):
if target_tensor is not None:
assert data_tensor.size(0) == target_tensor.size(0)
self.data_tensor = data_tensor
self.target_tensor = target_tensor
if transforms is None:
transforms = []
if target_transforms is None:
target_transforms = []
if not isinstance(transforms, list):
transforms = [transforms]
if not isinstance(target_transforms, list):
target_transforms = [target_transforms]
self.transforms = transforms
self.target_transforms = target_transforms
def __getitem__(self, index):
data_tensor = self.data_tensor[index]
for transform in self.transforms:
data_tensor = transform(data_tensor)
if self.target_tensor is None:
return data_tensor
target_tensor = self.target_tensor[index]
for transform in self.target_transforms:
target_tensor = transform(target_tensor)
return data_tensor, target_tensor
def __len__(self):
return self.data_tensor.size(0)
#read data from mat file
print("loading the mat")
f = h5py.File('/Users/Renjee/Desktop/CUHK/NOEL/Deep learning proj/code/L3_dataset/Input_v.mat','r')
data = f['Input']
Input = np.array(data) # For converting to a NumPy array
f = h5py.File('/Users/Renjee/Desktop/CUHK/NOEL/Deep learning proj/code/L3_dataset/Output_v.mat','r')
data = f['QnV']
Output = np.array(data) # For converting to a NumPy array
print("converting to tensor")
input_tensor = torch.tensor(Input)
output_tensor = torch.tensor(Output)
#swap the axes
input_tensor = input_tensor.permute(3,2,1,0).float()
output_tensor = output_tensor.permute(1,0).float()
output_tensor = output_tensor[:,0] #do Q first
output_tensor = output_tensor.view(-1,1) #correct the dimension
print(output_tensor[-1])
print(input_tensor.shape)
print(output_tensor.shape)
#produce the full dataset
transformer=transforms.Normalize(mean=[-8.7270e-13,3.3969e-13,-1.6978e-12], std=[0.0000000005,0.0000000005,0.0000000005])
dataset=TensorsDataset(input_tensor, output_tensor,transforms=transformer)
#split into training and test datasets
train_size = 0
test_size = len(output_tensor)
train_dataset, test_dataset = torch.utils.data.random_split(dataset, [train_size, test_size])
#load the data
test_loader = torch.utils.data.DataLoader(dataset, batch_size=len(output_tensor), shuffle=False)
#set up the network
#create a class for the CNN
class Net(nn.Module):
#build the network (cnn+fc)
def __init__(self):
super(Net,self).__init__()
self.conv1 = nn.Conv2d(3,20, kernel_size=(3,3), padding = 1, bias=False)
self.bn1=nn.BatchNorm2d(20)
self.conv2 = nn.Conv2d(20,40,kernel_size=(3,3),bias=False)
self.bn2=nn.BatchNorm2d(40)
self.fc1 = nn.Linear(240,120)
self.fc2 = nn.Linear(120,50)
self.fc3 = nn.Linear(50,1)
#pass data to the CNN. x represents the data
def forward(self,x):
x = F.relu(F.avg_pool2d(self.conv1(x),(1,2)))
# print(x.shape)
x = F.relu(F.avg_pool2d(self.conv2(x),(1,2)))
# print(x.shape)
x = x.view(x.size(0),-1)
# print(x.shape)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
network = Net()
network_state_dict = torch.load('/Users/Renjee/Desktop/CUHK/NOEL/Deep learning proj/code/L3_model.pt')
network.load_state_dict(network_state_dict)
test_losses = [] #for Q
testV_losses = [] #for V
test_output = []
testV_output = []
test_target = []
testV_target = []
pred_error = []
#test loop
def test():
#global test_output
network.eval()
with torch.no_grad(): #disable the gradient computation
for data, target in test_loader:
output = network(data)
#save the test result
#Q
test_output.append(output)
test_target.append(target)
pred_err = 100*torch.abs((output - target))/target
pred_error.append(pred_err)
#print('pred errors...')
#print(pred_err)
start=datetime.now()
for epoch in range(0,1):
test()
#print('Q predicted/true values...')
#print(test_output,test_target)
print((datetime.now()-start))
#convert from list to tensor
pred_errorT = torch.cat(pred_error,0)
pred_errorA = pred_errorT.numpy()
print(min(pred_errorA))
red_square = dict(markerfacecolor='r', marker='s')
fig, ax = plt.subplots()
ax.boxplot(pred_errorA, flierprops=red_square, vert=False)
plt.savefig('/Users/Renjee/Desktop/CUHK/NOEL/Deep learning proj/code/valid_boxplot.eps')
fig = plt.figure()
plt.hist(pred_errorA, 50, density=False)
plt.xlim(-0.05,0.80)
plt.xticks(np.arange(0, 0.85, 0.05))
plt.savefig('/Users/Renjee/Desktop/CUHK/NOEL/Deep learning proj/code/valid_hist.eps')
print(np.amin(pred_errorA),np.mean(pred_errorA), np.median(pred_errorA)) | 30.956757 | 121 | 0.688493 |
0a05791196aea740b7efe7f44cc4960d05a6883b | 15,444 | py | Python | pylistener/tests.py | CCallahanIV/CF401-Project-1---PyListener | 1c31d0b26e373dfac5c0a38fe554a835b71e75e8 | [
"MIT"
] | null | null | null | pylistener/tests.py | CCallahanIV/CF401-Project-1---PyListener | 1c31d0b26e373dfac5c0a38fe554a835b71e75e8 | [
"MIT"
] | null | null | null | pylistener/tests.py | CCallahanIV/CF401-Project-1---PyListener | 1c31d0b26e373dfac5c0a38fe554a835b71e75e8 | [
"MIT"
] | null | null | null | """Tests for Pylistener."""
import pytest
import transaction
import os
import cgi
from pyramid import testing
from pylistener.models import User, AddressBook, Category, Attribute, UserAttributeLink, get_tm_session
from pylistener.models.meta import Base
from passlib.apps import custom_app_context as pwd_context
TEST_DB = os.environ.get("TEST_DB", "test")
@pytest.fixture(scope="session")
def configuration(request):
"""Set up a Configurator instance.
This Configurator instance sets up a pointer to the location of the
database.
It also includes the models from your app's model package.
Finally it tears everything down, including the Postgres database.
This configuration will persist for the entire duration of your PyTest run.
"""
settings = {
'sqlalchemy.url': TEST_DB}
config = testing.setUp(settings=settings)
config.include('pylistener.models')
config.include('pylistener.routes')
def teardown():
testing.tearDown()
request.addfinalizer(teardown)
return config
@pytest.fixture
def db_session(configuration, request):
"""Create a session for interacting with the test database.
This uses the dbsession_factory on the configurator instance to create a
new database session. It binds that session to the available engine
and returns a new session for every call of the dummy_request object.
"""
SessionFactory = configuration.registry['dbsession_factory']
session = SessionFactory()
engine = session.bind
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
def teardown():
session.transaction.rollback()
Base.metadata.drop_all(engine)
request.addfinalizer(teardown)
return session
@pytest.fixture
def dummy_request(db_session):
"""Instantiate a fake HTTP Request, complete with a database session.
This is a function-level fixture, so every new request will have a
new database session.
"""
return testing.DummyRequest(dbsession=db_session)
@pytest.fixture
def test_user(db_session):
"""Instantiate a test user account."""
new_user = User(username="test", password=pwd_context.hash("test"))
db_session.add(new_user)
# ======== UNIT TESTS ==========
def test_user_table_empty(db_session):
"""Test user table is initially empty."""
query = db_session.query(User).all()
assert not len(query)
def test_addresses_table_empty(db_session):
"""Test addresses table is initially empty."""
query = db_session.query(AddressBook).all()
assert not len(query)
def test_category_table_empty(db_session):
"""Test category table is initially empty."""
query = db_session.query(Category).all()
assert not len(query)
def test_attribute_table_empty(db_session):
"""Test attribute table is initially empty."""
query = db_session.query(AddressBook).all()
assert not len(query)
def test_new_user_is_added(db_session):
"""Test new user gets added to the database."""
new_user = User(username="test", password="test")
db_session.add(new_user)
query = db_session.query(User).all()
assert len(query) == 1
def test_new_user_username(db_session):
"""Test new user has correct data."""
new_user = User(username="test", password="test")
db_session.add(new_user)
user = db_session.query(User).filter(User.id == 1).first()
assert user.username == "test"
def test_new_contact_is_added(db_session):
"""Test new contact gets added to correct table."""
new_contact = AddressBook(
name="test_name",
phone="test_phone",
email="test_email"
)
db_session.add(new_contact)
query = db_session.query(AddressBook).all()
assert len(query) == 1
def test_new_contact_data(db_session):
"""Test new contact has correct data."""
new_contact = AddressBook(
name="test_name",
phone="test_phone",
email="test_email"
)
db_session.add(new_contact)
contact = db_session.query(AddressBook).all()
assert contact[0].name == "test_name"
assert contact[0].phone == "test_phone"
assert contact[0].email == "test_email"
def test_new_category_is_added(db_session):
"""Test new category is added to database."""
new_cat = Category(
label="test_label",
desc="test_desc"
)
db_session.add(new_cat)
query = db_session.query(Category).all()
assert len(query) == 1
def test_new_category_data(db_session):
"""Test new category has correct data."""
new_cat = Category(
label="test_label",
desc="test_desc"
)
db_session.add(new_cat)
category = db_session.query(Category).all()
assert category[0].label == "test_label"
assert category[0].desc == "test_desc"
def test_new_attribute_is_added(db_session):
"""Test new attribute is added to database."""
new_att = Attribute(
label="test_label",
desc="test_desc"
)
db_session.add(new_att)
query = db_session.query(Attribute).all()
assert len(query) == 1
def test_new_attribute_data(db_session):
"""Test new attribute has correct data."""
new_att = Attribute(
label="test_label",
desc="test_desc"
)
db_session.add(new_att)
att = db_session.query(Attribute).all()
assert att[0].label == "test_label"
assert att[0].desc == "test_desc"
def test_login_view_bad_credentials(dummy_request):
"""Test that when given bad credentials login doesn't happen."""
from .views.default import login_view
dummy_request.POST["username"] = "testme"
dummy_request.POST["password"] = "badpassword"
result = login_view(dummy_request)
assert result == {}
def test_login_view_get_request(dummy_request):
"""Test that you can see the login view."""
from .views.default import login_view
result = login_view(dummy_request)
assert result == {}
def test_login_view_good_credentials(dummy_request, test_user):
"""Test that when given good credentials login can be successful."""
from .views.default import login_view
from pyramid.httpexceptions import HTTPFound
dummy_request.POST["username"] = "test"
dummy_request.POST["password"] = "test"
result = login_view(dummy_request)
assert isinstance(result, HTTPFound)
def test_logout_view_redirects(dummy_request):
"""When logging out you get redirected to the home page."""
from .views.default import logout_view
from pyramid.httpexceptions import HTTPFound
result = logout_view(dummy_request)
assert isinstance(result, HTTPFound)
def test_register_view(dummy_request):
"""Test that you can see the register view."""
from .views.default import register_view
result = register_view(dummy_request)
assert result == {}
def test_not_found_view(dummy_request):
"""Test not found view."""
from .views.notfound import notfound_view
result = notfound_view(dummy_request)
assert result == {}
def test_home_view(dummy_request):
"""Test home view."""
from .views.default import home_view
result = home_view(dummy_request)
assert result == {}
def test_categories_view():
"""Test category view."""
from .views.default import categories_view
with pytest.raises(Exception):
categories_view(dummy_request)
def test_attributes_view():
"""Test attributes view."""
from .views.default import attributes_view
with pytest.raises(Exception):
attributes_view(dummy_request)
# # Unit test for initialize_db # #
def test_create_cat_object():
"""Test create_cat_object returns a Category model."""
from .scripts.initializedb import create_cat_object
cat_object = create_cat_object("a", "b", "c", "c")
assert isinstance(cat_object, Category)
def test_create_att_object():
"""Test create_att_object returns an Attribute model."""
from .scripts.initializedb import create_att_object
att_object = create_att_object("a", "b", "c", "d", "c")
assert isinstance(att_object, Attribute)
def test_create_user_object():
"""Test create_user_object returns a User model."""
from .scripts.initializedb import create_user_object
user_object = create_user_object("test", "test", "test")
assert isinstance(user_object, User)
def test_create_address_object():
"""Test create_address_object returns an AddressBook model."""
from .scripts.initializedb import create_address_object
address_object = create_address_object("a", "b", "c", "d", "e", "f")
assert isinstance(address_object, AddressBook)
def test_create_user_att_link_object():
"""Test create_user_att_link_object returns a UserAttributeLink model."""
from .scripts.initializedb import create_user_att_link_object
user_att_link_object = create_user_att_link_object("user", "attribute")
assert isinstance(user_att_link_object, UserAttributeLink)
def test_get_picture_binary():
"""Test get_picture_binary returns a bytes class."""
from .scripts.initializedb import get_picture_binary
import os
here = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(here, 'scripts/img_questions/how.jpg')
rb = get_picture_binary(path)
assert isinstance(rb, bytes)
def test_handle_new_picture():
"""Test handle new picture function returns a bytes class."""
import os
from .views.default import handle_new_picture
here = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(here, 'scripts/img_questions/how.jpg')
with open(path, 'rb') as ouput_file:
new_picture = handle_new_picture("name", ouput_file)
assert isinstance(new_picture, bytes)
# # ======== FUNCTIONAL TESTS ===========
@pytest.fixture
def testapp(request):
"""Create an instance of webtests TestApp for testing routes.
With the alchemy scaffold we need to add to our test application the
setting for a database to be used for the models.
We have to then set up the database by starting a database session.
Finally we have to create all of the necessary tables that our app
normally uses to function.
The scope of the fixture is function-level, so every test will get a new
test application.
"""
from webtest import TestApp
from pyramid.config import Configurator
def main(global_config, **settings):
config = Configurator(settings=settings)
config.include('pyramid_jinja2')
config.include('.models')
config.include('.routes')
config.include('.security')
config.scan()
return config.make_wsgi_app()
app = main({}, **{'sqlalchemy.url': TEST_DB})
testapp = TestApp(app)
SessionFactory = app.registry["dbsession_factory"]
engine = SessionFactory().bind
Base.metadata.drop_all(engine)
Base.metadata.create_all(bind=engine)
return testapp
@pytest.fixture
def new_user(testapp):
"""Add a new user to the database."""
SessionFactory = testapp.app.registry["dbsession_factory"]
with transaction.manager:
dbsession = get_tm_session(SessionFactory, transaction.manager)
new_user = User(username="test", password=pwd_context.hash("test"))
dbsession.add(new_user)
@pytest.fixture
def login_fixture(testapp, new_user):
"""Test that logging redirects."""
resp = testapp.post('/login', params={'username': 'test', 'password': 'test'})
headers = resp.headers
return headers
@pytest.fixture
def fill_the_db(testapp, new_user):
"""Fill the database with a contact, category and attribute."""
from .scripts.initializedb import get_picture_binary
import os
here = here = os.path.abspath(os.path.dirname(__file__))
SessionFactory = testapp.app.registry["dbsession_factory"]
with transaction.manager:
dbsession = get_tm_session(SessionFactory, transaction.manager)
picture = get_picture_binary(os.path.join(here, "placeholder.jpg"))
new_user = AddressBook(
name="user name",
phone="user phone",
email="user email",
picture=picture,
pic_mime="image/jpeg",
user=1
)
dbsession.add(new_user)
new_category = Category(
label="category label",
desc="category",
picture=picture,
pic_mime="image/jpeg"
)
dbsession.add(new_category)
new_attribute = Attribute(
label="attribute label",
desc="attribute",
picture=picture,
cat_id=1,
pic_mime="image/jpeg"
)
dbsession.add(new_attribute)
def test_login_page_has_form(testapp):
"""Test that the login route brings up the login template."""
html = testapp.get('/login').html
assert len(html.find_all('input'))
def test_category_view_not_logged_in(testapp):
"""Test category route without logging in returns 403 error."""
from webtest.app import AppError
with pytest.raises(AppError, message="403 Forbidden"):
testapp.get('/category/1')
def test_category_view_logged_in(testapp, fill_the_db, login_fixture):
"""Test category view when logged in is accessible."""
response = testapp.get('/category/1', params=login_fixture)
assert response.status_code == 200
def test_404_view(testapp):
"""Test a non-registered route will raise a 404."""
from webtest.app import AppError
with pytest.raises(AppError, message="404 Not Found"):
testapp.get('/raise404')
def test_home_view_authenticated(testapp, login_fixture):
"""Test home view is accessible authenticated."""
response = testapp.get('/', params=login_fixture)
assert response.status_code == 200
def test_home_authenticated_has_contacts(testapp, fill_the_db, login_fixture):
"""Test home views renders contacts when authenticated."""
response = testapp.get('/', params=login_fixture).html
assert len(response.find_all("img")) == 1
def test_attribute_view_authenticated(testapp, fill_the_db, login_fixture):
"""Test attribute view with full db and authenticated user."""
response = testapp.get('/attribute/1/1', params=login_fixture)
assert response.status_code == 200
def test_attribute_authenticated_has_attributes(testapp, login_fixture, fill_the_db):
"""Test attribute view renders attributes when authenticated."""
response = testapp.get('/attribute/1/1', params=login_fixture)
assert len(response.html.find_all("img")) == 2
def test_display_view_authenticated(testapp, fill_the_db, login_fixture):
"""Test display view is accessible authenticated."""
response = testapp.get("/display/1/1/1", params=login_fixture)
assert response.status_code == 200
def test_display_authenticated_has_string(testapp, fill_the_db, login_fixture):
"""Test display view renders the string when authenticated."""
response = testapp.get("/display/1/1/1", params=login_fixture)
display_h1 = response.html.find_all('h1')[1]
assert "user name, category attribute" in display_h1
def test_manage_view_get_request(testapp, fill_the_db, login_fixture):
"""Test the manage view returns three forms."""
response = testapp.get('/manage/test', params=login_fixture)
assert response.status == '200 OK'
assert len(response.html.find_all('form')) == 3
assert len(response.html.find_all('li', class_='manage_list_item')) == 1
| 32.041494 | 103 | 0.702732 |
a5b41e605422b7568cdf7f609cb3cbb8651de56b | 3,464 | py | Python | graphql/core/language/printer.py | jhgg/graphql-py | 47ad2ca029954423e4b13f5b4ef84f788e865f6f | [
"MIT"
] | 1 | 2021-04-28T21:35:02.000Z | 2021-04-28T21:35:02.000Z | graphql/core/language/printer.py | jhgg/graphql-py | 47ad2ca029954423e4b13f5b4ef84f788e865f6f | [
"MIT"
] | null | null | null | graphql/core/language/printer.py | jhgg/graphql-py | 47ad2ca029954423e4b13f5b4ef84f788e865f6f | [
"MIT"
] | 1 | 2021-02-09T10:10:11.000Z | 2021-02-09T10:10:11.000Z | import json
from .visitor import visit, Visitor
__all__ = ['print_ast']
def print_ast(ast):
return visit(ast, PrintingVisitor())
class PrintingVisitor(Visitor):
def leave_Name(self, node, *args):
return node.value
def leave_Variable(self, node, *args):
return '$' + node.name
def leave_Document(self, node, *args):
return join(node.definitions, '\n\n') + '\n'
def leave_OperationDefinition(self, node, *args):
name = node.name
selection_set = node.selection_set
if not name:
return selection_set
op = node.operation
defs = wrap('(', join(node.variable_definitions, ', '), ')')
directives = join(node.directives, ' ')
return join([op, join([name, defs]), directives, selection_set], ' ')
def leave_VariableDefinition(self, node, *args):
return node.variable + ': ' + node.type + wrap(' = ', node.default_value)
def leave_SelectionSet(self, node, *args):
return block(node.selections)
def leave_Field(self, node, *args):
return join([
wrap('', node.alias, ': ') + node.name + wrap('(', join(node.arguments, ', '), ')'),
join(node.directives, ' '),
node.selection_set
], ' ')
def leave_Argument(self, node, *args):
return node.name + ': ' + node.value
# Fragments
def leave_FragmentSpread(self, node, *args):
return '...' + node.name + wrap(' ', join(node.directives, ' '))
def leave_InlineFragment(self, node, *args):
return ('... on ' + node.type_condition + ' ' +
wrap('', join(node.directives, ' '), ' ') +
node.selection_set)
def leave_FragmentDefinition(self, node, *args):
return ('fragment {} on {} '.format(node.name, node.type_condition) +
wrap('', join(node.directives, ' '), ' ') +
node.selection_set)
# Value
def leave_IntValue(self, node, *args):
return node.value
def leave_FloatValue(self, node, *args):
return node.value
def leave_StringValue(self, node, *args):
return json.dumps(node.value)
def leave_BooleanValue(self, node, *args):
return json.dumps(node.value)
def leave_EnumValue(self, node, *args):
return node.value
def leave_ListValue(self, node, *args):
return '[' + join(node.values, ', ') + ']'
def leave_ObjectValue(self, node, *args):
return '{' + join(node.fields, ', ') + '}'
def leave_ObjectField(self, node, *args):
return node.name + ': ' + node.value
# Directive
def leave_Directive(self, node, *args):
return '@' + node.name + wrap('(', join(node.arguments, ', '), ')')
# Type
def leave_NamedType(self, node, *args):
return node.name
def leave_ListType(self, node, *args):
return '[' + node.type + ']'
def leave_NonNullType(self, node, *args):
return node.type + '!'
def join(maybe_list, separator=''):
if maybe_list:
return separator.join(filter(None, maybe_list))
return ''
def block(maybe_list):
if maybe_list:
return indent('{\n' + join(maybe_list, '\n')) + '\n}'
return ''
def wrap(start, maybe_str, end=''):
if maybe_str:
return start + maybe_str + end
return ''
def indent(maybe_str):
if maybe_str:
return maybe_str.replace('\n', '\n ')
return maybe_str
| 27.275591 | 96 | 0.582564 |
d76729e7d0caa26dffb810b16c42b69beb9b0a0c | 2,237 | py | Python | meniscus/tests/personas/common/publish_stats_test.py | ProjectMeniscus/meniscus | 1df9efe33ead702d0f53dfc227b5da385ba9cf23 | [
"Apache-2.0"
] | 12 | 2015-01-14T03:40:05.000Z | 2018-08-20T13:19:07.000Z | meniscus/tests/personas/common/publish_stats_test.py | ProjectMeniscus/meniscus | 1df9efe33ead702d0f53dfc227b5da385ba9cf23 | [
"Apache-2.0"
] | 1 | 2015-07-02T17:03:47.000Z | 2015-07-02T17:03:47.000Z | meniscus/tests/personas/common/publish_stats_test.py | ProjectMeniscus/meniscus | 1df9efe33ead702d0f53dfc227b5da385ba9cf23 | [
"Apache-2.0"
] | 4 | 2015-05-12T12:04:44.000Z | 2020-11-17T19:08:43.000Z | import unittest
import requests
from mock import MagicMock
from mock import patch
from meniscus.openstack.common import jsonutils
from meniscus.personas.common.publish_stats import ConfigCache
from meniscus.personas.common.publish_stats import publish_worker_stats
from meniscus.data.model.worker import SystemInfo
from meniscus.data.model.worker import Worker
from meniscus.data.model.worker import WorkerConfiguration
def suite():
suite = unittest.TestSuite()
suite.addTest(WhenTestingPublishStats())
return suite
class WhenTestingPublishStats(unittest.TestCase):
def setUp(self):
self.conf = MagicMock()
self.conf.status_update.worker_status_interval = 60
self.get_config = MagicMock(return_value=self.conf)
self.config = WorkerConfiguration(
personality='worker',
hostname='worker01',
coordinator_uri='http://192.168.1.2/v1')
self.system_info = SystemInfo().format()
self.request_uri = "{0}/worker/{1}/status".format(
self.config.coordinator_uri, self.config.hostname)
self.worker_status = {
'worker_status': Worker(personality='worker').format()
}
self.worker_status['worker_status']['system_info'] = self.system_info
self.req_body = jsonutils.dumps(self.worker_status)
self.get_config = MagicMock(return_value=self.config)
self.resp = requests.Response()
self.http_request = MagicMock(return_value=self.resp)
def test_http_request_called(self):
with patch.object(
ConfigCache, 'get_config', self.get_config), patch(
'meniscus.personas.common.publish_stats.http_request',
self.http_request), patch(
'meniscus.personas.common.publish_stats.get_config',
self.get_config), patch.object(
SystemInfo, 'format',
MagicMock(return_value=self.system_info)):
publish_worker_stats()
self.http_request.assert_called_once_with(
url=self.request_uri,
json_payload=self.req_body,
http_verb='PUT'
)
if __name__ == '__main__':
unittest.main()
| 33.893939 | 77 | 0.668306 |
73445f4dcb1f3761f4315ad29fc30d6df6729615 | 23,487 | py | Python | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/storage/netapp/netapp_e_host.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/storage/netapp/netapp_e_host.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/storage/netapp/netapp_e_host.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netapp_e_host
short_description: NetApp E-Series manage eseries hosts
description: Create, update, remove hosts on NetApp E-series storage arrays
author:
- Kevin Hulquest (@hulquest)
- Nathan Swartz (@ndswartz)
extends_documentation_fragment:
- netapp.ontap.netapp.eseries
options:
name:
description:
- If the host doesn't yet exist, the label/name to assign at creation time.
- If the hosts already exists, this will be used to uniquely identify the host to make any required changes
required: True
aliases:
- label
state:
description:
- Set to absent to remove an existing host
- Set to present to modify or create a new host definition
choices:
- absent
- present
default: present
host_type:
description:
- This is the type of host to be mapped
- Required when C(state=present)
- Either one of the following names can be specified, Linux DM-MP, VMWare, Windows, Windows Clustered, or a
host type index which can be found in M(netapp_e_facts)
type: str
aliases:
- host_type_index
ports:
description:
- A list of host ports you wish to associate with the host.
- Host ports are uniquely identified by their WWN or IQN. Their assignments to a particular host are
uniquely identified by a label and these must be unique.
required: False
suboptions:
type:
description:
- The interface type of the port to define.
- Acceptable choices depend on the capabilities of the target hardware/software platform.
required: true
choices:
- iscsi
- sas
- fc
- ib
- nvmeof
- ethernet
label:
description:
- A unique label to assign to this port assignment.
required: true
port:
description:
- The WWN or IQN of the hostPort to assign to this port definition.
required: true
force_port:
description:
- Allow ports that are already assigned to be re-assigned to your current host
required: false
type: bool
group:
description:
- The unique identifier of the host-group you want the host to be a member of; this is used for clustering.
required: False
aliases:
- cluster
log_path:
description:
- A local path to a file to be used for debug logging
required: False
'''
EXAMPLES = """
- name: Define or update an existing host named 'Host1'
netapp_e_host:
ssid: "1"
api_url: "10.113.1.101:8443"
api_username: admin
api_password: myPassword
name: "Host1"
state: present
host_type_index: Linux DM-MP
ports:
- type: 'iscsi'
label: 'PORT_1'
port: 'iqn.1996-04.de.suse:01:56f86f9bd1fe'
- type: 'fc'
label: 'FC_1'
port: '10:00:FF:7C:FF:FF:FF:01'
- type: 'fc'
label: 'FC_2'
port: '10:00:FF:7C:FF:FF:FF:00'
- name: Ensure a host named 'Host2' doesn't exist
netapp_e_host:
ssid: "1"
api_url: "10.113.1.101:8443"
api_username: admin
api_password: myPassword
name: "Host2"
state: absent
"""
RETURN = """
msg:
description:
- A user-readable description of the actions performed.
returned: on success
type: str
sample: The host has been created.
id:
description:
- the unique identifier of the host on the E-Series storage-system
returned: on success when state=present
type: str
sample: 00000000600A098000AAC0C3003004700AD86A52
version_added: "2.6"
ssid:
description:
- the unique identifier of the E-Series storage-system with the current api
returned: on success
type: str
sample: 1
version_added: "2.6"
api_url:
description:
- the url of the API that this request was processed by
returned: on success
type: str
sample: https://webservices.example.com:8443
version_added: "2.6"
"""
import json
import logging
import re
from pprint import pformat
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.netapp.ontap.plugins.module_utils.netapp import request, eseries_host_argument_spec
from ansible.module_utils._text import to_native
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json",
}
class Host(object):
HOST_TYPE_INDEXES = {"linux dm-mp": 28, "vmware": 10, "windows": 1, "windows clustered": 8}
def __init__(self):
argument_spec = eseries_host_argument_spec()
argument_spec.update(dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
group=dict(type='str', required=False, aliases=['cluster']),
ports=dict(type='list', required=False),
force_port=dict(type='bool', default=False),
name=dict(type='str', required=True, aliases=['label']),
host_type_index=dict(type='str', aliases=['host_type']),
log_path=dict(type='str', required=False),
))
self.module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
self.check_mode = self.module.check_mode
args = self.module.params
self.group = args['group']
self.ports = args['ports']
self.force_port = args['force_port']
self.name = args['name']
self.state = args['state']
self.ssid = args['ssid']
self.url = args['api_url']
self.user = args['api_username']
self.pwd = args['api_password']
self.certs = args['validate_certs']
self.post_body = dict()
self.all_hosts = list()
self.host_obj = dict()
self.newPorts = list()
self.portsForUpdate = list()
self.portsForRemoval = list()
# Update host type with the corresponding index
host_type = args['host_type_index']
if host_type:
host_type = host_type.lower()
if host_type in [key.lower() for key in list(self.HOST_TYPE_INDEXES.keys())]:
self.host_type_index = self.HOST_TYPE_INDEXES[host_type]
elif host_type.isdigit():
self.host_type_index = int(args['host_type_index'])
else:
self.module.fail_json(msg="host_type must be either a host type name or host type index found integer"
" the documentation.")
# logging setup
self._logger = logging.getLogger(self.__class__.__name__)
if args['log_path']:
logging.basicConfig(
level=logging.DEBUG, filename=args['log_path'], filemode='w',
format='%(relativeCreated)dms %(levelname)s %(module)s.%(funcName)s:%(lineno)d\n %(message)s')
if not self.url.endswith('/'):
self.url += '/'
# Ensure when state==present then host_type_index is defined
if self.state == "present" and self.host_type_index is None:
self.module.fail_json(msg="Host_type_index is required when state=='present'. Array Id: [%s]" % self.ssid)
# Fix port representation if they are provided with colons
if self.ports is not None:
for port in self.ports:
port['label'] = port['label'].lower()
port['type'] = port['type'].lower()
port['port'] = port['port'].lower()
# Determine whether address is 16-byte WWPN and, if so, remove
if re.match(r'^(0x)?[0-9a-f]{16}$', port['port'].replace(':', '')):
port['port'] = port['port'].replace(':', '').replace('0x', '')
def valid_host_type(self):
host_types = None
try:
(rc, host_types) = request(self.url + 'storage-systems/%s/host-types' % self.ssid, url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except Exception as err:
self.module.fail_json(
msg="Failed to get host types. Array Id [%s]. Error [%s]." % (self.ssid, to_native(err)))
try:
match = list(filter(lambda host_type: host_type['index'] == self.host_type_index, host_types))[0]
return True
except IndexError:
self.module.fail_json(msg="There is no host type with index %s" % self.host_type_index)
def assigned_host_ports(self, apply_unassigning=False):
"""Determine if the hostPorts requested have already been assigned and return list of required used ports."""
used_host_ports = {}
for host in self.all_hosts:
if host['label'] != self.name:
for host_port in host['hostSidePorts']:
for port in self.ports:
if port['port'] == host_port["address"] or port['label'] == host_port['label']:
if not self.force_port:
self.module.fail_json(msg="There are no host ports available OR there are not enough"
" unassigned host ports")
else:
# Determine port reference
port_ref = [port["hostPortRef"] for port in host["ports"]
if port["hostPortName"] == host_port["address"]]
port_ref.extend([port["initiatorRef"] for port in host["initiators"]
if port["nodeName"]["iscsiNodeName"] == host_port["address"]])
# Create dictionary of hosts containing list of port references
if host["hostRef"] not in used_host_ports.keys():
used_host_ports.update({host["hostRef"]: port_ref})
else:
used_host_ports[host["hostRef"]].extend(port_ref)
else:
for host_port in host['hostSidePorts']:
for port in self.ports:
if ((host_port['label'] == port['label'] and host_port['address'] != port['port']) or
(host_port['label'] != port['label'] and host_port['address'] == port['port'])):
if not self.force_port:
self.module.fail_json(msg="There are no host ports available OR there are not enough"
" unassigned host ports")
else:
# Determine port reference
port_ref = [port["hostPortRef"] for port in host["ports"]
if port["hostPortName"] == host_port["address"]]
port_ref.extend([port["initiatorRef"] for port in host["initiators"]
if port["nodeName"]["iscsiNodeName"] == host_port["address"]])
# Create dictionary of hosts containing list of port references
if host["hostRef"] not in used_host_ports.keys():
used_host_ports.update({host["hostRef"]: port_ref})
else:
used_host_ports[host["hostRef"]].extend(port_ref)
# Unassign assigned ports
if apply_unassigning:
for host_ref in used_host_ports.keys():
try:
rc, resp = request(self.url + 'storage-systems/%s/hosts/%s' % (self.ssid, host_ref),
url_username=self.user, url_password=self.pwd, headers=HEADERS,
validate_certs=self.certs, method='POST',
data=json.dumps({"portsToRemove": used_host_ports[host_ref]}))
except Exception as err:
self.module.fail_json(msg="Failed to unassign host port. Host Id [%s]. Array Id [%s]. Ports [%s]."
" Error [%s]." % (self.host_obj['id'], self.ssid,
used_host_ports[host_ref], to_native(err)))
return used_host_ports
def group_id(self):
if self.group:
try:
(rc, all_groups) = request(self.url + 'storage-systems/%s/host-groups' % self.ssid,
url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except Exception as err:
self.module.fail_json(
msg="Failed to get host groups. Array Id [%s]. Error [%s]." % (self.ssid, to_native(err)))
try:
group_obj = list(filter(lambda group: group['name'] == self.group, all_groups))[0]
return group_obj['id']
except IndexError:
self.module.fail_json(msg="No group with the name: %s exists" % self.group)
else:
# Return the value equivalent of no group
return "0000000000000000000000000000000000000000"
def host_exists(self):
"""Determine if the requested host exists
As a side effect, set the full list of defined hosts in 'all_hosts', and the target host in 'host_obj'.
"""
match = False
all_hosts = list()
try:
(rc, all_hosts) = request(self.url + 'storage-systems/%s/hosts' % self.ssid, url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except Exception as err:
self.module.fail_json(
msg="Failed to determine host existence. Array Id [%s]. Error [%s]." % (self.ssid, to_native(err)))
# Augment the host objects
for host in all_hosts:
for port in host['hostSidePorts']:
port['type'] = port['type'].lower()
port['address'] = port['address'].lower()
port['label'] = port['label'].lower()
# Augment hostSidePorts with their ID (this is an omission in the API)
ports = dict((port['label'], port['id']) for port in host['ports'])
ports.update((port['label'], port['id']) for port in host['initiators'])
for host_side_port in host['hostSidePorts']:
if host_side_port['label'] in ports:
host_side_port['id'] = ports[host_side_port['label']]
if host['label'] == self.name:
self.host_obj = host
match = True
self.all_hosts = all_hosts
return match
def needs_update(self):
"""Determine whether we need to update the Host object
As a side effect, we will set the ports that we need to update (portsForUpdate), and the ports we need to add
(newPorts), on self.
"""
changed = False
if (self.host_obj["clusterRef"].lower() != self.group_id().lower() or
self.host_obj["hostTypeIndex"] != self.host_type_index):
self._logger.info("Either hostType or the clusterRef doesn't match, an update is required.")
changed = True
current_host_ports = dict((port["id"], {"type": port["type"], "port": port["address"], "label": port["label"]})
for port in self.host_obj["hostSidePorts"])
if self.ports:
for port in self.ports:
for current_host_port_id in current_host_ports.keys():
if port == current_host_ports[current_host_port_id]:
current_host_ports.pop(current_host_port_id)
break
elif port["port"] == current_host_ports[current_host_port_id]["port"]:
if self.port_on_diff_host(port) and not self.force_port:
self.module.fail_json(msg="The port you specified [%s] is associated with a different host."
" Specify force_port as True or try a different port spec" % port)
if (port["label"] != current_host_ports[current_host_port_id]["label"] or
port["type"] != current_host_ports[current_host_port_id]["type"]):
current_host_ports.pop(current_host_port_id)
self.portsForUpdate.append({"portRef": current_host_port_id, "port": port["port"],
"label": port["label"], "hostRef": self.host_obj["hostRef"]})
break
else:
self.newPorts.append(port)
self.portsForRemoval = list(current_host_ports.keys())
changed = any([self.newPorts, self.portsForUpdate, self.portsForRemoval, changed])
return changed
def port_on_diff_host(self, arg_port):
""" Checks to see if a passed in port arg is present on a different host """
for host in self.all_hosts:
# Only check 'other' hosts
if host['name'] != self.name:
for port in host['hostSidePorts']:
# Check if the port label is found in the port dict list of each host
if arg_port['label'] == port['label'] or arg_port['port'] == port['address']:
self.other_host = host
return True
return False
def update_host(self):
self._logger.info("Beginning the update for host=%s.", self.name)
if self.ports:
# Remove ports that need reassigning from their current host.
self.assigned_host_ports(apply_unassigning=True)
self.post_body["portsToUpdate"] = self.portsForUpdate
self.post_body["ports"] = self.newPorts
self._logger.info("Requested ports: %s", pformat(self.ports))
else:
self._logger.info("No host ports were defined.")
if self.group:
self.post_body['groupId'] = self.group_id()
self.post_body['hostType'] = dict(index=self.host_type_index)
api = self.url + 'storage-systems/%s/hosts/%s' % (self.ssid, self.host_obj['id'])
self._logger.info("POST => url=%s, body=%s.", api, pformat(self.post_body))
if not self.check_mode:
try:
(rc, self.host_obj) = request(api, url_username=self.user, url_password=self.pwd, headers=HEADERS,
validate_certs=self.certs, method='POST', data=json.dumps(self.post_body))
except Exception as err:
self.module.fail_json(
msg="Failed to update host. Array Id [%s]. Error [%s]." % (self.ssid, to_native(err)))
payload = self.build_success_payload(self.host_obj)
self.module.exit_json(changed=True, **payload)
def create_host(self):
self._logger.info("Creating host definition.")
# Remove ports that need reassigning from their current host.
self.assigned_host_ports(apply_unassigning=True)
# needs_reassignment = False
post_body = dict(
name=self.name,
hostType=dict(index=self.host_type_index),
groupId=self.group_id(),
)
if self.ports:
post_body.update(ports=self.ports)
api = self.url + "storage-systems/%s/hosts" % self.ssid
self._logger.info('POST => url=%s, body=%s', api, pformat(post_body))
if not self.check_mode:
if not self.host_exists():
try:
(rc, self.host_obj) = request(api, method='POST', url_username=self.user, url_password=self.pwd, validate_certs=self.certs,
data=json.dumps(post_body), headers=HEADERS)
except Exception as err:
self.module.fail_json(
msg="Failed to create host. Array Id [%s]. Error [%s]." % (self.ssid, to_native(err)))
else:
payload = self.build_success_payload(self.host_obj)
self.module.exit_json(changed=False, msg="Host already exists. Id [%s]. Host [%s]." % (self.ssid, self.name), **payload)
payload = self.build_success_payload(self.host_obj)
self.module.exit_json(changed=True, msg='Host created.', **payload)
def remove_host(self):
try:
(rc, resp) = request(self.url + "storage-systems/%s/hosts/%s" % (self.ssid, self.host_obj['id']),
method='DELETE',
url_username=self.user, url_password=self.pwd, validate_certs=self.certs)
except Exception as err:
self.module.fail_json(
msg="Failed to remove host. Host[%s]. Array Id [%s]. Error [%s]." % (self.host_obj['id'],
self.ssid,
to_native(err)))
def build_success_payload(self, host=None):
keys = ['id']
if host is not None:
result = dict((key, host[key]) for key in keys)
else:
result = dict()
result['ssid'] = self.ssid
result['api_url'] = self.url
return result
def apply(self):
if self.state == 'present':
if self.host_exists():
if self.needs_update() and self.valid_host_type():
self.update_host()
else:
payload = self.build_success_payload(self.host_obj)
self.module.exit_json(changed=False, msg="Host already present; no changes required.", **payload)
elif self.valid_host_type():
self.create_host()
else:
payload = self.build_success_payload()
if self.host_exists():
self.remove_host()
self.module.exit_json(changed=True, msg="Host removed.", **payload)
else:
self.module.exit_json(changed=False, msg="Host already absent.", **payload)
def main():
host = Host()
host.apply()
if __name__ == '__main__':
main()
| 43.73743 | 143 | 0.548346 |
f1bc07e7c72b177310cbabcbf43079cbda423418 | 209,116 | py | Python | tencentcloud/tsf/v20180326/tsf_client.py | HS-Gray/tencentcloud-sdk-python | b28b19c4beebc9f361aa3221afa36ad1ee047ccc | [
"Apache-2.0"
] | null | null | null | tencentcloud/tsf/v20180326/tsf_client.py | HS-Gray/tencentcloud-sdk-python | b28b19c4beebc9f361aa3221afa36ad1ee047ccc | [
"Apache-2.0"
] | null | null | null | tencentcloud/tsf/v20180326/tsf_client.py | HS-Gray/tencentcloud-sdk-python | b28b19c4beebc9f361aa3221afa36ad1ee047ccc | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.tsf.v20180326 import models
class TsfClient(AbstractClient):
_apiVersion = '2018-03-26'
_endpoint = 'tsf.tencentcloudapi.com'
_service = 'tsf'
def AddClusterInstances(self, request):
"""添加云主机节点至TSF集群
:param request: Request instance for AddClusterInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.AddClusterInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.AddClusterInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("AddClusterInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AddClusterInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def AddInstances(self, request):
"""添加云主机节点至TSF集群
:param request: Request instance for AddInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.AddInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.AddInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("AddInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.AddInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindApiGroup(self, request):
"""网关与API分组批量绑定
:param request: Request instance for BindApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.BindApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.BindApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("BindApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindPlugin(self, request):
"""插件与网关分组/API批量绑定
:param request: Request instance for BindPlugin.
:type request: :class:`tencentcloud.tsf.v20180326.models.BindPluginRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.BindPluginResponse`
"""
try:
params = request._serialize()
body = self.call("BindPlugin", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindPluginResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ChangeApiUsableStatus(self, request):
"""启用或禁用API
:param request: Request instance for ChangeApiUsableStatus.
:type request: :class:`tencentcloud.tsf.v20180326.models.ChangeApiUsableStatusRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ChangeApiUsableStatusResponse`
"""
try:
params = request._serialize()
body = self.call("ChangeApiUsableStatus", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ChangeApiUsableStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ContinueRunFailedTaskBatch(self, request):
"""对执行失败的任务批次执行续跑
:param request: Request instance for ContinueRunFailedTaskBatch.
:type request: :class:`tencentcloud.tsf.v20180326.models.ContinueRunFailedTaskBatchRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ContinueRunFailedTaskBatchResponse`
"""
try:
params = request._serialize()
body = self.call("ContinueRunFailedTaskBatch", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ContinueRunFailedTaskBatchResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAllGatewayApiAsync(self, request):
"""一键导入API分组
:param request: Request instance for CreateAllGatewayApiAsync.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateAllGatewayApiAsyncRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateAllGatewayApiAsyncResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAllGatewayApiAsync", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAllGatewayApiAsyncResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateApiGroup(self, request):
"""创建API分组
:param request: Request instance for CreateApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("CreateApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateApiRateLimitRule(self, request):
"""创建API限流规则
:param request: Request instance for CreateApiRateLimitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateApiRateLimitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateApiRateLimitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("CreateApiRateLimitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateApiRateLimitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateApplication(self, request):
"""创建应用
:param request: Request instance for CreateApplication.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateApplicationRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateApplicationResponse`
"""
try:
params = request._serialize()
body = self.call("CreateApplication", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateApplicationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateCluster(self, request):
"""创建集群
:param request: Request instance for CreateCluster.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateClusterRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateClusterResponse`
"""
try:
params = request._serialize()
body = self.call("CreateCluster", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateClusterResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateConfig(self, request):
"""创建配置项
:param request: Request instance for CreateConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateConfigResponse`
"""
try:
params = request._serialize()
body = self.call("CreateConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateContainGroup(self, request):
"""创建容器部署组
:param request: Request instance for CreateContainGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateContainGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateContainGroupResponse`
"""
try:
params = request._serialize()
body = self.call("CreateContainGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateContainGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateFileConfig(self, request):
"""创建文件配置项
:param request: Request instance for CreateFileConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateFileConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateFileConfigResponse`
"""
try:
params = request._serialize()
body = self.call("CreateFileConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateFileConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateGatewayApi(self, request):
"""批量导入API至api分组(也支持新建API到分组)
:param request: Request instance for CreateGatewayApi.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateGatewayApiRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateGatewayApiResponse`
"""
try:
params = request._serialize()
body = self.call("CreateGatewayApi", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateGatewayApiResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateGroup(self, request):
"""创建虚拟机部署组
:param request: Request instance for CreateGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateGroupResponse`
"""
try:
params = request._serialize()
body = self.call("CreateGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateLane(self, request):
"""创建泳道
:param request: Request instance for CreateLane.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateLaneRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateLaneResponse`
"""
try:
params = request._serialize()
body = self.call("CreateLane", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateLaneResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateLaneRule(self, request):
"""创建泳道规则
:param request: Request instance for CreateLaneRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateLaneRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateLaneRuleResponse`
"""
try:
params = request._serialize()
body = self.call("CreateLaneRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateLaneRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateMicroservice(self, request):
"""新增微服务
:param request: Request instance for CreateMicroservice.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateMicroserviceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateMicroserviceResponse`
"""
try:
params = request._serialize()
body = self.call("CreateMicroservice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateMicroserviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateNamespace(self, request):
"""创建命名空间
:param request: Request instance for CreateNamespace.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateNamespaceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateNamespaceResponse`
"""
try:
params = request._serialize()
body = self.call("CreateNamespace", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateNamespaceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreatePathRewrites(self, request):
"""创建路径重写
:param request: Request instance for CreatePathRewrites.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreatePathRewritesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreatePathRewritesResponse`
"""
try:
params = request._serialize()
body = self.call("CreatePathRewrites", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreatePathRewritesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreatePublicConfig(self, request):
"""创建公共配置项
:param request: Request instance for CreatePublicConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreatePublicConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreatePublicConfigResponse`
"""
try:
params = request._serialize()
body = self.call("CreatePublicConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreatePublicConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateRepository(self, request):
"""创建仓库
:param request: Request instance for CreateRepository.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateRepositoryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateRepositoryResponse`
"""
try:
params = request._serialize()
body = self.call("CreateRepository", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateRepositoryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateServerlessGroup(self, request):
"""创建Serverless部署组
:param request: Request instance for CreateServerlessGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateServerlessGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateServerlessGroupResponse`
"""
try:
params = request._serialize()
body = self.call("CreateServerlessGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateServerlessGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTask(self, request):
"""创建任务
:param request: Request instance for CreateTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateTaskResponse`
"""
try:
params = request._serialize()
body = self.call("CreateTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTaskFlow(self, request):
"""创建工作流
:param request: Request instance for CreateTaskFlow.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateTaskFlowRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateTaskFlowResponse`
"""
try:
params = request._serialize()
body = self.call("CreateTaskFlow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTaskFlowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateUnitRule(self, request):
"""创建单元化规则
:param request: Request instance for CreateUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.CreateUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.CreateUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("CreateUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteApiGroup(self, request):
"""删除Api分组
:param request: Request instance for DeleteApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteApplication(self, request):
"""删除应用
:param request: Request instance for DeleteApplication.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteApplicationRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteApplicationResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteApplication", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteApplicationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteConfig(self, request):
"""删除配置项
:param request: Request instance for DeleteConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteContainerGroup(self, request):
"""删除容器部署组
:param request: Request instance for DeleteContainerGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteContainerGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteContainerGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteContainerGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteContainerGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteGroup(self, request):
"""删除容器部署组
:param request: Request instance for DeleteGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteImageTags(self, request):
"""批量删除镜像版本
:param request: Request instance for DeleteImageTags.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteImageTagsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteImageTagsResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteImageTags", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteImageTagsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteLane(self, request):
"""删除泳道
:param request: Request instance for DeleteLane.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteLaneRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteLaneResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteLane", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteLaneResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteLaneRule(self, request):
"""删除泳道规则
:param request: Request instance for DeleteLaneRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteLaneRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteLaneRuleResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteLaneRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteLaneRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteMicroservice(self, request):
"""删除微服务
:param request: Request instance for DeleteMicroservice.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteMicroserviceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteMicroserviceResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteMicroservice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteMicroserviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteNamespace(self, request):
"""删除命名空间
:param request: Request instance for DeleteNamespace.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteNamespaceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteNamespaceResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteNamespace", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteNamespaceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeletePathRewrites(self, request):
"""删除路径重写
:param request: Request instance for DeletePathRewrites.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeletePathRewritesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeletePathRewritesResponse`
"""
try:
params = request._serialize()
body = self.call("DeletePathRewrites", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeletePathRewritesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeletePkgs(self, request):
"""从软件仓库批量删除程序包。
一次最多支持删除1000个包,数量超过1000,返回UpperDeleteLimit错误。
:param request: Request instance for DeletePkgs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeletePkgsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeletePkgsResponse`
"""
try:
params = request._serialize()
body = self.call("DeletePkgs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeletePkgsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeletePublicConfig(self, request):
"""删除公共配置项
:param request: Request instance for DeletePublicConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeletePublicConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeletePublicConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DeletePublicConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeletePublicConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteRepository(self, request):
"""删除仓库
:param request: Request instance for DeleteRepository.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteRepositoryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteRepositoryResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteRepository", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteRepositoryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteServerlessGroup(self, request):
"""删除Serverless部署组
:param request: Request instance for DeleteServerlessGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteServerlessGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteServerlessGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteServerlessGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteServerlessGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteTask(self, request):
"""删除任务
:param request: Request instance for DeleteTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteTaskResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteUnitNamespaces(self, request):
"""删除单元化命名空间
:param request: Request instance for DeleteUnitNamespaces.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteUnitNamespacesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteUnitNamespacesResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteUnitNamespaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteUnitNamespacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteUnitRule(self, request):
"""删除单元化规则
:param request: Request instance for DeleteUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeleteUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeleteUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeployContainerGroup(self, request):
"""部署容器应用-更新
:param request: Request instance for DeployContainerGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeployContainerGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeployContainerGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeployContainerGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeployContainerGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeployGroup(self, request):
"""部署虚拟机部署组应用
:param request: Request instance for DeployGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeployGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeployGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeployGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeployGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeployServerlessGroup(self, request):
"""部署Serverless应用
:param request: Request instance for DeployServerlessGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DeployServerlessGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DeployServerlessGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DeployServerlessGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeployServerlessGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiDetail(self, request):
"""查询API详情
:param request: Request instance for DescribeApiDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiGroup(self, request):
"""查询API分组
:param request: Request instance for DescribeApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiGroups(self, request):
"""查询API 分组信息列表
:param request: Request instance for DescribeApiGroups.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiGroupsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiGroupsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiGroups", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiGroupsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiRateLimitRules(self, request):
"""查询API限流规则
:param request: Request instance for DescribeApiRateLimitRules.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiRateLimitRulesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiRateLimitRulesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiRateLimitRules", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiRateLimitRulesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiUseDetail(self, request):
"""查询网关API监控明细数据
:param request: Request instance for DescribeApiUseDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiUseDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiUseDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiUseDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiUseDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApiVersions(self, request):
"""查询API 版本
:param request: Request instance for DescribeApiVersions.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApiVersionsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApiVersionsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApiVersions", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApiVersionsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApplication(self, request):
"""获取应用详情
:param request: Request instance for DescribeApplication.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApplication", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApplicationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApplicationAttribute(self, request):
"""获取应用列表其它字段,如实例数量信息等
:param request: Request instance for DescribeApplicationAttribute.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationAttributeRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApplicationAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApplicationAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeApplications(self, request):
"""获取应用列表
:param request: Request instance for DescribeApplications.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeApplicationsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeApplications", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeApplicationsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBasicResourceUsage(self, request):
"""TSF基本资源信息概览接口
:param request: Request instance for DescribeBasicResourceUsage.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeBasicResourceUsageRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeBasicResourceUsageResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBasicResourceUsage", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBasicResourceUsageResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeClusterInstances(self, request):
"""查询集群实例
:param request: Request instance for DescribeClusterInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeClusterInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeClusterInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeClusterInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeClusterInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeConfig(self, request):
"""查询配置
:param request: Request instance for DescribeConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeConfigReleaseLogs(self, request):
"""查询配置发布历史
:param request: Request instance for DescribeConfigReleaseLogs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigReleaseLogsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigReleaseLogsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeConfigReleaseLogs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeConfigReleaseLogsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeConfigReleases(self, request):
"""查询配置发布信息
:param request: Request instance for DescribeConfigReleases.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigReleasesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigReleasesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeConfigReleases", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeConfigReleasesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeConfigSummary(self, request):
"""查询配置汇总列表
:param request: Request instance for DescribeConfigSummary.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigSummaryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigSummaryResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeConfigSummary", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeConfigSummaryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeConfigs(self, request):
"""查询配置项列表
:param request: Request instance for DescribeConfigs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeConfigsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeConfigs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeConfigsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeContainerEvents(self, request):
"""获取容器事件列表
:param request: Request instance for DescribeContainerEvents.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerEventsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerEventsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeContainerEvents", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeContainerEventsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeContainerGroupDeployInfo(self, request):
"""获取部署组详情
:param request: Request instance for DescribeContainerGroupDeployInfo.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupDeployInfoRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupDeployInfoResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeContainerGroupDeployInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeContainerGroupDeployInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeContainerGroupDetail(self, request):
"""容器部署组详情(已废弃,请使用 DescribeContainerGroupDeployInfo)
:param request: Request instance for DescribeContainerGroupDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeContainerGroupDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeContainerGroupDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeContainerGroups(self, request):
"""容器部署组列表
:param request: Request instance for DescribeContainerGroups.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeContainerGroupsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeContainerGroups", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeContainerGroupsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeCreateGatewayApiStatus(self, request):
"""查询一键导入API分组任务的状态
:param request: Request instance for DescribeCreateGatewayApiStatus.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeCreateGatewayApiStatusRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeCreateGatewayApiStatusResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeCreateGatewayApiStatus", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeCreateGatewayApiStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDownloadInfo(self, request):
"""TSF上传的程序包存放在腾讯云对象存储(COS)中,通过该API可以获取从COS下载程序包需要的信息,包括包所在的桶、存储路径、鉴权信息等,之后使用COS API(或SDK)进行下载。
COS相关文档请查阅:https://cloud.tencent.com/document/product/436
:param request: Request instance for DescribeDownloadInfo.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeDownloadInfoRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeDownloadInfoResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDownloadInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDownloadInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeEnabledUnitRule(self, request):
"""查询生效的单元化规则
:param request: Request instance for DescribeEnabledUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeEnabledUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeEnabledUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeEnabledUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeEnabledUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFileConfigs(self, request):
"""查询文件配置项列表
:param request: Request instance for DescribeFileConfigs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeFileConfigsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeFileConfigsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFileConfigs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFileConfigsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFlowLastBatchState(self, request):
"""查询工作流最新一个批次的状态信息
:param request: Request instance for DescribeFlowLastBatchState.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeFlowLastBatchStateRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeFlowLastBatchStateResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeFlowLastBatchState", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFlowLastBatchStateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGatewayAllGroupApis(self, request):
"""查询网关所有分组下Api列表
:param request: Request instance for DescribeGatewayAllGroupApis.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayAllGroupApisRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayAllGroupApisResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGatewayAllGroupApis", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGatewayAllGroupApisResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGatewayApis(self, request):
"""查询API分组下的Api列表信息
:param request: Request instance for DescribeGatewayApis.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayApisRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayApisResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGatewayApis", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGatewayApisResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGatewayMonitorOverview(self, request):
"""查询网关监控概览
:param request: Request instance for DescribeGatewayMonitorOverview.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayMonitorOverviewRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGatewayMonitorOverviewResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGatewayMonitorOverview", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGatewayMonitorOverviewResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroup(self, request):
"""查询虚拟机部署组详情
:param request: Request instance for DescribeGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupAttribute(self, request):
"""获取部署组其他属性
:param request: Request instance for DescribeGroupAttribute.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupAttributeRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupBindedGateways(self, request):
"""查询某个API分组已绑定的网关部署组信息列表
:param request: Request instance for DescribeGroupBindedGateways.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupBindedGatewaysRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupBindedGatewaysResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupBindedGateways", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupBindedGatewaysResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupGateways(self, request):
"""查询某个网关绑定的API 分组信息列表
:param request: Request instance for DescribeGroupGateways.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupGatewaysRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupGatewaysResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupGateways", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupGatewaysResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupInstances(self, request):
"""查询虚拟机部署组云主机列表
:param request: Request instance for DescribeGroupInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupRelease(self, request):
"""查询部署组相关的发布信息
:param request: Request instance for DescribeGroupRelease.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupReleaseRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupReleaseResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupRelease", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupReleaseResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupUseDetail(self, request):
"""查询网关分组监控明细数据
:param request: Request instance for DescribeGroupUseDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupUseDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupUseDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupUseDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupUseDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroups(self, request):
"""获取虚拟机部署组列表
:param request: Request instance for DescribeGroups.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroups", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGroupsWithPlugin(self, request):
"""查询某个插件下绑定或未绑定的API分组
:param request: Request instance for DescribeGroupsWithPlugin.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupsWithPluginRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeGroupsWithPluginResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeGroupsWithPlugin", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGroupsWithPluginResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeImageRepository(self, request):
"""镜像仓库列表
:param request: Request instance for DescribeImageRepository.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeImageRepositoryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeImageRepositoryResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeImageRepository", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeImageRepositoryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeImageTags(self, request):
"""镜像版本列表
:param request: Request instance for DescribeImageTags.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeImageTagsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeImageTagsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeImageTags", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeImageTagsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInovcationIndicators(self, request):
"""查询调用监控指标
:param request: Request instance for DescribeInovcationIndicators.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeInovcationIndicatorsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeInovcationIndicatorsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInovcationIndicators", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInovcationIndicatorsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInvocationMetricDataCurve(self, request):
"""查询调用指标数据变化曲线
:param request: Request instance for DescribeInvocationMetricDataCurve.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataCurveRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataCurveResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInvocationMetricDataCurve", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInvocationMetricDataCurveResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInvocationMetricDataDimension(self, request):
"""查询维度
:param request: Request instance for DescribeInvocationMetricDataDimension.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataDimensionRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataDimensionResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInvocationMetricDataDimension", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInvocationMetricDataDimensionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInvocationMetricDataPoint(self, request):
"""查询单值指标维度
:param request: Request instance for DescribeInvocationMetricDataPoint.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataPointRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricDataPointResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInvocationMetricDataPoint", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInvocationMetricDataPointResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeInvocationMetricScatterPlot(self, request):
"""查询调用指标数据散点图
:param request: Request instance for DescribeInvocationMetricScatterPlot.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricScatterPlotRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeInvocationMetricScatterPlotResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeInvocationMetricScatterPlot", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeInvocationMetricScatterPlotResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeJvmMonitor(self, request):
"""查询java实例jvm监控数据,返回数据可选
:param request: Request instance for DescribeJvmMonitor.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeJvmMonitorRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeJvmMonitorResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeJvmMonitor", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeJvmMonitorResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeLaneRules(self, request):
"""查询泳道规则列表
:param request: Request instance for DescribeLaneRules.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeLaneRulesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeLaneRulesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeLaneRules", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeLaneRulesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeLanes(self, request):
"""查询泳道列表
:param request: Request instance for DescribeLanes.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeLanesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeLanesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeLanes", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeLanesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeMicroservice(self, request):
"""查询微服务详情
:param request: Request instance for DescribeMicroservice.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeMicroserviceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeMicroserviceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeMicroservice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeMicroserviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeMicroservices(self, request):
"""获取微服务列表
:param request: Request instance for DescribeMicroservices.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeMicroservicesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeMicroservicesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeMicroservices", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeMicroservicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeMsApiList(self, request):
"""查询服务API列表
:param request: Request instance for DescribeMsApiList.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeMsApiListRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeMsApiListResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeMsApiList", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeMsApiListResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeOverviewInvocation(self, request):
"""服务调用监控统计概览
:param request: Request instance for DescribeOverviewInvocation.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeOverviewInvocationRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeOverviewInvocationResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeOverviewInvocation", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeOverviewInvocationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePathRewrite(self, request):
"""查询路径重写
:param request: Request instance for DescribePathRewrite.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePathRewriteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePathRewriteResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePathRewrite", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePathRewriteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePathRewrites(self, request):
"""查询路径重写列表
:param request: Request instance for DescribePathRewrites.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePathRewritesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePathRewritesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePathRewrites", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePathRewritesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePkgs(self, request):
"""无
:param request: Request instance for DescribePkgs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePkgsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePkgsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePkgs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePkgsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePluginInstances(self, request):
"""分页查询网关分组/API绑定(或未绑定)的插件列表
:param request: Request instance for DescribePluginInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePluginInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePluginInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePluginInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePluginInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePodInstances(self, request):
"""获取部署组实例列表
:param request: Request instance for DescribePodInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePodInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePodInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePodInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePodInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePublicConfig(self, request):
"""查询公共配置(单条)
:param request: Request instance for DescribePublicConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePublicConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePublicConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePublicConfigReleaseLogs(self, request):
"""查询公共配置发布历史
:param request: Request instance for DescribePublicConfigReleaseLogs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigReleaseLogsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigReleaseLogsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePublicConfigReleaseLogs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePublicConfigReleaseLogsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePublicConfigReleases(self, request):
"""查询公共配置发布信息
:param request: Request instance for DescribePublicConfigReleases.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigReleasesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigReleasesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePublicConfigReleases", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePublicConfigReleasesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePublicConfigSummary(self, request):
"""查询公共配置汇总列表
:param request: Request instance for DescribePublicConfigSummary.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigSummaryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigSummaryResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePublicConfigSummary", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePublicConfigSummaryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePublicConfigs(self, request):
"""查询公共配置项列表
:param request: Request instance for DescribePublicConfigs.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribePublicConfigsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePublicConfigs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePublicConfigsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeReleasedConfig(self, request):
"""查询group发布的配置
:param request: Request instance for DescribeReleasedConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeReleasedConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeReleasedConfigResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeReleasedConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeReleasedConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeRepositories(self, request):
"""查询仓库列表
:param request: Request instance for DescribeRepositories.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeRepositoriesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeRepositoriesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeRepositories", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeRepositoriesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeRepository(self, request):
"""查询仓库信息
:param request: Request instance for DescribeRepository.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeRepositoryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeRepositoryResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeRepository", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeRepositoryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeServerlessGroup(self, request):
"""查询Serverless部署组明细
:param request: Request instance for DescribeServerlessGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeServerlessGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeServerlessGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeServerlessGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeServerlessGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeServerlessGroups(self, request):
"""查询Serverless部署组列表
:param request: Request instance for DescribeServerlessGroups.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeServerlessGroupsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeServerlessGroupsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeServerlessGroups", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeServerlessGroupsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSimpleApplications(self, request):
"""查询简单应用列表
:param request: Request instance for DescribeSimpleApplications.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleApplicationsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleApplicationsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSimpleApplications", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSimpleApplicationsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSimpleClusters(self, request):
"""查询简单集群列表
:param request: Request instance for DescribeSimpleClusters.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleClustersRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleClustersResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSimpleClusters", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSimpleClustersResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSimpleGroups(self, request):
"""查询简单部署组列表
:param request: Request instance for DescribeSimpleGroups.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleGroupsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleGroupsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSimpleGroups", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSimpleGroupsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSimpleNamespaces(self, request):
"""查询简单命名空间列表
:param request: Request instance for DescribeSimpleNamespaces.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleNamespacesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeSimpleNamespacesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSimpleNamespaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSimpleNamespacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeStatistics(self, request):
"""服务统计页面:接口和服务维度
:param request: Request instance for DescribeStatistics.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeStatisticsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeStatisticsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeStatistics", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeStatisticsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTaskDetail(self, request):
"""查询任务详情
:param request: Request instance for DescribeTaskDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTaskDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTaskDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTaskLastStatus(self, request):
"""查询任务最近一次执行状态
:param request: Request instance for DescribeTaskLastStatus.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskLastStatusRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskLastStatusResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTaskLastStatus", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTaskLastStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTaskRecords(self, request):
"""翻页查询任务列表
:param request: Request instance for DescribeTaskRecords.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskRecordsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeTaskRecordsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTaskRecords", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTaskRecordsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUnitApiUseDetail(self, request):
"""查询网关API监控明细数据(仅单元化网关),非单元化网关使用DescribeApiUseDetail
:param request: Request instance for DescribeUnitApiUseDetail.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitApiUseDetailRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitApiUseDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUnitApiUseDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUnitApiUseDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUnitNamespaces(self, request):
"""查询单元化命名空间列表
:param request: Request instance for DescribeUnitNamespaces.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitNamespacesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitNamespacesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUnitNamespaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUnitNamespacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUnitRule(self, request):
"""查询单元化规则详情
:param request: Request instance for DescribeUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUnitRules(self, request):
"""查询单元化规则列表
:param request: Request instance for DescribeUnitRules.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitRulesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUnitRulesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUnitRules", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUnitRulesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUploadInfo(self, request):
"""TSF会将软件包上传到腾讯云对象存储(COS)。调用此接口获取上传信息,如目标地域,桶,包Id,存储路径,鉴权信息等,之后请使用COS API(或SDK)进行上传。
COS相关文档请查阅:https://cloud.tencent.com/document/product/436
:param request: Request instance for DescribeUploadInfo.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUploadInfoRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUploadInfoResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUploadInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUploadInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUsableUnitNamespaces(self, request):
"""查询可用于被导入的命名空间列表
:param request: Request instance for DescribeUsableUnitNamespaces.
:type request: :class:`tencentcloud.tsf.v20180326.models.DescribeUsableUnitNamespacesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DescribeUsableUnitNamespacesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUsableUnitNamespaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUsableUnitNamespacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableTask(self, request):
"""停用任务
:param request: Request instance for DisableTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.DisableTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DisableTaskResponse`
"""
try:
params = request._serialize()
body = self.call("DisableTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableTaskFlow(self, request):
"""停用工作流
:param request: Request instance for DisableTaskFlow.
:type request: :class:`tencentcloud.tsf.v20180326.models.DisableTaskFlowRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DisableTaskFlowResponse`
"""
try:
params = request._serialize()
body = self.call("DisableTaskFlow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableTaskFlowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableUnitRoute(self, request):
"""禁用单元化路由
:param request: Request instance for DisableUnitRoute.
:type request: :class:`tencentcloud.tsf.v20180326.models.DisableUnitRouteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DisableUnitRouteResponse`
"""
try:
params = request._serialize()
body = self.call("DisableUnitRoute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableUnitRouteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableUnitRule(self, request):
"""禁用单元化规则
:param request: Request instance for DisableUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.DisableUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DisableUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("DisableUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DraftApiGroup(self, request):
"""下线Api分组
:param request: Request instance for DraftApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.DraftApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.DraftApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("DraftApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DraftApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableTask(self, request):
"""启用任务
:param request: Request instance for EnableTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.EnableTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.EnableTaskResponse`
"""
try:
params = request._serialize()
body = self.call("EnableTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableTaskFlow(self, request):
"""启用工作流
:param request: Request instance for EnableTaskFlow.
:type request: :class:`tencentcloud.tsf.v20180326.models.EnableTaskFlowRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.EnableTaskFlowResponse`
"""
try:
params = request._serialize()
body = self.call("EnableTaskFlow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableTaskFlowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableUnitRoute(self, request):
"""启用单元化路由
:param request: Request instance for EnableUnitRoute.
:type request: :class:`tencentcloud.tsf.v20180326.models.EnableUnitRouteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.EnableUnitRouteResponse`
"""
try:
params = request._serialize()
body = self.call("EnableUnitRoute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableUnitRouteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableUnitRule(self, request):
"""启用单元化规则
:param request: Request instance for EnableUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.EnableUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.EnableUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("EnableUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ExecuteTask(self, request):
"""手动执行一次任务。
:param request: Request instance for ExecuteTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.ExecuteTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ExecuteTaskResponse`
"""
try:
params = request._serialize()
body = self.call("ExecuteTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ExecuteTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ExecuteTaskFlow(self, request):
"""执行一次工作流
:param request: Request instance for ExecuteTaskFlow.
:type request: :class:`tencentcloud.tsf.v20180326.models.ExecuteTaskFlowRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ExecuteTaskFlowResponse`
"""
try:
params = request._serialize()
body = self.call("ExecuteTaskFlow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ExecuteTaskFlowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ExpandGroup(self, request):
"""虚拟机部署组添加实例
:param request: Request instance for ExpandGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.ExpandGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ExpandGroupResponse`
"""
try:
params = request._serialize()
body = self.call("ExpandGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ExpandGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyContainerGroup(self, request):
"""修改容器部署组
:param request: Request instance for ModifyContainerGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyContainerGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyContainerGroupResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyContainerGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyContainerGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyContainerReplicas(self, request):
"""修改容器部署组实例数
:param request: Request instance for ModifyContainerReplicas.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyContainerReplicasRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyContainerReplicasResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyContainerReplicas", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyContainerReplicasResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyLane(self, request):
"""更新泳道信息
:param request: Request instance for ModifyLane.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyLaneRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyLaneResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyLane", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyLaneResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyLaneRule(self, request):
"""更新泳道规则
:param request: Request instance for ModifyLaneRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyLaneRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyLaneRuleResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyLaneRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyLaneRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyMicroservice(self, request):
"""修改微服务详情
:param request: Request instance for ModifyMicroservice.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyMicroserviceRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyMicroserviceResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyMicroservice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyMicroserviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyPathRewrite(self, request):
"""修改路径重写
:param request: Request instance for ModifyPathRewrite.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyPathRewriteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyPathRewriteResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyPathRewrite", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyPathRewriteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyTask(self, request):
"""修改任务
:param request: Request instance for ModifyTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyTaskResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyUploadInfo(self, request):
"""调用该接口和COS的上传接口后,需要调用此接口更新TSF中保存的程序包状态。
调用此接口完成后,才标志上传包流程结束。
:param request: Request instance for ModifyUploadInfo.
:type request: :class:`tencentcloud.tsf.v20180326.models.ModifyUploadInfoRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ModifyUploadInfoResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyUploadInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyUploadInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def OperateApplicationTcrBinding(self, request):
"""绑定解绑tcr仓库
:param request: Request instance for OperateApplicationTcrBinding.
:type request: :class:`tencentcloud.tsf.v20180326.models.OperateApplicationTcrBindingRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.OperateApplicationTcrBindingResponse`
"""
try:
params = request._serialize()
body = self.call("OperateApplicationTcrBinding", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.OperateApplicationTcrBindingResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RedoTask(self, request):
"""重新执行任务
:param request: Request instance for RedoTask.
:type request: :class:`tencentcloud.tsf.v20180326.models.RedoTaskRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RedoTaskResponse`
"""
try:
params = request._serialize()
body = self.call("RedoTask", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RedoTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RedoTaskBatch(self, request):
"""重新执行任务批次
:param request: Request instance for RedoTaskBatch.
:type request: :class:`tencentcloud.tsf.v20180326.models.RedoTaskBatchRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RedoTaskBatchResponse`
"""
try:
params = request._serialize()
body = self.call("RedoTaskBatch", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RedoTaskBatchResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RedoTaskExecute(self, request):
"""重新执行在某个节点上执行任务。
:param request: Request instance for RedoTaskExecute.
:type request: :class:`tencentcloud.tsf.v20180326.models.RedoTaskExecuteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RedoTaskExecuteResponse`
"""
try:
params = request._serialize()
body = self.call("RedoTaskExecute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RedoTaskExecuteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RedoTaskFlowBatch(self, request):
"""重新执行工作流批次
:param request: Request instance for RedoTaskFlowBatch.
:type request: :class:`tencentcloud.tsf.v20180326.models.RedoTaskFlowBatchRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RedoTaskFlowBatchResponse`
"""
try:
params = request._serialize()
body = self.call("RedoTaskFlowBatch", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RedoTaskFlowBatchResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ReleaseApiGroup(self, request):
"""发布Api分组
:param request: Request instance for ReleaseApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.ReleaseApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ReleaseApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("ReleaseApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ReleaseApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ReleaseConfig(self, request):
"""发布配置
:param request: Request instance for ReleaseConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.ReleaseConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ReleaseConfigResponse`
"""
try:
params = request._serialize()
body = self.call("ReleaseConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ReleaseConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ReleaseFileConfig(self, request):
"""发布文件配置
:param request: Request instance for ReleaseFileConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.ReleaseFileConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ReleaseFileConfigResponse`
"""
try:
params = request._serialize()
body = self.call("ReleaseFileConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ReleaseFileConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ReleasePublicConfig(self, request):
"""发布公共配置
:param request: Request instance for ReleasePublicConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.ReleasePublicConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ReleasePublicConfigResponse`
"""
try:
params = request._serialize()
body = self.call("ReleasePublicConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ReleasePublicConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RemoveInstances(self, request):
"""从 TSF 集群中批量移除云主机节点
:param request: Request instance for RemoveInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.RemoveInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RemoveInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("RemoveInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RemoveInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RevocationConfig(self, request):
"""撤回已发布的配置
:param request: Request instance for RevocationConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.RevocationConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RevocationConfigResponse`
"""
try:
params = request._serialize()
body = self.call("RevocationConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RevocationConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RevocationPublicConfig(self, request):
"""撤回已发布的公共配置
:param request: Request instance for RevocationPublicConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.RevocationPublicConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RevocationPublicConfigResponse`
"""
try:
params = request._serialize()
body = self.call("RevocationPublicConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RevocationPublicConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RollbackConfig(self, request):
"""回滚配置
:param request: Request instance for RollbackConfig.
:type request: :class:`tencentcloud.tsf.v20180326.models.RollbackConfigRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.RollbackConfigResponse`
"""
try:
params = request._serialize()
body = self.call("RollbackConfig", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RollbackConfigResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SearchBusinessLog(self, request):
"""业务日志搜索
:param request: Request instance for SearchBusinessLog.
:type request: :class:`tencentcloud.tsf.v20180326.models.SearchBusinessLogRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.SearchBusinessLogResponse`
"""
try:
params = request._serialize()
body = self.call("SearchBusinessLog", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SearchBusinessLogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SearchStdoutLog(self, request):
"""标准输出日志搜索
:param request: Request instance for SearchStdoutLog.
:type request: :class:`tencentcloud.tsf.v20180326.models.SearchStdoutLogRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.SearchStdoutLogResponse`
"""
try:
params = request._serialize()
body = self.call("SearchStdoutLog", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SearchStdoutLogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ShrinkGroup(self, request):
"""下线部署组所有机器实例
:param request: Request instance for ShrinkGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.ShrinkGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ShrinkGroupResponse`
"""
try:
params = request._serialize()
body = self.call("ShrinkGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ShrinkGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ShrinkInstances(self, request):
"""虚拟机部署组下线实例
:param request: Request instance for ShrinkInstances.
:type request: :class:`tencentcloud.tsf.v20180326.models.ShrinkInstancesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.ShrinkInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("ShrinkInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ShrinkInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartContainerGroup(self, request):
"""启动容器部署组
:param request: Request instance for StartContainerGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.StartContainerGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StartContainerGroupResponse`
"""
try:
params = request._serialize()
body = self.call("StartContainerGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartContainerGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StartGroup(self, request):
"""启动分组
:param request: Request instance for StartGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.StartGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StartGroupResponse`
"""
try:
params = request._serialize()
body = self.call("StartGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StartGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopContainerGroup(self, request):
"""停止容器部署组
:param request: Request instance for StopContainerGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.StopContainerGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StopContainerGroupResponse`
"""
try:
params = request._serialize()
body = self.call("StopContainerGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopContainerGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopGroup(self, request):
"""停止虚拟机部署组
:param request: Request instance for StopGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.StopGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StopGroupResponse`
"""
try:
params = request._serialize()
body = self.call("StopGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopTaskBatch(self, request):
"""停止执行中的任务批次, 非运行中的任务不可调用。
:param request: Request instance for StopTaskBatch.
:type request: :class:`tencentcloud.tsf.v20180326.models.StopTaskBatchRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StopTaskBatchResponse`
"""
try:
params = request._serialize()
body = self.call("StopTaskBatch", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopTaskBatchResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def StopTaskExecute(self, request):
"""停止正在某个节点上执行的任务
:param request: Request instance for StopTaskExecute.
:type request: :class:`tencentcloud.tsf.v20180326.models.StopTaskExecuteRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.StopTaskExecuteResponse`
"""
try:
params = request._serialize()
body = self.call("StopTaskExecute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.StopTaskExecuteResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def TerminateTaskFlowBatch(self, request):
"""停止一个工作流批次
:param request: Request instance for TerminateTaskFlowBatch.
:type request: :class:`tencentcloud.tsf.v20180326.models.TerminateTaskFlowBatchRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.TerminateTaskFlowBatchResponse`
"""
try:
params = request._serialize()
body = self.call("TerminateTaskFlowBatch", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.TerminateTaskFlowBatchResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UnbindApiGroup(self, request):
"""API分组批量与网关解绑
:param request: Request instance for UnbindApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.UnbindApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UnbindApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("UnbindApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UnbindApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateApiGroup(self, request):
"""更新Api分组
:param request: Request instance for UpdateApiGroup.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateApiGroupRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateApiGroupResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateApiGroup", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateApiGroupResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateApiRateLimitRule(self, request):
"""更新API限流规则
:param request: Request instance for UpdateApiRateLimitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateApiRateLimitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateApiRateLimitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateApiRateLimitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateApiRateLimitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateApiRateLimitRules(self, request):
"""批量更新API限流规则
:param request: Request instance for UpdateApiRateLimitRules.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateApiRateLimitRulesRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateApiRateLimitRulesResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateApiRateLimitRules", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateApiRateLimitRulesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateApiTimeouts(self, request):
"""批量更新API超时
:param request: Request instance for UpdateApiTimeouts.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateApiTimeoutsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateApiTimeoutsResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateApiTimeouts", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateApiTimeoutsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateGatewayApi(self, request):
"""更新API
:param request: Request instance for UpdateGatewayApi.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateGatewayApiRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateGatewayApiResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateGatewayApi", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateGatewayApiResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateHealthCheckSettings(self, request):
"""更新健康检查配置
:param request: Request instance for UpdateHealthCheckSettings.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateHealthCheckSettingsRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateHealthCheckSettingsResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateHealthCheckSettings", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateHealthCheckSettingsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateRepository(self, request):
"""更新仓库信息
:param request: Request instance for UpdateRepository.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateRepositoryRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateRepositoryResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateRepository", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateRepositoryResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateUnitRule(self, request):
"""更新单元化规则
:param request: Request instance for UpdateUnitRule.
:type request: :class:`tencentcloud.tsf.v20180326.models.UpdateUnitRuleRequest`
:rtype: :class:`tencentcloud.tsf.v20180326.models.UpdateUnitRuleResponse`
"""
try:
params = request._serialize()
body = self.call("UpdateUnitRule", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateUnitRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | 41.019223 | 110 | 0.588099 |
b61921c4dc9308ca53e6dc40ecb38223ddd48b4d | 1,753 | py | Python | baiduspider/core/_spider.py | samzhangjy/GSSpider | 344d9c9053a5d5bf08692e0c817d30763dbd8ab7 | [
"MIT"
] | 31 | 2020-07-17T08:26:37.000Z | 2021-08-24T02:28:50.000Z | baiduspider/core/_spider.py | samzhangjy/GSSpider | 344d9c9053a5d5bf08692e0c817d30763dbd8ab7 | [
"MIT"
] | 6 | 2020-07-14T17:13:17.000Z | 2020-09-12T06:02:01.000Z | baiduspider/core/_spider.py | samzhangjy/GSSpider | 344d9c9053a5d5bf08692e0c817d30763dbd8ab7 | [
"MIT"
] | 12 | 2020-07-27T08:38:26.000Z | 2021-07-28T16:05:58.000Z | import re
from htmlmin import minify
import requests
from baiduspider.errors import ParseError, UnknownError
class BaseSpider(object): # pragma: no cover
def __init__(self) -> None:
"""所有爬虫的基类
此类包括了常用的util和自定义方法,继承自`object`。
"""
super().__init__()
self.spider_name = 'BaseSpider'
self.headers = {}
def _format(self, s: str) -> str:
"""去除字符串中不必要的成分并返回
Args:
s (str): 要整理的字符串
Returns:
str: 处理后的字符串
"""
return s.strip()
def _remove_html(self, s: str) -> str:
"""从字符串中去除HTML标签
Args:
s (str): 要处理的字符串
Returns:
str: 处理完的去除了HTML标签的字符串
"""
pattern = re.compile(r'<[^*>]+>', re.S)
removed = pattern.sub('', s)
return removed
def _minify(self, html: str) -> str:
"""压缩HTML代码
Args:
html (str): 要压缩的代码
Returns:
str: 压缩后的HTML代码
"""
return html.replace('\u00a0', '')
def _get_response(self, url: str) -> str:
"""获取网站响应,并返回源码
Args:
url (str): 要获取响应的链接
Returns:
str: 获取到的网站HTML代码
"""
response = requests.get(url, headers=self.headers)
content = bytes(response.text, response.encoding).decode('utf-8')
return content
def _handle_error(self, err: Exception) -> None:
if err is None:
return None
if type(err) in [ParseError]:
error = err
else:
error = UnknownError(str(err))
raise error
def __repr__(self) -> str:
return '<Spider %s>' % self.spider_name
def __str__(self) -> str:
return self.__repr__() | 22.766234 | 73 | 0.524815 |
c412a76e03f768da6abb21beeb5da4f138fe8c8c | 3,693 | py | Python | rowcounter.py | graeme-winter/rpi-pico | be497016c1151e9249bb8565223e52ce55e5864e | [
"BSD-3-Clause"
] | null | null | null | rowcounter.py | graeme-winter/rpi-pico | be497016c1151e9249bb8565223e52ce55e5864e | [
"BSD-3-Clause"
] | null | null | null | rowcounter.py | graeme-winter/rpi-pico | be497016c1151e9249bb8565223e52ce55e5864e | [
"BSD-3-Clause"
] | null | null | null | # rowcounter.py
#
# Counter for counting rows when knitting, which includes a time indicator
# which counts for ~ 12 minutes since count was last changed, to answer the
# question "did I just count that row?"
#
# (C) Graeme Winter, 2021
#
# UI:
#
# A X
# +------------------+
# | |
# +------------------+
# B Y
#
# A: reset
# X: increment counter
# Y: decrement counter
# B+X: increase counter brightness
# B+Y: decrease counter brightness
#
import time
import picoscroll as scroll
# constants - layout of numbers - all are in a 4x5 box so can encode as
# to binary byte strings
__NUMBERS = (
"01101011110110010110",
"00100110001000100010",
"01101001001001001111",
"11100001001000011110",
"10011001111100010001",
"11111000111000011110",
"01101000111010010110",
"11110001001000100010",
"01101001011010010110",
"01101001011100010110",
)
scroll.init()
__WIDTH = scroll.get_width()
__HEIGHT = scroll.get_height()
__INI = "count.ini"
t0 = time.time()
count = 0
brightness = 8
def load():
try:
global count
count = int(open(__INI, "r").read())
if count < 0:
count = 0
except:
count = 0
def save():
with open(__INI, "w") as f:
f.write(str(count))
def plot_digit(digit, x, y, b):
"""Write the digit at the offset starting at x, y with brightness b"""
code = __NUMBERS[digit]
assert x >= 0
assert x + 4 <= __WIDTH
assert y >= 0
assert y + 5 <= __HEIGHT
for _y in range(5):
for _x in range(4):
if code[_x + 4 * _y] == "1":
scroll.set_pixel(_x + x, _y + y, b)
else:
scroll.set_pixel(_x + x, _y + y, 0)
def plot_time():
"""Plot the time as a bar across the top - last pip will blink"""
dt = time.time() - t0
n = dt // 30 + 1
if n > 25:
n = 25
for j in range(n):
y, x = divmod(j, 5)
if j == n - 1 and dt % 2:
scroll.set_pixel(x + 1, y + 1, brightness)
else:
scroll.set_pixel(x + 1, y + 1, brightness // 2)
for j in range(n, 25):
y, x = divmod(j, 5)
scroll.set_pixel(x + 1, y + 1, 0)
def plot_count():
"""Write the right-justified count"""
assert count < 1000
digits = map(int, reversed(str(count)))
scroll.clear()
for j, digit in enumerate(digits):
plot_digit(digit, __WIDTH - 5 * (j + 1), 1, brightness)
def update():
plot_time()
scroll.update()
def main():
global t0, count, brightness
load()
plot_count()
while True:
# dumb switch debouncing - time.sleep(0.1) below
x = scroll.is_pressed(scroll.BUTTON_X)
y = scroll.is_pressed(scroll.BUTTON_Y)
a = scroll.is_pressed(scroll.BUTTON_A)
b = scroll.is_pressed(scroll.BUTTON_B)
if b and x:
if brightness < 128:
brightness *= 2
plot_count()
update()
time.sleep(0.25)
elif x:
count += 1
t0 = time.time()
save()
plot_count()
update()
time.sleep(0.25)
if b and y:
if brightness > 1:
brightness //= 2
plot_count()
update()
time.sleep(0.25)
elif y and count > 0:
count -= 1
t0 = time.time()
save()
plot_count()
update()
time.sleep(0.25)
if a:
count = 0
t0 = time.time()
plot_count()
save()
update()
time.sleep(0.02)
main()
| 21.723529 | 75 | 0.522069 |
681058e35e1e758457014ca95826f59c83fc978c | 3,884 | py | Python | OSMHelper.py | EsriDE/EsriDE-python-osm2arcgis- | c0ec5b8128688b1daa9863b9524088998346b6cf | [
"Apache-2.0"
] | 7 | 2017-10-26T10:46:46.000Z | 2020-03-06T17:56:35.000Z | OSMHelper.py | EsriDE/EsriDE-python-osm2arcgis- | c0ec5b8128688b1daa9863b9524088998346b6cf | [
"Apache-2.0"
] | null | null | null | OSMHelper.py | EsriDE/EsriDE-python-osm2arcgis- | c0ec5b8128688b1daa9863b9524088998346b6cf | [
"Apache-2.0"
] | 4 | 2017-11-15T07:51:59.000Z | 2021-05-22T05:41:39.000Z | __version__ = "1.4"
'''
__author__ = "Lukas Bug"
__copyright__ = "Copyright 2018, Esri Deutschland GmbH"
__license__ = "Apache-2.0"
__version__ = "1.4"
__email__ = "lukas.bug@aol.de"
'''
from osm_runner import gen_osm_sdf
from osm_runner_utils import Filters
from threading import Thread
from ExceptionHelper import OSMHelperExceptions as osmh_excps
import os,traceback
threadlist = []
sdflist = []
def requestOSMData(osmconfig, elem, sdflist):
'''
Function to prepare request of an OSM data item using osm-runner
@param osmconfig: A dictionary containing the OSM configuration defined in the file osmconfig.json.
@param elem: OSM-configuration item (category) defined in the file osmconfig.json
@param sdflist: List of spatial dataframes needed for upload to portal.
'''
if elem['isEnabled'] == 'yes':
geom = elem['geometryType']
bbox = osmconfig['boundingBox']
category = elem['categoryName']
excludedattributes = elem['attributeFieldsToExclude']
Filters[elem['categoryName']] = elem['categoryValues']
osmdata = fetchOSMData(geom, bbox, category, excludedattributes)
sdflist.append(osmdata)
def getDataFrameList(osmconfig):
'''
Function to initiate simulatenous (Thread-based) requests to OSM using osm-runner.
@param osmConfig: A dictionary containing the OSM configuration defined in the file osmconfig.json.
'''
for elem in osmconfig['categories']:
t = Thread(target=requestOSMData, args=[osmconfig, elem, sdflist])
threadlist.append(t)
t.start()
for t in threadlist:
t.join()
return sdflist
def fetchOSMData(geom, bbox, category, excludedattributes):
'''
Function to create layer definitions for upload to portal.
@param geom: The geometry type of the requested data.
@param bbox: The extent of the requested data defined by a bounding box.
@param category: The category name of the requested data.
@param excludedattributes: The attributes to be excluded from the current layer.
'''
try:
print('Fetching '+geom+' data from OpenStreetMap on category: '+category+' . . .')
if geom != 'polygon':
sdf = gen_osm_sdf(geom, bbox, excludedattributes, category)
if not sdf.empty:
return sdf
else:
if FileNotFoundError:
raise FileNotFoundError
if ConnectionRefusedError:
raise ConnectionRefusedError
if RuntimeError:
raise RuntimeError
else:
sdf = gen_osm_sdf(geom, bbox, excludedattributes, category, 1)
if not sdf.empty:
return sdf
else:
if FileNotFoundError:
raise FileNotFoundError
if ConnectionRefusedError:
raise ConnectionRefusedError
if RuntimeError:
raise RuntimeError
except FileNotFoundError:
tb = traceback.format_exc()
print('OSM request could not be completed. \n Cause: OSM returned empty result for geometry '+geom+' , \
the scripts exits now. Additional configuration information: Category: '+category+', excluded attributes: \
'+excludedattributes+', \n Disable this configuration and try again. Detailed information: '+tb)
os._exit(-1)
except ConnectionRefusedError:
tb = traceback.format_exc()
print('OSM request could not be completed. \n Cause: OSM refused the connection due to too many requests, \
try again later. Detailed information: '+tb)
os._exit(-1)
except RuntimeError:
tb = traceback.format_exc()
print('OSM request could not be completed. \n Cause: OSM returned an unknown error. Detailed information: '+tb)
os._exit(-1) | 42.217391 | 119 | 0.655252 |
3096e62fc795b3664bbbb47928d04949ea162bc0 | 563 | py | Python | marco/portal/ocean_stories/migrations/0012_auto_20160225_0012.py | Ecotrust/marco-portal2 | 13bb1b444c7605e1de3c88313d36abc1f463d1f5 | [
"0BSD"
] | 4 | 2016-09-24T00:57:45.000Z | 2019-07-28T23:35:15.000Z | marco/portal/ocean_stories/migrations/0012_auto_20160225_0012.py | MidAtlanticPortal/marco-portal2 | b47e7bfa171e98a6cf499b2d411fc743caae91c2 | [
"0BSD"
] | 146 | 2016-09-27T23:16:52.000Z | 2022-03-09T16:55:32.000Z | marco/portal/ocean_stories/migrations/0012_auto_20160225_0012.py | Ecotrust/marco-portal2 | 13bb1b444c7605e1de3c88313d36abc1f463d1f5 | [
"0BSD"
] | 1 | 2019-07-03T23:42:05.000Z | 2019-07-03T23:42:05.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ocean_stories', '0011_oceanstory_display_home_page'),
]
operations = [
migrations.AlterField(
model_name='oceanstorysection',
name='media_embed_url',
field=models.URLField(help_text=b"The URL to a video that you'd like to embed, e.g., https://vimeo.com/121095661.", blank=True),
preserve_default=True,
),
]
| 26.809524 | 140 | 0.642984 |
13350a73da938487dbd5e166ca4e9e9026b8cafa | 7,944 | py | Python | problems/vrp/problem_vrp.py | fmxFranky/attention-learn-to-route | dd0d945c93f4a924244dd71edd9c1a836793adbb | [
"MIT"
] | null | null | null | problems/vrp/problem_vrp.py | fmxFranky/attention-learn-to-route | dd0d945c93f4a924244dd71edd9c1a836793adbb | [
"MIT"
] | null | null | null | problems/vrp/problem_vrp.py | fmxFranky/attention-learn-to-route | dd0d945c93f4a924244dd71edd9c1a836793adbb | [
"MIT"
] | null | null | null | import os
import pickle
import torch
from torch.utils.data import Dataset
from problems.vrp.state_cvrp import StateCVRP
from problems.vrp.state_sdvrp import StateSDVRP
from utils.beam_search import beam_search
class CVRP(object):
NAME = 'cvrp' # Capacitated Vehicle Routing Problem
VEHICLE_CAPACITY = 1.0 # (w.l.o.g. vehicle capacity is 1, demands should be scaled)
@staticmethod
def get_costs(dataset, pi):
batch_size, graph_size = dataset['demand'].size()
# Check that tours are valid, i.e. contain 0 to n -1
sorted_pi = pi.data.sort(1)[0]
# Sorting it should give all zeros at front and then 1...n
assert (torch.arange(1, graph_size + 1, out=pi.data.new()).view(
1, -1).expand(batch_size, graph_size)
== sorted_pi[:, -graph_size:]).all() and (
sorted_pi[:, :-graph_size] == 0).all(), "Invalid tour"
# Visiting depot resets capacity so we add demand = -capacity (we make sure it does not become negative)
demand_with_depot = torch.cat(
(torch.full_like(dataset['demand'][:, :1],
-CVRP.VEHICLE_CAPACITY), dataset['demand']), 1)
d = demand_with_depot.gather(1, pi)
used_cap = torch.zeros_like(dataset['demand'][:, 0])
for i in range(pi.size(1)):
used_cap += d[:,
i] # This will reset/make capacity negative if i == 0, e.g. depot visited
# Cannot use less than 0
used_cap[used_cap < 0] = 0
assert (used_cap <= CVRP.VEHICLE_CAPACITY +
1e-5).all(), "Used more than capacity"
# Gather dataset in order of tour
loc_with_depot = torch.cat(
(dataset['depot'][:, None, :], dataset['loc']), 1)
d = loc_with_depot.gather(
1, pi[..., None].expand(*pi.size(), loc_with_depot.size(-1)))
# Length is distance (L2-norm of difference) of each next location to its prev and of first and last to depot
return ((d[:, 1:] - d[:, :-1]).norm(p=2, dim=2).sum(1) +
(d[:, 0] - dataset['depot']).norm(p=2, dim=1) # Depot to first
+ (d[:, -1] - dataset['depot']).norm(
p=2, dim=1) # Last to depot, will be 0 if depot is last
), None
@staticmethod
def make_dataset(*args, **kwargs):
return VRPDataset(*args, **kwargs)
@staticmethod
def make_state(*args, **kwargs):
return StateCVRP.initialize(*args, **kwargs)
@staticmethod
def beam_search(input,
beam_size,
expand_size=None,
compress_mask=False,
model=None,
max_calc_batch_size=4096):
assert model is not None, "Provide model"
fixed = model.precompute_fixed(input)
def propose_expansions(beam):
return model.propose_expansions(
beam,
fixed,
expand_size,
normalize=True,
max_calc_batch_size=max_calc_batch_size)
state = CVRP.make_state(
input, visited_dtype=torch.int64 if compress_mask else torch.uint8)
return beam_search(state, beam_size, propose_expansions)
class SDVRP(object):
NAME = 'sdvrp' # Split Delivery Vehicle Routing Problem
VEHICLE_CAPACITY = 1.0 # (w.l.o.g. vehicle capacity is 1, demands should be scaled)
@staticmethod
def get_costs(dataset, pi):
batch_size, graph_size = dataset['demand'].size()
# Each node can be visited multiple times, but we always deliver as much demand as possible
# We check that at the end all demand has been satisfied
demands = torch.cat(
(torch.full_like(dataset['demand'][:, :1],
-SDVRP.VEHICLE_CAPACITY), dataset['demand']), 1)
rng = torch.arange(batch_size, out=demands.data.new().long())
used_cap = torch.zeros_like(dataset['demand'][:, 0])
a_prev = None
for a in pi.transpose(0, 1):
assert a_prev is None or (demands[((a_prev == 0) & (a == 0)), :] == 0).all(), \
"Cannot visit depot twice if any nonzero demand"
d = torch.min(demands[rng, a], SDVRP.VEHICLE_CAPACITY - used_cap)
demands[rng, a] -= d
used_cap += d
used_cap[a == 0] = 0
a_prev = a
assert (demands == 0).all(), "All demand must be satisfied"
# Gather dataset in order of tour
loc_with_depot = torch.cat(
(dataset['depot'][:, None, :], dataset['loc']), 1)
d = loc_with_depot.gather(
1, pi[..., None].expand(*pi.size(), loc_with_depot.size(-1)))
# Length is distance (L2-norm of difference) of each next location to its prev and of first and last to depot
return ((d[:, 1:] - d[:, :-1]).norm(p=2, dim=2).sum(1) +
(d[:, 0] - dataset['depot']).norm(p=2, dim=1) # Depot to first
+ (d[:, -1] - dataset['depot']).norm(
p=2, dim=1) # Last to depot, will be 0 if depot is last
), None
@staticmethod
def make_dataset(*args, **kwargs):
return VRPDataset(*args, **kwargs)
@staticmethod
def make_state(*args, **kwargs):
return StateSDVRP.initialize(*args, **kwargs)
@staticmethod
def beam_search(input,
beam_size,
expand_size=None,
compress_mask=False,
model=None,
max_calc_batch_size=4096):
assert model is not None, "Provide model"
assert not compress_mask, "SDVRP does not support compression of the mask"
fixed = model.precompute_fixed(input)
def propose_expansions(beam):
return model.propose_expansions(
beam,
fixed,
expand_size,
normalize=True,
max_calc_batch_size=max_calc_batch_size)
state = SDVRP.make_state(input)
return beam_search(state, beam_size, propose_expansions)
def make_instance(args):
depot, loc, demand, capacity, *args = args
grid_size = 1
if len(args) > 0:
depot_types, customer_types, grid_size = args
return {
'loc': torch.tensor(loc, dtype=torch.float) / grid_size,
'demand': torch.tensor(demand, dtype=torch.float) / capacity,
'depot': torch.tensor(depot, dtype=torch.float) / grid_size
}
class VRPDataset(Dataset):
def __init__(self,
filename=None,
size=50,
num_samples=1000000,
offset=0,
distribution=None):
super(VRPDataset, self).__init__()
self.data_set = []
if filename is not None:
assert os.path.splitext(filename)[1] == '.pkl'
with open(filename, 'rb') as f:
data = pickle.load(f)
self.data = [
make_instance(args)
for args in data[offset:offset + num_samples]
]
else:
# From VRP with RL paper https://arxiv.org/abs/1802.04240
CAPACITIES = {10: 20., 20: 30., 50: 40., 100: 50.}
self.data = [
{
'loc':
torch.FloatTensor(size, 2).uniform_(0, 1),
# Uniform 1 - 9, scaled by capacities
'demand':
(torch.FloatTensor(size).uniform_(0, 9).int() + 1).float()
/ CAPACITIES[size],
'depot':
torch.FloatTensor(2).uniform_(0, 1)
} for i in range(num_samples)
]
self.size = len(self.data)
def __len__(self):
return self.size
def __getitem__(self, idx):
return self.data[idx]
| 35.623318 | 117 | 0.549975 |
9feb994e715844950134a595ea2b9b779335f3bb | 9,581 | py | Python | tests/python/unittest/test_tir_schedule_tensorize_ldmatrix_mma.py | shengxinhu/tvm | 06c443e9959452c6da3a911fe0c11e08c5554477 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/python/unittest/test_tir_schedule_tensorize_ldmatrix_mma.py | shengxinhu/tvm | 06c443e9959452c6da3a911fe0c11e08c5554477 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/python/unittest/test_tir_schedule_tensorize_ldmatrix_mma.py | shengxinhu/tvm | 06c443e9959452c6da3a911fe0c11e08c5554477 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-docstring
import tvm
from tvm import te
from tvm.tir.tensor_intrin.cuda import (
LDMATRIX_16x16_A_INTRIN,
LDMATRIX_16x16_B_INTRIN,
LDMATRIX_16x16_B_TRANS_INTRIN,
LDMATRIX_16x32_A_INTRIN,
LDMATRIX_32x16_B_INTRIN,
LDMATRIX_16x32_B_TRANS_INTRIN,
MMA_f16f16f32_INTRIN,
MMA_f16f16f32_TRANS_INTRIN,
MMA_f16f16f16_INTRIN,
MMA_f16f16f16_TRANS_INTRIN,
MMA_i8i8i32_INTRIN,
MMA_i8i8i32_TRANS_INTRIN,
MMA_fill_16x16_f32_INTRIN,
MMA_fill_16x16_f16_INTRIN,
MMA_fill_16x16_i32_INTRIN,
MMA_store_16x16_f32_global_INTRIN,
MMA_store_16x16_f16_global_INTRIN,
MMA_store_16x16_i32_global_INTRIN,
shared_16x16_to_ldmatrix_32x8_layout,
shared_32x16_to_ldmatrix_32x16_layout,
shared_16x32_to_ldmatrix_32x16_layout,
)
import tvm.testing
import numpy as np
from tvm.testing.tir import mma_schedule
M = 4096
N = 4096
K = 4096
measure_perf = False
gflops = (N * M * K) * 2 / 1e9
def matmul(m, n, k, in_dtype, out_dtype, b_transposed):
b_shape = (n, k) if b_transposed else (k, n)
a = te.placeholder((m, k), name="A", dtype=in_dtype)
b = te.placeholder(b_shape, name="B", dtype=in_dtype)
k = te.reduce_axis((0, k), name="k")
def maybe_cast(v):
if in_dtype != out_dtype:
return tvm.tir.Cast(out_dtype, v)
return v
def maybe_swap(i, j):
if b_transposed:
return j, i
return i, j
c = te.compute(
(m, n),
lambda i, j: te.sum(maybe_cast(a[i, k]) * maybe_cast(b[maybe_swap(k, j)]), axis=[k]),
name="C",
)
return (a, b, c)
def is_ampere_or_newer():
arch = tvm.contrib.nvcc.get_target_compute_version()
major, _ = tvm.contrib.nvcc.parse_compute_version(arch)
return major >= 8
def run_test(
k_inner,
in_dtype,
out_dtype,
b_transposed,
i_factors,
j_factors,
k_factors,
index_map_A,
index_map_B,
index_map_C,
ldmatrix_a_intrin,
ldmatrix_b_intrin,
mma_intrin,
mma_fill_intrin,
mma_store_intrin,
):
sch = mma_schedule(
te.create_prim_func(matmul(M, N, K, in_dtype, out_dtype, b_transposed)),
k_inner,
in_dtype,
b_transposed,
i_factors,
j_factors,
k_factors,
index_map_A,
index_map_B,
index_map_C,
ldmatrix_a_intrin,
ldmatrix_b_intrin,
mma_intrin,
mma_fill_intrin,
mma_store_intrin,
)
if not is_ampere_or_newer():
return None
f = tvm.build(sch.mod["main"], target="cuda", name="dense")
dev = tvm.device("cuda", 0)
if in_dtype == "float16":
a_np = np.random.uniform(size=(M, K)).astype("float16")
if b_transposed:
b_np = np.random.uniform(size=(N, K)).astype("float16")
c_np = np.dot(a_np.astype("float32"), b_np.astype("float32").transpose()).astype(
out_dtype
)
else:
b_np = np.random.uniform(size=(K, N)).astype("float16")
c_np = np.dot(a_np.astype("float32"), b_np.astype("float32")).astype(out_dtype)
else:
a_np = np.random.randint(-128, 128, (M, K)).astype("int8")
if b_transposed:
b_np = np.random.randint(-128, 128, (N, K)).astype("int8")
c_np = np.dot(a_np.astype("float32"), b_np.astype("float32").transpose()).astype(
"int32"
)
else:
b_np = np.random.randint(-128, 128, (K, N)).astype("int8")
c_np = np.dot(a_np.astype("float32"), b_np.astype("float32")).astype("int32")
a = tvm.nd.array(a_np, dev)
b = tvm.nd.array(b_np, dev)
c = tvm.nd.array(np.zeros((M, N), dtype=out_dtype), dev)
f(a, b, c)
if out_dtype != "float16":
# The numpy reference is computed with fp32 precision (otherwise too slow).
# So there is non-trivial accuracy difference if TVM result is computed with fp16 accumulation.
tvm.testing.assert_allclose(c.numpy(), c_np, rtol=1e-3)
return lambda: f.time_evaluator(f.entry_name, dev, number=500)(a, b, c)
@tvm.testing.requires_cuda
def test_f16f16f32_m16n16k16():
def index_map(i, j):
return (
i // 16,
j // 16,
*shared_16x16_to_ldmatrix_32x8_layout(i % 16, j % 16),
)
k_inner = 16
in_dtype = "float16"
out_dtype = "float32"
i_factors, j_factors, k_factors = [4, 8, 2, 4, 1], [1, 64, 2, 1, 2], [128, 2, 1]
timer = run_test(
k_inner,
in_dtype,
out_dtype,
False, # b_transposed
i_factors,
j_factors,
k_factors,
index_map,
index_map,
index_map,
LDMATRIX_16x16_A_INTRIN,
LDMATRIX_16x16_B_INTRIN,
MMA_f16f16f32_INTRIN,
MMA_fill_16x16_f32_INTRIN,
MMA_store_16x16_f32_global_INTRIN,
)
if measure_perf and timer:
print("f16f16f32_m16n16k16: %f GFLOPS" % (gflops / (timer().mean)))
timer = run_test(
k_inner,
in_dtype,
out_dtype,
True, # b_transposed
i_factors,
j_factors,
k_factors,
index_map,
index_map,
index_map,
LDMATRIX_16x16_A_INTRIN,
LDMATRIX_16x16_B_TRANS_INTRIN,
MMA_f16f16f32_TRANS_INTRIN,
MMA_fill_16x16_f32_INTRIN,
MMA_store_16x16_f32_global_INTRIN,
)
if measure_perf and timer:
print("f16f16f32_m16n16k16_trans: %f GFLOPS" % (gflops / (timer().mean)))
@tvm.testing.requires_cuda
def test_f16f16f16_m16n16k16():
def index_map(i, j):
return (
i // 16,
j // 16,
*shared_16x16_to_ldmatrix_32x8_layout(i % 16, j % 16),
)
k_inner = 16
in_dtype = "float16"
out_dtype = "float16"
i_factors, j_factors, k_factors = [16, 2, 1, 4, 2], [16, 2, 2, 1, 4], [128, 2, 1]
timer = run_test(
k_inner,
in_dtype,
out_dtype,
False, # b_transposed
i_factors,
j_factors,
k_factors,
index_map,
index_map,
index_map,
LDMATRIX_16x16_A_INTRIN,
LDMATRIX_16x16_B_INTRIN,
MMA_f16f16f16_INTRIN,
MMA_fill_16x16_f16_INTRIN,
MMA_store_16x16_f16_global_INTRIN,
)
if measure_perf and timer:
print("f16f16f16_m16n16k16: %f GFLOPS" % (gflops / (timer().mean)))
timer = run_test(
k_inner,
in_dtype,
out_dtype,
True, # b_transposed
i_factors,
j_factors,
k_factors,
index_map,
index_map,
index_map,
LDMATRIX_16x16_A_INTRIN,
LDMATRIX_16x16_B_TRANS_INTRIN,
MMA_f16f16f16_TRANS_INTRIN,
MMA_fill_16x16_f16_INTRIN,
MMA_store_16x16_f16_global_INTRIN,
)
if measure_perf and timer:
print("f16f16f16_m16n16k16_trans: %f GFLOPS" % (gflops / (timer().mean)))
@tvm.testing.requires_cuda
def test_i8i8i32_m16n16k32():
def index_map_A(i, j):
return (
i // 16,
j // 32,
*shared_16x32_to_ldmatrix_32x16_layout(i % 16, j % 32),
)
def index_map_B(i, j):
return (
i // 32,
j // 16,
*shared_32x16_to_ldmatrix_32x16_layout(i % 32, j % 16),
)
def index_map_C(i, j):
return (
i // 16,
j // 16,
*shared_16x16_to_ldmatrix_32x8_layout(i % 16, j % 16),
)
k_inner = 32
in_dtype = "int8"
out_dtype = "int32"
i_factors, j_factors, k_factors = [1, 32, 1, 4, 2], [8, 4, 4, 2, 1], [32, 2, 2]
timer = run_test(
k_inner,
in_dtype,
out_dtype,
False, # b_transposed
i_factors,
j_factors,
k_factors,
index_map_A,
index_map_B,
index_map_C,
LDMATRIX_16x32_A_INTRIN,
LDMATRIX_32x16_B_INTRIN,
MMA_i8i8i32_INTRIN,
MMA_fill_16x16_i32_INTRIN,
MMA_store_16x16_i32_global_INTRIN,
)
if measure_perf and timer:
print("i8i8i32_m16n16k32: %f GOPS" % (gflops / (timer().mean)))
timer = run_test(
k_inner,
in_dtype,
out_dtype,
True, # b_transposed
i_factors,
j_factors,
k_factors,
index_map_A,
index_map_A,
index_map_C,
LDMATRIX_16x32_A_INTRIN,
LDMATRIX_16x32_B_TRANS_INTRIN,
MMA_i8i8i32_TRANS_INTRIN,
MMA_fill_16x16_i32_INTRIN,
MMA_store_16x16_i32_global_INTRIN,
)
if measure_perf and timer:
print("i8i8i32_m16n16k32_trans: %f GOPS" % (gflops / (timer().mean)))
if __name__ == "__main__":
test_f16f16f32_m16n16k16()
test_f16f16f16_m16n16k16()
test_i8i8i32_m16n16k32()
| 27.141643 | 103 | 0.614028 |
50b03b85adb9a91023a7de8a135b4a4877230876 | 3,258 | py | Python | selfdrive/car/toyota/tunes.py | KexianShen/openpilot | bf58e2f7edce20e4fed5bb8c147aca40cd1d91bc | [
"MIT"
] | null | null | null | selfdrive/car/toyota/tunes.py | KexianShen/openpilot | bf58e2f7edce20e4fed5bb8c147aca40cd1d91bc | [
"MIT"
] | null | null | null | selfdrive/car/toyota/tunes.py | KexianShen/openpilot | bf58e2f7edce20e4fed5bb8c147aca40cd1d91bc | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from enum import Enum
class LongTunes(Enum):
PEDAL = 0
TSS2 = 1
TSS = 2
class LatTunes(Enum):
INDI_PRIUS = 0
LQR_RAV4 = 1
PID_A = 2
PID_B = 3
PID_C = 4
PID_D = 5
PID_E = 6
PID_F = 7
PID_G = 8
PID_I = 9
PID_H = 10
PID_J = 11
PID_K = 12
PID_L = 13
PID_M = 14
PID_N = 15
TORQUE = 16
###### LONG ######
def set_long_tune(tune, name):
# Improved longitudinal tune
if name == LongTunes.TSS2 or name == LongTunes.PEDAL:
tune.deadzoneBP = [0., 8.05]
tune.deadzoneV = [.0, .14]
tune.kpBP = [0., 5., 20.]
tune.kpV = [1.3, 1.0, 0.7]
tune.kiBP = [0., 5., 12., 20., 27.]
tune.kiV = [.35, .23, .20, .17, .1]
# Default longitudinal tune
elif name == LongTunes.TSS:
tune.deadzoneBP = [0., 9.]
tune.deadzoneV = [0., .15]
tune.kpBP = [0., 5., 35.]
tune.kiBP = [0., 35.]
tune.kpV = [3.6, 2.4, 1.5]
tune.kiV = [0.54, 0.36]
else:
raise NotImplementedError('This longitudinal tune does not exist')
###### LAT ######
def set_lat_tune(tune, name, MAX_LAT_ACCEL=2.5, FRICTION=.1):
if name == LatTunes.TORQUE:
tune.init('torque')
tune.torque.useSteeringAngle = True
tune.torque.kp = 1.0 / MAX_LAT_ACCEL
tune.torque.kf = 1.0 / MAX_LAT_ACCEL
tune.torque.ki = 0.25 / MAX_LAT_ACCEL
tune.torque.friction = FRICTION
elif name == LatTunes.INDI_PRIUS:
tune.init('indi')
tune.indi.innerLoopGainBP = [0.]
tune.indi.innerLoopGainV = [4.0]
tune.indi.outerLoopGainBP = [0.]
tune.indi.outerLoopGainV = [3.0]
tune.indi.timeConstantBP = [0.]
tune.indi.timeConstantV = [1.0]
tune.indi.actuatorEffectivenessBP = [0.]
tune.indi.actuatorEffectivenessV = [1.0]
elif 'PID' in str(name):
tune.init('pid')
tune.pid.kiBP = [0.0]
tune.pid.kpBP = [0.0]
if name == LatTunes.PID_A:
tune.pid.kpV = [0.2]
tune.pid.kiV = [0.05]
tune.pid.kf = 0.00003
elif name == LatTunes.PID_C:
tune.pid.kpV = [0.6]
tune.pid.kiV = [0.1]
tune.pid.kf = 0.00006
elif name == LatTunes.PID_D:
tune.pid.kpV = [0.6]
tune.pid.kiV = [0.1]
tune.pid.kf = 0.00007818594
elif name == LatTunes.PID_F:
tune.pid.kpV = [0.723]
tune.pid.kiV = [0.0428]
tune.pid.kf = 0.00006
elif name == LatTunes.PID_G:
tune.pid.kpV = [0.18]
tune.pid.kiV = [0.015]
tune.pid.kf = 0.00012
elif name == LatTunes.PID_H:
tune.pid.kpV = [0.17]
tune.pid.kiV = [0.03]
tune.pid.kf = 0.00006
elif name == LatTunes.PID_I:
tune.pid.kpV = [0.15]
tune.pid.kiV = [0.05]
tune.pid.kf = 0.00004
elif name == LatTunes.PID_J:
tune.pid.kpV = [0.19]
tune.pid.kiV = [0.02]
tune.pid.kf = 0.00007818594
elif name == LatTunes.PID_L:
tune.pid.kpV = [0.3]
tune.pid.kiV = [0.05]
tune.pid.kf = 0.00006
elif name == LatTunes.PID_M:
tune.pid.kpV = [0.3]
tune.pid.kiV = [0.05]
tune.pid.kf = 0.00007
elif name == LatTunes.PID_N:
tune.pid.kpV = [0.35]
tune.pid.kiV = [0.15]
tune.pid.kf = 0.00007818594
else:
raise NotImplementedError('This PID tune does not exist')
else:
raise NotImplementedError('This lateral tune does not exist')
| 26.487805 | 70 | 0.579497 |
41cb25771bb90e7e334f4d31d5c1a88cb4ee641a | 30,293 | py | Python | autograder/autograder/execution_environments/container_network.py | sballance/Submitty | 457fd3bf176960194985a067b9f32b3a653e3ff4 | [
"BSD-3-Clause"
] | null | null | null | autograder/autograder/execution_environments/container_network.py | sballance/Submitty | 457fd3bf176960194985a067b9f32b3a653e3ff4 | [
"BSD-3-Clause"
] | null | null | null | autograder/autograder/execution_environments/container_network.py | sballance/Submitty | 457fd3bf176960194985a067b9f32b3a653e3ff4 | [
"BSD-3-Clause"
] | null | null | null | import json
import os
import subprocess
import traceback
import time
from pwd import getpwnam
import shutil
import docker
from submitty_utils import dateutils
from . import secure_execution_environment, rlimit_utils
from .. import autograding_utils
class Container():
"""
Containers are the building blocks of a container network. Containers know how to
create, start, and cleanup after themselves. Note that a network of containers
can be made up of 1 or more containers.
"""
def __init__(self, container_info, untrusted_user, testcase_directory, more_than_one, is_test_environment, log_function):
self.name = container_info['container_name']
# If there are multiple containers, each gets its own directory under testcase_directory, otherwise,
# we can just use testcase_directory
self.directory = os.path.join(testcase_directory, self.name) if more_than_one else testcase_directory
# Check if we are the router in a container network
self.is_router = True if self.name == 'router' else False
self.image = container_info['container_image']
self.is_server = container_info['server']
self.outgoing_connections = container_info['outgoing_connections']
self.container_rlimits = container_info['container_rlimits']
# If we are in production, we need to run as an untrusted user inside of our docker container.
self.container_user_argument = str(getpwnam(untrusted_user).pw_uid)
self.full_name = f'{untrusted_user}_{self.name}'
self.tcp_port_range = container_info['tcp_port_range']
self.udp_port_range = container_info['udp_port_range']
# This will be populated later
self.return_code = None
self.log_function = log_function
self.container = None
# A socket for communication with the container.
self.socket = None
# Maps a network name to an ip address
self.ip_address_map = dict()
# Determine whether or not we need to pull the default submitty router into this container's directory.
need_router = container_info.get('import_default_router', False)
if self.name == 'router' and need_router:
self.import_router = True
else:
self.import_router = False
def create(self, execution_script, arguments, more_than_one):
""" Create (but don't start) this container. """
client = docker.from_env()
mount = {
self.directory : {
'bind' : self.directory,
'mode' : 'rw'
}
}
# Only pass container name to testcases with greater than one container. (Doing otherwise breaks compilation)
container_name_argument = ['--container_name', self.name] if more_than_one else list()
container_ulimits = rlimit_utils.build_ulimit_argument(self.container_rlimits)
# A server container does not run student code, but instead hosts a service (e.g. a database.)
try:
if self.is_server:
self.container = client.containers.create(self.image, stdin_open = True, tty = True, network = 'none',
volumes = mount, working_dir = self.directory, name = self.full_name)
else:
command = [execution_script,] + arguments + container_name_argument
self.container = client.containers.create(self.image, command = command, ulimits = container_ulimits, stdin_open = True,
tty = True, network = 'none', user = self.container_user_argument, volumes=mount,
working_dir = self.directory, hostname = self.name, name = self.full_name)
except docker.errors.ImageNotFound:
self.log_function(f'ERROR: The image {self.image} is not available on this worker')
raise
dockerlaunch_done = dateutils.get_current_time()
self.log_function(f'docker container {self.container.short_id} created')
def start(self, logfile):
self.container.start()
self.socket = self.container.attach_socket(params={'stdin': 1, 'stream': 1})
def set_ip_address(self, network_name, ip_address):
self.ip_address_map[network_name] = ip_address
def get_ip_address(self, network_name):
return self.ip_address_map[network_name]
def cleanup_container(self):
""" Remove this container. """
if not self.is_server:
status = self.container.wait()
self.return_code = status['StatusCode']
self.container.remove(force=True)
self.log_function(f'{dateutils.get_current_time()} docker container {self.container.short_id} destroyed')
class ContainerNetwork(secure_execution_environment.SecureExecutionEnvironment):
"""
A Container Network ensures a secure execution environment by executing instances of student code
within a secure Docker Container. To add an extra layer of security, files and directories are carefully
permissioned, and code is executed as a limited-access, untrusted user. Therefore, code is effectively
run in a Jailed Sandbox within the container. Containers may be networked together to test networked
gradeables.
"""
def __init__(self, job_id, untrusted_user, testcase_directory, is_vcs, is_batch_job, complete_config_obj,
testcase_info, autograding_directory, log_path, stack_trace_log_path, is_test_environment):
super().__init__(job_id, untrusted_user, testcase_directory, is_vcs, is_batch_job, complete_config_obj,
testcase_info, autograding_directory, log_path, stack_trace_log_path, is_test_environment)
containers = list()
container_specs = testcase_info.get('containers', list())
solution_container_specs = testcase_info.get('solution_containers', list())
gradeable_rlimits = complete_config_obj.get('resource_limits', {})
# If there are container specifications in the complete_config, create objects for them,
# else, create a single default container.
if len(container_specs) > 0:
greater_than_one = True if len(container_specs) > 1 else False
current_tcp_port = 9000
current_udp_port = 15000
for container_spec in container_specs:
container_spec['container_rlimits'] = gradeable_rlimits
container_spec['tcp_port_range'] = (current_tcp_port, current_tcp_port + container_spec.get('number_of_ports', 1) - 1)
container_spec['udp_port_range'] = (current_udp_port, current_udp_port + container_spec.get('number_of_ports', 1) - 1)
current_udp_port += container_spec.get('number_of_ports', 1)
current_tcp_port += container_spec.get('number_of_ports', 1)
containers.append(Container(container_spec, untrusted_user, os.path.join(self.tmp_work, testcase_directory), greater_than_one, self.is_test_environment, self.log_message))
else:
container_spec = {
'container_name' : f'temporary_container',
'container_image' : 'submitty/autograding-default:latest',
'server' : False,
'outgoing_connections' : [],
'container_rlimits': gradeable_rlimits,
'tcp_port_range' : (9000, 9000),
'udp_port_range' : (1500, 1500)
}
containers.append(Container(container_spec, untrusted_user, os.path.join(self.tmp_work, testcase_directory), False, self.is_test_environment, self.log_message))
self.containers = containers
# Solution containers are a network of containers which run instructor code.
# We instantiate objects for them in a similar manner to the way that we instantiate
# execution containers (above), but do not add a default container if they are not present.
solution_containers = list()
greater_than_one_solution_container = True if len(solution_container_specs) > 1 else False
current_tcp_port = 9000
current_udp_port = 15000
for solution_container_spec in solution_container_specs:
solution_container_spec['container_rlimits'] = gradeable_rlimits
solution_container_spec['tcp_port_range'] = (current_tcp_port, current_tcp_port + solution_container_spec.get('number_of_ports', 1) - 1)
solution_container_spec['udp_port_range'] = (current_udp_port, current_udp_port + solution_container_spec.get('number_of_ports', 1) - 1)
current_udp_port += solution_container_spec.get('number_of_ports', 1)
current_tcp_port += solution_container_spec.get('number_of_ports', 1)
solution_containers.append(Container(solution_container_spec, untrusted_user, self.random_output_directory, greater_than_one_solution_container, self.is_test_environment, self.log_message))
self.solution_containers = solution_containers
# Check for dispatcher actions (standard input)
self.dispatcher_actions = testcase_info.get('dispatcher_actions', list())
# As new container networks are generated, they will be appended to this list.
self.networks = list()
###########################################################
#
# Container Network Functions
#
###########################################################
def get_router(self, containers):
""" Given a set of containers, return the router. """
for container in containers:
if container.name == 'router':
return container
return None
def get_server_containers(self, all_containers):
""" Given a set of containers, return any server containers. """
containers = list()
for container in all_containers:
if container.name != 'router' and container.is_server == True:
containers.append(container)
return containers
def get_standard_containers(self, all_containers):
""" Given a set of containers, return all non-router, non-server containers. """
containers = list()
for container in all_containers:
if container.name != 'router' and container.is_server == False:
containers.append(container)
return containers
def create_containers(self, containers, script, arguments):
""" Given a set of containers, create each of them. """
try:
self.verify_execution_status()
except Exception as e:
self.log_stack_trace(traceback.format_exc())
self.log("ERROR: Could not verify execution mode status.")
return
more_than_one = True if len(containers) > 1 else False
for container in containers:
my_script = os.path.join(container.directory, script)
container.create(my_script, arguments, more_than_one)
def network_containers(self, containers):
""" Given a set of containers, network them per their specifications. """
if len(containers) <= 1:
return
client = docker.from_env()
none_network = client.networks.get('none')
#remove all containers from the none network
for container in containers:
none_network.disconnect(container.container, force=True)
if self.get_router(containers) is not None:
self.network_containers_with_router(containers)
else:
self.network_containers_routerless(containers)
# Provide an initialization file to each container.
self.create_knownhosts_txt(containers)
self.create_knownhosts_json(containers)
def network_containers_routerless(self, containers):
""" If there is no router, all containers are added to the same network. """
client = docker.from_env()
network_name = f'{self.untrusted_user}_routerless_network'
# Assumes untrustedXX naming scheme, where XX is a number
untrusted_num = int(self.untrusted_user.replace('untrusted','')) + 100
subnet = 1
ip_address_start = f'10.{untrusted_num}.{subnet}'
ipam_pool = docker.types.IPAMPool(subnet=f'{ip_address_start}.0/24')
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
#create the global network
# TODO: Can fail on ip conflict.
network = client.networks.create(network_name, driver='bridge', ipam=ipam_config, internal=True)
host = 2
for container in containers:
ip_address = f'{ip_address_start}.{host}'
network.connect(container.container, ipv4_address=ip_address, aliases=[container.name,])
container.set_ip_address(network_name, ip_address)
host+=1
self.networks.append(network)
def network_containers_with_router(self, containers):
"""
If there is a router, all containers are added to their own network, on which the only other
endpoint is the router, which has been aliased to impersonate all other reachable endpoints.
"""
client = docker.from_env()
router = self.get_container_with_name('router', containers)
router_connections = dict()
network_num = 10
subnet = 1
# Assumes untrustedXX naming scheme, where XX is a number
untrusted_num = int(self.untrusted_user.replace('untrusted','')) + 100
container_to_subnet = dict()
for container in containers:
network_name = f"{container.full_name}_network"
if container.name == 'router':
continue
# We are creating a new subnet with a new subnet number
subnet += 1
# We maintain a map of container_name to subnet for use by the router.
container_to_subnet[container.name] = subnet
actual_name = '{0}_Actual'.format(container.name)
# Create the network with the appropriate iprange
ipam_pool = docker.types.IPAMPool( subnet=f'{network_num}.{untrusted_num}.{subnet}.0/24')
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
network = client.networks.create(network_name, ipam=ipam_config, driver='bridge', internal=True)
# We connectt the container with host=1. Later we'll connect the router with host=2
container_ip = f'{network_num}.{untrusted_num}.{subnet}.2'
container.set_ip_address(network_name, container_ip)
network.connect(container.container, ipv4_address=container_ip, aliases=[actual_name,])
self.networks.append(network)
#The router pretends to be all dockers on this network.
if len(container.outgoing_connections) == 0:
connected_machines = [x.name for x in containers]
else:
connected_machines = container.outgoing_connections
for connected_machine in connected_machines:
if connected_machine == 'router':
continue
if connected_machine == container.name:
continue
if not container.name in router_connections:
router_connections[container.name] = list()
if not connected_machine in router_connections:
router_connections[connected_machine] = list()
#The router must be in both endpoints' network, and must connect to all endpoints on a network simultaneously,
# so we group together all connections here, and then connect later.
router_connections[container.name].append(connected_machine)
router_connections[connected_machine].append(container.name)
# Connect the router to all networks.
for startpoint, endpoints in router_connections.items():
full_startpoint_name = f'{self.untrusted_user}_{startpoint}'
network_name = f"{full_startpoint_name}_network"
# Store the ip address of the router on this network
router_ip = f'{network_num}.{untrusted_num}.{container_to_subnet[startpoint]}.3'
router.set_ip_address(network_name, router_ip)
aliases = []
for endpoint in endpoints:
if endpoint in aliases:
continue
aliases.append(endpoint)
network = self.get_network_with_name(network_name)
network.connect(router.container, ipv4_address=router_ip, aliases=aliases)
def cleanup_networks(self):
""" Destroy all created networks. """
for network in self.networks:
try:
network.remove()
self.log_message(f'{dateutils.get_current_time()} docker network {network} destroyed')
except Exception as e:
self.log_message(f'{dateutils.get_current_time()} ERROR: Could not remove docker network {network}')
self.networks.clear()
def create_knownhosts_json(self, containers):
"""
Given a set of containers, add initialization files to each
container's directory which specify how to connect to other endpoints
on the container's network (hostname, port).
"""
#writing complete knownhost JSON to the container directory
router = self.get_router(containers)
sorted_networked_containers = sorted(containers, key=lambda x: x.name)
for container in sorted_networked_containers:
knownhosts_location = os.path.join(container.directory, 'knownhosts.json')
container_knownhost = dict()
container_knownhost['hosts'] = dict()
if len(container.outgoing_connections) == 0:
connections = [x.name for x in containers]
else:
connections = container.outgoing_connections
if not container.name in connections:
connections.append(container.name)
sorted_connections = sorted(connections)
for connected_container_name in sorted_connections:
connected_container = self.get_container_with_name(connected_container_name, containers)
network_name = f"{container.full_name}_network"
# If there is a router, the router is impersonating all other
# containers, but has only one ip address.
if router is not None:
# Even if we are injecting the router, we know who WE are.
if container.name == 'router' and connected_container_name == 'router':
continue
elif container.name == connected_container_name:
network_name = f"{container.full_name}_network"
ip_address = container.get_ip_address(network_name)
# If this node is not the router, we must inject the router
elif container.name != 'router':
# Get the router's ip on the container's network
network_name = f"{container.full_name}_network"
ip_address = router.get_ip_address(network_name)
else:
# If we are the router, get the connected container's ip on its own network
network_name = f"{self.untrusted_user}_{connected_container_name}_network"
ip_address = connected_container.get_ip_address(network_name)
else:
ip_address = connected_container.get_ip_address(f'{self.untrusted_user}_routerless_network')
container_knownhost['hosts'][connected_container.name] = {
'tcp_start_port' : connected_container.tcp_port_range[0],
'tcp_end_port' : connected_container.tcp_port_range[1],
'udp_start_port' : connected_container.udp_port_range[0],
'udp_end_port' : connected_container.udp_port_range[1],
'ip_address' : ip_address
}
with open(knownhosts_location, 'w') as outfile:
json.dump(container_knownhost, outfile, indent=4)
autograding_utils.add_all_permissions(knownhosts_location)
def create_knownhosts_txt(self, containers):
"""
Given a set of containers, add initialization files to each
container's directory which specify how to connect to other endpoints
on the container's network (hostname, port).
"""
tcp_connection_list = list()
udp_connection_list = list()
sorted_containers = sorted(containers, key=lambda x: x.name)
for container in sorted_containers:
tcp_connection_list.append([container.name, container.tcp_port_range[0]])
udp_connection_list.append([container.name, container.udp_port_range[0]])
#writing complete knownhosts csvs to input directory'
networked_containers = self.get_standard_containers(containers)
router = self.get_router(containers)
if router is not None:
networked_containers.append(router)
sorted_networked_containers = sorted(networked_containers, key=lambda x: x.name)
for container in sorted_networked_containers:
knownhosts_location = os.path.join(container.directory, 'knownhosts_tcp.txt')
with open(knownhosts_location, 'w') as outfile:
for tup in tcp_connection_list:
outfile.write(" ".join(map(str, tup)) + '\n')
outfile.flush()
autograding_utils.add_all_permissions(knownhosts_location)
knownhosts_location = os.path.join(container.directory, 'knownhosts_udp.txt')
with open(knownhosts_location, 'w') as outfile:
for tup in udp_connection_list:
outfile.write(" ".join(map(str, tup)) + '\n')
outfile.flush()
autograding_utils.add_all_permissions(knownhosts_location)
###########################################################
#
# Dispatcher Functions
#
###########################################################
def process_dispatcher_actions(self, containers):
"""
Deliver actions (stdin, delay, stop, start, kill)
to a set of containers per their testcase specification.
"""
for action_obj in self.dispatcher_actions:
action_type = action_obj["action"]
if action_type == "delay":
time_to_delay = float(action_obj["seconds"])
while time_to_delay > 0 and self.at_least_one_alive(containers):
if time_to_delay >= .1:
time.sleep(.1)
else:
time.sleep(time_to_delay)
# This can go negative (subtracts .1 even in the else case) but that's fine.
time_to_delay -= .1
elif action_type == "stdin":
self.send_message_to_processes(containers, action_obj["string"], action_obj["containers"])
elif action_type in ['stop', 'start', 'kill']:
self.send_message_to_processes(containers, f"SUBMITTY_SIGNAL:{action_type.upper()}\n", action_obj['containers'])
# A .1 second delay after each action to keep things flowing smoothly.
time.sleep(.1)
if len(self.dispatcher_actions) > 0:
names = [c.name for c in containers]
self.send_message_to_processes(containers, "SUBMITTY_SIGNAL:FINALMESSAGE\n", names)
def get_container_with_name(self, name, containers):
""" Given a name, grab the corresponding container. """
for container in containers:
if container.name == name:
return container
return None
def get_network_with_name(self, name):
""" Given a name, grab the corresponding container. """
for network in self.networks:
if network.name == name:
return network
return None
#targets must hold names/keys for the processes dictionary
def send_message_to_processes(self, containers, message, targets):
""" Given containers, targets, and a message, deliver the message to the target containers. """
for target in targets:
container = self.get_container_with_name(target, containers)
container.container.reload()
if container.container.status != 'exited':
os.write(container.socket.fileno(), message.encode('utf-8'))
else:
pass
def at_least_one_alive(self, containers):
""" Check that at least one of a set of containers is running. """
for container in self.get_standard_containers(containers):
# Update container variables so that status is accurate.
container.container.reload()
if container.container.status != 'exited':
return True
return False
###########################################################
#
# Overridden Secure Execution Environment Functions
#
###########################################################
def setup_for_compilation_testcase(self):
""" For every container, set up its directory for compilation. """
os.chdir(self.tmp_work)
for container in self.containers:
self._setup_single_directory_for_compilation(container.directory)
# Run any necessary pre_commands
self._run_pre_commands(self.directory)
def setup_for_execution_testcase(self, testcase_dependencies):
""" For every container, set up its directory for execution. """
os.chdir(self.tmp_work)
for container in self.containers:
self._setup_single_directory_for_execution(container.directory, testcase_dependencies)
# Copy in the submitty_router if necessary.
if container.import_router:
router_path = os.path.join(self.tmp_autograding, "bin", "submitty_router.py")
self.log_message(f"COPYING:\n\t{router_path}\n\t{container.directory}")
shutil.copy(router_path, container.directory)
autograding_utils.add_all_permissions(container.directory)
self._run_pre_commands(self.directory)
def setup_for_random_output(self, testcase_dependencies):
""" For every container, set up its directory for random output generation. """
os.chdir(self.tmp_work)
for container in self.solution_containers:
self._setup_single_directory_for_random_output(container.directory, testcase_dependencies)
if container.import_router:
router_path = os.path.join(self.tmp_autograding, "bin", "submitty_router.py")
self.log_message(f"COPYING:\n\t{router_path}\n\t{container.directory}")
shutil.copy(router_path, container.directory)
autograding_utils.add_all_permissions(container.directory)
self._run_pre_commands(self.random_output_directory)
def setup_for_archival(self, overall_log):
""" For every container, set up its directory for archival. """
self.setup_for_testcase_archival(overall_log)
test_input_path = os.path.join(self.tmp_autograding, 'test_input_path')
for container in self.containers:
if len(self.containers) > 1:
public_dir = os.path.join(self.tmp_results,"results_public", self.name, container.name)
details_dir = os.path.join(self.tmp_results, "details", self.name, container.name)
os.mkdir(public_dir)
os.mkdir(details_dir)
def execute_random_input(self, untrusted_user, executable, arguments, logfile, cwd):
""" Generate random input for this container using its testcase specification. """
container_spec = {
'container_name' : f'{untrusted_user}_temporary_container',
'container_image' : 'submitty/autograding-default:latest',
'server' : False,
'outgoing_connections' : []
}
# Create a container to generate random input inside of.
container = Container( container_spec, untrusted_user, self.random_input_directory, False, self.is_test_environment, self.log_message)
execution_script = os.path.join(container.directory, executable)
try:
container.create(execution_script, arguments, False)
container.start(logfile)
container.process.wait()
except Exception as e:
self.log_message('ERROR generating random input using docker. See stack trace output for more details.')
self.log_stack_trace(traceback.format_exc())
finally:
container.cleanup_container()
return container.return_code
def execute_random_output(self, untrusted_user, script, arguments, logfile, cwd=None):
"""
Random output execution is analogous to execution, but with slightly different arguments
and a different network of containers.
"""
return self.execute_helper(self.solution_containers, script, arguments, logfile)
def execute(self, untrusted_user, script, arguments, logfile, cwd=None):
""" Run an execution step using our container network specification. """
return self.execute_helper(self.containers, script, arguments, logfile)
def execute_helper(self, containers, script, arguments, logfile):
""" Create, Start, Monitor/Deliver input to a network of containers. """
try:
# Make certain we are executing in the environment in which we say we are
# (e.g. test vs production environment).
self.verify_execution_status()
except Exception as e:
self.log_stack_trace(traceback.format_exc())
self.log_message("ERROR: Could not verify execution mode status.")
return
try:
self.create_containers( containers, script, arguments)
self.network_containers(containers)
except Exception as e:
self.log_message('ERROR: Could not create or network containers. See stack trace output for more details.')
self.log_stack_trace(traceback.format_exc())
return -1
try:
router = self.get_router(containers)
# First start the router a second before any other container, giving it time to initialize.
if router is not None:
router.start(logfile)
time.sleep(2)
# Next start any server containers, giving them time to initialize.
for container in self.get_server_containers(containers):
container.start(logfile)
# Finally, start the standard (assignment) containers.
for container in self.get_standard_containers(containers):
container.start(logfile)
# Deliver dispatcher actions.
self.process_dispatcher_actions(containers)
except Exception as e:
self.log_message('ERROR grading using docker. See stack trace output for more details.')
self.log_stack_trace(traceback.format_exc())
return_code = -1
try:
# Clean up all containers. (Cleanup waits until they are finished)
# Note: All containers should eventually terminate, as their executable will kill them for time.
for container in self.get_standard_containers(containers):
container.cleanup_container()
for container in self.get_server_containers(containers):
container.cleanup_container()
if router is not None:
self.get_router(containers).cleanup_container()
except Exception as e:
self.log_message('ERROR cleaning up docker containers. See stack trace output for more details.')
self.log_stack_trace(traceback.format_exc())
# Cleanup the all networks.
try:
self.cleanup_networks()
except Exception as e:
self.log_message('ERROR cleaning up docker networks. See stack trace output for more details.')
self.log_stack_trace(traceback.format_exc())
# A zero return code means execution went smoothly
return_code = 0
# Check the return codes of the standard (non server/router) containers
# to see if they finished properly. Note that this return code is yielded by
# main runner/validator/compiler. We return the first non-zero return code we encounter.
for container in self.get_standard_containers(containers):
if container.return_code != 0:
return_code = container.return_code
break
return return_code
| 43.524425 | 195 | 0.706005 |
26f4bb098483f615774fc2ffc420209f29027904 | 993 | py | Python | examples/nova/v2/12_backup.py | hustbeta/openstack-juno-api-adventure | 0c62cdc33599256ca7063478bb1e2906a8a6c2a2 | [
"MIT"
] | 4 | 2015-08-27T08:39:15.000Z | 2020-06-24T01:47:30.000Z | examples/nova/v2/12_backup.py | hustbeta/openstack-juno-api-adventure | 0c62cdc33599256ca7063478bb1e2906a8a6c2a2 | [
"MIT"
] | null | null | null | examples/nova/v2/12_backup.py | hustbeta/openstack-juno-api-adventure | 0c62cdc33599256ca7063478bb1e2906a8a6c2a2 | [
"MIT"
] | 2 | 2015-08-27T08:39:20.000Z | 2018-11-20T08:48:49.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import keystoneclient
import keystoneclient.auth.identity.v3
import keystoneclient.session
import keystoneclient.v3.client
import novaclient.client
import local_settings
auth = keystoneclient.auth.identity.v3.Password(auth_url=local_settings.auth_url_v3,
username=local_settings.username,
password=local_settings.password,
user_domain_name='Default',
project_domain_name='Default',
project_name=local_settings.tenant_name)
session = keystoneclient.session.Session(auth=auth)
nova = novaclient.client.Client('2', session=session)
server = nova.servers.get('f893cbec-b8dc-4bb4-8ee1-baa43c91bc30')
print json.dumps(server.to_dict())
backup = server.backup('backup-1', 'daily', 1)
print backup
| 36.777778 | 88 | 0.6143 |
a7234f31e06aab8f678936abae6d4c1c2c41424d | 32,287 | py | Python | desktop/libs/notebook/src/notebook/api_tests.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 1 | 2021-05-08T20:00:03.000Z | 2021-05-08T20:00:03.000Z | desktop/libs/notebook/src/notebook/api_tests.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 1 | 2021-05-10T02:32:57.000Z | 2021-05-10T02:32:57.000Z | desktop/libs/notebook/src/notebook/api_tests.py | yetsun/hue | 2e48f0cc70e233ee0e1b40733d4b2a18d8836c66 | [
"Apache-2.0"
] | 1 | 2021-02-01T19:55:11.000Z | 2021-02-01T19:55:11.000Z | #!/usr/bin/env python
## -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import object
import json
import sys
from collections import OrderedDict
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from nose.tools import assert_equal, assert_true, assert_false
from django.test.client import Client
from django.urls import reverse
from azure.conf import is_adls_enabled
from desktop import appmanager
from desktop.conf import APP_BLACKLIST, ENABLE_CONNECTORS, ENABLE_PROMETHEUS
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import grant_access, add_permission
from desktop.metrics import num_of_queries
from desktop.models import Directory, Document, Document2
from hadoop import cluster as originalCluster
from useradmin.models import User
import notebook.conf
import notebook.connectors.hiveserver2
from notebook.api import _historify
from notebook.connectors.base import Notebook, QueryError, Api, QueryExpired
from notebook.decorators import api_error_handler
from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS
if sys.version_info[0] > 2:
from unittest.mock import patch, Mock
else:
from mock import patch, Mock
class TestApi(object):
def setUp(self):
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
self.client_not_me = make_logged_in_client(username="not_perm_user", groupname="default", recreate=True, is_superuser=False)
self.user = User.objects.get(username="test")
self.user_not_me = User.objects.get(username="not_perm_user")
self.notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": 50010,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement_raw":""" \
""""select * from default.web_logs where app = '${app_name}';","variables":[{"name":"app_name","value":"metastore"}],""" \
""""statement":"select * from default.web_logs where app = 'metastore';","properties":{"settings":[],"files":[],""" \
""""functions":[]},"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,""" \
""""statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,""" \
""""start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":""" \
""""select * from default.web_logs where app = 'metastore';","operation_type":0,"modified_row_count":null,""" \
""""guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "5982a274-de78-083c-2efc-74f53dce744c",
"isSaved": false,
"parentUuid": null
}
"""
self.notebook = json.loads(self.notebook_json)
self.doc2 = Document2.objects.create(id=50010, name=self.notebook['name'], type=self.notebook['type'], owner=self.user)
self.doc1 = Document.objects.link(
self.doc2, owner=self.user, name=self.doc2.name, description=self.doc2.description, extra=self.doc2.type
)
def test_save_notebook(self):
# Test that saving a new document with a new parent will set the parent_directory
home_dir = Document2.objects.get_home_directory(self.user)
assert_equal(home_dir.uuid, self.doc2.parent_directory.uuid)
new_dir = Directory.objects.create(name='new_dir', owner=self.user, parent_directory=home_dir)
notebook_cp = self.notebook.copy()
notebook_cp.pop('id')
notebook_cp['directoryUuid'] = new_dir.uuid
notebook_json = json.dumps(notebook_cp)
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal(new_dir.uuid, doc.parent_directory.uuid)
# Test that saving a new document with a no parent will map it to its home dir
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement_raw":""" \
""""select * from default.web_logs where app = '${app_name}';","variables":""" \
"""[{"name":"app_name","value":"metastore"}],"statement":""" \
""""select * from default.web_logs where app = 'metastore';","properties":{"settings":[],"files":[],"functions":[]},""" \
""""result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,""" \
""""statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,""" \
""""start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,""" \
""""statement":"select * from default.web_logs where app = 'metastore';","operation_type":0,""" \
""""modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
"""
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal(Document2.objects.get_home_directory(self.user).uuid, doc.parent_directory.uuid)
# Test that saving a notebook will save the search field to the first statement text
assert_equal(doc.search, "select * from default.web_logs where app = 'metastore';")
def test_save_notebook_with_connector_off(self):
reset = ENABLE_CONNECTORS.set_for_testing(False)
notebook_cp = self.notebook.copy()
notebook_cp.pop('id')
notebook_cp['snippets'][0]['connector'] = {
'name': 'MySql', # At some point even v1 should set those two
'dialect': 'mysql',
'optimizer': 'api',
}
notebook_json = json.dumps(notebook_cp)
try:
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
finally:
reset()
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal('query-mysql', doc.type)
def test_save_notebook_with_connector_on(self):
if not ENABLE_CONNECTORS.get():
raise SkipTest
notebook_cp = self.notebook.copy()
notebook_cp.pop('id')
connector = Connector.objects.create(
name='MySql',
dialect='mysql'
)
notebook_cp['snippets'][0]['connector'] = {
'name': 'MySql',
'dialect': 'mysql',
'type': str(connector.id),
'optimizer': 'api',
}
try:
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': notebook_json})
data = json.loads(response.content)
finally:
connector.delete()
assert_equal(0, data['status'], data)
doc = Document2.objects.get(pk=data['id'])
assert_equal('query-mysql', doc.type)
def test_historify(self):
# Starts with no history
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(1, Document.objects.filter(name__contains=self.notebook['name']).count())
history_doc = _historify(self.notebook, self.user)
assert_true(history_doc.id > 0)
# Test that historify creates new Doc2 and linked Doc1
assert_equal(1, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(2, Document.objects.filter(name__contains=self.notebook['name']).count())
# Historify again
history_doc = _historify(self.notebook, self.user)
assert_equal(2, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
assert_equal(3, Document.objects.filter(name__contains=self.notebook['name']).count())
def test_get_history(self):
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
assert_equal(3, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
# History should not return history objects that don't have the given doc type
Document2.objects.create(name='Impala History', type='query-impala', data=self.notebook_json, owner=self.user, is_history=True)
# Verify that get_history API returns history objects for given type and current user
response = self.client.get(reverse('notebook:get_history'), {'doc_type': 'hive'})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal(3, len(data['history']), data)
assert_true(all(doc['type'] == 'query-hive' for doc in data['history']), data)
# TODO: test that query history for shared query only returns docs accessible by current user
def test_clear_history(self):
assert_equal(0, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
_historify(self.notebook, self.user)
assert_equal(3, Document2.objects.filter(name__contains=self.notebook['name'], is_history=True).count())
# Clear history should not clear history objects that don't have the given doc type
Document2.objects.create(name='Impala History', type='query-impala', owner=self.user, is_history=True)
# clear history should retain original document but wipe history
response = self.client.post(reverse('notebook:clear_history'), {'notebook': self.notebook_json, 'doc_type': 'hive'})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_false(Document2.objects.filter(type='query-hive', is_history=True).exists())
assert_true(Document2.objects.filter(type='query-hive', is_history=False).exists())
assert_true(Document2.objects.filter(type='query-impala', is_history=True).exists())
def test_delete_notebook(self):
trash_notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id": "e069ef32-5c95-4507-b961-e79c090b5abf","type":"hive","status":"ready","database":"default",""" \
""""statement":"select * from web_logs","statement_raw":"select * from web_logs","variables":[],"properties":""" \
"""{"settings":[],"files":[],"functions":[]},"result":{}}],
"uuid": "8a20da5f-b69c-4843-b17d-dea5c74c41d1"
}
"""
# Assert that the notebook is first saved
response = self.client.post(reverse('notebook:save_notebook'), {'notebook': trash_notebook_json})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
# Test that deleting it moves it to the user's Trash folder
notebook_doc = Document2.objects.get(id=data['id'])
trash_notebooks = [Notebook(notebook_doc).get_data()]
response = self.client.post(reverse('notebook:delete'), {'notebooks': json.dumps(trash_notebooks)})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('Trashed 1 notebook(s)', data['message'], data)
response = self.client.get('/desktop/api2/doc', {'path': '/.Trash'})
data = json.loads(response.content)
trash_uuids = [doc['uuid'] for doc in data['children']]
assert_true(notebook_doc.uuid in trash_uuids, data)
# Test that any errors are reported in the response
nonexistant_doc = {
"id": 12345,
"uuid": "ea22da5f-b69c-4843-b17d-dea5c74c41d1",
"selectedSnippet": "hive",
"showHistory": False,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": None,
}
],
"type": "query-hive",
"snippets": [{
"id": "e069ef32-5c95-4507-b961-e79c090b5abf",
"type": "hive",
"status": "ready",
"database": "default",
"statement": "select * from web_logs",
"statement_raw": "select * from web_logs",
"variables": [],
"properties": {"settings": [], "files": [], "functions": []},
"result": {}
}]
}
trash_notebooks = [nonexistant_doc]
response = self.client.post(reverse('notebook:delete'), {'notebooks': json.dumps(trash_notebooks)})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('Trashed 0 notebook(s) and failed to delete 1 notebook(s).', data['message'], data)
assert_equal(['ea22da5f-b69c-4843-b17d-dea5c74c41d1'], data['errors'])
def test_query_error_encoding(self):
@api_error_handler
def send_exception(message):
raise QueryError(message=message)
message = """SELECT
a.key,
a.*
FROM customers c, c.addresses a"""
response = send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
message = """SELECT
\u2002\u2002a.key,
\u2002\u2002a.*
FROM customers c, c.addresses a"""
response = send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
message = u"""SELECT
a.key,
a.*
FROM déclenché c, c.addresses a"""
response = send_exception(message)
data = json.loads(response.content)
assert_equal(1, data['status'])
def test_notebook_autocomplete(self):
with patch('notebook.api.get_api') as get_api:
get_api.return_value = Mock(
autocomplete=Mock(
side_effect=QueryExpired("HTTPSConnectionPool(host='gethue.com', port=10001): Read timed out. (read timeout=120)")
)
)
response = self.client.post(
reverse('notebook:api_autocomplete_tables', kwargs={'database': 'database'}),
{
'snippet': json.dumps({'type': 'hive'})
}
)
data = json.loads(response.content)
assert_equal(data, {'status': 0}) # We get back empty instead of failure with QueryExpired to silence end user messages
def test_autocomplete_functions(self):
# Note: better test would be to mock autocomplete() and not get_api() with hive and mysql dialects
with patch('notebook.api.get_api') as get_api:
get_api.return_value = Mock(
autocomplete=Mock(
return_value={
'functions': [
{'name': 'f1'}, {'name': 'f2'}, {'name': 'f3'}
]
}
)
)
response = self.client.post(reverse('notebook:api_autocomplete_databases'), {
'snippet': json.dumps({'type': 'hive', 'properties': {}}),
'operation': 'functions'
})
assert_equal(response.status_code, 200)
data = json.loads(response.content)
assert_equal(data['status'], 0)
assert_equal(
data['functions'],
[{'name': 'f1'}, {'name': 'f2'}, {'name': 'f3'}]
)
class MockedApi(Api):
def execute(self, notebook, snippet):
return {
'sync': True,
'has_result_set': True,
'result': {
'has_more': False,
'data': [['test']],
'meta': [{
'name': 'test',
'type': '',
'comment': ''
}],
'type': 'table'
}
}
def close_statement(self, notebook, snippet):
pass
def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
return {'destination': target_file}
class MockFs(object):
def __init__(self, logical_name=None):
self.fs_defaultfs = 'hdfs://curacao:8020'
self.logical_name = logical_name if logical_name else ''
self.DEFAULT_USER = 'test'
self.user = 'test'
self._filebrowser_action = ''
def setuser(self, user):
self._user = user
@property
def user(self):
return self._user
def do_as_user(self, username, fn, *args, **kwargs):
return ''
def exists(self, path):
if path == '/user/hue/non_exists_directory':
return False
return True
def listdir_stats(self, path):
if path == '/user/hue/non_empty_directory':
return ['mock_dir', 'mock_file']
return []
def isdir(self, path):
return path == '/user/hue'
def filebrowser_action(self):
return self._filebrowser_action
@user.setter
def user(self, value):
self._user = value
class TestNotebookApiMocked(object):
def setUp(self):
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
self.client_not_me = make_logged_in_client(username="not_perm_user", groupname="default", recreate=True, is_superuser=False)
self.user = User.objects.get(username="test")
self.user_not_me = User.objects.get(username="not_perm_user")
# Beware: Monkey patch HS2API Mock API
if not hasattr(notebook.connectors.hiveserver2, 'original_HS2Api'): # Could not monkey patch base.get_api
notebook.connectors.hiveserver2.original_HS2Api = notebook.connectors.hiveserver2.HS2Api
notebook.connectors.hiveserver2.HS2Api = MockedApi
originalCluster.get_hdfs()
self.original_fs = originalCluster.FS_CACHE["default"]
originalCluster.FS_CACHE["default"] = MockFs()
grant_access("test", "default", "notebook")
grant_access("test", "default", "beeswax")
grant_access("test", "default", "hive")
grant_access("not_perm_user", "default", "notebook")
grant_access("not_perm_user", "default", "beeswax")
grant_access("not_perm_user", "default", "hive")
add_permission('test', 'has_adls', permname='adls_access', appname='filebrowser')
def tearDown(self):
notebook.connectors.hiveserver2.HS2Api = notebook.connectors.hiveserver2.original_HS2Api
if originalCluster.FS_CACHE is None:
originalCluster.FS_CACHE = {}
originalCluster.FS_CACHE["default"] = self.original_fs
@attr('integration')
def test_export_result(self):
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement":""" \
""""select * from web_logs","properties":{"settings":[],"variables":[],"files":[],"functions":[]},""" \
""""result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":""" \
"""{"log_context":null,"statements_count":1,"end":{"column":21,"row":0},"statement_id":0,""" \
""""has_more_statements":false,"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an",""" \
""""has_result_set":true,"statement":"select * from web_logs","operation_type":0,"modified_row_count":""" \
"""null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
"""
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('/user/hue'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('/user/hue/Test Hive Query.csv', data['watch_url']['destination'], data)
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('/user/hue/path.csv'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('/user/hue/path.csv', data['watch_url']['destination'], data)
if is_adls_enabled():
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-file'),
'destination': json.dumps('adl:/user/hue/path.csv'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(0, data['status'], data)
assert_equal('adl:/user/hue/path.csv', data['watch_url']['destination'], data)
response = self.client.post(reverse('notebook:export_result'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': json.dumps('hdfs-directory'),
'destination': json.dumps('/user/hue/non_empty_directory'),
'overwrite': json.dumps(False)
})
data = json.loads(response.content)
assert_equal(-1, data['status'], data)
assert_equal('The destination is not an empty directory!', data['message'], data)
def test_download_result(self):
notebook_json = """
{
"selectedSnippet": "hive",
"showHistory": false,
"description": "Test Hive Query",
"name": "Test Hive Query",
"sessions": [
{
"type": "hive",
"properties": [],
"id": null
}
],
"type": "query-hive",
"id": null,
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"hive","status":"running","statement":""" \
""""select * from web_logs","properties":{"settings":[],"variables":[],"files":[],"functions":[]},""" \
""""result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,""" \
""""statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,""" \
""""start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":"""\
""""select * from web_logs","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},""" \
""""lastExecuted": 1462554843817,"database":"default"}],
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
}
"""
response = self.client.post(reverse('notebook:download'), {
'notebook': notebook_json,
'snippet': json.dumps(json.loads(notebook_json)['snippets'][0]),
'format': 'csv'
})
content = b"".join(response)
assert_true(len(content) > 0)
def test_get_interpreters_to_show():
default_interpreters = OrderedDict((
('hive', {
'name': 'Hive', 'interface': 'hiveserver2', 'type': 'hive', 'is_sql': True, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
}),
('spark', {
'name': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
}),
('pig', {
'name': 'Pig', 'interface': 'pig', 'type': 'pig', 'is_sql': False, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'pig'
}),
('java', {
'name': 'Java', 'interface': 'oozie', 'type': 'java', 'is_sql': False, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'java'
})
))
expected_interpreters = OrderedDict((
('java', {
'name': 'Java', 'interface': 'oozie', 'type': 'java', 'is_sql': False, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'java'
}),
('pig', {
'name': 'Pig', 'interface': 'pig', 'is_sql': False, 'type': 'pig', 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'pig'
}),
('hive', {
'name': 'Hive', 'interface': 'hiveserver2', 'is_sql': True, 'type': 'hive', 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
}),
('spark', {
'name': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {}, 'dialect_properties': {},
'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
})
))
try:
resets = [
INTERPRETERS.set_for_testing(default_interpreters),
APP_BLACKLIST.set_for_testing(''),
ENABLE_CONNECTORS.set_for_testing(False)
]
appmanager.DESKTOP_MODULES = []
appmanager.DESKTOP_APPS = None
appmanager.load_apps(APP_BLACKLIST.get())
notebook.conf.INTERPRETERS_CACHE = None
# 'get_interpreters_to_show should return the same as get_interpreters when interpreters_shown_on_wheel is unset'
assert_equal(
list(default_interpreters.values()), get_ordered_interpreters()
)
resets.append(INTERPRETERS_SHOWN_ON_WHEEL.set_for_testing('java,pig'))
assert_equal(
list(expected_interpreters.values()), get_ordered_interpreters(),
'get_interpreters_to_show did not return interpreters in the correct order expected'
)
finally:
for reset in resets:
reset()
appmanager.DESKTOP_MODULES = []
appmanager.DESKTOP_APPS = None
appmanager.load_apps(APP_BLACKLIST.get())
notebook.conf.INTERPRETERS_CACHE = None
def test_get_ordered_interpreters():
default_interpreters = OrderedDict((
('hive', {
'name': 'Hive', 'interface': 'hiveserver2', 'type': 'hive', 'is_sql': True, 'options': {}, 'dialect_properties': None,
'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
}),
('impala', {
'name': 'Impala', 'interface': 'hiveserver2', 'type': 'impala', 'is_sql': True, 'options': {}, 'dialect_properties': None,
'is_catalog': False, 'category': 'editor', 'dialect': 'impala'
}),
('spark', {
'name': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {}, 'dialect_properties': None,
'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
}),
('pig', {
'name': 'Pig', 'interface': 'pig', 'type': 'pig', 'is_sql': False, 'options': {}, 'dialect_properties': None,
'is_catalog': False, 'category': 'editor', 'dialect': 'pig'
}),
('java', {
'name': 'Java', 'interface': 'oozie', 'type': 'java', 'is_sql': False, 'options': {}, 'dialect_properties': None,
'is_catalog': False, 'category': 'editor', 'dialect': 'java'
})
))
try:
resets = [APP_BLACKLIST.set_for_testing('')]
appmanager.DESKTOP_MODULES = []
appmanager.DESKTOP_APPS = None
appmanager.load_apps(APP_BLACKLIST.get())
with patch('notebook.conf.is_cm_managed') as is_cm_managed:
with patch('notebook.conf.appmanager.get_apps_dict') as get_apps_dict:
with patch('notebook.conf.has_connectors') as has_connectors:
get_apps_dict.return_value = {'hive': {}}
has_connectors.return_value = False
notebook.conf.INTERPRETERS_CACHE = None
is_cm_managed.return_value = False
# No CM --> Verbatim
INTERPRETERS.set_for_testing(
OrderedDict((
('phoenix', {
'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
}),)
)
)
assert_equal(
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['phoenix']
)
assert_equal( # Check twice because of cache
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['phoenix']
)
is_cm_managed.return_value = True
notebook.conf.INTERPRETERS_CACHE = None
# CM --> Append []
INTERPRETERS.set_for_testing(
OrderedDict(()
)
)
assert_equal(
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['hive']
)
assert_equal( # Check twice
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['hive']
)
notebook.conf.INTERPRETERS_CACHE = None
# CM --> Append [Phoenix]
INTERPRETERS.set_for_testing(
OrderedDict((
('phoenix', {
'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
}),)
)
)
assert_equal(
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['hive', 'phoenix']
)
assert_equal( # Check twice
[interpreter['dialect'] for interpreter in get_ordered_interpreters()],
['hive', 'phoenix']
)
finally:
for reset in resets:
reset()
appmanager.DESKTOP_MODULES = []
appmanager.DESKTOP_APPS = None
appmanager.load_apps(APP_BLACKLIST.get())
notebook.conf.INTERPRETERS_CACHE = None
class TestQueriesMetrics(object):
def test_queries_num(self):
with patch('desktop.models.Document2.objects') as doc2_value_mock:
doc2_value_mock.filter.return_value.count.return_value = 12500
count = num_of_queries()
assert_equal(12500, count)
if not ENABLE_PROMETHEUS.get():
raise SkipTest
c = Client()
response = c.get('/metrics')
assert_true(b'hue_queries_numbers 12500.0' in response.content, response.content)
class TestEditor(object):
def setUp(self):
self.client = make_logged_in_client(username="test", groupname="empty", recreate=True, is_superuser=False)
self.user = User.objects.get(username="test")
grant_access("test", "empty", "impala")
def test_open_saved_impala_query_when_no_hive_interepreter(self):
try:
doc, created = Document2.objects.get_or_create(
name='open_saved_query_with_hive_not_present',
type='query-impala',
owner=self.user,
data={}
)
with patch('desktop.middleware.fsmanager') as fsmanager:
response = self.client.get(reverse('notebook:editor'), {'editor': doc.id, 'is_embeddable': True})
assert_equal(200, response.status_code)
finally:
doc.delete()
| 37.984706 | 131 | 0.632917 |
dd11d17a3ab45dcaf4fa32b09510892593569e97 | 647 | py | Python | models/Checkpoint.py | pdfop/Geschichte-Vernetzt-Backend | 48a8ae0c323c8bc607fc8ad035d5773cb8dbb279 | [
"MIT"
] | null | null | null | models/Checkpoint.py | pdfop/Geschichte-Vernetzt-Backend | 48a8ae0c323c8bc607fc8ad035d5773cb8dbb279 | [
"MIT"
] | null | null | null | models/Checkpoint.py | pdfop/Geschichte-Vernetzt-Backend | 48a8ae0c323c8bc607fc8ad035d5773cb8dbb279 | [
"MIT"
] | null | null | null | from mongoengine import *
from models.Tour import Tour
class Checkpoint(Document):
"""
Parent class for all checkpoints. All other checkpoints inherit all fields and may or may not overwrite them.
Serves as the most basic checkpoint for texts.
"""
meta = {'db_alias': 'tour',
'collection': 'checkpoint',
'allow_inheritance': True}
tour = ReferenceField(document_type=Tour, reverse_delete_rule=CASCADE)
text = StringField()
index = IntField(default=0)
show_text = BooleanField(default=False)
show_picture = BooleanField(default=False)
show_details = BooleanField(default=False)
| 34.052632 | 113 | 0.703246 |
a43e09adc1c684fd3fa5414abc476738bffe03f1 | 110,055 | py | Python | ciscoaxl/axl.py | levensailor/ciscoaxl | f3a78ef39fba011c8bb0fdc6184a53d879e2d049 | [
"MIT"
] | 16 | 2019-04-18T17:16:48.000Z | 2020-05-27T13:39:41.000Z | ciscoaxl/axl.py | levensailor/ciscoaxl | f3a78ef39fba011c8bb0fdc6184a53d879e2d049 | [
"MIT"
] | 2 | 2020-03-16T14:26:46.000Z | 2020-05-28T09:23:11.000Z | ciscoaxl/axl.py | levensailor/ciscoaxl | f3a78ef39fba011c8bb0fdc6184a53d879e2d049 | [
"MIT"
] | 10 | 2019-09-29T16:21:11.000Z | 2020-05-22T20:07:33.000Z | """
Class to interface with cisco ucm axl api.
Author: Jeff Levensailor
Version: 0.1
Dependencies:
- zeep: https://python-zeep.readthedocs.io/en/master/
Links:
- https://developer.cisco.com/site/axl/
"""
import sys
from pathlib import Path
import os
import json
from requests import Session
from requests.exceptions import SSLError, ConnectionError
import re
import urllib3
from zeep import Client, Settings, Plugin
from zeep.transports import Transport
from zeep.cache import SqliteCache
from zeep.plugins import HistoryPlugin
from zeep.exceptions import Fault
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class axl(object):
"""
The AXL class sets up the connection to the call manager with methods for configuring UCM.
Tested with environment of;
Python 3.6
"""
def __init__(self, username, password, cucm, cucm_version, strict_ssl=False):
"""
:param username: axl username
:param password: axl password
:param cucm: UCM IP address
:param cucm_version: UCM version
:param strict_ssl: do not work around an SSL failure, default False
example usage:
>>> from axl import AXL
>>> ucm = AXL('axl_user', 'axl_pass', '192.168.200.10')
"""
cwd = os.path.dirname(os.path.abspath(__file__))
if os.name == "posix":
wsdl = Path(f"{cwd}/schema/{cucm_version}/AXLAPI.wsdl").as_uri()
else:
wsdl = str(Path(f"{cwd}/schema/{cucm_version}/AXLAPI.wsdl").absolute())
session = Session()
session.auth = (username, password)
# validate session before assigning to Transport
url = f"https://{cucm}:8443/axl/"
try:
ret_code = session.get(url, stream=True, timeout=10).status_code
except SSLError:
if strict_ssl:
raise
# retry with verify set False
session.close()
session = Session()
session.auth = (username, password)
session.verify = False
ret_code = session.get(url, stream=True, timeout=10).status_code
except ConnectionError:
raise Exception(f"{url} cannot be found, please try again") from None
if ret_code == 401:
raise Exception(
"[401 Unauthorized]: Please check your username and password"
)
elif ret_code == 403:
raise Exception(
f"[403 Forbidden]: Please ensure the user '{username}' has AXL access set up"
)
elif ret_code == 404:
raise Exception(
f"[404 Not Found]: AXL not found, please check your URL ({url})"
)
settings = Settings(
strict=False, xml_huge_tree=True, xsd_ignore_sequence_order=True
)
transport = Transport(session=session, timeout=10, cache=SqliteCache())
axl_client = Client(wsdl, settings=settings, transport=transport)
self._zeep = axl_client
self.username = username
self.password = password
self.wsdl = wsdl
self.cucm = cucm
self.cucm_version = cucm_version
self.UUID_PATTERN = re.compile(
r"^[\da-f]{8}-([\da-f]{4}-){3}[\da-f]{12}$", re.IGNORECASE
)
self.client = axl_client.create_service(
"{http://www.cisco.com/AXLAPIService/}AXLAPIBinding",
f"https://{cucm}:8443/axl/",
)
def get_locations(
self,
tagfilter={
"name": "",
"withinAudioBandwidth": "",
"withinVideoBandwidth": "",
"withinImmersiveKbits": "",
},
):
"""
Get location details
:param mini: return a list of tuples of location details
:return: A list of dictionary's
"""
try:
return self.client.listLocation({"name": "%"}, returnedTags=tagfilter,)[
"return"
]["location"]
except Fault as e:
return e
def run_sql_query(self, query):
result = {"num_rows": 0, "query": query}
try:
sql_result = self.client.executeSQLQuery(sql=query)
except Exception as fault:
sql_result = None
self.last_exception = fault
num_rows = 0
result_rows = []
if sql_result is not None:
if sql_result["return"] is not None:
for row in sql_result["return"]["row"]:
result_rows.append({})
for column in row:
result_rows[num_rows][column.tag] = column.text
num_rows += 1
result["num_rows"] = num_rows
if num_rows > 0:
result["rows"] = result_rows
return result
def sql_query(self, query):
"""
Execute SQL query
:param query: SQL Query to execute
:return: result dictionary
"""
try:
return self.client.executeSQLQuery(query)["return"]
except Fault as e:
return e
def sql_update(self, query):
"""
Execute SQL update
:param query: SQL Update to execute
:return: result dictionary
"""
try:
return self.client.executeSQLUpdate(query)["return"]
except Fault as e:
return e
def get_ldap_dir(
self,
tagfilter={
"name": "",
"ldapDn": "",
"userSearchBase": "",
},
):
"""
Get LDAP Syncs
:return: result dictionary
"""
try:
return self.client.listLdapDirectory(
{"name": "%"},
returnedTags=tagfilter,
)["return"]["ldapDirectory"]
except Fault as e:
return e
def do_ldap_sync(self, uuid):
"""
Do LDAP Sync
:param uuid: uuid
:return: result dictionary
"""
try:
return self.client.doLdapSync(uuid=uuid, sync=True)
except Fault as e:
return e
def do_change_dnd_status(self, **args):
"""
Do Change DND Status
:param userID:
:param status:
:return: result dictionary
"""
try:
return self.client.doChangeDNDStatus(**args)
except Fault as e:
return e
def do_device_login(self, **args):
"""
Do Device Login
:param deviceName:
:param userId:
:param profileName:
:return: result dictionary
"""
try:
return self.client.doDeviceLogin(**args)
except Fault as e:
return e
def do_device_logout(self, **args):
"""
Do Device Logout
:param device:
:param userId:
:return: result dictionary
"""
try:
return self.client.doDeviceLogout(**args)
except Fault as e:
return e
def do_device_reset(self, name="", uuid=""):
"""
Do Device Reset
:param name: device name
:param uuid: device uuid
:return: result dictionary
"""
if name != "" and uuid == "":
try:
return self.client.doDeviceReset(deviceName=name, isHardReset=True)
except Fault as e:
return e
elif name == "" and uuid != "":
try:
return self.client.doDeviceReset(uuid=uuid, isHardReset=True)
except Fault as e:
return e
def reset_sip_trunk(self, name="", uuid=""):
"""
Reset SIP Trunk
:param name: device name
:param uuid: device uuid
:return: result dictionary
"""
if name != "" and uuid == "":
try:
return self.client.resetSipTrunk(name=name)
except Fault as e:
return e
elif name == "" and uuid != "":
try:
return self.client.resetSipTrunk(uuid=uuid)
except Fault as e:
return e
def get_location(self, **args):
"""
Get device pool parameters
:param name: location name
:param uuid: location uuid
:return: result dictionary
"""
try:
return self.client.getLocation(**args)
except Fault as e:
return e
def add_location(
self,
name,
kbits=512,
video_kbits=-1,
within_audio_bw=512,
within_video_bw=-1,
within_immersive_kbits=-1,
):
"""
Add a location
:param name: Name of the location to add
:param cucm_version: ucm version
:param kbits: ucm 8.5
:param video_kbits: ucm 8.5
:param within_audio_bw: ucm 10
:param within_video_bw: ucm 10
:param within_immersive_kbits: ucm 10
:return: result dictionary
"""
if (
self.cucm_version == "8.6"
or self.cucm_version == "9.0"
or self.cucm_version == "9.5"
or self.cucm_version == "10.0"
):
try:
return self.client.addLocation(
{
"name": name,
# CUCM 8.6
"kbits": kbits,
"videoKbits": video_kbits,
}
)
except Fault as e:
return e
else:
try:
betweenLocations = []
betweenLocation = {}
RLocationBetween = {}
RLocationBetween["locationName"] = "Hub_None"
RLocationBetween["weight"] = 0
RLocationBetween["audioBandwidth"] = within_audio_bw
RLocationBetween["videoBandwidth"] = within_video_bw
RLocationBetween["immersiveBandwidth"] = within_immersive_kbits
betweenLocation["betweenLocation"] = RLocationBetween
betweenLocations.append(betweenLocation)
return self.client.addLocation(
{
"name": name,
# CUCM 10.6
"withinAudioBandwidth": within_audio_bw,
"withinVideoBandwidth": within_video_bw,
"withinImmersiveKbits": within_immersive_kbits,
"betweenLocations": betweenLocations,
}
)
except Fault as e:
return e
def delete_location(self, **args):
"""
Delete a location
:param name: The name of the location to delete
:param uuid: The uuid of the location to delete
:return: result dictionary
"""
try:
return self.client.removeLocation(**args)
except Fault as e:
return e
def update_location(self, **args):
"""
Update a Location
:param name:
:param uuid:
:param newName:
:param withinAudioBandwidth:
:param withinVideoBandwidth:
:param withImmersiveKbits:
:param betweenLocations:
:return:
"""
try:
return self.client.updateLocation(**args)
except Fault as e:
return e
def get_regions(self, tagfilter={"uuid": "", "name": ""}):
"""
Get region details
:param mini: return a list of tuples of region details
:return: A list of dictionary's
"""
try:
return self.client.listRegion({"name": "%"}, returnedTags=tagfilter)[
"return"
]["region"]
except Fault as e:
return e
def get_region(self, **args):
"""
Get region information
:param name: Region name
:return: result dictionary
"""
try:
return self.client.getRegion(**args)["return"]["region"]
except Fault as e:
return e
def add_region(self, name):
"""
Add a region
:param name: Name of the region to add
:return: result dictionary
"""
try:
return self.client.addRegion({"name": name})
except Fault as e:
return e
def update_region(self, name="", newName="", moh_region=""):
"""
Update region and assign region to all other regions
:param name:
:param uuid:
:param moh_region:
:return:
"""
# Get all Regions
all_regions = self.client.listRegion({"name": "%"}, returnedTags={"name": ""})
# Make list of region names
region_names = [str(i["name"]) for i in all_regions["return"]["region"]]
# Build list of dictionaries to add to region api call
region_list = []
for i in region_names:
# Highest codec within a region
if i == name:
region_list.append(
{
"regionName": i,
"bandwidth": "256 kbps",
"videoBandwidth": "-1",
"immersiveVideoBandwidth": "-1",
"lossyNetwork": "Use System Default",
}
)
# Music on hold region name
elif i == moh_region:
region_list.append(
{
"regionName": i,
"bandwidth": "64 kbps",
"videoBandwidth": "-1",
"immersiveVideoBandwidth": "-1",
"lossyNetwork": "Use System Default",
}
)
# All else G.711
else:
region_list.append(
{
"regionName": i,
"bandwidth": "64 kbps",
"videoBandwidth": "-1",
"immersiveVideoBandwidth": "-1",
"lossyNetwork": "Use System Default",
}
)
try:
return self.client.updateRegion(
name=name,
newName=newName,
relatedRegions={"relatedRegion": region_list},
)
except Fault as e:
return e
def delete_region(self, **args):
"""
Delete a location
:param name: The name of the region to delete
:param uuid: The uuid of the region to delete
:return: result dictionary
"""
try:
return self.client.removeRegion(**args)
except Fault as e:
return e
def get_srsts(self, tagfilter={"uuid": ""}):
"""
Get all SRST details
:param mini: return a list of tuples of SRST details
:return: A list of dictionary's
"""
try:
return self.client.listSrst({"name": "%"}, returnedTags=tagfilter)[
"return"
]["srst"]
except Fault as e:
return e
def get_srst(self, **args):
"""
Get SRST information
:param name: SRST name
:return: result dictionary
"""
try:
return self.client.getSrst(**args)["return"]["srst"]
except Fault as e:
return e
def add_srst(self, name, ip_address, port=2000, sip_port=5060):
"""
Add SRST
:param name: SRST name
:param ip_address: SRST ip address
:param port: SRST port
:param sip_port: SIP port
:return: result dictionary
"""
try:
return self.client.addSrst(
{
"name": name,
"port": port,
"ipAddress": ip_address,
"SipPort": sip_port,
}
)
except Fault as e:
return e
def delete_srst(self, name):
"""
Delete a SRST
:param name: The name of the SRST to delete
:return: result dictionary
"""
try:
return self.client.removeSrst(name=name)
except Fault as e:
return e
def update_srst(self, name, newName=""):
"""
Update a SRST
:param srst: The name of the SRST to update
:param newName: The new name of the SRST
:return: result dictionary
"""
try:
return self.client.updateSrst(name=name, newName=newName)
except Fault as e:
return e
def get_device_pools(
self,
tagfilter={
"name": "",
"dateTimeSettingName": "",
"callManagerGroupName": "",
"mediaResourceListName": "",
"regionName": "",
"srstName": "",
# 'localRouteGroup': [0],
},
):
"""
Get a dictionary of device pools
:param mini: return a list of tuples of device pool info
:return: a list of dictionary's of device pools information
"""
try:
return self.client.listDevicePool({"name": "%"}, returnedTags=tagfilter,)[
"return"
]["devicePool"]
except Fault as e:
return e
def get_device_pool(self, **args):
"""
Get device pool parameters
:param name: device pool name
:return: result dictionary
"""
try:
return self.client.getDevicePool(**args)["return"]["devicePool"]
except Fault as e:
return e
def add_device_pool(
self,
name,
date_time_group="CMLocal",
region="Default",
location="Hub_None",
route_group="",
media_resource_group_list="",
srst="Disable",
cm_group="Default",
network_locale="",
):
"""
Add a device pool
:param device_pool: Device pool name
:param date_time_group: Date time group name
:param region: Region name
:param location: Location name
:param route_group: Route group name
:param media_resource_group_list: Media resource group list name
:param srst: SRST name
:param cm_group: CM Group name
:param network_locale: Network locale name
:return: result dictionary
"""
try:
return self.client.addDevicePool(
{
"name": name,
"dateTimeSettingName": date_time_group, # update to state timezone
"regionName": region,
"locationName": location,
"localRouteGroup": {
"name": "Standard Local Route Group",
"value": route_group,
},
"mediaResourceListName": media_resource_group_list,
"srstName": srst,
"callManagerGroupName": cm_group,
"networkLocale": network_locale,
}
)
except Fault as e:
return e
def update_device_pool(self, **args):
"""
Update a device pools route group and media resource group list
:param name:
:param uuid:
:param newName:
:param mediaResourceGroupListName:
:param dateTimeSettingName:
:param callManagerGroupName:
:param regionName:
:param locationName:
:param networkLocale:
:param srstName:
:param localRouteGroup:
:param elinGroup:
:param media_resource_group_list:
:return:
"""
try:
return self.client.updateDevicePool(**args)
except Fault as e:
return e
def delete_device_pool(self, **args):
"""
Delete a Device pool
:param device_pool: The name of the Device pool to delete
:return: result dictionary
"""
try:
return self.client.removeDevicePool(**args)
except Fault as e:
return e
def get_conference_bridges(
self,
tagfilter={
"name": "",
"description": "",
"devicePoolName": "",
"locationName": "",
},
):
"""
Get conference bridges
:param mini: List of tuples of conference bridge details
:return: results dictionary
"""
try:
return self.client.listConferenceBridge(
{"name": "%"},
returnedTags=tagfilter,
)["return"]["conferenceBridge"]
except Fault as e:
return e
def get_conference_bridge(self, **args):
"""
Get conference bridge parameters
:param name: conference bridge name
:return: result dictionary
"""
try:
return self.client.getConferenceBridge(**args)["return"]["conferenceBridge"]
except Fault as e:
return e
def add_conference_bridge(
self,
name,
description="",
device_pool="Default",
location="Hub_None",
product="Cisco IOS Enhanced Conference Bridge",
security_profile="Non Secure Conference Bridge",
):
"""
Add a conference bridge
:param conference_bridge: Conference bridge name
:param description: Conference bridge description
:param device_pool: Device pool name
:param location: Location name
:param product: Conference bridge type
:param security_profile: Conference bridge security type
:return: result dictionary
"""
try:
return self.client.addConferenceBridge(
{
"name": name,
"description": description,
"devicePoolName": device_pool,
"locationName": location,
"product": product,
"securityProfileName": security_profile,
}
)
except Fault as e:
return e
def update_conference_bridge(self, **args):
"""
Update a conference bridge
:param name: Conference bridge name
:param newName: New Conference bridge name
:param description: Conference bridge description
:param device_pool: Device pool name
:param location: Location name
:param product: Conference bridge type
:param security_profile: Conference bridge security type
:return: result dictionary
"""
try:
return self.client.updateConferenceBridge(**args)
except Fault as e:
return e
def delete_conference_bridge(self, name):
"""
Delete a Conference bridge
:param name: The name of the Conference bridge to delete
:return: result dictionary
"""
try:
return self.client.removeConferenceBridge(name=name)
except Fault as e:
return e
def get_transcoders(
self, tagfilter={"name": "", "description": "", "devicePoolName": ""}
):
"""
Get transcoders
:param mini: List of tuples of transcoder details
:return: results dictionary
"""
try:
return self.client.listTranscoder({"name": "%"}, returnedTags=tagfilter,)[
"return"
]["transcoder"]
except Fault as e:
return e
def get_transcoder(self, **args):
"""
Get conference bridge parameters
:param name: transcoder name
:return: result dictionary
"""
try:
return self.client.getTranscoder(**args)["return"]["transcoder"]
except Fault as e:
return e
def add_transcoder(
self,
name,
description="",
device_pool="Default",
product="Cisco IOS Enhanced Media Termination Point",
):
"""
Add a transcoder
:param transcoder: Transcoder name
:param description: Transcoder description
:param device_pool: Transcoder device pool
:param product: Trancoder product
:return: result dictionary
"""
try:
return self.client.addTranscoder(
{
"name": name,
"description": description,
"devicePoolName": device_pool,
"product": product,
}
)
except Fault as e:
return e
def update_transcoder(self, **args):
"""
Add a transcoder
:param name: Transcoder name
:param newName: New Transcoder name
:param description: Transcoder description
:param device_pool: Transcoder device pool
:param product: Trancoder product
:return: result dictionary
"""
try:
return self.client.updateTranscoder(**args)
except Fault as e:
return e
def delete_transcoder(self, name):
"""
Delete a Transcoder
:param name: The name of the Transcoder to delete
:return: result dictionary
"""
try:
return self.client.removeTranscoder(name=name)
except Fault as e:
return e
def get_mtps(self, tagfilter={"name": "", "description": "", "devicePoolName": ""}):
"""
Get mtps
:param mini: List of tuples of transcoder details
:return: results dictionary
"""
try:
return self.client.listMtp({"name": "%"}, returnedTags=tagfilter,)[
"return"
]["mtp"]
except Fault as e:
return e
def get_mtp(self, **args):
"""
Get mtp parameters
:param name: transcoder name
:return: result dictionary
"""
try:
return self.client.getMtp(**args)["return"]["mtp"]
except Fault as e:
return e
def add_mtp(
self,
name,
description="",
device_pool="Default",
mtpType="Cisco IOS Enhanced Media Termination Point",
):
"""
Add an mtp
:param name: MTP name
:param description: MTP description
:param device_pool: MTP device pool
:param mtpType: MTP Type
:return: result dictionary
"""
try:
return self.client.addMtp(
{
"name": name,
"description": description,
"devicePoolName": device_pool,
"mtpType": mtpType,
}
)
except Fault as e:
return e
def update_mtp(self, **args):
"""
Update an MTP
:param name: MTP name
:param newName: New MTP name
:param description: MTP description
:param device_pool: MTP device pool
:param mtpType: MTP Type
:return: result dictionary
"""
try:
return self.client.updateMtp(**args)
except Fault as e:
return e
def delete_mtp(self, name):
"""
Delete an MTP
:param name: The name of the Transcoder to delete
:return: result dictionary
"""
try:
return self.client.removeMtp(name=name)
except Fault as e:
return e
def get_h323_gateways(
self,
tagfilter={
"name": "",
"description": "",
"devicePoolName": "",
"locationName": "",
"sigDigits": "",
},
):
"""
Get H323 Gateways
:param mini: List of tuples of H323 Gateway details
:return: results dictionary
"""
try:
return self.client.listH323Gateway({"name": "%"}, returnedTags=tagfilter,)[
"return"
]["h323Gateway"]
except Fault as e:
return e
def get_h323_gateway(self, **args):
"""
Get H323 Gateway parameters
:param name: H323 Gateway name
:return: result dictionary
"""
try:
return self.client.getH323Gateway(**args)["return"]["h323Gateway"]
except Fault as e:
return e
def add_h323_gateway(self, **args):
"""
Add H323 gateway
:param h323_gateway:
:param description:
:param device_pool:
:param location:
:param media_resource_group_list: Media resource group list name
:param prefix_dn:
:param sig_digits: Significant digits, 99 = ALL
:param css:
:param aar_css:
:param aar_neighborhood:
:param product:
:param protocol:
:param protocol_side:
:param pstn_access:
:param redirect_in_num_ie:
:param redirect_out_num_ie:
:param cld_party_ie_num_type:
:param clng_party_ie_num_type:
:param clng_party_nat_pre:
:param clng_party_inat_prefix:
:param clng_party_unknown_prefix:
:param clng_party_sub_prefix:
:param clng_party_nat_strip_digits:
:param clng_party_inat_strip_digits:
:param clng_party_unknown_strip_digits:
:param clng_party_sub_strip_digits:
:param clng_party_nat_trans_css:
:param clng_party_inat_trans_css:
:param clng_party_unknown_trans_css:
:param clng_party_sub_trans_css:
:return:
"""
try:
return self.client.addH323Gateway(**args)
except Fault as e:
return e
def update_h323_gateway(self, **args):
"""
:param name:
:return:
"""
try:
return self.client.updateH323Gateway(**args)
except Fault as e:
return e
def delete_h323_gateway(self, name):
"""
Delete a H323 gateway
:param name: The name of the H323 gateway to delete
:return: result dictionary
"""
try:
return self.client.removeH323Gateway(name=name)
except Fault as e:
return e
def get_route_groups(self, tagfilter={"name": "", "distributionAlgorithm": ""}):
"""
Get route groups
:param mini: return a list of tuples of route group details
:return: A list of dictionary's
"""
try:
return self.client.listRouteGroup({"name": "%"}, returnedTags=tagfilter)[
"return"
]["routeGroup"]
except Fault as e:
return e
def get_route_group(self, **args):
"""
Get route group
:param name: route group name
:param uuid: route group uuid
:return: result dictionary
"""
try:
return self.client.getRouteGroup(**args)["return"]["routeGroup"]
except Fault as e:
return e
def add_route_group(self, name, distribution_algorithm="Top Down", members=[]):
"""
Add a route group
:param name: Route group name
:param distribution_algorithm: Top Down/Circular
:param members: A list of devices to add (must already exist DUH!)
"""
req = {
"name": name,
"distributionAlgorithm": distribution_algorithm,
"members": {"member": []},
}
if members:
[
req["members"]["member"].append(
{
"deviceName": i,
"deviceSelectionOrder": members.index(i) + 1,
"port": 0,
}
)
for i in members
]
try:
return self.client.addRouteGroup(req)
except Fault as e:
return e
def delete_route_group(self, **args):
"""
Delete a Route group
:param name: The name of the Route group to delete
:return: result dictionary
"""
try:
return self.client.removeRouteGroup(**args)
except Fault as e:
return e
def update_route_group(self, **args):
"""
Update a Route group
:param name: The name of the Route group to update
:param distribution_algorithm: Top Down/Circular
:param members: A list of devices to add (must already exist DUH!)
:return: result dictionary
"""
try:
return self.client.updateRouteGroup(**args)
except Fault as e:
return e
def get_route_lists(self, tagfilter={"name": "", "description": ""}):
"""
Get route lists
:param mini: return a list of tuples of route list details
:return: A list of dictionary's
"""
try:
return self.client.listRouteList({"name": "%"}, returnedTags=tagfilter)[
"return"
]["routeList"]
except Fault as e:
return e
def get_route_list(self, **args):
"""
Get route list
:param name: route list name
:param uuid: route list uuid
:return: result dictionary
"""
try:
return self.client.getRouteList(**args)["return"]["routeList"]
except Fault as e:
return e
def add_route_list(
self,
name,
description="",
cm_group_name="Default",
route_list_enabled="true",
run_on_all_nodes="false",
members=[],
):
"""
Add a route list
:param name: Route list name
:param description: Route list description
:param cm_group_name: Route list call mangaer group name
:param route_list_enabled: Enable route list
:param run_on_all_nodes: Run route list on all nodes
:param members: A list of route groups
:return: Result dictionary
"""
req = {
"name": name,
"description": description,
"callManagerGroupName": cm_group_name,
"routeListEnabled": route_list_enabled,
"runOnEveryNode": run_on_all_nodes,
"members": {"member": []},
}
if members:
[
req["members"]["member"].append(
{
"routeGroupName": i,
"selectionOrder": members.index(i) + 1,
"calledPartyTransformationMask": "",
"callingPartyTransformationMask": "",
"digitDiscardInstructionName": "",
"callingPartyPrefixDigits": "",
"prefixDigitsOut": "",
"useFullyQualifiedCallingPartyNumber": "Default",
"callingPartyNumberingPlan": "Cisco CallManager",
"callingPartyNumberType": "Cisco CallManager",
"calledPartyNumberingPlan": "Cisco CallManager",
"calledPartyNumberType": "Cisco CallManager",
}
)
for i in members
]
try:
return self.client.addRouteList(req)
except Fault as e:
return e
def delete_route_list(self, **args):
"""
Delete a Route list
:param name: The name of the Route list to delete
:param uuid: The uuid of the Route list to delete
:return: result dictionary
"""
try:
return self.client.removeRouteList(**args)
except Fault as e:
return e
def update_route_list(self, **args):
"""
Update a Route list
:param name: The name of the Route list to update
:param uuid: The uuid of the Route list to update
:param description: Route list description
:param cm_group_name: Route list call mangaer group name
:param route_list_enabled: Enable route list
:param run_on_all_nodes: Run route list on all nodes
:param members: A list of route groups
:return: result dictionary
"""
try:
return self.client.updateRouteList(**args)
except Fault as e:
return e
def get_partitions(self, tagfilter={"name": "", "description": ""}):
"""
Get partitions
:param mini: return a list of tuples of partition details
:return: A list of dictionary's
"""
try:
return self.client.listRoutePartition(
{"name": "%"}, returnedTags=tagfilter
)["return"]["routePartition"]
except Fault as e:
return e
def get_partition(self, **args):
"""
Get partition details
:param partition: Partition name
:param uuid: UUID name
:return: result dictionary
"""
try:
return self.client.getRoutePartition(**args)["return"]["routePartition"]
except Fault as e:
return e
def add_partition(self, name, description="", time_schedule_name="All the time"):
"""
Add a partition
:param name: Name of the partition to add
:param description: Partition description
:param time_schedule_name: Name of the time schedule to use
:return: result dictionary
"""
try:
return self.client.addRoutePartition(
{
"name": name,
"description": description,
"timeScheduleIdName": time_schedule_name,
}
)
except Fault as e:
return e
def delete_partition(self, **args):
"""
Delete a partition
:param partition: The name of the partition to delete
:return: result dictionary
"""
try:
return self.client.removeRoutePartition(**args)
except Fault as e:
return e
def update_partition(self, **args):
"""
Update calling search space
:param uuid: CSS UUID
:param name: CSS Name
:param description:
:param newName:
:param timeScheduleIdName:
:param useOriginatingDeviceTimeZone:
:param timeZone:
:return: result dictionary
"""
try:
return self.client.updateRoutePartition(**args)
except Fault as e:
return e
def get_calling_search_spaces(self, tagfilter={"name": "", "description": ""}):
"""
Get calling search spaces
:param mini: return a list of tuples of css details
:return: A list of dictionary's
"""
try:
return self.client.listCss({"name": "%"}, returnedTags=tagfilter)["return"][
"css"
]
except Fault as e:
return e
def get_calling_search_space(self, **args):
"""
Get Calling search space details
:param name: Calling search space name
:param uuid: Calling search space uuid
:return: result dictionary
"""
try:
return self.client.getCss(**args)["return"]["css"]
except Fault as e:
return e
def add_calling_search_space(self, name, description="", members=[]):
"""
Add a Calling search space
:param name: Name of the CSS to add
:param description: Calling search space description
:param members: A list of partitions to add to the CSS
:return: result dictionary
"""
req = {
"name": name,
"description": description,
"members": {"member": []},
}
if members:
[
req["members"]["member"].append(
{
"routePartitionName": i,
"index": members.index(i) + 1,
}
)
for i in members
]
try:
return self.client.addCss(req)
except Fault as e:
return e
def delete_calling_search_space(self, **args):
"""
Delete a Calling search space
:param calling_search_space: The name of the partition to delete
:return: result dictionary
"""
try:
return self.client.removeCss(**args)
except Fault as e:
return e
def update_calling_search_space(self, **args):
"""
Update calling search space
:param uuid: CSS UUID
:param name: CSS Name
:param description:
:param newName:
:param members:
:param removeMembers:
:param addMembers:
:return: result dictionary
"""
try:
return self.client.updateCss(**args)
except Fault as e:
return e
def get_route_patterns(
self, tagfilter={"pattern": "", "description": "", "uuid": ""}
):
"""
Get route patterns
:param mini: return a list of tuples of route pattern details
:return: A list of dictionary's
"""
try:
return self.client.listRoutePattern(
{"pattern": "%"},
returnedTags=tagfilter,
)["return"]["routePattern"]
except Fault as e:
return e
def get_route_pattern(self, pattern="", uuid=""):
"""
Get route pattern
:param pattern: route pattern
:param uuid: route pattern uuid
:return: result dictionary
"""
if uuid == "" and pattern != "":
# Cant get pattern directly so get UUID first
try:
uuid = self.client.listRoutePattern(
{"pattern": pattern}, returnedTags={"uuid": ""}
)
except Fault as e:
return e
if "return" in uuid and uuid["return"] is not None:
uuid = uuid["return"]["routePattern"][0]["uuid"]
try:
return self.client.getRoutePattern(uuid=uuid)["return"][
"routePattern"
]
except Fault as e:
return e
elif uuid != "" and pattern == "":
try:
return self.client.getRoutePattern(uuid=uuid)
except Fault as e:
return e
def add_route_pattern(
self,
pattern,
gateway="",
route_list="",
description="",
partition="",
blockEnable=False,
patternUrgency=False,
releaseClause="Call Rejected",
):
"""
Add a route pattern
:param pattern: Route pattern - required
:param gateway: Destination gateway - required
:param route_list: Destination route list - required
Either a gateway or route list can be used at the same time
:param description: Route pattern description
:param partition: Route pattern partition
:return: result dictionary
"""
req = {
"pattern": pattern,
"description": description,
"destination": {},
"routePartitionName": partition,
"blockEnable": blockEnable,
"releaseClause": releaseClause,
"useCallingPartyPhoneMask": "Default",
"networkLocation": "OnNet",
}
if gateway == "" and route_list == "":
return "Either a gateway OR route list, is a required parameter"
elif gateway != "" and route_list != "":
return "Enter a gateway OR route list, not both"
elif gateway != "":
req["destination"].update({"gatewayName": gateway})
elif route_list != "":
req["destination"].update({"routeListName": route_list})
try:
return self.client.addRoutePattern(req)
except Fault as e:
return e
def delete_route_pattern(self, **args):
"""
Delete a route pattern
:param uuid: The pattern uuid
:param pattern: The pattern of the route to delete
:param partition: The name of the partition
:return: result dictionary
"""
try:
return self.client.removeRoutePattern(**args)
except Fault as e:
return e
def update_route_pattern(self, **args):
"""
Update a route pattern
:param uuid: The pattern uuid
:param pattern: The pattern of the route to update
:param partition: The name of the partition
:param gateway: Destination gateway - required
:param route_list: Destination route list - required
Either a gateway or route list can be used at the same time
:param description: Route pattern description
:param partition: Route pattern partition
:return: result dictionary
"""
try:
return self.client.updateRoutePattern(**args)
except Fault as e:
return e
def get_media_resource_groups(self, tagfilter={"name": "", "description": ""}):
"""
Get media resource groups
:param mini: return a list of tuples of route pattern details
:return: A list of dictionary's
"""
try:
return self.client.listMediaResourceGroup(
{"name": "%"}, returnedTags=tagfilter
)["return"]["mediaResourceGroup"]
except Fault as e:
return e
def get_media_resource_group(self, **args):
"""
Get a media resource group details
:param media_resource_group: Media resource group name
:return: result dictionary
"""
try:
return self.client.getMediaResourceGroup(**args)["return"][
"mediaResourceGroup"
]
except Fault as e:
return e
def add_media_resource_group(
self, name, description="", multicast="false", members=[]
):
"""
Add a media resource group
:param name: Media resource group name
:param description: Media resource description
:param multicast: Mulicast enabled
:param members: Media resource group members
:return: result dictionary
"""
req = {
"name": name,
"description": description,
"multicast": multicast,
"members": {"member": []},
}
if members:
[req["members"]["member"].append({"deviceName": i}) for i in members]
try:
return self.client.addMediaResourceGroup(req)
except Fault as e:
return e
def update_media_resource_group(self, **args):
"""
Update a media resource group
:param name: Media resource group name
:param description: Media resource description
:param multicast: Mulicast enabled
:param members: Media resource group members
:return: result dictionary
"""
try:
return self.client.updateMediaResourceGroup(**args)
except Fault as e:
return e
def delete_media_resource_group(self, name):
"""
Delete a Media resource group
:param media_resource_group: The name of the media resource group to delete
:return: result dictionary
"""
try:
return self.client.removeMediaResourceGroup(name=name)
except Fault as e:
return e
def get_media_resource_group_lists(self, tagfilter={"name": ""}):
"""
Get media resource groups
:param mini: return a list of tuples of route pattern details
:return: A list of dictionary's
"""
try:
return self.client.listMediaResourceList(
{"name": "%"}, returnedTags=tagfilter
)["return"]["mediaResourceList"]
except Fault as e:
return e
def get_media_resource_group_list(self, name):
"""
Get a media resource group list details
:param name: Media resource group list name
:return: result dictionary
"""
try:
return self.client.getMediaResourceList(name=name)
except Fault as e:
return e
def add_media_resource_group_list(self, name, members=[]):
"""
Add a media resource group list
:param media_resource_group_list: Media resource group list name
:param members: A list of members
:return:
"""
req = {"name": name, "members": {"member": []}}
if members:
[
req["members"]["member"].append(
{"order": members.index(i), "mediaResourceGroupName": i}
)
for i in members
]
try:
return self.client.addMediaResourceList(req)
except Fault as e:
return e
def update_media_resource_group_list(self, **args):
"""
Update a media resource group list
:param name: Media resource group name
:param description: Media resource description
:param multicast: Mulicast enabled
:param members: Media resource group members
:return: result dictionary
"""
try:
return self.client.updateMediaResourceList(**args)
except Fault as e:
return e
def delete_media_resource_group_list(self, name):
"""
Delete a Media resource group list
:param name: The name of the media resource group list to delete
:return: result dictionary
"""
try:
return self.client.removeMediaResourceList(name=name)
except Fault as e:
return e
def get_directory_numbers(
self,
tagfilter={
"pattern": "",
"description": "",
"routePartitionName": "",
},
):
"""
Get directory numbers
:param mini: return a list of tuples of directory number details
:return: A list of dictionary's
"""
try:
return self.client.listLine({"pattern": "%"}, returnedTags=tagfilter,)[
"return"
]["line"]
except Fault as e:
return e
def get_directory_number(self, **args):
"""
Get directory number details
:param name:
:param partition:
:return: result dictionary
"""
try:
return self.client.getLine(**args)["return"]["line"]
except Fault as e:
return e
def add_directory_number(
self,
pattern,
partition="",
description="",
alerting_name="",
ascii_alerting_name="",
shared_line_css="",
aar_neighbourhood="",
call_forward_css="",
vm_profile_name="NoVoiceMail",
aar_destination_mask="",
call_forward_destination="",
forward_all_to_vm="false",
forward_all_destination="",
forward_to_vm="false",
):
"""
Add a directory number
:param pattern: Directory number
:param partition: Route partition name
:param description: Directory number description
:param alerting_name: Alerting name
:param ascii_alerting_name: ASCII alerting name
:param shared_line_css: Calling search space
:param aar_neighbourhood: AAR group
:param call_forward_css: Call forward calling search space
:param vm_profile_name: Voice mail profile
:param aar_destination_mask: AAR destination mask
:param call_forward_destination: Call forward destination
:param forward_all_to_vm: Forward all to voice mail checkbox
:param forward_all_destination: Forward all destination
:param forward_to_vm: Forward to voice mail checkbox
:return: result dictionary
"""
try:
return self.client.addLine(
{
"pattern": pattern,
"routePartitionName": partition,
"description": description,
"alertingName": alerting_name,
"asciiAlertingName": ascii_alerting_name,
"voiceMailProfileName": vm_profile_name,
"shareLineAppearanceCssName": shared_line_css,
"aarNeighborhoodName": aar_neighbourhood,
"aarDestinationMask": aar_destination_mask,
"usage": "Device",
"callForwardAll": {
"forwardToVoiceMail": forward_all_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": forward_all_destination,
},
"callForwardBusy": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardBusyInt": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNoAnswer": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNoAnswerInt": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNoCoverage": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNoCoverageInt": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardOnFailure": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNotRegistered": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
"callForwardNotRegisteredInt": {
"forwardToVoiceMail": forward_to_vm,
"callingSearchSpaceName": call_forward_css,
"destination": call_forward_destination,
},
}
)
except Fault as e:
return e
def delete_directory_number(self, pattern="", routePartitionName="", uuid=""):
"""
Delete a directory number
:param directory_number: The name of the directory number to delete
:return: result dictionary
"""
if uuid != "":
try:
return self.client.removeLine(uuid=uuid)
except Fault as e:
return e
else:
try:
return self.client.removeLine(
pattern=pattern, routePartitionName=routePartitionName
)
except Fault as e:
return e
def update_directory_number(self, **args):
"""
Update a directory number
:param pattern: Directory number
:param partition: Route partition name
:param description: Directory number description
:param alerting_name: Alerting name
:param ascii_alerting_name: ASCII alerting name
:param shared_line_css: Calling search space
:param aar_neighbourhood: AAR group
:param call_forward_css: Call forward calling search space
:param vm_profile_name: Voice mail profile
:param aar_destination_mask: AAR destination mask
:param call_forward_destination: Call forward destination
:param forward_all_to_vm: Forward all to voice mail checkbox
:param forward_all_destination: Forward all destination
:param forward_to_vm: Forward to voice mail checkbox
:return: result dictionary
"""
try:
return self.client.updateLine(**args)
except Fault as e:
return e
def get_cti_route_points(self, tagfilter={"name": "", "description": ""}):
"""
Get CTI route points
:param mini: return a list of tuples of CTI route point details
:return: A list of dictionary's
"""
try:
return self.client.listCtiRoutePoint({"name": "%"}, returnedTags=tagfilter)[
"return"
]["ctiRoutePoint"]
except Fault as e:
return e
def get_cti_route_point(self, **args):
"""
Get CTI route point details
:param name: CTI route point name
:param uuid: CTI route point uuid
:return: result dictionary
"""
try:
return self.client.getCtiRoutePoint(**args)["return"]["ctiRoutePoint"]
except Fault as e:
return e
def add_cti_route_point(
self,
name,
description="",
device_pool="Default",
location="Hub_None",
common_device_config="",
css="",
product="CTI Route Point",
dev_class="CTI Route Point",
protocol="SCCP",
protocol_slide="User",
use_trusted_relay_point="Default",
lines=[],
):
"""
Add CTI route point
lines should be a list of tuples containing the pattern and partition
EG: [('77777', 'AU_PHONE_PT')]
:param name: CTI route point name
:param description: CTI route point description
:param device_pool: Device pool name
:param location: Location name
:param common_device_config: Common device config name
:param css: Calling search space name
:param product: CTI device type
:param dev_class: CTI device type
:param protocol: CTI protocol
:param protocol_slide: CTI protocol slide
:param use_trusted_relay_point: Use trusted relay point: (Default, On, Off)
:param lines: A list of tuples of [(directory_number, partition)]
:return:
"""
req = {
"name": name,
"description": description,
"product": product,
"class": dev_class,
"protocol": protocol,
"protocolSide": protocol_slide,
"commonDeviceConfigName": common_device_config,
"callingSearchSpaceName": css,
"devicePoolName": device_pool,
"locationName": location,
"useTrustedRelayPoint": use_trusted_relay_point,
"lines": {"line": []},
}
if lines:
[
req["lines"]["line"].append(
{
"index": lines.index(i) + 1,
"dirn": {"pattern": i[0], "routePartitionName": i[1]},
}
)
for i in lines
]
try:
return self.client.addCtiRoutePoint(req)
except Fault as e:
return e
def delete_cti_route_point(self, **args):
"""
Delete a CTI route point
:param cti_route_point: The name of the CTI route point to delete
:return: result dictionary
"""
try:
return self.client.removeCtiRoutePoint(**args)
except Fault as e:
return e
def update_cti_route_point(self, **args):
"""
Add CTI route point
lines should be a list of tuples containing the pattern and partition
EG: [('77777', 'AU_PHONE_PT')]
:param name: CTI route point name
:param description: CTI route point description
:param device_pool: Device pool name
:param location: Location name
:param common_device_config: Common device config name
:param css: Calling search space name
:param product: CTI device type
:param dev_class: CTI device type
:param protocol: CTI protocol
:param protocol_slide: CTI protocol slide
:param use_trusted_relay_point: Use trusted relay point: (Default, On, Off)
:param lines: A list of tuples of [(directory_number, partition)]
:return:
"""
try:
return self.client.updateCtiRoutePoint(**args)
except Fault as e:
return e
def get_phones(
self,
query={"name": "%"},
tagfilter={
"name": "",
"product": "",
"description": "",
"protocol": "",
"locationName": "",
"callingSearchSpaceName": "",
},
):
skip = 0
a = []
def inner(skip):
while True:
res = self.client.listPhone(
searchCriteria=query, returnedTags=tagfilter, first=1000, skip=skip
)["return"]
skip = skip + 1000
if res is not None and "phone" in res:
yield res["phone"]
else:
break
for each in inner(skip):
a.extend(each)
return a
def get_phone(self, **args):
"""
Get device profile parameters
:param phone: profile name
:return: result dictionary
"""
try:
return self.client.getPhone(**args)["return"]["phone"]
except Fault as e:
return e
def add_phone(
self,
name,
description="",
product="Cisco 7941",
device_pool="Default",
location="Hub_None",
phone_template="Standard 8861 SIP",
common_device_config="",
css="",
aar_css="",
subscribe_css="",
securityProfileName="",
lines=[],
dev_class="Phone",
protocol="SCCP",
softkey_template="Standard User",
enable_em="true",
em_service_name="Extension Mobility",
em_service_url=False,
em_url_button_enable=False,
em_url_button_index="1",
em_url_label="Press here to logon",
ehook_enable=1,
):
"""
lines takes a list of Tuples with properties for each line EG:
display external
DN partition display ascii label mask
[('77777', 'LINE_PT', 'Jim Smith', 'Jim Smith', 'Jim Smith - 77777', '0294127777')]
Add A phone
:param name:
:param description:
:param product:
:param device_pool:
:param location:
:param phone_template:
:param common_device_config:
:param css:
:param aar_css:
:param subscribe_css:
:param lines:
:param dev_class:
:param protocol:
:param softkey_template:
:param enable_em:
:param em_service_name:
:param em_service_url:
:param em_url_button_enable:
:param em_url_button_index:
:param em_url_label:
:param ehook_enable:
:return:
"""
req = {
"name": name,
"description": description,
"product": product,
"class": dev_class,
"protocol": protocol,
"protocolSide": "User",
"commonDeviceConfigName": common_device_config,
"commonPhoneConfigName": "Standard Common Phone Profile",
"softkeyTemplateName": softkey_template,
"phoneTemplateName": phone_template,
"devicePoolName": device_pool,
"locationName": location,
"useTrustedRelayPoint": "Off",
"builtInBridgeStatus": "Default",
"certificateOperation": "No Pending Operation",
"packetCaptureMode": "None",
"deviceMobilityMode": "Default",
"enableExtensionMobility": enable_em,
"callingSearchSpaceName": css,
"automatedAlternateRoutingCssName": aar_css,
"subscribeCallingSearchSpaceName": subscribe_css,
"lines": {"line": []},
"services": {"service": []},
"vendorConfig": [{"ehookEnable": ehook_enable}],
}
if lines:
[
req["lines"]["line"].append(
{
"index": lines.index(i) + 1,
"dirn": {"pattern": i[0], "routePartitionName": i[1]},
"display": i[2],
"displayAscii": i[3],
"label": i[4],
"e164Mask": i[5],
}
)
for i in lines
]
if em_service_url:
req["services"]["service"].append(
[
{
"telecasterServiceName": em_service_name,
"name": em_service_name,
"url": "http://{0}:8080/emapp/EMAppServlet?device=#DEVICENAME#&EMCC=#EMCC#".format(
self.cucm
),
}
]
)
if em_url_button_enable:
req["services"]["service"][0].update(
{"urlButtonIndex": em_url_button_index, "urlLabel": em_url_label}
)
try:
return self.client.addPhone(req)
except Fault as e:
return e
def delete_phone(self, **args):
"""
Delete a phone
:param phone: The name of the phone to delete
:return: result dictionary
"""
try:
return self.client.removePhone(**args)
except Fault as e:
return e
def update_phone(self, **args):
"""
lines takes a list of Tuples with properties for each line EG:
display external
DN partition display ascii label mask
[('77777', 'LINE_PT', 'Jim Smith', 'Jim Smith', 'Jim Smith - 77777', '0294127777')]
Add A phone
:param name:
:param description:
:param product:
:param device_pool:
:param location:
:param phone_template:
:param common_device_config:
:param css:
:param aar_css:
:param subscribe_css:
:param lines:
:param dev_class:
:param protocol:
:param softkey_template:
:param enable_em:
:param em_service_name:
:param em_service_url:
:param em_url_button_enable:
:param em_url_button_index:
:param em_url_label:
:param ehook_enable:
:return:
"""
try:
return self.client.updatePhone(**args)
except Fault as e:
return e
def get_device_profiles(
self,
tagfilter={
"name": "",
"product": "",
"protocol": "",
"phoneTemplateName": "",
},
):
"""
Get device profile details
:param mini: return a list of tuples of device profile details
:return: A list of dictionary's
"""
try:
return self.client.listDeviceProfile(
{"name": "%"},
returnedTags=tagfilter,
)["return"]["deviceProfile"]
except Fault as e:
return e
def get_device_profile(self, **args):
"""
Get device profile parameters
:param name: profile name
:param uuid: profile uuid
:return: result dictionary
"""
try:
return self.client.getDeviceProfile(**args)["return"]["deviceProfile"]
except Fault as e:
return e
def add_device_profile(
self,
name,
description="",
product="Cisco 7962",
phone_template="Standard 7962G SCCP",
dev_class="Device Profile",
protocol="SCCP",
protocolSide="User",
softkey_template="Standard User",
em_service_name="Extension Mobility",
lines=[],
):
"""
Add A Device profile for use with extension mobility
lines takes a list of Tuples with properties for each line EG:
display external
DN partition display ascii label mask
[('77777', 'LINE_PT', 'Jim Smith', 'Jim Smith', 'Jim Smith - 77777', '0294127777')]
:param name:
:param description:
:param product:
:param phone_template:
:param lines:
:param dev_class:
:param protocol:
:param softkey_template:
:param em_service_name:
:return:
"""
req = {
"name": name,
"description": description,
"product": product,
"class": dev_class,
"protocol": protocol,
"protocolSide": protocolSide,
"softkeyTemplateName": softkey_template,
"phoneTemplateName": phone_template,
"lines": {"line": []},
}
if lines:
[
req["lines"]["line"].append(
{
"index": lines.index(i) + 1,
"dirn": {"pattern": i[0], "routePartitionName": i[1]},
"display": i[2],
"displayAscii": i[3],
"label": i[4],
"e164Mask": i[5],
}
)
for i in lines
]
try:
blah = self.client.addDeviceProfile(req)
return blah
except Fault as e:
return e
def delete_device_profile(self, **args):
"""
Delete a device profile
:param profile: The name of the device profile to delete
:return: result dictionary
"""
try:
return self.client.removeDeviceProfile(**args)
except Fault as e:
return e
def update_device_profile(self, **args):
"""
Update A Device profile for use with extension mobility
lines takes a list of Tuples with properties for each line EG:
display external
DN partition display ascii label mask
[('77777', 'LINE_PT', 'Jim Smith', 'Jim Smith', 'Jim Smith - 77777', '0294127777')]
:param profile:
:param description:
:param product:
:param phone_template:
:param lines:
:param dev_class:
:param protocol:
:param softkey_template:
:param em_service_name:
:return:
"""
try:
return self.client.updateDeviceProfile(**args)
except Fault as e:
return e
def get_users(self, tagfilter={"userid": "", "firstName": "", "lastName": ""}):
"""
Get users details
Parameters
-------
tagfilter : dictionary, optional
userid: None or uuid of user
firstName: None or first name of user
lastName: None or last name of user
Returns
-------
users
A list of Users
"""
skip = 0
a = []
def inner(skip):
while True:
res = self.client.listUser(
{"userid": "%"}, returnedTags=tagfilter, first=1000, skip=skip
)["return"]
skip = skip + 1000
if res is not None and "user" in res:
yield res["user"]
else:
break
for each in inner(skip):
a.extend(each)
return a
def get_user(self, userid):
"""
Get user parameters
:param user_id: profile name
:return: result dictionary
"""
try:
return self.client.getUser(userid=userid)["return"]["user"]
except Fault as e:
return e
def add_user(
self,
userid,
lastName,
firstName,
presenceGroupName="Standard Presence group",
phoneProfiles=[],
):
"""
Add a user
:param user_id: User ID of the user to add
:param first_name: First name of the user to add
:param last_name: Last name of the user to add
:return: result dictionary
"""
try:
return self.client.addUser(
{
"userid": userid,
"lastName": lastName,
"firstName": firstName,
"presenceGroupName": presenceGroupName,
"phoneProfiles": phoneProfiles,
}
)
except Fault as e:
return e
def update_user(self, **args):
"""
Update end user for credentials
:param userid: User ID
:param password: Web interface password
:param pin: Extension mobility PIN
:return: result dictionary
"""
try:
return self.client.updateUser(**args)
except Fault as e:
return e
def update_user_em(
self, user_id, device_profile, default_profile, subscribe_css, primary_extension
):
"""
Update end user for extension mobility
:param user_id: User ID
:param device_profile: Device profile name
:param default_profile: Default profile name
:param subscribe_css: Subscribe CSS
:param primary_extension: Primary extension, must be a number from the device profile
:return: result dictionary
"""
try:
resp = self.client.getDeviceProfile(name=device_profile)
except Fault as e:
return e
if "return" in resp and resp["return"] is not None:
uuid = resp["return"]["deviceProfile"]["uuid"]
try:
return self.client.updateUser(
userid=user_id,
phoneProfiles={"profileName": {"uuid": uuid}},
defaultProfile=default_profile,
subscribeCallingSearchSpaceName=subscribe_css,
primaryExtension={"pattern": primary_extension},
associatedGroups={"userGroup": {"name": "Standard CCM End Users"}},
)
except Fault as e:
return e
else:
return "Device Profile not found for user"
def update_user_credentials(self, userid, password="", pin=""): # nosec
"""
Update end user for credentials
:param userid: User ID
:param password: Web interface password
:param pin: Extension mobility PIN
:return: result dictionary
"""
if password == "" and pin == "": # nosec
return "Password and/or Pin are required"
elif password != "" and pin != "": # nosec
try:
return self.client.updateUser(
userid=userid, password=password, pin=pin
) # nosec
except Fault as e:
return e
elif password != "": # nosec
try:
return self.client.updateUser(userid=userid, password=password)
except Fault as e:
return e
elif pin != "":
try:
return self.client.updateUser(userid=userid, pin=pin)
except Fault as e:
return e
def delete_user(self, **args):
"""
Delete a user
:param userid: The name of the user to delete
:return: result dictionary
"""
try:
return self.client.removeUser(**args)
except Fault as e:
return e
def get_translations(self):
"""
Get translation patterns
:param mini: return a list of tuples of route pattern details
:return: A list of dictionary's
"""
try:
return self.client.listTransPattern(
{"pattern": "%"},
returnedTags={
"pattern": "",
"description": "",
"uuid": "",
"routePartitionName": "",
"callingSearchSpaceName": "",
"useCallingPartyPhoneMask": "",
"patternUrgency": "",
"provideOutsideDialtone": "",
"prefixDigitsOut": "",
"calledPartyTransformationMask": "",
"callingPartyTransformationMask": "",
"digitDiscardInstructionName": "",
"callingPartyPrefixDigits": "",
},
)["return"]["transPattern"]
except Fault as e:
return e
def get_translation(self, pattern="", routePartitionName="", uuid=""):
"""
Get translation pattern
:param pattern: translation pattern to match
:param routePartitionName: routePartitionName required if searching pattern
:param uuid: translation pattern uuid
:return: result dictionary
"""
if pattern != "" and routePartitionName != "" and uuid == "":
try:
return self.client.getTransPattern(
pattern=pattern,
routePartitionName=routePartitionName,
returnedTags={
"pattern": "",
"description": "",
"routePartitionName": "",
"callingSearchSpaceName": "",
"useCallingPartyPhoneMask": "",
"patternUrgency": "",
"provideOutsideDialtone": "",
"prefixDigitsOut": "",
"calledPartyTransformationMask": "",
"callingPartyTransformationMask": "",
"digitDiscardInstructionName": "",
"callingPartyPrefixDigits": "",
},
)
except Fault as e:
return e
elif uuid != "" and pattern == "" and routePartitionName == "":
try:
return self.client.getTransPattern(
uuid=uuid,
returnedTags={
"pattern": "",
"description": "",
"routePartitionName": "",
"callingSearchSpaceName": "",
"useCallingPartyPhoneMask": "",
"patternUrgency": "",
"provideOutsideDialtone": "",
"prefixDigitsOut": "",
"calledPartyTransformationMask": "",
"callingPartyTransformationMask": "",
"digitDiscardInstructionName": "",
"callingPartyPrefixDigits": "",
},
)
except Fault as e:
return e
else:
return "must specify either uuid OR pattern and partition"
def add_translation(
self,
pattern,
partition,
description="",
usage="Translation",
callingSearchSpaceName="",
useCallingPartyPhoneMask="Off",
patternUrgency="f",
provideOutsideDialtone="f",
prefixDigitsOut="",
calledPartyTransformationMask="",
callingPartyTransformationMask="",
digitDiscardInstructionName="",
callingPartyPrefixDigits="",
blockEnable="f",
routeNextHopByCgpn="f",
):
"""
Add a translation pattern
:param pattern: Translation pattern
:param partition: Route Partition
:param description: Description - optional
:param usage: Usage
:param callingSearchSpaceName: Calling Search Space - optional
:param patternUrgency: Pattern Urgency - optional
:param provideOutsideDialtone: Provide Outside Dial Tone - optional
:param prefixDigitsOut: Prefix Digits Out - optional
:param calledPartyTransformationMask: - optional
:param callingPartyTransformationMask: - optional
:param digitDiscardInstructionName: - optional
:param callingPartyPrefixDigits: - optional
:param blockEnable: - optional
:return: result dictionary
"""
try:
return self.client.addTransPattern(
{
"pattern": pattern,
"description": description,
"routePartitionName": partition,
"usage": usage,
"callingSearchSpaceName": callingSearchSpaceName,
"useCallingPartyPhoneMask": useCallingPartyPhoneMask,
"patternUrgency": patternUrgency,
"provideOutsideDialtone": provideOutsideDialtone,
"prefixDigitsOut": prefixDigitsOut,
"calledPartyTransformationMask": calledPartyTransformationMask,
"callingPartyTransformationMask": callingPartyTransformationMask,
"digitDiscardInstructionName": digitDiscardInstructionName,
"callingPartyPrefixDigits": callingPartyPrefixDigits,
"blockEnable": blockEnable,
}
)
except Fault as e:
return e
def delete_translation(self, pattern="", partition="", uuid=""):
"""
Delete a translation pattern
:param pattern: The pattern of the route to delete
:param partition: The name of the partition
:param uuid: Required if pattern and partition are not specified
:return: result dictionary
"""
if pattern != "" and partition != "" and uuid == "":
try:
return self.client.removeTransPattern(
pattern=pattern, routePartitionName=partition
)
except Fault as e:
return e
elif uuid != "" and pattern == "" and partition == "":
try:
return self.client.removeTransPattern(uuid=uuid)
except Fault as e:
return e
else:
return "must specify either uuid OR pattern and partition"
def update_translation(
self,
pattern="",
partition="",
uuid="",
newPattern="",
description="",
newRoutePartitionName="",
callingSearchSpaceName="",
useCallingPartyPhoneMask="",
patternUrgency="",
provideOutsideDialtone="",
prefixDigitsOut="",
calledPartyTransformationMask="",
callingPartyTransformationMask="",
digitDiscardInstructionName="",
callingPartyPrefixDigits="",
blockEnable="",
):
"""
Update a translation pattern
:param uuid: UUID or Translation + Partition Required
:param pattern: Translation pattern
:param partition: Route Partition
:param description: Description - optional
:param usage: Usage
:param callingSearchSpaceName: Calling Search Space - optional
:param patternUrgency: Pattern Urgency - optional
:param provideOutsideDialtone: Provide Outside Dial Tone - optional
:param prefixDigitsOut: Prefix Digits Out - optional
:param calledPartyTransformationMask: - optional
:param callingPartyTransformationMask: - optional
:param digitDiscardInstructionName: - optional
:param callingPartyPrefixDigits: - optional
:param blockEnable: - optional
:return: result dictionary
"""
args = {}
if description != "":
args["description"] = description
if pattern != "" and partition != "" and uuid == "":
args["pattern"] = pattern
args["routePartitionName"] = partition
if pattern == "" and partition == "" and uuid != "":
args["uuid"] = uuid
if newPattern != "":
args["newPattern"] = newPattern
if newRoutePartitionName != "":
args["newRoutePartitionName"] = newRoutePartitionName
if callingSearchSpaceName != "":
args["callingSearchSpaceName"] = callingSearchSpaceName
if useCallingPartyPhoneMask != "":
args["useCallingPartyPhoneMask"] = useCallingPartyPhoneMask
if digitDiscardInstructionName != "":
args["digitDiscardInstructionName"] = digitDiscardInstructionName
if callingPartyTransformationMask != "":
args["callingPartyTransformationMask"] = callingPartyTransformationMask
if calledPartyTransformationMask != "":
args["calledPartyTransformationMask"] = calledPartyTransformationMask
if patternUrgency != "":
args["patternUrgency"] = patternUrgency
if provideOutsideDialtone != "":
args["provideOutsideDialtone"] = provideOutsideDialtone
if prefixDigitsOut != "":
args["prefixDigitsOut"] = prefixDigitsOut
if callingPartyPrefixDigits != "":
args["callingPartyPrefixDigits"] = callingPartyPrefixDigits
if blockEnable != "":
args["blockEnable"] = blockEnable
try:
return self.client.updateTransPattern(**args)
except Fault as e:
return e
def list_route_plan(self, pattern=""):
"""
List Route Plan
:param pattern: Route Plan Contains Pattern
:return: results dictionary
"""
try:
return self.client.listRoutePlan(
{"dnOrPattern": "%" + pattern + "%"},
returnedTags={
"dnOrPattern": "",
"partition": "",
"type": "",
"routeDetail": "",
},
)["return"]["routePlan"]
except Fault as e:
return e
def list_route_plan_specific(self, pattern=""):
"""
List Route Plan
:param pattern: Route Plan Contains Pattern
:return: results dictionary
"""
try:
return self.client.listRoutePlan(
{"dnOrPattern": pattern},
returnedTags={
"dnOrPattern": "",
"partition": "",
"type": "",
"routeDetail": "",
},
)
except Fault as e:
return e
def get_called_party_xforms(self):
"""
Get called party xforms
:param mini: return a list of tuples of called party transformation pattern details
:return: A list of dictionary's
"""
try:
return self.client.listCalledPartyTransformationPattern(
{"pattern": "%"},
returnedTags={"pattern": "", "description": "", "uuid": ""},
)["return"]["calledPartyTransformationPattern"]
except Fault as e:
return e
def get_called_party_xform(self, **args):
"""
Get called party xform details
:param name:
:param partition:
:param uuid:
:return: result dictionary
"""
try:
return self.client.getCalledPartyTransformationPattern(**args)["return"][
"calledPartyTransformationPattern"
]
except Fault as e:
return e
def add_called_party_xform(
self,
pattern="",
description="",
partition="",
calledPartyPrefixDigits="",
calledPartyTransformationMask="",
digitDiscardInstructionName="",
):
"""
Add a called party transformation pattern
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Route pattern description
:param calledPartyTransformationmask:
:param dialPlanName:
:param digitDiscardInstructionName:
:param routeFilterName:
:param calledPartyPrefixDigits:
:param calledPartyNumberingPlan:
:param calledPartyNumberType:
:param mlppPreemptionDisabled: does anyone use this?
:return: result dictionary
"""
try:
return self.client.addCalledPartyTransformationPattern(
{
"pattern": pattern,
"description": description,
"routePartitionName": partition,
"calledPartyPrefixDigits": calledPartyPrefixDigits,
"calledPartyTransformationMask": calledPartyTransformationMask,
"digitDiscardInstructionName": digitDiscardInstructionName,
}
)
except Fault as e:
return e
def delete_called_party_xform(self, **args):
"""
Delete a called party transformation pattern
:param uuid: The pattern uuid
:param pattern: The pattern of the transformation to delete
:param partition: The name of the partition
:return: result dictionary
"""
try:
return self.client.removeCalledPartyTransformationPattern(**args)
except Fault as e:
return e
def update_called_party_xform(self, **args):
"""
Update a called party transformation
:param uuid: required unless pattern and routePartitionName is given
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Route pattern description
:param calledPartyTransformationmask:
:param dialPlanName:
:param digitDiscardInstructionName:
:param routeFilterName:
:param calledPartyPrefixDigits:
:param calledPartyNumberingPlan:
:param calledPartyNumberType:
:param mlppPreemptionDisabled: does anyone use this?
:return: result dictionary
:return: result dictionary
"""
try:
return self.client.updateCalledPartyTransformationPattern(**args)
except Fault as e:
return e
def get_calling_party_xforms(self):
"""
Get calling party xforms
:param mini: return a list of tuples of calling party transformation pattern details
:return: A list of dictionary's
"""
try:
return self.client.listCallingPartyTransformationPattern(
{"pattern": "%"},
returnedTags={"pattern": "", "description": "", "uuid": ""},
)["return"]["callingPartyTransformationPattern"]
except Fault as e:
return e
def get_calling_party_xform(self, **args):
"""
Get calling party xform details
:param name:
:param partition:
:param uuid:
:return: result dictionary
"""
try:
return self.client.getCallingPartyTransformationPattern(**args)["return"][
"callingPartyTransformationPattern"
]
except Fault as e:
return e
def add_calling_party_xform(
self,
pattern="",
description="",
partition="",
callingPartyPrefixDigits="",
callingPartyTransformationMask="",
digitDiscardInstructionName="",
):
"""
Add a calling party transformation pattern
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Route pattern description
:param callingPartyTransformationmask:
:param dialPlanName:
:param digitDiscardInstructionName:
:param routeFilterName:
:param callingPartyPrefixDigits:
:param callingPartyNumberingPlan:
:param callingPartyNumberType:
:param mlppPreemptionDisabled: does anyone use this?
:return: result dictionary
"""
try:
return self.client.addCallingPartyTransformationPattern(
{
"pattern": pattern,
"description": description,
"routePartitionName": partition,
"callingPartyPrefixDigits": callingPartyPrefixDigits,
"callingPartyTransformationMask": callingPartyTransformationMask,
"digitDiscardInstructionName": digitDiscardInstructionName,
}
)
except Fault as e:
return e
def delete_calling_party_xform(self, **args):
"""
Delete a calling party transformation pattern
:param uuid: The pattern uuid
:param pattern: The pattern of the transformation to delete
:param partition: The name of the partition
:return: result dictionary
"""
try:
return self.client.removeCallingPartyTransformationPattern(**args)
except Fault as e:
return e
def update_calling_party_xform(self, **args):
"""
Update a calling party transformation
:param uuid: required unless pattern and routePartitionName is given
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Route pattern description
:param callingPartyTransformationMask:
:param dialPlanName:
:param digitDiscardInstructionName:
:param routeFilterName:
:param calledPartyPrefixDigits:
:param calledPartyNumberingPlan:
:param calledPartyNumberType:
:param mlppPreemptionDisabled: does anyone use this?
:return: result dictionary
:return: result dictionary
"""
try:
return self.client.updateCallingPartyTransformationPattern(**args)
except Fault as e:
return e
def get_sip_trunks(
self, tagfilter={"name": "", "sipProfileName": "", "callingSearchSpaceName": ""}
):
try:
return self.client.listSipTrunk({"name": "%"}, returnedTags=tagfilter)[
"return"
]["sipTrunk"]
except Fault as e:
return e
def get_sip_trunk(self, **args):
"""
Get sip trunk
:param name:
:param uuid:
:return: result dictionary
"""
try:
return self.client.getSipTrunk(**args)["return"]["sipTrunk"]
except Fault as e:
return e
def update_sip_trunk(self, **args):
"""
Update a SIP Trunk
:param name:
:param uuid:
:param newName:
:param description:
:param callingSearchSpaceName:
:param devicePoolName:
:param locationName:
:param sipProfileName:
:param mtpRequired:
:return:
"""
try:
return self.client.updateSipTrunk(**args)
except Fault as e:
return e
def delete_sip_trunk(self, **args):
try:
return self.client.removeSipTrunk(**args)
except Fault as e:
return e
def get_sip_security_profile(self, name):
try:
return self.client.getSipTrunkSecurityProfile(name=name)["return"]
except Fault as e:
return e
def get_sip_profile(self, name):
try:
return self.client.getSipProfile(name=name)["return"]
except Fault as e:
return e
def add_sip_trunk(self, **args):
"""
Add a SIP Trunk
:param name:
:param description:
:param product:
:param protocol:
:param protocolSide:
:param callingSearchSpaceName:
:param devicePoolName:
:param securityProfileName:
:param sipProfileName:
:param destinations: param destination:
:param runOnEveryNode:
:return:
"""
try:
return self.client.addSipTrunk(**args)
except Fault as e:
return e
def list_process_nodes(self):
try:
return self.client.listProcessNode(
{"name": "%", "processNodeRole": "CUCM Voice/Video"},
returnedTags={"name": ""},
)["return"]["processNode"]
except Fault as e:
return e
def add_call_manager_group(self, name, members):
"""
Add call manager group
:param name: name of cmg
:param members[]: array of members
:return: result dictionary
"""
try:
return self.client.addCallManagerGroup({"name": name, "members": members})
except Fault as e:
return e
def get_call_manager_group(self, name):
"""
Get call manager group
:param name: name of cmg
:return: result dictionary
"""
try:
return self.client.getCallManagerGroup(name=name)
except Fault as e:
return e
def get_call_manager_groups(self):
"""
Get call manager groups
:param name: name of cmg
:return: result dictionary
"""
try:
return self.client.listCallManagerGroup(
{"name": "%"}, returnedTags={"name": ""}
)["return"]["callManagerGroup"]
except Fault as e:
return e
def update_call_manager_group(self, **args):
"""
Update call manager group
:param name: name of cmg
:return: result dictionary
"""
try:
return self.client.listCallManagerGroup({**args}, returnedTags={"name": ""})
except Fault as e:
return e
def delete_call_manager_group(self, name):
"""
Delete call manager group
:param name: name of cmg
:return: result dictionary
"""
try:
return self.client.removeCallManagerGroup({"name": name})
except Fault as e:
return e
# Hunt Pilot Methods
def get_hunt_pilots(
self,
tagfilter={
"pattern": "",
"description": "",
"routePartitionName": "",
},
):
"""
Get hunt pilots
:param mini: return a list of tuples of hunt pilot details
:return: A list of dictionary's
"""
try:
response = self.client.listHuntPilot(
{"pattern": "%"},
returnedTags=tagfilter,
)["return"]
if response:
return response["huntPilot"]
else:
return response
except Fault as e:
return e
def get_hunt_pilot(self, **args):
"""
Get hunt pilot details
:param name:
:param partition:
:return: result dictionary
"""
try:
return self.client.getHuntPilot(**args)["return"]["huntPilot"]
except Fault as e:
return e
def add_hunt_pilot(self, **args):
"""
Add a Hunt Pilot minimal params needed
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Hunt Pilot pattern description
:param useCallingPartyPhoneMask: "Off" or "On"
:param blockEnable: boolean (true or false)
:param huntListName:
:return: result dictionary
"""
try:
return self.client.addHuntPilot({**args})
except Fault as e:
return e
def update_hunt_pilot(self, **args):
"""
Update a Hunt Pilot
:param pattern: pattern - required
:param routePartitionName: partition required
:param description: Hunt Pilot pattern description
:param useCallingPartyPhoneMask: "Off" or "On"
:param blockEnable: boolean (true or false)
:param huntListName:
:return:
"""
try:
return self.client.updateHuntPilot(**args)
except Fault as e:
return e
def delete_hunt_pilot(self, **args):
"""
Delete a Hunt Pilot
:param uuid: The pattern uuid
:param pattern: The pattern of the transformation to delete
:param partition: The name of the partition
:return: result dictionary
"""
try:
return self.client.removeHuntPilot(**args)
except Fault as e:
return e
# Hunt List Methods
def get_hunt_lists(
self,
tagfilter={
"name": "",
"callManagerGroupName": "",
"routeListEnabled": "",
"voiceMailUsage": "",
"description": "",
},
):
"""
Get hunt lists
:param mini: return a list of tuples of hunt pilot details
:return: A list of dictionary's
"""
try:
response = self.client.listHuntList(
{"name": "%"},
returnedTags=tagfilter,
)["return"]
if response:
return response["huntList"]
else:
return response
except Fault as e:
return e
def get_hunt_list(self, **args):
"""
Get hunt list details
:param name:
:param partition:
:return: result dictionary
"""
try:
return self.client.getHuntList(**args)["return"]["huntList"]
except Fault as e:
return e
def add_hunt_list(self, **args):
"""
Add a Hunt list minimal params needed
:param name: - required
:param callManagerGroup: - required
:param description: str
:param routeListEnabled: bool
:param voiceMailUsage: bool
:return: result dictionary
"""
try:
return self.client.addHuntList({**args})
except Fault as e:
return e
def update_hunt_list(self, **args):
"""
Update a Hunt List
:param name: - required
:param callManagerGroup: - required
:param description: str
:param routeListEnabled: bool
:param voiceMailUsage: bool
:return:
"""
try:
return self.client.updateHuntList(**args)
except Fault as e:
return e
def delete_hunt_list(self, **args):
"""
Delete a Hunt List
:param name: - required
:return: result dictionary
"""
try:
return self.client.removeHuntList(**args)
except Fault as e:
return e
# Line Group Methods
def get_line_groups(
self,
tagfilter={
"name": "",
"distributionAlgorithm": "",
"rnaReversionTimeOut": "",
"huntAlgorithmNoAnswer": "",
"huntAlgorithmBusy": "",
"huntAlgorithmNotAvailable": "",
"autoLogOffHunt": "",
},
):
"""
Get Line Groups
:param mini: return a list of tuples of hunt pilot details
:return: A list of dictionary's
"""
try:
response = self.client.listLineGroup(
{"name": "%"},
returnedTags=tagfilter,
)["return"]
if response:
return response["lineGroup"]
else:
return response
except Fault as e:
return e
def get_line_group(self, **args):
"""
Get line group details
:param name:
:return: result dictionary
"""
try:
return self.client.getLineGroup(**args)["return"]["lineGroup"]
except Fault as e:
return e
def add_line_group(self, **args):
"""
Add a Line Group minimal params needed
:param name: - required
:param distributionAlgorithm: "Longest Idle Time", "Broadcast" etc...
:param rnaReversionTimeOut:
:param huntAlgorithmNoAnswer: "Try next member; then, try next group in Hunt List",
:param huntAlgorithmBusy: "Try next member; then, try next group in Hunt List",
:param huntAlgorithmNotAvailable: "Try next member; then, try next group in Hunt List",
:param members: dict for each member directory number
:return: result dictionary
"""
try:
return self.client.addLineGroup({**args})
except Fault as e:
return e
def update_line_group(self, **args):
"""
Update a Line Group
:param name: - required
:param distributionAlgorithm: "Longest Idle Time", "Broadcast" etc...
:param rnaReversionTimeOut:
:param huntAlgorithmNoAnswer: "Try next member; then, try next group in Hunt List",
:param huntAlgorithmBusy: "Try next member; then, try next group in Hunt List",
:param huntAlgorithmNotAvailable: "Try next member; then, try next group in Hunt List",
:param members: dict for each member directory number
:return: result dictionary
:return:
"""
try:
return self.client.updateLineGroup(**args)
except Fault as e:
return e
def delete_line_group(self, **args):
"""
Delete a Line Group
:param name: - required
:return: result dictionary
"""
try:
return self.client.removeLineGroup(**args)
except Fault as e:
return e
# Call Pickup Group Methods
def get_call_pickup_groups(
self,
tagfilter={
"name": "",
"pattern": "",
"description": "",
"usage": "",
"routePartitionName": "",
"pickupNotification": "",
"pickupNotificationTimer": "",
"callInfoForPickupNotification": "",
},
):
"""
Get Call Pickup Groups
:param pattern: return a list of tuples of hunt pilot details
:return: A list of dictionary's
"""
try:
response = self.client.listCallPickupGroup(
{"pattern": "%"},
returnedTags=tagfilter,
)["return"]
if response:
return response["callPickupGroup"]
else:
return response
except Fault as e:
return e
def get_call_pickup_group(self, **args):
"""
Get call pickup group details
:param pattern:
:param name:
:param
:return: result dictionary
"""
try:
return self.client.getCallPickupGroup(**args)["return"]["callPickupGroup"]
except Fault as e:
return e
def add_call_pickup_group(self, **args):
"""
Add a Call Pickup Group minimal params needed
:param name: - required
:param pattern: - required
:param description:
:param usage:
:param routePartitionName:
:param pickupNotification:
:param pickupNotificationTimer:
:param callInfoForPickupNotification:
:param members:
:return: result dictionary
"""
try:
return self.client.addCallPickupGroup({**args})
except Fault as e:
return e
def update_call_pickup_group(self, **args):
"""
Update a Call Pickup Group
:param name:
:param pattern:
:param description:
:param usage:
:param routePartitionName:
:param pickupNotification:
:param pickupNotificationTimer:
:param callInfoForPickupNotification:
:param members:
:return: result dictionary
"""
try:
return self.client.updateCallPickupGroup(**args)
except Fault as e:
return e
def delete_call_pickup_group(self, **args):
"""
Delete a Call Pickup Group
:param name: - required
:return: result dictionary
"""
try:
return self.client.removeCallPickupGroup(**args)
except Fault as e:
return e
| 32.22694 | 107 | 0.530498 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.