code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
"""
Open Nodes web server
Copyright (c) 2018 Opennodes / Blake Bjorn Anderson
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import gzip
import json
import os
import sys
from io import BytesIO
from flask import Flask, render_template, request, redirect, flash, Response
from flask_sqlalchemy import SQLAlchemy
from geoip2.errors import AddressNotFoundError
from sqlalchemy import and_
from config import load_config, DefaultFlaskConfig
from crawler import init_geoip, connect
from models import *
import pandas as pd
from autodoc import Autodoc
app = Flask(__name__)
auto = Autodoc(app)
app.config.from_object(DefaultFlaskConfig())
app.config.from_object('flask_config')
db = SQLAlchemy(app)
CONF = load_config()
COUNTRY, CITY, ASN = init_geoip()
@app.route('/')
@app.route('/networks/<network_name>', methods=['GET'])
def network_dashboard(network_name=None):
if not network_name in ("okcash", "testnet", None):
flash("Invalid network")
return redirect("/")
with open("static/network_summaries.json", 'r') as f:
summaries = json.load(f)
if network_name:
age_min = summaries[network_name]['age_min']
age_max = summaries[network_name]['age_max']
else:
age_min = min((summaries[network]['age_min'] for network in CONF['networks']))
age_max = max((summaries[network]['age_max'] for network in CONF['networks']))
return render_template("network_dashboard.html",
network=network_name,
include_client=False if network_name is not None else False,
include_user_agent=True if network_name is not None else False,
include_network=True if network_name is None else False,
include_version=True if network_name is not None else False,
include_active=True if CONF['export_inactive_nodes'] else False,
age_min=age_min * 1000.0,
age_max=age_max * 1000.0)
def gzip_response(input_str, pre_compressed):
response = Response()
if not pre_compressed:
buffer = BytesIO()
gzip_file = gzip.GzipFile(mode='wb', fileobj=buffer)
gzip_file.write(input_str if isinstance(input_str, bytes) else input_str.encode())
gzip_file.close()
response.data = buffer.getvalue()
else:
response.data = input_str
response.headers['Content-Encoding'] = 'gzip'
response.headers['Vary'] = 'Accept-Encoding'
response.headers['Content-Length'] = len(response.data)
return response
@app.route('/api/get_networks', methods=['POST'])
@auto.doc()
def get_networks():
"""
Returns a list of all available network names
:return: JSON string, ex. "['okcash','testnet']"
"""
return json.dumps([x[0] for x in db.session.query(Node.network).distinct().all()])
@app.route('/api/gzip_file/<filename>', methods=['GET'])
@auto.doc()
def gzip_static_file(filename):
"""
Returns a crawl result as a gzipped response
:param filename: file_network.ext - file is 'data' or 'history', ext is either .json, .csv, .txt (data.ext returns data for all crawled networks)
:return: gzip encoded html response
"""
valid_files = ["custom.geo.json"]
for coin in ("", "_groestlcoin", "_testnet"):
for suff in ("", "_unique"):
for ext in (".csv", ".json", ".txt"):
valid_files.append("data" + coin + suff + ext)
valid_files.append("history" + coin + '.json')
if filename not in valid_files:
return redirect("/", code=404)
with open(os.path.join("static", filename), "r") as f:
return gzip_response(f.read(), False)
def deconstruct_address_string(inp):
assert isinstance(inp, str)
resp = {}
aliases = {'ok': 'okcash',
'tok': 'testnet'}
inp = inp.lower()
network = inp.split(":")[0]
if network:
inp = ":".join(inp.split(":")[1:])
network = aliases[network] if network in aliases else network
network = network if network in CONF['networks'] else None
if not network:
network = "okcash"
resp['warning'] = "Network not recognized, using OK"
if ":" in inp:
port = inp.split(":")[-1]
try:
port = int(port)
inp = ":".join(inp.split(":")[:-1])
except ValueError:
resp['warning'] = "port not recognized, using default"
port = int(CONF['networks'][network]['port'])
else:
port = int(CONF['networks'][network]['port'])
return network, inp, port, resp
@app.route('/api/check_node', methods=['POST'])
@auto.doc()
def check_node():
"""
Checks the current status of a node. This is a live result, so response times will be longer - to view a saved
result see /api/check_historic_node.
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default
:param to_services (integer, optional): outgoing services to broadcast, default=0
:param from_services (integer, optional): outgoing services to broadcast, default=0
:param version (integer, optional): version code to broadcast, default varies by network
:param user_agent (string, optional): user agent to broadcast, default="/oknodes:0.1/"
:param height (integer, optional): block height to broadcast during handshake. default=network median
:param p2p_nodes (bool, optional): issues a getaddr call and list of connected nodes, default=False
:return: json dict {"result":{"user_agent":"/oktoshi:5.0.0.2/", "version":" .... }, "nodes":[["127.0.0.1:6970, 157532132191], ...]}
"""
dat = request.form
node = dat.get("node")
network, address, port, resp = deconstruct_address_string(node)
network_data = CONF['networks'][network]
if dat.get("height"):
network_data['height'] = dat.get("height")
else:
with open("static/network_summaries.json", 'r') as f:
network_data['height'] = int(json.load(f)[network]['med'])
network_data['protocol_version'] = dat.get("version") or network_data['protocol_version']
result = connect(network, address, port,
to_services=dat.get("to_services") or network_data['services'],
network_data=network_data,
user_agent=dat.get("user_agent") or None,
p2p_nodes=False,
explicit_p2p=dat.get("p2p_nodes") or False,
from_services=dat.get('from_services') or None,
keepalive=False)
resp['result'] = result[0]
resp['nodes'] = result[1]
resp['result'] = geocode(resp['result'])
return to_json(resp)
@app.route('/api/check_historic_node', methods=['POST', 'GET'])
@auto.doc()
def check_historic_node():
"""
Checks the status of a node based on the last crawl
result see /api/check_historical_node
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default
:return: json dict {"result":{"user_agent":"/oktoshi:5.0.0.2/", "version":" .... }}
"""
if request.method == "POST":
dat = request.form
else:
dat = request.args
node = dat.get("node")
network, address, port, resp = deconstruct_address_string(node)
if network not in CONF['networks']:
return json.dumps({'error': "network not recognized"})
result = db.session.query(Node).get((network, address, port))
resp['result'] = "None" if result is None else result.to_dict()
return to_json(resp)
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/api_docs")
def api_docs():
return auto.html()
@app.route('/api/get_nodes', methods=['POST'])
@auto.doc()
def get_node_list():
"""
Gets a list of all nodes visible during the past 30 days
:param network (optional): Filters the result set based on the given network
:return: json array [{"address":"127.0.0.1" ... }, {"address":"0.0.0.0", "port:6970}]
"""
q = db.session.query(Node.network, Node.address, Node.port, Node.user_agent, Node.version, Node.first_seen,
Node.last_seen, Node.last_checked, Node.country, Node.city, Node.asn, Node.aso).filter(
Node.seen)
if request.args.get("network") is not None:
network = request.args.get("network")
if network not in CONF['networks']:
return {"error": "network must be one of " + ", ".join(CONF['networks'])}
q = q.filter(Node.network == network)
return pd.read_sql(q.statement, q.session.bind).to_json(orient='records')
@app.route('/api/node_history', methods=['POST'])
@auto.doc()
def get_node_history():
"""
Returns the data associated with a node, and all crawler visitations on record
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default.
:return: json dict {"node":{"user_agent":"/oktoshi/", "last_seen": ... }, "history":{"timestamp":157032190321,"height":56000, "success":1 ...}}
"""
node = request.form.get("node")
network, address, port, resp = deconstruct_address_string(node)
if network not in CONF['networks']:
return json.dumps({'error': "network not recognized"})
default_port = int(CONF['networks'][network]['port'])
resp = {}
try:
port = int(port) if port is not None else default_port
except ValueError:
resp['warning'] = "port not recognized, using default"
port = default_port
n = db.session.query(Node.network, Node.address, Node.port, Node.user_agent, Node.version, Node.first_seen,
Node.last_seen, Node.last_checked, Node.country, Node.city, Node.asn, Node.aso) \
.filter(and_(Node.network == network, Node.address == address, Node.port == port)).one()
q = db.session.query(NodeVisitation.timestamp, NodeVisitation.height, NodeVisitation.success) \
.join(Node, and_(Node.network == NodeVisitation.network, Node.address == NodeVisitation.address,
Node.port == NodeVisitation.port)) \
.filter(and_(Node.network == network, Node.address == address, Node.port == port)) \
.order_by(NodeVisitation.timestamp.desc())
df = pd.read_sql(q.statement, q.session.bind)
df['timestamp'] = df['timestamp'].astype(pd.np.int64) // 10 ** 9
resp.update({"node": {"network": n.network, 'address': n.address, "port": n.port, "user_agent": n.user_agent,
"version": n.version,
"first_seen": n.first_seen,
"last_seen": n.last_seen,
"last_checked": n.last_checked,
"country": n.country, "city": n.city, "asn": n.asn, "aso": n.aso},
"history": df.to_dict(orient='records')})
return to_json(resp)
def geocode(result):
if result and result['address'].endswith('.onion'):
aso, asn, country, city = "Anonymous", "Anonymous", "Anonymous", "Anonymous"
elif result:
try:
aso = ASN.asn(result['address']).autonomous_system_organization
asn = ASN.asn(result['address']).autonomous_system_number
except AddressNotFoundError:
aso = None
asn = None
try:
country = COUNTRY.country(result['address']).country.name
except AddressNotFoundError:
country = None
try:
city = CITY.city(result['address']).city.name
except AddressNotFoundError:
city = None
else:
return result
result['aso'] = aso
result['asn'] = asn
result['country'] = country
result['city'] = city
return result
def clean_dates(d):
for i in d:
if isinstance(d[i], datetime.datetime):
d[i] = d[i].timestamp()
if isinstance(d[i], dict):
d[i] = clean_dates(d[i])
return d
def to_json(d):
"""
Sanitizes a dictionary - converts datetime.datetime instances to timestamps
:param d: dictionary
:return: json string
"""
d = clean_dates(d)
return json.dumps(d)
def main():
app.run("0.0.0.0", debug=False if "--prod" in sys.argv else True, port=8888 if "--prod" in sys.argv else 5000)
# app.run("0.0.0.0", debug=False if "--prod" in sys.argv else True, port=443 if "--prod" in sys.argv else 5000, ssl_context=('/etc/letsencrypt/live/nodes.okcash.org/fullchain.pem', '/etc/letsencrypt/live/nodes.okcash.org/privkey.pem'))
if __name__ == '__main__':
main()
| [
[
[
1114,
1118
],
[
3171,
3175
]
],
[
[
1126,
1130
],
[
2046,
2050
],
[
3808,
3812
],
[
7039,
7043
],
[
8412,
8416
],
[
10291,
10295
],
[
13219,
13223
]
],
[
[
1138,
1140
],
[
4625,
4627
]
],
[
[
1148,
1151
],
[
13300,
13303
],
[
13345,
13348
]
],
[
[
1167,
1174
],
[
3141,
3148
]
],
[
[
1194,
1199
],
[
1537,
1542
]
],
[
[
1201,
1216
],
[
2383,
2398
],
[
8668,
8683
]
],
[
[
1218,
1225
],
[
6695,
6702
],
[
8170,
8177
],
[
8210,
8217
],
[
8247,
8254
],
[
9345,
9352
],
[
9404,
9411
],
[
10141,
10148
]
],
[
[
1227,
1235
],
[
1953,
1961
],
[
4587,
4595
]
],
[
[
1237,
1242
],
[
1913,
1918
]
],
[
[
1244,
1252
],
[
3086,
3094
]
],
[
[
1282,
1292
],
[
1662,
1672
]
],
[
[
1319,
1339
],
[
12308,
12328
],
[
12475,
12495
],
[
12611,
12631
]
],
[
[
1363,
1367
],
[
10836,
10840
],
[
11038,
11042
],
[
11201,
11205
]
],
[
[
1388,
1399
],
[
1686,
1697
]
],
[
[
1401,
1419
],
[
1596,
1614
]
],
[
[
1440,
1450
],
[
1721,
1731
]
],
[
[
1452,
1459
],
[
7177,
7184
]
],
[
[
1479,
1480
],
[
3851,
3855
],
[
8491,
8495
],
[
9119,
9123
],
[
9133,
9137
],
[
9147,
9151
],
[
9158,
9162
],
[
9175,
9179
],
[
9189,
9193
],
[
9231,
9235
],
[
9247,
9251
],
[
9266,
9270
],
[
9280,
9284
],
[
9291,
9295
],
[
9301,
9305
],
[
9327,
9331
],
[
9583,
9587
],
[
10626,
10630
],
[
10640,
10644
],
[
10654,
10658
],
[
10665,
10669
],
[
10682,
10686
],
[
10696,
10700
],
[
10738,
10742
],
[
10754,
10758
],
[
10773,
10777
],
[
10787,
10791
],
[
10798,
10802
],
[
10808,
10812
],
[
10841,
10845
],
[
10866,
10870
],
[
10891,
10895
],
[
10943,
10957
],
[
10969,
10983
],
[
10992,
11006
],
[
11032,
11036
],
[
11043,
11047
],
[
11059,
11073
],
[
11083,
11087
],
[
11099,
11113
],
[
11148,
11152
],
[
11161,
11175
],
[
11206,
11210
],
[
11231,
11235
],
[
11256,
11260
],
[
11296,
11310
],
[
12880,
12888
]
],
[
[
1488,
1500
],
[
9619,
9621
],
[
11339,
11341
],
[
11425,
11427
]
],
[
[
1521,
1528
],
[
1560,
1567
]
],
[
[
1531,
1534
],
[
1568,
1571
],
[
1573,
1576
],
[
1618,
1621
],
[
1673,
1676
],
[
1736,
1739
],
[
1752,
1755
],
[
3597,
3600
],
[
3887,
3890
],
[
5624,
5627
],
[
7751,
7754
],
[
8624,
8627
],
[
8701,
8704
],
[
8766,
8769
],
[
9688,
9691
],
[
13254,
13257
]
],
[
[
1553,
1557
],
[
3647,
3651
],
[
3944,
3948
],
[
5672,
5676
],
[
7815,
7819
],
[
8813,
8817
],
[
9738,
9742
],
[
8751,
8755
]
],
[
[
1657,
1659
],
[
3834,
3836
],
[
8474,
8476
],
[
9102,
9104
],
[
10609,
10611
],
[
10926,
10928
]
],
[
[
1679,
1683
],
[
2265,
2269
],
[
2352,
2356
],
[
2875,
2879
],
[
5089,
5093
],
[
5485,
5489
],
[
5549,
5553
],
[
6823,
6827
],
[
8379,
8383
],
[
9458,
9462
],
[
9543,
9547
],
[
10258,
10262
],
[
10363,
10367
]
],
[
[
1700,
1707
],
[
12412,
12419
]
],
[
[
1709,
1713
],
[
12557,
12561
]
],
[
[
1715,
1718
],
[
12165,
12168
],
[
12241,
12244
]
],
[
[
1811,
1828
]
],
[
[
3029,
3042
],
[
4685,
4698
]
],
[
[
3662,
3674
]
],
[
[
3959,
3975
]
],
[
[
4721,
4747
],
[
6770,
6796
],
[
8323,
8349
],
[
10202,
10228
]
],
[
[
5687,
5697
]
],
[
[
7830,
7849
]
],
[
[
8648,
8653
]
],
[
[
8728,
8736
]
],
[
[
8828,
8841
]
],
[
[
9753,
9769
]
],
[
[
11959,
11966
],
[
7699,
7706
]
],
[
[
12820,
12831
],
[
12990,
13001
],
[
13193,
13204
]
],
[
[
13027,
13034
],
[
7734,
7741
],
[
8607,
8614
],
[
11939,
11946
]
],
[
[
13242,
13246
],
[
13637,
13641
]
]
] |
#!/usr/bin/python
import hashlib, re, sys, os, base64, time, random, hmac
# stripped down from https://github.com/vbuterin/pybitcointools/blob/master/bitcoin/main.py
### Elliptic curve parameters
P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1
N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
A = 0
B = 7
H = 1
Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240
Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424
G = (Gx,Gy)
### Extended Euclidean Algorithm
def inv(a,n):
lm, hm = 1,0
low, high = a%n,n
while low > 1:
r = high/low
nm, new = hm-lm*r, high-low*r
lm, low, hm, high = nm, new, lm, low
return lm % n
### Base switching
def get_code_string(base):
if base == 2: return '01'
elif base == 10: return '0123456789'
elif base == 16: return "0123456789abcdef"
elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
elif base == 256: return ''.join([chr(x) for x in range(256)])
else: raise ValueError("Invalid base!")
def encode(val,base,minlen=0):
base, minlen = int(base), int(minlen)
code_string = get_code_string(base)
result = ""
while val > 0:
result = code_string[val % base] + result
val /= base
if len(result) < minlen:
result = code_string[0]*(minlen-len(result))+result
return result
def decode(string,base):
base = int(base)
code_string = get_code_string(base)
result = 0
if base == 16: string = string.lower()
while len(string) > 0:
result *= base
result += code_string.find(string[0])
string = string[1:]
return result
def changebase(string,frm,to,minlen=0):
return encode(decode(string,frm),to,minlen)
### Elliptic Curve functions
def isinf(p): return p[0] == 0 and p[1] == 0
def base10_add(a,b):
if isinf(a): return b[0],b[1]
if isinf(b): return a[0],a[1]
if a[0] == b[0]:
if a[1] == b[1]: return base10_double((a[0],a[1]))
else: return (0,0)
m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P
x = (m*m-a[0]-b[0]) % P
y = (m*(a[0]-x)-a[1]) % P
return (x,y)
def base10_double(a):
if isinf(a): return (0,0)
m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P
x = (m*m-2*a[0]) % P
y = (m*(a[0]-x)-a[1]) % P
return (x,y)
def base10_multiply(a,n):
if isinf(a) or n == 0: return (0,0)
if n == 1: return a
if n < 0 or n >= N: return base10_multiply(a,n%N)
if (n%2) == 0: return base10_double(base10_multiply(a,n/2))
if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a)
# Functions for handling pubkey and privkey formats
def get_pubkey_format(pub):
if isinstance(pub,(tuple,list)): return 'decimal'
elif len(pub) == 65 and pub[0] == '\x04': return 'bin'
elif len(pub) == 130 and pub[0:2] == '04': return 'hex'
elif len(pub) == 33 and pub[0] in ['\x02','\x03']: return 'bin_compressed'
elif len(pub) == 66 and pub[0:2] in ['02','03']: return 'hex_compressed'
elif len(pub) == 64: return 'bin_electrum'
elif len(pub) == 128: return 'hex_electrum'
else: raise Exception("Pubkey not in recognized format")
def encode_pubkey(pub,formt):
if not isinstance(pub,(tuple,list)):
pub = decode_pubkey(pub)
if formt == 'decimal': return pub
elif formt == 'bin': return '\x04' + encode(pub[0],256,32) + encode(pub[1],256,32)
elif formt == 'bin_compressed': return chr(2+(pub[1]%2)) + encode(pub[0],256,32)
elif formt == 'hex': return '04' + encode(pub[0],16,64) + encode(pub[1],16,64)
elif formt == 'hex_compressed': return '0'+str(2+(pub[1]%2)) + encode(pub[0],16,64)
elif formt == 'bin_electrum': return encode(pub[0],256,32) + encode(pub[1],256,32)
elif formt == 'hex_electrum': return encode(pub[0],16,64) + encode(pub[1],16,64)
else: raise Exception("Invalid format!")
def decode_pubkey(pub,formt=None):
if not formt: formt = get_pubkey_format(pub)
if formt == 'decimal': return pub
elif formt == 'bin': return (decode(pub[1:33],256),decode(pub[33:65],256))
elif formt == 'bin_compressed':
x = decode(pub[1:33],256)
beta = pow(x*x*x+7,(P+1)/4,P)
y = (P-beta) if ((beta + ord(pub[0])) % 2) else beta
return (x,y)
elif formt == 'hex': return (decode(pub[2:66],16),decode(pub[66:130],16))
elif formt == 'hex_compressed':
return decode_pubkey(pub.decode('hex'),'bin_compressed')
elif formt == 'bin_electrum':
return (decode(pub[:32],256),decode(pub[32:64],256))
elif formt == 'hex_electrum':
return (decode(pub[:64],16),decode(pub[64:128],16))
else: raise Exception("Invalid format!")
def get_privkey_format(priv):
if isinstance(priv,(int,long)): return 'decimal'
elif len(priv) == 32: return 'bin'
elif len(priv) == 33: return 'bin_compressed'
elif len(priv) == 64: return 'hex'
elif len(priv) == 66: return 'hex_compressed'
else:
bin_p = b58check_to_bin(priv)
if len(bin_p) == 32: return 'wif'
elif len(bin_p) == 33: return 'wif_compressed'
else: raise Exception("WIF does not represent privkey")
def encode_privkey(priv,formt):
if not isinstance(priv,(int,long)):
return encode_privkey(decode_privkey(priv),formt)
if formt == 'decimal': return priv
elif formt == 'bin': return encode(priv,256,32)
elif formt == 'bin_compressed': return encode(priv,256,32)+'\x01'
elif formt == 'hex': return encode(priv,16,64)
elif formt == 'hex_compressed': return encode(priv,16,64)+'01'
else: raise Exception("Invalid format!")
def decode_privkey(priv,formt=None):
if not formt: formt = get_privkey_format(priv)
if formt == 'decimal': return priv
elif formt == 'bin': return decode(priv,256)
elif formt == 'bin_compressed': return decode(priv[:32],256)
elif formt == 'hex': return decode(priv,16)
elif formt == 'hex_compressed': return decode(priv[:64],16)
else: raise Exception("Invalid format!")
def add_pubkeys(p1,p2):
f1,f2 = get_pubkey_format(p1), get_pubkey_format(p2)
return encode_pubkey(base10_add(decode_pubkey(p1,f1),decode_pubkey(p2,f2)),f1)
def add_privkeys(p1,p2):
f1,f2 = get_privkey_format(p1), get_privkey_format(p2)
return encode_privkey((decode_privkey(p1,f1) + decode_privkey(p2,f2)) % N,f1)
def multiply(pubkey,privkey):
f1,f2 = get_pubkey_format(pubkey), get_privkey_format(privkey)
pubkey, privkey = decode_pubkey(pubkey,f1), decode_privkey(privkey,f2)
# http://safecurves.cr.yp.to/twist.html
if not isinf(pubkey) and (pubkey[0]**3+7-pubkey[1]*pubkey[1]) % P != 0:
raise Exception("Point not on curve")
return encode_pubkey(base10_multiply(pubkey,privkey),f1)
def divide(pubkey,privkey):
factor = inv(decode_privkey(privkey),N)
return multiply(pubkey,factor)
def compress(pubkey):
f = get_pubkey_format(pubkey)
if 'compressed' in f: return pubkey
elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey,f),'bin_compressed')
elif f == 'hex' or f == 'decimal':
return encode_pubkey(decode_pubkey(pubkey,f),'hex_compressed')
def decompress(pubkey):
f = get_pubkey_format(pubkey)
if 'compressed' not in f: return pubkey
elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey,f),'bin')
elif f == 'hex_compressed' or f == 'decimal':
return encode_pubkey(decode_pubkey(pubkey,f),'hex')
def privkey_to_pubkey(privkey):
f = get_privkey_format(privkey)
privkey = decode_privkey(privkey,f)
if privkey == 0 or privkey >= N:
raise Exception("Invalid privkey")
if f in ['bin','bin_compressed','hex','hex_compressed','decimal']:
return encode_pubkey(base10_multiply(G,privkey),f)
else:
return encode_pubkey(base10_multiply(G,privkey),f.replace('wif','hex'))
privtopub = privkey_to_pubkey
def privkey_to_address(priv,magicbyte=0):
return pubkey_to_address(privkey_to_pubkey(priv),magicbyte)
privtoaddr = privkey_to_address
def neg_pubkey(pubkey):
f = get_pubkey_format(pubkey)
pubkey = decode_pubkey(pubkey,f)
return encode_pubkey((pubkey[0],(P-pubkey[1]) % P),f)
def neg_privkey(privkey):
f = get_privkey_format(privkey)
privkey = decode_privkey(privkey,f)
return encode_privkey((N - privkey) % N,f)
def subtract_pubkeys(p1, p2):
f1,f2 = get_pubkey_format(p1), get_pubkey_format(p2)
k2 = decode_pubkey(p2,f2)
return encode_pubkey(base10_add(decode_pubkey(p1,f1),(k2[0],(P - k2[1]) % P)),f1)
def subtract_privkeys(p1, p2):
f1,f2 = get_privkey_format(p1), get_privkey_format(p2)
k2 = decode_privkey(p2,f2)
return encode_privkey((decode_privkey(p1,f1) - k2) % N,f1)
### Hashes
def bin_sha256(string):
return hashlib.sha256(string).digest()
def sha256(string):
return bin_sha256(string).encode('hex')
def hash_to_int(x):
if len(x) in [40,64]: return decode(x,16)
else: return decode(x,256)
def num_to_var_int(x):
x = int(x)
if x < 253: return chr(x)
elif x < 65536: return chr(253) + encode(x,256,2)[::-1]
elif x < 4294967296: return chr(254) + encode(x,256,4)[::-1]
else: return chr(255) + encode(x,256,8)[::-1]
### Encodings
### EDCSA
def encode_sig(v,r,s):
vb, rb, sb = chr(v), encode(r,256), encode(s,256)
return base64.b64encode(vb+'\x00'*(32-len(rb))+rb+'\x00'*(32-len(sb))+sb)
def decode_sig(sig):
bytez = base64.b64decode(sig)
return ord(bytez[0]), decode(bytez[1:33],256), decode(bytez[33:],256)
# https://tools.ietf.org/html/rfc6979#section-3.2
def deterministic_generate_k(msghash,priv):
v = '\x01' * 32
k = '\x00' * 32
priv = encode_privkey(priv,'bin')
msghash = encode(hash_to_int(msghash),256,32)
k = hmac.new(k, v+'\x00'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
k = hmac.new(k, v+'\x01'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
return decode(hmac.new(k, v, hashlib.sha256).digest(),256)
def ecdsa_raw_sign(msghash,priv):
z = hash_to_int(msghash)
k = deterministic_generate_k(msghash,priv)
r,y = base10_multiply(G,k)
s = inv(k,N) * (z + r*decode_privkey(priv)) % N
return 27+(y%2),r,s
def ecdsa_sign(msg,priv):
return encode_sig(*ecdsa_raw_sign(electrum_sig_hash(msg),priv))
def ecdsa_raw_verify(msghash,vrs,pub):
v,r,s = vrs
w = inv(s,N)
z = hash_to_int(msghash)
u1, u2 = z*w % N, r*w % N
x,y = base10_add(base10_multiply(G,u1), base10_multiply(decode_pubkey(pub),u2))
return r == x
def ecdsa_verify(msg,sig,pub):
return ecdsa_raw_verify(electrum_sig_hash(msg),decode_sig(sig),pub)
def ecdsa_raw_recover(msghash,vrs):
v,r,s = vrs
x = r
beta = pow(x*x*x+7,(P+1)/4,P)
y = beta if v%2 ^ beta%2 else (P - beta)
z = hash_to_int(msghash)
Qr = base10_add(neg_pubkey(base10_multiply(G,z)),base10_multiply((x,y),s))
Q = base10_multiply(Qr,inv(r,N))
if ecdsa_raw_verify(msghash,vrs,Q): return encode_pubkey(Q,'hex')
return False
def ecdsa_recover(msg,sig):
return ecdsa_raw_recover(electrum_sig_hash(msg),decode_sig(sig))
| [
[
[
25,
32
],
[
8700,
8707
],
[
9716,
9723
],
[
9764,
9771
],
[
9832,
9839
],
[
9880,
9887
],
[
9938,
9945
]
],
[
[
34,
36
]
],
[
[
38,
41
]
],
[
[
43,
45
]
],
[
[
47,
53
],
[
9253,
9259
],
[
9354,
9360
]
],
[
[
55,
59
]
],
[
[
61,
67
]
],
[
[
69,
73
],
[
9681,
9685
],
[
9749,
9753
],
[
9797,
9801
],
[
9865,
9869
],
[
9923,
9927
]
],
[
[
199,
200
],
[
2066,
2067
],
[
2072,
2073
],
[
2098,
2099
],
[
2126,
2127
],
[
2230,
2231
],
[
2236,
2237
],
[
2259,
2260
],
[
2287,
2288
],
[
4151,
4152
],
[
4158,
4159
],
[
4174,
4175
],
[
6583,
6584
],
[
8104,
8105
],
[
8119,
8120
],
[
8452,
8453
],
[
8465,
8466
],
[
10717,
10718
],
[
10724,
10725
],
[
10762,
10763
]
],
[
[
243,
244
],
[
2410,
2411
],
[
2440,
2441
],
[
6300,
6301
],
[
6765,
6766
],
[
7534,
7535
],
[
8255,
8256
],
[
8270,
8271
],
[
8646,
8647
],
[
10126,
10127
],
[
10162,
10163
],
[
10355,
10356
],
[
10411,
10412
],
[
10420,
10421
],
[
10914,
10915
]
],
[
[
326,
327
],
[
2216,
2217
]
],
[
[
332,
333
]
],
[
[
338,
339
]
],
[
[
344,
346
],
[
515,
517
]
],
[
[
427,
429
],
[
518,
520
]
],
[
[
510,
511
],
[
7696,
7697
],
[
7765,
7766
],
[
10107,
10108
],
[
10459,
10460
],
[
10849,
10850
]
],
[
[
561,
564
],
[
2052,
2055
],
[
2219,
2222
],
[
6737,
6740
],
[
10120,
10123
],
[
10349,
10352
],
[
10908,
10911
]
],
[
[
756,
771
],
[
1181,
1196
],
[
1471,
1486
]
],
[
[
1096,
1102
],
[
1734,
1740
],
[
3332,
3338
],
[
3356,
3362
],
[
3441,
3447
],
[
3502,
3508
],
[
3525,
3531
],
[
3613,
3619
],
[
3675,
3681
],
[
3699,
3705
],
[
3762,
3768
],
[
3785,
3791
],
[
5329,
5335
],
[
5392,
5398
],
[
5451,
5457
],
[
5513,
5519
],
[
9001,
9007
],
[
9066,
9072
],
[
9116,
9122
],
[
9213,
9219
],
[
9228,
9234
],
[
9637,
9643
]
],
[
[
1413,
1419
],
[
1741,
1747
],
[
4007,
4013
],
[
4029,
4035
],
[
4101,
4107
],
[
4276,
4282
],
[
4297,
4303
],
[
4472,
4478
],
[
4493,
4499
],
[
4567,
4573
],
[
4587,
4593
],
[
5742,
5748
],
[
5802,
5808
],
[
5856,
5862
],
[
5915,
5921
],
[
8850,
8856
],
[
8880,
8886
],
[
9402,
9408
],
[
9427,
9433
],
[
9916,
9922
]
],
[
[
1688,
1698
]
],
[
[
1806,
1811
],
[
1874,
1879
],
[
1906,
1911
],
[
2173,
2178
],
[
2336,
2341
],
[
6526,
6531
]
],
[
[
1852,
1862
],
[
2529,
2539
],
[
6085,
6095
],
[
8412,
8422
],
[
10432,
10442
],
[
10811,
10821
]
],
[
[
2150,
2163
],
[
1981,
1994
],
[
2467,
2480
],
[
2540,
2553
]
],
[
[
2309,
2324
],
[
2420,
2435
],
[
2481,
2496
],
[
2554,
2569
],
[
6659,
6674
],
[
7680,
7695
],
[
7749,
7764
],
[
10091,
10106
],
[
10443,
10458
],
[
10466,
10481
],
[
10833,
10848
],
[
10855,
10870
],
[
10889,
10904
]
],
[
[
2639,
2656
],
[
3913,
3930
],
[
6017,
6034
],
[
6040,
6057
],
[
6347,
6364
],
[
6834,
6851
],
[
7127,
7144
],
[
8004,
8021
],
[
8316,
8333
],
[
8339,
8356
]
],
[
[
3153,
3166
],
[
6071,
6084
],
[
6645,
6658
],
[
6928,
6941
],
[
7038,
7051
],
[
7236,
7249
],
[
7346,
7359
],
[
7666,
7679
],
[
7735,
7748
],
[
8078,
8091
],
[
8398,
8411
],
[
10966,
10979
]
],
[
[
3856,
3869
],
[
3234,
3247
],
[
4372,
4385
],
[
6096,
6109
],
[
6117,
6130
],
[
6422,
6435
],
[
6942,
6955
],
[
7052,
7065
],
[
7250,
7263
],
[
7360,
7373
],
[
8043,
8056
],
[
8368,
8381
],
[
8423,
8436
],
[
10482,
10495
]
],
[
[
4661,
4679
],
[
5646,
5664
],
[
6179,
6197
],
[
6203,
6221
],
[
6374,
6392
],
[
7432,
7450
],
[
8160,
8178
],
[
8515,
8533
],
[
8539,
8557
]
],
[
[
5132,
5146
],
[
5215,
5229
],
[
6235,
6249
],
[
8239,
8253
],
[
8600,
8614
],
[
9596,
9610
]
],
[
[
5587,
5601
],
[
5230,
5244
],
[
6251,
6265
],
[
6275,
6289
],
[
6448,
6462
],
[
6741,
6755
],
[
7474,
7488
],
[
8202,
8216
],
[
8569,
8583
],
[
8616,
8630
],
[
10138,
10152
]
],
[
[
5987,
5998
]
],
[
[
6148,
6160
]
],
[
[
6311,
6319
],
[
6779,
6787
]
],
[
[
6700,
6706
]
],
[
[
6808,
6816
]
],
[
[
7099,
7109
]
],
[
[
7396,
7413
],
[
7813,
7830
],
[
7903,
7920
]
],
[
[
7801,
7810
]
],
[
[
7836,
7854
],
[
7951,
7969
]
],
[
[
7938,
7948
]
],
[
[
7975,
7985
],
[
10822,
10832
]
],
[
[
8130,
8141
]
],
[
[
8280,
8296
]
],
[
[
8478,
8495
]
],
[
[
8669,
8679
],
[
8763,
8773
]
],
[
[
8736,
8742
]
],
[
[
8801,
8812
],
[
9644,
9655
],
[
10012,
10023
],
[
10366,
10377
],
[
10780,
10791
]
],
[
[
8899,
8913
]
],
[
[
9169,
9179
],
[
10227,
10237
]
],
[
[
9325,
9335
],
[
10608,
10618
],
[
11087,
11097
]
],
[
[
9505,
9529
],
[
10041,
10065
]
],
[
[
9973,
9987
],
[
10239,
10253
]
],
[
[
10194,
10204
]
],
[
[
10289,
10305
],
[
10568,
10584
],
[
10926,
10942
]
],
[
[
10530,
10542
]
],
[
[
10634,
10651
],
[
11046,
11063
]
],
[
[
11011,
11024
]
]
] |
#
# ovirt-engine-setup -- ovirt engine setup
#
# Copyright oVirt Authors
# SPDX-License-Identifier: Apache-2.0
#
#
"""ovirt-host-setup vmconsole_proxy plugin."""
from otopi import util
from . import config
from . import pki
from . import system
@util.export
def createPlugins(context):
config.Plugin(context=context)
pki.Plugin(context=context)
system.Plugin(context=context)
# vim: expandtab tabstop=4 shiftwidth=4
| [
[
[
184,
188
],
[
253,
257
]
],
[
[
204,
210
],
[
297,
303
]
],
[
[
225,
228
],
[
332,
335
]
],
[
[
243,
249
],
[
364,
370
]
],
[
[
269,
282
]
]
] |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class PolicyType(core.Resource):
_alias = 'Policy-Type'
_display_name = 'Policy type'
_plural = 'PolicyTypes'
_plural_display_name = 'Policy types'
_repr_attributes = ['ref', 'enabled', 'description']
class Policy(core.Resource):
_plural = 'Policies'
_repr_attributes = ['name', 'pack', 'enabled', 'policy_type', 'resource_ref']
| [
[
[
787,
794
],
[
838,
845
]
],
[
[
825,
829
],
[
885,
889
],
[
1104,
1108
]
],
[
[
832,
835
]
],
[
[
874,
884
]
],
[
[
1097,
1103
]
]
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
###########################################################
# KivyCalendar (X11/MIT License)
# Calendar & Date picker widgets for Kivy (http://kivy.org)
# https://bitbucket.org/xxblx/kivycalendar
#
# Oleg Kozlov (xxblx), 2015
# https://xxblx.bitbucket.org/
###########################################################
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.popup import Popup
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.button import Button
from kivy.uix.togglebutton import ToggleButton
from kivy.uix.textinput import TextInput
from kivy.uix.label import Label
from kivy.core.window import Window
from kivy.factory import Factory
from kivy.properties import NumericProperty, ReferenceListProperty
from kivyblocks.i18n import I18n
from . import calendar_data as cal_data
###########################################################
Builder.load_string("""
<ArrowButton>:
background_normal: ""
background_down: ""
background_color: 1, 1, 1, 0
size_hint: .1, .1
<MonthYearLabel>:
pos_hint: {"top": 1, "center_x": .5}
size_hint: None, 0.1
halign: "center"
<MonthsManager>:
pos_hint: {"top": .9}
size_hint: 1, .9
<ButtonsGrid>:
cols: 7
rows: 7
size_hint: 1, 1
pos_hint: {"top": 1}
<DayAbbrLabel>:
text_size: self.size[0], None
halign: "center"
<DayAbbrWeekendLabel>:
color: 1, 0, 0, 1
<DayButton>:
group: "day_num"
<DayNumWeekendButton>:
background_color: 1, 0, 0, 1
""")
###########################################################
class DatePicker(TextInput):
"""
Date picker is a textinput, if it focused shows popup with calendar
which allows you to define the popup dimensions using pHint_x, pHint_y,
and the pHint lists, for example in kv:
DatePicker:
pHint: 0.7,0.4
would result in a size_hint of 0.7,0.4 being used to create the popup
"""
def __init__(self, touch_switch=False, value=None, *args, **kwargs):
super(DatePicker, self).__init__(*args, **kwargs)
self.touch_switch = touch_switch
self.init_ui(value)
def getValue(self):
return self.text
def setValue(self, sdate):
self.text = sdate
d = [int(i) for i in sdate.split('-')]
d.reverse()
self.cal.active_date = d
def init_ui(self, value):
if not value:
value = cal_data.today_date()
d = [int(i) for i in value.split('.')]
value = '%04d-%02d-%02d' % (d[2],d[1],d[0])
# Calendar
self.cal = CalendarWidget(as_popup=True,
touch_switch=self.touch_switch)
self.setValue(value)
# Popup
self.popup = Popup(content=self.cal, on_dismiss=self.update_value,
size_hint=(0.7,0.7),
title="")
self.cal.parent_popup = self.popup
self.bind(focus=self.show_popup)
def show_popup(self, isnt, val):
"""
Open popup if textinput focused,
and regardless update the popup size_hint
"""
if val:
# Automatically dismiss the keyboard
# that results from the textInput
Window.release_all_keyboards()
self.popup.open()
def update_value(self, inst):
""" Update textinput value on popup close """
d = self.cal.active_date
self.text = "%04d-%02d-%02d" % (d[2],d[1],d[0])
self.focus = False
class CalendarWidget(RelativeLayout):
""" Basic calendar widget """
def __init__(self, as_popup=False, touch_switch=False, *args, **kwargs):
super(CalendarWidget, self).__init__(*args, **kwargs)
self.i18n = I18n()
self.as_popup = as_popup
self.touch_switch = touch_switch
self.prepare_data()
self.init_ui()
def init_ui(self):
self.left_arrow = ArrowButton(text="<", on_press=self.go_prev,
pos_hint={"top": 1, "left": 0})
self.right_arrow = ArrowButton(text=">", on_press=self.go_next,
pos_hint={"top": 1, "right": 1})
self.add_widget(self.left_arrow)
self.add_widget(self.right_arrow)
# Title
self.title_label = MonthYearLabel(text=self.title)
self.add_widget(self.title_label)
# ScreenManager
self.sm = MonthsManager()
self.add_widget(self.sm)
self.create_month_scr(self.quarter[1], toogle_today=True)
def create_month_scr(self, month, toogle_today=False):
""" Screen with calendar for one month """
scr = Screen()
m = self.month_names_eng[self.active_date[1] - 1]
scr.name = "%s-%s" % (m, self.active_date[2]) # like march-2015
# Grid for days
grid_layout = ButtonsGrid()
scr.add_widget(grid_layout)
# Days abbrs
for i in range(7):
if i >= 5: # weekends
#l = DayAbbrWeekendLabel(text=self.days_abrs[i])
l = Factory.Text(text=self.days_abrs[i], i18n=True)
else: # work days
#l = DayAbbrLabel(text=self.days_abrs[i])
l = Factory.Text(text=self.days_abrs[i], i18n=True)
grid_layout.add_widget(l)
# Buttons with days numbers
for week in month:
for day in week:
if day[1] >= 5: # weekends
tbtn = DayNumWeekendButton(text=str(day[0]))
else: # work days
tbtn = DayNumButton(text=str(day[0]))
tbtn.bind(on_press=self.get_btn_value)
if toogle_today:
# Down today button
if day[0] == self.active_date[0] and day[2] == 1:
tbtn.state = "down"
# Disable buttons with days from other months
if day[2] == 0:
tbtn.disabled = True
grid_layout.add_widget(tbtn)
self.sm.add_widget(scr)
def prepare_data(self):
""" Prepare data for showing on widget loading """
# Get days abbrs and month names lists
self.month_names = cal_data.get_month_names()
self.month_names_eng = cal_data.get_month_names_eng()
self.days_abrs = cal_data.get_days_abbrs()
# Today date
self.active_date = cal_data.today_date_list()
# Set title
self.title = "%s - %s" % (self.i18n(self.month_names[self.active_date[1] - 1]),
self.active_date[2])
# Quarter where current month in the self.quarter[1]
self.get_quarter()
def get_quarter(self):
""" Get caledar and months/years nums for quarter """
self.quarter_nums = cal_data.calc_quarter(self.active_date[2],
self.active_date[1])
self.quarter = cal_data.get_quarter(self.active_date[2],
self.active_date[1])
def get_btn_value(self, inst):
""" Get day value from pressed button """
self.active_date[0] = int(inst.text)
if self.as_popup:
self.parent_popup.dismiss()
def go_prev(self, inst):
""" Go to screen with previous month """
# Change active date
self.active_date = [self.active_date[0], self.quarter_nums[0][1],
self.quarter_nums[0][0]]
# Name of prev screen
n = self.quarter_nums[0][1] - 1
prev_scr_name = "%s-%s" % (self.month_names_eng[n],
self.quarter_nums[0][0])
# If it's doen't exitst, create it
if not self.sm.has_screen(prev_scr_name):
self.create_month_scr(self.quarter[0])
self.sm.current = prev_scr_name
self.sm.transition.direction = "right"
self.get_quarter()
self.title = "%s - %s" % (self.i18n(self.month_names[self.active_date[1] - 1]),
self.active_date[2])
self.title_label.text = self.title
def go_next(self, inst):
""" Go to screen with next month """
# Change active date
self.active_date = [self.active_date[0], self.quarter_nums[2][1],
self.quarter_nums[2][0]]
# Name of prev screen
n = self.quarter_nums[2][1] - 1
next_scr_name = "%s-%s" % (self.month_names_eng[n],
self.quarter_nums[2][0])
# If it's doen't exitst, create it
if not self.sm.has_screen(next_scr_name):
self.create_month_scr(self.quarter[2])
self.sm.current = next_scr_name
self.sm.transition.direction = "left"
self.get_quarter()
self.title = "%s - %s" % (self.i18n(self.month_names[self.active_date[1] - 1]),
self.active_date[2])
self.title_label.text = self.title
def on_touch_move(self, touch):
""" Switch months pages by touch move """
if self.touch_switch:
# Left - prev
if touch.dpos[0] < -30:
self.go_prev(None)
# Right - next
elif touch.dpos[0] > 30:
self.go_next(None)
class ArrowButton(Button):
pass
class MonthYearLabel(Label):
pass
class MonthsManager(ScreenManager):
pass
class ButtonsGrid(GridLayout):
pass
class DayAbbrLabel(Label):
pass
class DayAbbrWeekendLabel(DayAbbrLabel):
pass
class DayButton(ToggleButton):
pass
class DayNumButton(DayButton):
pass
class DayNumWeekendButton(DayButton):
pass
| [
[
[
383,
390
],
[
1001,
1008
]
],
[
[
426,
439
],
[
8179,
8192
]
],
[
[
441,
447
],
[
4273,
4279
]
],
[
[
475,
480
],
[
2622,
2627
]
],
[
[
517,
531
],
[
3282,
3296
]
],
[
[
564,
574
],
[
8220,
8230
]
],
[
[
603,
609
],
[
8107,
8113
]
],
[
[
644,
656
],
[
8338,
8350
]
],
[
[
688,
697
],
[
1643,
1652
]
],
[
[
725,
730
],
[
8144,
8149
],
[
8259,
8264
]
],
[
[
760,
766
],
[
3025,
3031
]
],
[
[
792,
799
],
[
4609,
4616
],
[
4733,
4740
]
],
[
[
828,
843
]
],
[
[
845,
866
]
],
[
[
895,
899
],
[
3479,
3483
]
],
[
[
915,
940
],
[
2364,
2372
],
[
5524,
5532
],
[
5576,
5584
],
[
5626,
5634
],
[
5692,
5700
],
[
6035,
6043
],
[
6131,
6139
]
],
[
[
1632,
1642
],
[
2030,
2040
]
],
[
[
3267,
3281
],
[
2501,
2515
],
[
3414,
3428
]
],
[
[
8095,
8106
],
[
3635,
3646
],
[
3747,
3758
]
],
[
[
8129,
8143
],
[
3949,
3963
]
],
[
[
8165,
8178
],
[
4050,
4063
]
],
[
[
8208,
8219
],
[
4438,
4449
]
],
[
[
8246,
8258
],
[
8300,
8312
]
],
[
[
8280,
8299
]
],
[
[
8328,
8337
],
[
8379,
8388
],
[
8424,
8433
]
],
[
[
8366,
8378
],
[
5006,
5018
]
],
[
[
8404,
8423
],
[
4933,
4952
]
]
] |
"""
Utilities for API Gateway response formatting
"""
import json
def format_response(data, status=200):
return {
'body': json.dumps(data),
'headers': {
'Content-Type': 'application/json'
},
'statusCode': int(status)
}
def format_error(msg, code='BadRequest', status=400):
data = {
'success': False,
'error': {
'code': code,
'message': msg
}
}
return format_response(data, status)
| [
[
[
61,
65
],
[
136,
140
]
],
[
[
72,
87
],
[
468,
483
]
],
[
[
279,
291
]
]
] |
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Class for VM tasks like spawn, snapshot, suspend, resume etc.
"""
import collections
import copy
import os
import time
import decorator
from oslo.config import cfg
from oslo.vmware import exceptions as vexc
from nova.api.metadata import base as instance_metadata
from nova import compute
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
from nova.console import type as ctype
from nova import context as nova_context
from nova import exception
from nova.i18n import _, _LE, _LW
from nova import objects
from nova.openstack.common import excutils
from nova.openstack.common import lockutils
from nova.openstack.common import log as logging
from nova.openstack.common import units
from nova.openstack.common import uuidutils
from nova import utils
from nova.virt import configdrive
from nova.virt import diagnostics
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import imagecache
from nova.virt.vmwareapi import vif as vmwarevif
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmware_images
CONF = cfg.CONF
CONF.import_opt('image_cache_subdirectory_name', 'nova.virt.imagecache')
CONF.import_opt('remove_unused_base_images', 'nova.virt.imagecache')
CONF.import_opt('vnc_enabled', 'nova.vnc')
CONF.import_opt('my_ip', 'nova.netconf')
LOG = logging.getLogger(__name__)
VMWARE_POWER_STATES = {
'poweredOff': power_state.SHUTDOWN,
'poweredOn': power_state.RUNNING,
'suspended': power_state.SUSPENDED}
RESIZE_TOTAL_STEPS = 4
DcInfo = collections.namedtuple('DcInfo',
['ref', 'name', 'vmFolder'])
class VirtualMachineInstanceConfigInfo(object):
"""Parameters needed to create and configure a new instance."""
def __init__(self, instance, instance_name, image_info,
datastore, dc_info, image_cache):
# Some methods called during spawn take the instance parameter purely
# for logging purposes.
# TODO(vui) Clean them up, so we no longer need to keep this variable
self.instance = instance
# Get the instance name. In some cases this may differ from the 'uuid',
# for example when the spawn of a rescue instance takes place.
self.instance_name = instance_name or instance.uuid
self.ii = image_info
self.root_gb = instance.root_gb
self.datastore = datastore
self.dc_info = dc_info
self._image_cache = image_cache
@property
def cache_image_folder(self):
if self.ii.image_id is None:
return
return self._image_cache.get_image_cache_folder(
self.datastore, self.ii.image_id)
@property
def cache_image_path(self):
if self.ii.image_id is None:
return
cached_image_file_name = "%s.%s" % (self.ii.image_id,
self.ii.file_type)
return self.cache_image_folder.join(cached_image_file_name)
# Note(vui): See https://bugs.launchpad.net/nova/+bug/1363349
# for cases where mocking time.sleep() can have unintended effects on code
# not under test. For now, unblock the affected test cases by providing
# a wrapper function to work around needing to mock time.sleep()
def _time_sleep_wrapper(delay):
time.sleep(delay)
@decorator.decorator
def retry_if_task_in_progress(f, *args, **kwargs):
retries = max(CONF.vmware.api_retry_count, 1)
delay = 1
for attempt in range(1, retries + 1):
if attempt != 1:
_time_sleep_wrapper(delay)
delay = min(2 * delay, 60)
try:
f(*args, **kwargs)
return
except error_util.TaskInProgress:
pass
class VMwareVMOps(object):
"""Management class for VM-related tasks."""
def __init__(self, session, virtapi, volumeops, cluster=None,
datastore_regex=None):
"""Initializer."""
self.compute_api = compute.API()
self._session = session
self._virtapi = virtapi
self._volumeops = volumeops
self._cluster = cluster
self._datastore_regex = datastore_regex
# Ensure that the base folder is unique per compute node
if CONF.remove_unused_base_images:
self._base_folder = '%s%s' % (CONF.my_ip,
CONF.image_cache_subdirectory_name)
else:
# Aging disable ensures backward compatibility
self._base_folder = CONF.image_cache_subdirectory_name
self._tmp_folder = 'vmware_temp'
self._default_root_device = 'vda'
self._rescue_suffix = '-rescue'
self._migrate_suffix = '-orig'
self._datastore_dc_mapping = {}
self._datastore_browser_mapping = {}
self._imagecache = imagecache.ImageCacheManager(self._session,
self._base_folder)
def _extend_virtual_disk(self, instance, requested_size, name, dc_ref):
service_content = self._session._get_vim().service_content
LOG.debug("Extending root virtual disk to %s", requested_size)
vmdk_extend_task = self._session._call_method(
self._session._get_vim(),
"ExtendVirtualDisk_Task",
service_content.virtualDiskManager,
name=name,
datacenter=dc_ref,
newCapacityKb=requested_size,
eagerZero=False)
try:
self._session._wait_for_task(vmdk_extend_task)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Extending virtual disk failed with error: %s'),
e, instance=instance)
# Clean up files created during the extend operation
files = [name.replace(".vmdk", "-flat.vmdk"), name]
for file in files:
ds_path = ds_util.DatastorePath.parse(file)
self._delete_datastore_file(instance, ds_path, dc_ref)
LOG.debug("Extended root virtual disk")
def _delete_datastore_file(self, instance, datastore_path, dc_ref):
try:
ds_util.file_delete(self._session, datastore_path, dc_ref)
except (vexc.CannotDeleteFileException,
vexc.FileFaultException,
vexc.FileLockedException,
vexc.FileNotFoundException):
LOG.debug("Unable to delete %(ds)s. There may be more than "
"one process or thread trying to delete the file",
{'ds': datastore_path},
exc_info=True)
def _extend_if_required(self, dc_info, image_info, instance,
root_vmdk_path):
"""Increase the size of the root vmdk if necessary."""
if instance.root_gb > image_info.file_size_in_gb:
size_in_kb = instance.root_gb * units.Mi
self._extend_virtual_disk(instance, size_in_kb,
root_vmdk_path, dc_info.ref)
def _configure_config_drive(self, instance, vm_ref, dc_info, datastore,
injected_files, admin_password):
session_vim = self._session._get_vim()
cookies = session_vim.client.options.transport.cookiejar
uploaded_iso_path = self._create_config_drive(instance,
injected_files,
admin_password,
datastore.name,
dc_info.name,
instance['uuid'],
cookies)
uploaded_iso_path = datastore.build_path(uploaded_iso_path)
self._attach_cdrom_to_vm(
vm_ref, instance,
datastore.ref,
str(uploaded_iso_path))
def build_virtual_machine(self, instance, instance_name, image_info,
dc_info, datastore, network_info):
node_mo_id = vm_util.get_mo_id_from_instance(instance)
res_pool_ref = vm_util.get_res_pool_ref(self._session,
self._cluster, node_mo_id)
vif_infos = vmwarevif.get_vif_info(self._session,
self._cluster,
utils.is_neutron(),
image_info.vif_model,
network_info)
allocations = self._get_cpu_allocations(instance.instance_type_id)
# Get the create vm config spec
client_factory = self._session._get_vim().client.factory
config_spec = vm_util.get_vm_create_spec(client_factory,
instance,
instance_name,
datastore.name,
vif_infos,
image_info.os_type,
allocations=allocations)
# Create the VM
vm_ref = vm_util.create_vm(self._session, instance, dc_info.vmFolder,
config_spec, res_pool_ref)
return vm_ref
def _get_cpu_allocations(self, instance_type_id):
# Read flavors for allocations
flavor = objects.Flavor.get_by_id(
nova_context.get_admin_context(read_deleted='yes'),
instance_type_id)
allocations = {}
for (key, type) in (('cpu_limit', int),
('cpu_reservation', int),
('cpu_shares_level', str),
('cpu_shares_share', int)):
value = flavor.extra_specs.get('quota:' + key)
if value:
allocations[key] = type(value)
return allocations
def _fetch_image_as_file(self, context, vi, image_ds_loc):
"""Download image as an individual file to host via HTTP PUT."""
session = self._session
session_vim = session._get_vim()
cookies = session_vim.client.options.transport.cookiejar
LOG.debug("Downloading image file data %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
vmware_images.fetch_image(
context,
vi.instance,
session._host,
vi.dc_info.name,
vi.datastore.name,
image_ds_loc.rel_path,
cookies=cookies)
def _prepare_sparse_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, "tmp-sparse.vmdk")
return tmp_dir_loc, tmp_image_ds_loc
def _prepare_flat_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
ds_util.mkdir(self._session, tmp_image_ds_loc.parent, vi.dc_info.ref)
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(tmp_image_ds_loc),
vi.ii.file_size_in_kb)
flat_vmdk_name = vi.cache_image_path.basename.replace('.vmdk',
'-flat.vmdk')
flat_vmdk_ds_loc = tmp_dir_loc.join(vi.ii.image_id, flat_vmdk_name)
self._delete_datastore_file(vi.instance, str(flat_vmdk_ds_loc),
vi.dc_info.ref)
return tmp_dir_loc, flat_vmdk_ds_loc
def _prepare_iso_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
return tmp_dir_loc, tmp_image_ds_loc
def _move_to_cache(self, dc_ref, src_folder_ds_path, dst_folder_ds_path):
try:
ds_util.file_move(self._session, dc_ref,
src_folder_ds_path, dst_folder_ds_path)
except vexc.FileAlreadyExistsException:
# Folder move has failed. This may be due to the fact that a
# process or thread has already completed the operation.
# Since image caching is synchronized, this can only happen
# due to action external to the process.
# In the event of a FileAlreadyExists we continue,
# all other exceptions will be raised.
LOG.warning(_LW("Destination %s already exists! Concurrent moves "
"can lead to unexpected results."),
dst_folder_ds_path)
def _cache_sparse_image(self, vi, tmp_image_ds_loc):
tmp_dir_loc = tmp_image_ds_loc.parent.parent
converted_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
# converts fetched image to preallocated disk
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(tmp_image_ds_loc),
str(converted_image_ds_loc))
self._delete_datastore_file(vi.instance, str(tmp_image_ds_loc),
vi.dc_info.ref)
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_flat_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_iso_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _get_vm_config_info(self, instance, image_info, instance_name=None):
"""Captures all relevant information from the spawn parameters."""
if (instance.root_gb != 0 and
image_info.file_size_in_gb > instance.root_gb):
reason = _("Image disk size greater than requested disk size")
raise exception.InstanceUnacceptable(instance_id=instance.uuid,
reason=reason)
datastore = ds_util.get_datastore(
self._session, self._cluster, self._datastore_regex)
dc_info = self.get_datacenter_ref_and_name(datastore.ref)
return VirtualMachineInstanceConfigInfo(instance,
instance_name,
image_info,
datastore,
dc_info,
self._imagecache)
def _get_image_callbacks(self, vi):
disk_type = vi.ii.disk_type
image_fetch = self._fetch_image_as_file
if vi.ii.is_iso:
image_prepare = self._prepare_iso_image
image_cache = self._cache_iso_image
elif disk_type == constants.DISK_TYPE_SPARSE:
image_prepare = self._prepare_sparse_image
image_cache = self._cache_sparse_image
elif disk_type in constants.SUPPORTED_FLAT_VARIANTS:
image_prepare = self._prepare_flat_image
image_cache = self._cache_flat_image
else:
reason = _("disk type '%s' not supported") % disk_type
raise exception.InvalidDiskInfo(reason=reason)
return image_prepare, image_fetch, image_cache
def _fetch_image_if_missing(self, context, vi):
image_prepare, image_fetch, image_cache = self._get_image_callbacks(vi)
LOG.debug("Processing image %s", vi.ii.image_id)
with lockutils.lock(str(vi.cache_image_path),
lock_file_prefix='nova-vmware-fetch_image'):
self.check_cache_folder(vi.datastore.name, vi.datastore.ref)
ds_browser = self._get_ds_browser(vi.datastore.ref)
if not ds_util.file_exists(self._session, ds_browser,
vi.cache_image_folder,
vi.cache_image_path.basename):
LOG.debug("Preparing fetch location")
tmp_dir_loc, tmp_image_ds_loc = image_prepare(vi)
LOG.debug("Fetch image to %s", tmp_image_ds_loc)
image_fetch(context, vi, tmp_image_ds_loc)
LOG.debug("Caching image")
image_cache(vi, tmp_image_ds_loc)
LOG.debug("Cleaning up location %s", str(tmp_dir_loc))
self._delete_datastore_file(vi.instance, str(tmp_dir_loc),
vi.dc_info.ref)
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None,
instance_name=None, power_on=True):
client_factory = self._session._get_vim().client.factory
image_info = vmware_images.VMwareImage.from_image(instance.image_ref,
image_meta)
vi = self._get_vm_config_info(instance, image_info, instance_name)
# Creates the virtual machine. The virtual machine reference returned
# is unique within Virtual Center.
vm_ref = self.build_virtual_machine(instance,
vi.instance_name,
image_info,
vi.dc_info,
vi.datastore,
network_info)
# Cache the vm_ref. This saves a remote call to the VC. This uses the
# instance_name. This covers all use cases including rescue and resize.
vm_util.vm_ref_cache_update(vi.instance_name, vm_ref)
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
if CONF.flat_injected:
self._set_machine_id(client_factory, instance, network_info)
# Set the vnc configuration of the instance, vnc port starts from 5900
if CONF.vnc_enabled:
self._get_and_set_vnc_config(client_factory, instance)
block_device_mapping = []
if block_device_info is not None:
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
# NOTE(mdbooth): the logic here is that we ignore the image if there
# are block device mappings. This behaviour is incorrect, and a bug in
# the driver. We should be able to accept an image and block device
# mappings.
if len(block_device_mapping) > 0:
msg = "Block device information present: %s" % block_device_info
# NOTE(mriedem): block_device_info can contain an auth_password
# so we have to scrub the message before logging it.
LOG.debug(logging.mask_password(msg), instance=instance)
for root_disk in block_device_mapping:
connection_info = root_disk['connection_info']
# TODO(hartsocks): instance is unnecessary, remove it
# we still use instance in many locations for no other purpose
# than logging, can we simplify this?
self._volumeops.attach_root_volume(connection_info, instance,
self._default_root_device,
vi.datastore.ref)
else:
self._imagecache.enlist_image(
image_info.image_id, vi.datastore, vi.dc_info.ref)
self._fetch_image_if_missing(context, vi)
if image_info.is_iso:
self._use_iso_image(vm_ref, vi)
elif image_info.linked_clone:
self._use_disk_image_as_linked_clone(vm_ref, vi)
else:
self._use_disk_image_as_full_clone(vm_ref, vi)
if configdrive.required_by(instance):
self._configure_config_drive(
instance, vm_ref, vi.dc_info, vi.datastore,
injected_files, admin_password)
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def _create_config_drive(self, instance, injected_files, admin_password,
data_store_name, dc_name, upload_folder, cookies):
if CONF.config_drive_format != 'iso9660':
reason = (_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
raise exception.InstancePowerOnFailure(reason=reason)
LOG.info(_('Using config drive for instance'), instance=instance)
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md)
try:
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
with utils.tempdir() as tmp_path:
tmp_file = os.path.join(tmp_path, 'configdrive.iso')
cdb.make_drive(tmp_file)
upload_iso_path = "%s/configdrive.iso" % (
upload_folder)
vmware_images.upload_iso_to_datastore(
tmp_file, instance,
host=self._session._host,
data_center_name=dc_name,
datastore_name=data_store_name,
cookies=cookies,
file_path=upload_iso_path)
return upload_iso_path
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
def _attach_cdrom_to_vm(self, vm_ref, instance,
datastore, file_path):
"""Attach cdrom to VM by reconfiguration."""
client_factory = self._session._get_vim().client.factory
devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(controller_key, unit_number,
controller_spec) = vm_util.allocate_controller_key_and_unit_number(
client_factory,
devices,
'ide')
cdrom_attach_config_spec = vm_util.get_cdrom_attach_config_spec(
client_factory, datastore, file_path,
controller_key, unit_number)
if controller_spec:
cdrom_attach_config_spec.deviceChange.append(controller_spec)
LOG.debug("Reconfiguring VM instance to attach cdrom %s",
file_path, instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, cdrom_attach_config_spec)
LOG.debug("Reconfigured VM instance to attach cdrom %s",
file_path, instance=instance)
def _create_vm_snapshot(self, instance, vm_ref):
LOG.debug("Creating Snapshot of the VM instance", instance=instance)
snapshot_task = self._session._call_method(
self._session._get_vim(),
"CreateSnapshot_Task", vm_ref,
name="%s-snapshot" % instance.uuid,
description="Taking Snapshot of the VM",
memory=False,
quiesce=True)
self._session._wait_for_task(snapshot_task)
LOG.debug("Created Snapshot of the VM instance", instance=instance)
task_info = self._session._call_method(vim_util,
"get_dynamic_property",
snapshot_task, "Task", "info")
snapshot = task_info.result
return snapshot
@retry_if_task_in_progress
def _delete_vm_snapshot(self, instance, vm_ref, snapshot):
LOG.debug("Deleting Snapshot of the VM instance", instance=instance)
delete_snapshot_task = self._session._call_method(
self._session._get_vim(),
"RemoveSnapshot_Task", snapshot,
removeChildren=False, consolidate=True)
self._session._wait_for_task(delete_snapshot_task)
LOG.debug("Deleted Snapshot of the VM instance", instance=instance)
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance.
Steps followed are:
1. Get the name of the vmdk file which the VM points to right now.
Can be a chain of snapshots, so we need to know the last in the
chain.
2. Create the snapshot. A new vmdk is created which the VM points to
now. The earlier vmdk becomes read-only.
3. Call CopyVirtualDisk which coalesces the disk chain to form a single
vmdk, rather a .vmdk metadata file and a -flat.vmdk disk data file.
4. Now upload the -flat.vmdk file to the image store.
5. Delete the coalesced .vmdk and -flat.vmdk created.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
service_content = self._session._get_vim().service_content
def _get_vm_and_vmdk_attribs():
# Get the vmdk file name that the VM is pointing to
hw_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_file_path_before_snapshot, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hw_devices, uuid=instance.uuid)
if not vmdk_file_path_before_snapshot:
LOG.debug("No root disk defined. Unable to snapshot.")
raise error_util.NoRootDiskDefined()
datastore_name = ds_util.DatastorePath.parse(
vmdk_file_path_before_snapshot).datastore
os_type = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "summary.config.guestId")
return (vmdk_file_path_before_snapshot, adapter_type, disk_type,
datastore_name, os_type)
(vmdk_file_path_before_snapshot, adapter_type, disk_type,
datastore_name, os_type) = _get_vm_and_vmdk_attribs()
snapshot = self._create_vm_snapshot(instance, vm_ref)
update_task_state(task_state=task_states.IMAGE_PENDING_UPLOAD)
def _check_if_tmp_folder_exists():
# Copy the contents of the VM that were there just before the
# snapshot was taken
ds_ref_ret = self._session._call_method(
vim_util, "get_dynamic_property", vm_ref, "VirtualMachine",
"datastore")
if ds_ref_ret is None:
raise exception.DatastoreNotFound()
ds_ref = ds_ref_ret.ManagedObjectReference[0]
self.check_temp_folder(datastore_name, ds_ref)
return ds_ref
ds_ref = _check_if_tmp_folder_exists()
# Generate a random vmdk file name to which the coalesced vmdk content
# will be copied to. A random name is chosen so that we don't have
# name clashes.
random_name = uuidutils.generate_uuid()
dest_vmdk_file_path = ds_util.DatastorePath(
datastore_name, self._tmp_folder, "%s.vmdk" % random_name)
dest_vmdk_data_file_path = ds_util.DatastorePath(
datastore_name, self._tmp_folder, "%s-flat.vmdk" % random_name)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
def _copy_vmdk_content():
# Consolidate the snapshotted disk to a temporary vmdk.
LOG.debug('Copying snapshotted disk %s.',
vmdk_file_path_before_snapshot,
instance=instance)
copy_disk_task = self._session._call_method(
self._session._get_vim(),
"CopyVirtualDisk_Task",
service_content.virtualDiskManager,
sourceName=vmdk_file_path_before_snapshot,
sourceDatacenter=dc_info.ref,
destName=str(dest_vmdk_file_path),
destDatacenter=dc_info.ref,
force=False)
self._session._wait_for_task(copy_disk_task)
LOG.debug('Copied snapshotted disk %s.',
vmdk_file_path_before_snapshot,
instance=instance)
_copy_vmdk_content()
self._delete_vm_snapshot(instance, vm_ref, snapshot)
cookies = self._session._get_vim().client.options.transport.cookiejar
def _upload_vmdk_to_image_repository():
# Upload the contents of -flat.vmdk file which has the disk data.
LOG.debug("Uploading image %s", image_id,
instance=instance)
vmware_images.upload_image(
context,
image_id,
instance,
os_type=os_type,
disk_type=constants.DEFAULT_DISK_TYPE,
adapter_type=adapter_type,
image_version=1,
host=self._session._host,
data_center_name=dc_info.name,
datastore_name=datastore_name,
cookies=cookies,
file_path="%s/%s-flat.vmdk" % (self._tmp_folder, random_name))
LOG.debug("Uploaded image %s", image_id,
instance=instance)
update_task_state(task_state=task_states.IMAGE_UPLOADING,
expected_state=task_states.IMAGE_PENDING_UPLOAD)
_upload_vmdk_to_image_repository()
def _clean_temp_data():
"""Delete temporary vmdk files generated in image handling
operations.
"""
# The data file is the one occupying space, and likelier to see
# deletion problems, so prioritize its deletion first. In the
# unlikely event that its deletion fails, the small descriptor file
# is retained too by design since it makes little sense to remove
# it when the data disk it refers to still lingers.
for f in dest_vmdk_data_file_path, dest_vmdk_file_path:
self._delete_datastore_file(instance, f, dc_info.ref)
_clean_temp_data()
def reboot(self, instance, network_info):
"""Reboot a VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.guest.toolsStatus", "runtime.powerState",
"summary.guest.toolsRunningStatus"]
props = self._session._call_method(vim_util, "get_object_properties",
None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(self._session, props)
pwr_state = query['runtime.powerState']
tools_status = query['summary.guest.toolsStatus']
tools_running_status = query['summary.guest.toolsRunningStatus']
# Raise an exception if the VM is not powered On.
if pwr_state not in ["poweredOn"]:
reason = _("instance is not powered on")
raise exception.InstanceRebootFailure(reason=reason)
# If latest vmware tools are installed in the VM, and that the tools
# are running, then only do a guest reboot. Otherwise do a hard reset.
if (tools_status == "toolsOk" and
tools_running_status == "guestToolsRunning"):
LOG.debug("Rebooting guest OS of VM", instance=instance)
self._session._call_method(self._session._get_vim(), "RebootGuest",
vm_ref)
LOG.debug("Rebooted guest OS of VM", instance=instance)
else:
LOG.debug("Doing hard reboot of VM", instance=instance)
reset_task = self._session._call_method(self._session._get_vim(),
"ResetVM_Task", vm_ref)
self._session._wait_for_task(reset_task)
LOG.debug("Did hard reboot of VM", instance=instance)
def _destroy_instance(self, instance, destroy_disks=True,
instance_name=None):
# Destroy a VM instance
# Get the instance name. In some cases this may differ from the 'uuid',
# for example when the spawn of a rescue instance takes place.
if instance_name is None:
instance_name = instance['uuid']
try:
vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)
if vm_ref is None:
LOG.warning(_('Instance does not exist on backend'),
instance=instance)
return
lst_properties = ["config.files.vmPathName", "runtime.powerState",
"datastore"]
props = self._session._call_method(vim_util,
"get_object_properties",
None, vm_ref, "VirtualMachine", lst_properties)
query = vm_util.get_values_from_object_properties(
self._session, props)
pwr_state = query['runtime.powerState']
vm_config_pathname = query['config.files.vmPathName']
vm_ds_path = None
if vm_config_pathname:
vm_ds_path = ds_util.DatastorePath.parse(vm_config_pathname)
# Power off the VM if it is in PoweredOn state.
if pwr_state == "poweredOn":
vm_util.power_off_instance(self._session, instance, vm_ref)
# Un-register the VM
try:
LOG.debug("Unregistering the VM", instance=instance)
self._session._call_method(self._session._get_vim(),
"UnregisterVM", vm_ref)
LOG.debug("Unregistered the VM", instance=instance)
except Exception as excep:
LOG.warn(_("In vmwareapi:vmops:_destroy_instance, got this "
"exception while un-registering the VM: %s"),
excep)
# Delete the folder holding the VM related content on
# the datastore.
if destroy_disks and vm_ds_path:
try:
dir_ds_compliant_path = vm_ds_path.parent
LOG.debug("Deleting contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
ds_ref_ret = query['datastore']
ds_ref = ds_ref_ret.ManagedObjectReference[0]
dc_info = self.get_datacenter_ref_and_name(ds_ref)
ds_util.file_delete(self._session,
dir_ds_compliant_path,
dc_info.ref)
LOG.debug("Deleted contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
except Exception:
LOG.warn(_("In vmwareapi:vmops:_destroy_instance, "
"exception while deleting the VM contents from "
"the disk"), exc_info=True)
except Exception as exc:
LOG.exception(exc, instance=instance)
finally:
vm_util.vm_ref_cache_delete(instance_name)
def destroy(self, instance, destroy_disks=True):
"""Destroy a VM instance.
Steps followed for each VM are:
1. Power off, if it is in poweredOn state.
2. Un-register.
3. Delete the contents of the folder holding the VM related data.
"""
# If there is a rescue VM then we need to destroy that one too.
LOG.debug("Destroying instance", instance=instance)
if instance['vm_state'] == vm_states.RESCUED:
LOG.debug("Rescue VM configured", instance=instance)
try:
self.unrescue(instance, power_on=False)
LOG.debug("Rescue VM destroyed", instance=instance)
except Exception:
rescue_name = instance['uuid'] + self._rescue_suffix
self._destroy_instance(instance,
destroy_disks=destroy_disks,
instance_name=rescue_name)
# NOTE(arnaud): Destroy uuid-orig and uuid VMs iff it is not
# triggered by the revert resize api call. This prevents
# the uuid-orig VM to be deleted to be able to associate it later.
if instance.task_state != task_states.RESIZE_REVERTING:
# When VM deletion is triggered in middle of VM resize before VM
# arrive RESIZED state, uuid-orig VM need to deleted to avoid
# VM leak. Within method _destroy_instance it will check vmref
# exist or not before attempt deletion.
resize_orig_vmname = instance['uuid'] + self._migrate_suffix
vm_orig_ref = vm_util.get_vm_ref_from_name(self._session,
resize_orig_vmname)
if vm_orig_ref:
self._destroy_instance(instance,
destroy_disks=destroy_disks,
instance_name=resize_orig_vmname)
self._destroy_instance(instance, destroy_disks=destroy_disks)
LOG.debug("Instance destroyed", instance=instance)
def pause(self, instance):
msg = _("pause not supported for vmwareapi")
raise NotImplementedError(msg)
def unpause(self, instance):
msg = _("unpause not supported for vmwareapi")
raise NotImplementedError(msg)
def suspend(self, instance):
"""Suspend the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
# Only PoweredOn VMs can be suspended.
if pwr_state == "poweredOn":
LOG.debug("Suspending the VM", instance=instance)
suspend_task = self._session._call_method(self._session._get_vim(),
"SuspendVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Suspended the VM", instance=instance)
# Raise Exception if VM is poweredOff
elif pwr_state == "poweredOff":
reason = _("instance is powered off and cannot be suspended.")
raise exception.InstanceSuspendFailure(reason=reason)
else:
LOG.debug("VM was already in suspended state. So returning "
"without doing anything", instance=instance)
def resume(self, instance):
"""Resume the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "runtime.powerState")
if pwr_state.lower() == "suspended":
LOG.debug("Resuming the VM", instance=instance)
suspend_task = self._session._call_method(
self._session._get_vim(),
"PowerOnVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Resumed the VM", instance=instance)
else:
reason = _("instance is not in a suspended state")
raise exception.InstanceResumeFailure(reason=reason)
def rescue(self, context, instance, network_info, image_meta):
"""Rescue the specified instance.
- shutdown the instance VM.
- spawn a rescue VM (the vm name-label will be instance-N-rescue).
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
self.power_off(instance)
r_instance = copy.deepcopy(instance)
instance_name = r_instance.uuid + self._rescue_suffix
self.spawn(context, r_instance, image_meta,
None, None, network_info,
instance_name=instance_name,
power_on=False)
# Attach vmdk to the rescue VM
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_path, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hardware_devices, uuid=instance.uuid)
rescue_vm_ref = vm_util.get_vm_ref_from_name(self._session,
instance_name)
self._volumeops.attach_disk_to_vm(
rescue_vm_ref, r_instance,
adapter_type, disk_type, vmdk_path)
vm_util.power_on_instance(self._session, r_instance,
vm_ref=rescue_vm_ref)
def unrescue(self, instance, power_on=True):
"""Unrescue the specified instance."""
# Get the original vmdk_path
vm_ref = vm_util.get_vm_ref(self._session, instance)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
(vmdk_path, adapter_type,
disk_type) = vm_util.get_vmdk_path_and_adapter_type(
hardware_devices, uuid=instance.uuid)
r_instance = copy.deepcopy(instance)
instance_name = r_instance.uuid + self._rescue_suffix
# detach the original instance disk from the rescue disk
vm_rescue_ref = vm_util.get_vm_ref_from_name(self._session,
instance_name)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_rescue_ref,
"VirtualMachine", "config.hardware.device")
device = vm_util.get_vmdk_volume_disk(hardware_devices, path=vmdk_path)
vm_util.power_off_instance(self._session, r_instance, vm_rescue_ref)
self._volumeops.detach_disk_from_vm(vm_rescue_ref, r_instance, device)
self._destroy_instance(r_instance, instance_name=instance_name)
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def power_off(self, instance):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
"""
vm_util.power_off_instance(self._session, instance)
def power_on(self, instance):
vm_util.power_on_instance(self._session, instance)
def _get_orig_vm_name_label(self, instance):
return instance.uuid + '-orig'
def _update_instance_progress(self, context, instance, step, total_steps):
"""Update instance progress percent to reflect current step number
"""
# Divide the action's workflow into discrete steps and "bump" the
# instance's progress field as each step is completed.
#
# For a first cut this should be fine, however, for large VM images,
# the clone disk step begins to dominate the equation. A
# better approximation would use the percentage of the VM image that
# has been streamed to the destination host.
progress = round(float(step) / total_steps * 100)
instance_uuid = instance.uuid
LOG.debug("Updating instance '%(instance_uuid)s' progress to"
" %(progress)d",
{'instance_uuid': instance_uuid, 'progress': progress},
instance=instance)
instance.progress = progress
instance.save()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# Checks if the migration needs a disk resize down.
if flavor['root_gb'] < instance['root_gb']:
reason = _("Unable to shrink disk.")
raise exception.InstanceFaultRollback(
exception.ResizeError(reason=reason))
# 0. Zero out the progress to begin
self._update_instance_progress(context, instance,
step=0,
total_steps=RESIZE_TOTAL_STEPS)
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Read the host_ref for the destination. If this is None then the
# VC will decide on placement
host_ref = self._get_host_ref_from_name(dest)
# 1. Power off the instance
self.power_off(instance)
self._update_instance_progress(context, instance,
step=1,
total_steps=RESIZE_TOTAL_STEPS)
# 2. Disassociate the linked vsphere VM from the instance
vm_util.disassociate_vmref_from_instance(self._session, instance,
vm_ref,
suffix=self._migrate_suffix)
self._update_instance_progress(context, instance,
step=2,
total_steps=RESIZE_TOTAL_STEPS)
ds_ref = ds_util.get_datastore(
self._session, self._cluster,
datastore_regex=self._datastore_regex).ref
dc_info = self.get_datacenter_ref_and_name(ds_ref)
# 3. Clone the VM for instance
vm_util.clone_vmref_for_instance(self._session, instance, vm_ref,
host_ref, ds_ref, dc_info.vmFolder)
self._update_instance_progress(context, instance,
step=3,
total_steps=RESIZE_TOTAL_STEPS)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
# Destroy the original VM. The vm_ref needs to be searched using the
# instance.uuid + self._migrate_suffix as the identifier. We will
# not get the vm when searched using the instanceUuid but rather will
# be found using the uuid buried in the extraConfig
vm_ref = vm_util.search_vm_ref_by_identifier(self._session,
instance.uuid + self._migrate_suffix)
if vm_ref is None:
LOG.debug("instance not present", instance=instance)
return
try:
LOG.debug("Destroying the VM", instance=instance)
destroy_task = self._session._call_method(
self._session._get_vim(),
"Destroy_Task", vm_ref)
self._session._wait_for_task(destroy_task)
LOG.debug("Destroyed the VM", instance=instance)
except Exception as excep:
LOG.warn(_("In vmwareapi:vmops:confirm_migration, got this "
"exception while destroying the VM: %s"), excep)
def finish_revert_migration(self, context, instance, network_info,
block_device_info, power_on=True):
"""Finish reverting a resize."""
vm_util.associate_vmref_for_instance(self._session, instance,
suffix=self._migrate_suffix)
if power_on:
vm_util.power_on_instance(self._session, instance)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
if resize_instance:
client_factory = self._session._get_vim().client.factory
vm_resize_spec = vm_util.get_vm_resize_spec(client_factory,
instance)
vm_util.reconfigure_vm(self._session, vm_ref, vm_resize_spec)
# Resize the disk (if larger)
old_root_gb = instance.system_metadata['old_instance_type_root_gb']
if instance['root_gb'] > int(old_root_gb):
root_disk_in_kb = instance['root_gb'] * units.Mi
vmdk_path = vm_util.get_vmdk_path(self._session, vm_ref,
instance)
data_store_ref = ds_util.get_datastore(self._session,
self._cluster, datastore_regex=self._datastore_regex).ref
dc_info = self.get_datacenter_ref_and_name(data_store_ref)
self._extend_virtual_disk(instance, root_disk_in_kb, vmdk_path,
dc_info.ref)
# TODO(ericwb): add extend for ephemeral disk
# 4. Start VM
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
self._update_instance_progress(context, instance,
step=4,
total_steps=RESIZE_TOTAL_STEPS)
def live_migration(self, context, instance_ref, dest,
post_method, recover_method, block_migration=False):
"""Spawning live_migration operation for distributing high-load."""
vm_ref = vm_util.get_vm_ref(self._session, instance_ref)
host_ref = self._get_host_ref_from_name(dest)
if host_ref is None:
raise exception.HostNotFound(host=dest)
LOG.debug("Migrating VM to host %s", dest, instance=instance_ref)
try:
vm_migrate_task = self._session._call_method(
self._session._get_vim(),
"MigrateVM_Task", vm_ref,
host=host_ref,
priority="defaultPriority")
self._session._wait_for_task(vm_migrate_task)
except Exception:
with excutils.save_and_reraise_exception():
recover_method(context, instance_ref, dest, block_migration)
post_method(context, instance_ref, dest, block_migration)
LOG.debug("Migrated VM to host %s", dest, instance=instance_ref)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
ctxt = nova_context.get_admin_context()
instances_info = dict(instance_count=len(instances),
timeout=timeout)
if instances_info["instance_count"] > 0:
LOG.info(_("Found %(instance_count)d hung reboots "
"older than %(timeout)d seconds") % instances_info)
for instance in instances:
LOG.info(_("Automatically hard rebooting"), instance=instance)
self.compute_api.reboot(ctxt, instance, "HARD")
def get_info(self, instance):
"""Return data about the VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config.numCpu",
"summary.config.memorySizeMB",
"runtime.powerState"]
vm_props = self._session._call_method(vim_util,
"get_object_properties", None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(
self._session, vm_props)
max_mem = int(query['summary.config.memorySizeMB']) * 1024
return {'state': VMWARE_POWER_STATES[query['runtime.powerState']],
'max_mem': max_mem,
'mem': max_mem,
'num_cpu': int(query['summary.config.numCpu']),
'cpu_time': 0}
def _get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config",
"summary.quickStats",
"summary.runtime"]
vm_props = self._session._call_method(vim_util,
"get_object_properties", None, vm_ref, "VirtualMachine",
lst_properties)
query = vm_util.get_values_from_object_properties(self._session,
vm_props)
data = {}
# All of values received are objects. Convert them to dictionaries
for value in query.values():
prop_dict = vim_util.object_to_dict(value, list_depth=1)
data.update(prop_dict)
return data
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
# Add a namespace to all of the diagnostsics
return dict([('vmware:' + k, v) for k, v in data.items()])
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
state = data.get('powerState')
if state:
state = power_state.STATE_MAP[VMWARE_POWER_STATES[state]]
uptime = data.get('uptimeSeconds', 0)
config_drive = configdrive.required_by(instance)
diags = diagnostics.Diagnostics(state=state,
driver='vmwareapi',
config_drive=config_drive,
hypervisor_os='esxi',
uptime=uptime)
diags.memory_details.maximum = data.get('memorySizeMB', 0)
diags.memory_details.used = data.get('guestMemoryUsage', 0)
# TODO(garyk): add in cpu, nic and disk stats
return diags
def _get_vnc_console_connection(self, instance):
"""Return connection info for a vnc console."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
opt_value = self._session._call_method(vim_util,
'get_dynamic_property',
vm_ref, 'VirtualMachine',
vm_util.VNC_CONFIG_KEY)
if opt_value:
port = int(opt_value.value)
else:
raise exception.ConsoleTypeUnavailable(console_type='vnc')
return {'port': port,
'internal_access_path': None}
@staticmethod
def _get_machine_id_str(network_info):
machine_id_str = ''
for vif in network_info:
# TODO(vish): add support for dns2
# TODO(sateesh): add support for injection of ipv6 configuration
network = vif['network']
ip_v4 = netmask_v4 = gateway_v4 = broadcast_v4 = dns = None
subnets_v4 = [s for s in network['subnets'] if s['version'] == 4]
if len(subnets_v4) > 0:
if len(subnets_v4[0]['ips']) > 0:
ip_v4 = subnets_v4[0]['ips'][0]
if len(subnets_v4[0]['dns']) > 0:
dns = subnets_v4[0]['dns'][0]['address']
netmask_v4 = str(subnets_v4[0].as_netaddr().netmask)
gateway_v4 = subnets_v4[0]['gateway']['address']
broadcast_v4 = str(subnets_v4[0].as_netaddr().broadcast)
interface_str = ";".join([vif['address'],
ip_v4 and ip_v4['address'] or '',
netmask_v4 or '',
gateway_v4 or '',
broadcast_v4 or '',
dns or ''])
machine_id_str = machine_id_str + interface_str + '#'
return machine_id_str
def _set_machine_id(self, client_factory, instance, network_info):
"""Set the machine id of the VM for guest tools to pick up
and reconfigure the network interfaces.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
machine_id_change_spec = vm_util.get_machine_id_change_spec(
client_factory,
self._get_machine_id_str(network_info))
LOG.debug("Reconfiguring VM instance to set the machine id",
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, machine_id_change_spec)
LOG.debug("Reconfigured VM instance to set the machine id",
instance=instance)
@utils.synchronized('vmware.get_and_set_vnc_port')
def _get_and_set_vnc_config(self, client_factory, instance):
"""Set the vnc configuration of the VM."""
port = vm_util.get_vnc_port(self._session)
vm_ref = vm_util.get_vm_ref(self._session, instance)
vnc_config_spec = vm_util.get_vnc_config_spec(
client_factory, port)
LOG.debug("Reconfiguring VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, vnc_config_spec)
LOG.debug("Reconfigured VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
def _get_ds_browser(self, ds_ref):
ds_browser = self._datastore_browser_mapping.get(ds_ref.value)
if not ds_browser:
ds_browser = self._session._call_method(
vim_util, "get_dynamic_property", ds_ref, "Datastore",
"browser")
self._datastore_browser_mapping[ds_ref.value] = ds_browser
return ds_browser
def _get_host_ref_from_name(self, host_name):
"""Get reference to the host with the name specified."""
host_objs = self._session._call_method(vim_util, "get_objects",
"HostSystem", ["name"])
vm_util._cancel_retrieve_if_necessary(self._session, host_objs)
for host in host_objs:
if hasattr(host, 'propSet'):
if host.propSet[0].val == host_name:
return host.obj
return None
def _get_vmfolder_ref(self):
"""Get the Vm folder ref from the datacenter."""
dc_objs = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["vmFolder"])
vm_util._cancel_retrieve_if_necessary(self._session, dc_objs)
# There is only one default datacenter in a standalone ESX host
vm_folder_ref = dc_objs.objects[0].propSet[0].val
return vm_folder_ref
def _create_folder_if_missing(self, ds_name, ds_ref, folder):
"""Create a folder if it does not exist.
Currently there are two folder that are required on the datastore
- base folder - the folder to store cached images
- temp folder - the folder used for snapshot management and
image uploading
This method is aimed to be used for the management of those
folders to ensure that they are created if they are missing.
The ds_util method mkdir will be used to check if the folder
exists. If this throws and exception 'FileAlreadyExistsException'
then the folder already exists on the datastore.
"""
path = ds_util.DatastorePath(ds_name, folder)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
try:
ds_util.mkdir(self._session, path, dc_info.ref)
LOG.debug("Folder %s created.", path)
except vexc.FileAlreadyExistsException:
# NOTE(hartsocks): if the folder already exists, that
# just means the folder was prepped by another process.
pass
def check_cache_folder(self, ds_name, ds_ref):
"""Check that the cache folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._base_folder)
def check_temp_folder(self, ds_name, ds_ref):
"""Check that the temp folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._tmp_folder)
def _check_if_folder_file_exists(self, ds_browser, ds_ref, ds_name,
folder_name, file_name):
# Ensure that the cache folder exists
self.check_cache_folder(ds_name, ds_ref)
# Check if the file exists or not.
folder_ds_path = ds_util.DatastorePath(ds_name, folder_name)
return ds_util.file_exists(
self._session, ds_browser, folder_ds_path, file_name)
def inject_network_info(self, instance, network_info):
"""inject network info for specified instance."""
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
client_factory = self._session._get_vim().client.factory
self._set_machine_id(client_factory, instance, network_info)
def manage_image_cache(self, context, instances):
if not CONF.remove_unused_base_images:
LOG.debug("Image aging disabled. Aging will not be done.")
return
datastores = ds_util.get_available_datastores(self._session,
self._cluster,
self._datastore_regex)
datastores_info = []
for ds in datastores:
dc_info = self.get_datacenter_ref_and_name(ds.ref)
datastores_info.append((ds, dc_info))
self._imagecache.update(context, instances, datastores_info)
def _get_valid_vms_from_retrieve_result(self, retrieve_result):
"""Returns list of valid vms from RetrieveResult object."""
lst_vm_names = []
while retrieve_result:
token = vm_util._get_token(retrieve_result)
for vm in retrieve_result.objects:
vm_name = None
conn_state = None
for prop in vm.propSet:
if prop.name == "name":
vm_name = prop.val
elif prop.name == "runtime.connectionState":
conn_state = prop.val
# Ignoring the orphaned or inaccessible VMs
if conn_state not in ["orphaned", "inaccessible"]:
lst_vm_names.append(vm_name)
if token:
retrieve_result = self._session._call_method(vim_util,
"continue_to_get_objects",
token)
else:
break
return lst_vm_names
def instance_exists(self, instance):
try:
vm_util.get_vm_ref(self._session, instance)
return True
except exception.InstanceNotFound:
return False
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance."""
vif_model = image_meta.get("hw_vif_model",
constants.DEFAULT_VIF_MODEL)
vif_model = vm_util.convert_vif_model(vif_model)
vif_info = vmwarevif.get_vif_dict(self._session, self._cluster,
vif_model, utils.is_neutron(), vif)
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_attach_port_index(self._session, vm_ref)
client_factory = self._session._get_vim().client.factory
attach_config_spec = vm_util.get_network_attach_config_spec(
client_factory, vif_info, port_index)
LOG.debug("Reconfiguring VM to attach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
attach_config_spec)
except Exception as e:
LOG.error(_LE('Attaching network adapter failed. Exception: '
' %s'),
e, instance=instance)
raise exception.InterfaceAttachFailed(
instance_uuid=instance['uuid'])
LOG.debug("Reconfigured VM to attach interface", instance=instance)
def detach_interface(self, instance, vif):
"""Detach an interface from the instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_vm_detach_port_index(self._session,
vm_ref,
vif['id'])
if port_index is None:
msg = _("No device with interface-id %s exists on "
"VM") % vif['id']
raise exception.NotFound(msg)
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
device = vmwarevif.get_network_device(hardware_devices,
vif['address'])
if device is None:
msg = _("No device with MAC address %s exists on the "
"VM") % vif['address']
raise exception.NotFound(msg)
client_factory = self._session._get_vim().client.factory
detach_config_spec = vm_util.get_network_detach_config_spec(
client_factory, device, port_index)
LOG.debug("Reconfiguring VM to detach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
detach_config_spec)
except Exception as e:
LOG.error(_LE('Detaching network adapter failed. Exception: '
'%s'),
e, instance=instance)
raise exception.InterfaceDetachFailed(
instance_uuid=instance['uuid'])
LOG.debug("Reconfigured VM to detach interface", instance=instance)
def _use_disk_image_as_full_clone(self, vm_ref, vi):
"""Uses cached image disk by copying it into the VM directory."""
instance_folder = vi.instance_name
root_disk_name = "%s.vmdk" % vi.instance_name
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(root_disk_ds_loc))
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(root_disk_ds_loc))
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, False)
def _sized_image_exists(self, sized_disk_ds_loc, ds_ref):
ds_browser = self._get_ds_browser(ds_ref)
return ds_util.file_exists(
self._session, ds_browser, sized_disk_ds_loc.parent,
sized_disk_ds_loc.basename)
def _use_disk_image_as_linked_clone(self, vm_ref, vi):
"""Uses cached image as parent of a COW child in the VM directory."""
sized_image_disk_name = "%s.vmdk" % vi.ii.image_id
if vi.root_gb > 0:
sized_image_disk_name = "%s.%s.vmdk" % (vi.ii.image_id, vi.root_gb)
sized_disk_ds_loc = vi.cache_image_folder.join(sized_image_disk_name)
# Ensure only a single thread extends the image at once.
# We do this by taking a lock on the name of the extended
# image. This allows multiple threads to create resized
# copies simultaneously, as long as they are different
# sizes. Threads attempting to create the same resized copy
# will be serialized, with only the first actually creating
# the copy.
#
# Note that the object is in a per-nova cache directory,
# so inter-nova locking is not a concern. Consequently we
# can safely use simple thread locks.
with lockutils.lock(str(sized_disk_ds_loc),
lock_file_prefix='nova-vmware-image'):
if not self._sized_image_exists(sized_disk_ds_loc,
vi.datastore.ref):
LOG.debug("Copying root disk of size %sGb", vi.root_gb)
try:
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(sized_disk_ds_loc))
except Exception as e:
LOG.warning(_("Root disk file creation "
"failed - %s"), e)
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to copy cached '
'image %(source)s to '
'%(dest)s for resize: '
'%(error)s'),
{'source': vi.cache_image_path,
'dest': sized_disk_ds_loc,
'error': e.message})
try:
ds_util.file_delete(self._session,
sized_disk_ds_loc,
vi.dc_info.ref)
except vexc.FileNotFoundException:
# File was never created: cleanup not
# required
pass
# Resize the copy to the appropriate size. No need
# for cleanup up here, as _extend_virtual_disk
# already does it
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(sized_disk_ds_loc))
# Associate the sized image disk to the VM by attaching to the VM a
# COW child of said disk.
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(sized_disk_ds_loc),
vi.root_gb * units.Mi, vi.ii.linked_clone)
def _use_iso_image(self, vm_ref, vi):
"""Uses cached image as a bootable virtual cdrom."""
self._attach_cdrom_to_vm(
vm_ref, vi.instance, vi.datastore.ref,
str(vi.cache_image_path))
# Optionally create and attach blank disk
if vi.root_gb > 0:
instance_folder = vi.instance_name
root_disk_name = "%s.vmdk" % vi.instance_name
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
# It is pointless to COW a blank disk
linked_clone = False
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi)
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, linked_clone)
def _update_datacenter_cache_from_objects(self, dcs):
"""Updates the datastore/datacenter cache."""
while dcs:
token = vm_util._get_token(dcs)
for dco in dcs.objects:
dc_ref = dco.obj
ds_refs = []
prop_dict = vm_util.propset_dict(dco.propSet)
name = prop_dict.get('name')
vmFolder = prop_dict.get('vmFolder')
datastore_refs = prop_dict.get('datastore')
if datastore_refs:
datastore_refs = datastore_refs.ManagedObjectReference
for ds in datastore_refs:
ds_refs.append(ds.value)
else:
LOG.debug("Datacenter %s doesn't have any datastore "
"associated with it, ignoring it", name)
for ds_ref in ds_refs:
self._datastore_dc_mapping[ds_ref] = DcInfo(ref=dc_ref,
name=name, vmFolder=vmFolder)
if token:
dcs = self._session._call_method(vim_util,
"continue_to_get_objects",
token)
else:
break
def get_datacenter_ref_and_name(self, ds_ref):
"""Get the datacenter name and the reference."""
dc_info = self._datastore_dc_mapping.get(ds_ref.value)
if not dc_info:
dcs = self._session._call_method(vim_util, "get_objects",
"Datacenter", ["name", "datastore", "vmFolder"])
self._update_datacenter_cache_from_objects(dcs)
dc_info = self._datastore_dc_mapping.get(ds_ref.value)
return dc_info
def list_instances(self):
"""Lists the VM instances that are registered with vCenter cluster."""
properties = ['name', 'runtime.connectionState']
LOG.debug("Getting list of instances from cluster %s",
self._cluster)
vms = []
root_res_pool = self._session._call_method(
vim_util, "get_dynamic_property", self._cluster,
'ClusterComputeResource', 'resourcePool')
if root_res_pool:
vms = self._session._call_method(
vim_util, 'get_inner_objects', root_res_pool, 'vm',
'VirtualMachine', properties)
lst_vm_names = self._get_valid_vms_from_retrieve_result(vms)
LOG.debug("Got total of %s instances", str(len(lst_vm_names)))
return lst_vm_names
def get_vnc_console(self, instance):
"""Return connection info for a vnc console using vCenter logic."""
# vCenter does not run virtual machines and does not run
# a VNC proxy. Instead, you need to tell OpenStack to talk
# directly to the ESX host running the VM you are attempting
# to connect to via VNC.
vnc_console = self._get_vnc_console_connection(instance)
host_name = vm_util.get_host_name_for_vm(
self._session,
instance)
vnc_console['host'] = host_name
# NOTE: VM can move hosts in some situations. Debug for admins.
LOG.debug("VM %(uuid)s is currently on host %(host_name)s",
{'uuid': instance.name, 'host_name': host_name},
instance=instance)
return ctype.ConsoleVNC(**vnc_console)
| [
[
[
830,
841
],
[
2509,
2520
]
],
[
[
849,
853
],
[
42682,
42686
],
[
44304,
44308
]
],
[
[
861,
863
],
[
23011,
23013
]
],
[
[
871,
875
],
[
4266,
4270
]
],
[
[
884,
893
],
[
4287,
4296
]
],
[
[
918,
921
],
[
2015,
2018
]
],
[
[
946,
964
],
[
7259,
7263
],
[
7307,
7311
],
[
7348,
7352
],
[
7390,
7394
],
[
13709,
13713
],
[
62556,
62560
],
[
73141,
73145
]
],
[
[
996,
1021
],
[
22646,
22663
]
],
[
[
1039,
1046
],
[
4928,
4935
]
],
[
[
1072,
1083
],
[
2343,
2354
],
[
2398,
2409
],
[
2452,
2463
],
[
56128,
56139
]
],
[
[
1109,
1120
],
[
28766,
28777
],
[
31869,
31880
],
[
31939,
31950
],
[
39230,
39241
]
],
[
[
1146,
1155
],
[
38480,
38489
]
],
[
[
1181,
1194
],
[
78508,
78513
]
],
[
[
1212,
1235
],
[
10601,
10613
],
[
53446,
53458
]
],
[
[
1253,
1262
],
[
15798,
15807
],
[
17136,
17145
],
[
22403,
22412
],
[
33593,
33602
],
[
41232,
41241
],
[
42276,
42285
],
[
46968,
46977
],
[
47017,
47026
],
[
52551,
52560
],
[
57277,
57286
],
[
65771,
65780
],
[
67264,
67273
],
[
68152,
68161
],
[
68682,
68691
],
[
69386,
69395
],
[
29166,
29175
]
],
[
[
1285,
1286
],
[
6627,
6628
],
[
15726,
15727
],
[
17072,
17073
],
[
22297,
22298
],
[
22469,
22470
],
[
23707,
23708
],
[
33543,
33544
],
[
35044,
35045
],
[
36386,
36387
],
[
37686,
37687
],
[
40149,
40150
],
[
40275,
40276
],
[
41160,
41161
],
[
42216,
42217
],
[
46922,
46923
],
[
49900,
49901
],
[
53645,
53646
],
[
53817,
53818
],
[
68042,
68043
],
[
68564,
68565
],
[
72319,
72320
]
],
[
[
1288,
1291
],
[
67104,
67107
],
[
69227,
69230
],
[
72499,
72502
]
],
[
[
1293,
1296
],
[
14147,
14150
]
],
[
[
1314,
1321
],
[
10563,
10570
]
],
[
[
1356,
1364
],
[
6562,
6570
],
[
23642,
23650
],
[
53071,
53079
],
[
72426,
72434
]
],
[
[
1399,
1408
],
[
17436,
17445
],
[
66404,
66413
],
[
67677,
67686
],
[
71682,
71691
]
],
[
[
1443,
1457
],
[
2257,
2264
],
[
20703,
20710
]
],
[
[
1492,
1497
],
[
7924,
7929
],
[
51315,
51320
],
[
70404,
70409
],
[
73907,
73912
],
[
74829,
74834
],
[
75061,
75066
]
],
[
[
1532,
1541
],
[
12056,
12065
],
[
12345,
12354
],
[
13300,
13309
],
[
29588,
29597
]
],
[
[
1559,
1564
],
[
59489,
59494
],
[
9486,
9491
],
[
22951,
22956
],
[
66232,
66237
]
],
[
[
1587,
1598
],
[
21762,
21773
],
[
22870,
22881
],
[
56247,
56258
]
],
[
[
1621,
1632
],
[
56297,
56308
]
],
[
[
1655,
1661
],
[
20094,
20100
]
],
[
[
1694,
1703
],
[
16740,
16749
],
[
16900,
16909
],
[
66021,
66030
],
[
31384,
31393
]
],
[
[
1736,
1743
],
[
6928,
6935
],
[
7184,
7191
],
[
12487,
12494
],
[
13583,
13590
],
[
15940,
15947
],
[
17706,
17713
],
[
29644,
29651
],
[
29777,
29784
],
[
35772,
35779
],
[
37224,
37231
],
[
48220,
48227
],
[
51490,
51497
],
[
62320,
62327
],
[
62443,
62450
],
[
63389,
63396
],
[
63448,
63455
],
[
64116,
64123
],
[
70549,
70556
],
[
72944,
72951
],
[
28140,
28147
]
],
[
[
1776,
1786
],
[
4645,
4655
],
[
28079,
28089
]
],
[
[
1819,
1829
],
[
5776,
5786
]
],
[
[
1862,
1878
],
[
9347,
9356
],
[
66126,
66135
],
[
68398,
68407
]
],
[
[
1911,
1919
],
[
24072,
24080
],
[
25825,
25833
],
[
33024,
33032
],
[
35324,
35332
],
[
40543,
40551
],
[
41620,
41628
],
[
43042,
43050
],
[
43998,
44006
],
[
44645,
44653
],
[
54267,
54275
],
[
55130,
55138
],
[
55548,
55556
],
[
57006,
57014
],
[
60469,
60477
],
[
60811,
60819
],
[
61269,
61277
],
[
65411,
65419
],
[
68235,
68243
],
[
76197,
76205
],
[
76620,
76628
],
[
77208,
77216
],
[
77399,
77407
],
[
27604,
27612
],
[
28280,
28288
],
[
29020,
29028
]
],
[
[
1952,
1959
],
[
9147,
9154
],
[
9212,
9219
],
[
9832,
9839
],
[
10307,
10314
],
[
12565,
12572
],
[
14594,
14601
],
[
19536,
19543
],
[
22001,
22008
],
[
24296,
24303
],
[
24598,
24605
],
[
25000,
25007
],
[
27336,
27343
],
[
32798,
32805
],
[
33177,
33184
],
[
34926,
34933
],
[
35475,
35482
],
[
35938,
35945
],
[
37980,
37987
],
[
39637,
39644
],
[
40452,
40459
],
[
41529,
41536
],
[
42583,
42590
],
[
43232,
43239
],
[
43350,
43357
],
[
43640,
43647
],
[
43900,
43907
],
[
44188,
44195
],
[
44479,
44486
],
[
44803,
44810
],
[
44874,
44881
],
[
45127,
45134
],
[
45351,
45358
],
[
45446,
45453
],
[
47294,
47301
],
[
47825,
47832
],
[
48478,
48485
],
[
49231,
49238
],
[
50210,
50217
],
[
50379,
50386
],
[
50727,
50734
],
[
50898,
50905
],
[
51019,
51026
],
[
51352,
51359
],
[
51930,
51937
],
[
52401,
52408
],
[
54032,
54039
],
[
54406,
54413
],
[
54903,
54910
],
[
55269,
55276
],
[
56915,
56922
],
[
57159,
57166
],
[
58957,
58964
],
[
59035,
59042
],
[
59308,
59315
],
[
59670,
59677
],
[
59723,
59730
],
[
59794,
59801
],
[
60046,
60053
],
[
60888,
60895
],
[
61375,
61382
],
[
64766,
64773
],
[
65688,
65695
],
[
66070,
66077
],
[
66274,
66281
],
[
66529,
66536
],
[
66684,
66691
],
[
66938,
66945
],
[
67547,
67554
],
[
67802,
67809
],
[
68809,
68816
],
[
69061,
69068
],
[
69925,
69932
],
[
72028,
72035
],
[
74596,
74603
],
[
75238,
75245
],
[
75388,
75395
],
[
78105,
78112
],
[
27823,
27830
]
],
[
[
1992,
2005
],
[
11709,
11722
],
[
18701,
18714
],
[
23220,
23233
],
[
31220,
31233
]
],
[
[
2008,
2012
],
[
2024,
2028
],
[
2097,
2101
],
[
2166,
2170
],
[
2209,
2213
],
[
4376,
4380
],
[
5198,
5202
],
[
5272,
5276
],
[
5326,
5330
],
[
5467,
5471
],
[
19713,
19717
],
[
19897,
19901
],
[
22236,
22240
],
[
22359,
22363
],
[
63972,
63976
]
],
[
[
2251,
2254
],
[
6047,
6050
],
[
6617,
6620
],
[
7046,
7049
],
[
7431,
7434
],
[
11360,
11363
],
[
14135,
14138
],
[
17373,
17376
],
[
17901,
17904
],
[
18021,
18024
],
[
18145,
18148
],
[
18238,
18241
],
[
20693,
20696
],
[
22460,
22463
],
[
23697,
23700
],
[
24886,
24889
],
[
25080,
25083
],
[
25247,
25250
],
[
25710,
25713
],
[
26147,
26150
],
[
26501,
26504
],
[
33913,
33916
],
[
34109,
34112
],
[
34191,
34194
],
[
34466,
34469
],
[
35032,
35035
],
[
36065,
36068
],
[
36270,
36273
],
[
36377,
36380
],
[
36786,
36789
],
[
37395,
37398
],
[
37677,
37680
],
[
37913,
37916
],
[
38393,
38396
],
[
38511,
38514
],
[
38653,
38656
],
[
40052,
40055
],
[
40761,
40764
],
[
41004,
41007
],
[
41306,
41309
],
[
41833,
41836
],
[
42134,
42137
],
[
46276,
46279
],
[
49395,
49398
],
[
49493,
49496
],
[
49795,
49798
],
[
49891,
49894
],
[
52594,
52597
],
[
53261,
53264
],
[
53636,
53639
],
[
53808,
53811
],
[
59202,
59205
],
[
59386,
59389
],
[
59892,
59895
],
[
60117,
60120
],
[
62503,
62506
],
[
64016,
64019
],
[
66814,
66817
],
[
67094,
67097
],
[
67361,
67364
],
[
68937,
68940
],
[
69217,
69220
],
[
69483,
69486
],
[
71931,
71934
],
[
72307,
72310
],
[
72489,
72492
],
[
75827,
75830
],
[
77039,
77042
],
[
77575,
77578
],
[
78329,
78332
],
[
28002,
28005
],
[
30054,
30057
],
[
30680,
30683
],
[
31125,
31128
],
[
31749,
31752
]
],
[
[
2286,
2305
],
[
54582,
54601
],
[
56150,
56169
]
],
[
[
2476,
2494
],
[
47256,
47274
],
[
47730,
47748
],
[
48182,
48200
],
[
48777,
48795
],
[
52153,
52171
]
],
[
[
2500,
2506
],
[
76048,
76054
]
],
[
[
2611,
2643
],
[
16114,
16146
]
],
[
[
4234,
4253
],
[
4501,
4520
]
],
[
[
4311,
4336
],
[
26050,
26075
]
],
[
[
4697,
4708
]
]
] |
from pathlib import Path
import json
# Directory
dir = Path().resolve()
# Configuration
with open(dir/'config.json') as config_file:
CONFIG = json.load(config_file)
def output_dir(config = CONFIG):
"""
Return string for default output directory path.
"""
return(config['output_dir']['path']) | [
[
[
20,
24
],
[
58,
62
]
],
[
[
32,
36
],
[
151,
155
]
],
[
[
52,
55
],
[
103,
106
]
],
[
[
125,
136
],
[
161,
172
]
],
[
[
142,
148
],
[
200,
206
]
],
[
[
180,
190
]
]
] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from scipy.ndimage import map_coordinates
from scipy.ndimage.interpolation import shift
from scipy.optimize import curve_fit, minimize
def reproject_image_into_polar(data, origin=None, Jacobian=False,
dr=1, dt=None):
"""
Reprojects a 2D numpy array (``data``) into a polar coordinate system.
"origin" is a tuple of (x0, y0) relative to the bottom-left image corner,
and defaults to the center of the image.
Parameters
----------
data : 2D np.array
origin : tuple
The coordinate of the image center, relative to bottom-left
Jacobian : boolean
Include ``r`` intensity scaling in the coordinate transform.
This should be included to account for the changing pixel size that
occurs during the transform.
dr : float
Radial coordinate spacing for the grid interpolation
tests show that there is not much point in going below 0.5
dt : float
Angular coordinate spacing (in radians)
if ``dt=None``, dt will be set such that the number of theta values
is equal to the maximum value between the height or the width of
the image.
Returns
-------
output : 2D np.array
The polar image (r, theta)
r_grid : 2D np.array
meshgrid of radial coordinates
theta_grid : 2D np.array
meshgrid of theta coordinates
Notes
-----
Adapted from:
http://stackoverflow.com/questions/3798333/image-information-along-a-polar-coordinate-system
"""
# bottom-left coordinate system requires numpy image to be np.flipud
data = np.flipud(data)
ny, nx = data.shape[:2]
if origin is None:
origin = (nx//2, ny//2)
# Determine that the min and max r and theta coords will be...
x, y = index_coords(data, origin=origin) # (x,y) coordinates of each pixel
r, theta = cart2polar(x, y) # convert (x,y) -> (r,θ), note θ=0 is vertical
nr = np.int(np.ceil((r.max()-r.min())/dr))
if dt is None:
nt = max(nx, ny)
else:
# dt in radians
nt = np.int(np.ceil((theta.max()-theta.min())/dt))
# Make a regular (in polar space) grid based on the min and max r & theta
r_i = np.linspace(r.min(), r.max(), nr, endpoint=False)
theta_i = np.linspace(theta.min(), theta.max(), nt, endpoint=False)
theta_grid, r_grid = np.meshgrid(theta_i, r_i)
# Project the r and theta grid back into pixel coordinates
X, Y = polar2cart(r_grid, theta_grid)
X += origin[0] # We need to shift the origin
Y += origin[1] # back to the bottom-left corner...
xi, yi = X.flatten(), Y.flatten()
coords = np.vstack((yi, xi)) # (map_coordinates requires a 2xn array)
zi = map_coordinates(data, coords)
output = zi.reshape((nr, nt))
if Jacobian:
output = output*r_i[:, np.newaxis]
return output, r_grid, theta_grid
def index_coords(data, origin=None):
"""
Creates x & y coords for the indicies in a numpy array
Parameters
----------
data : numpy array
2D data
origin : (x,y) tuple
defaults to the center of the image. Specify origin=(0,0)
to set the origin to the *bottom-left* corner of the image.
Returns
-------
x, y : arrays
"""
ny, nx = data.shape[:2]
if origin is None:
origin_x, origin_y = nx//2, ny//2
else:
origin_x, origin_y = origin
x, y = np.meshgrid(np.arange(float(nx)), np.arange(float(ny)))
x -= origin_x
y -= origin_y
return x, y
def cart2polar(x, y):
"""
Transform Cartesian coordinates to polar
Parameters
----------
x, y : floats or arrays
Cartesian coordinates
Returns
-------
r, theta : floats or arrays
Polar coordinates
"""
r = np.sqrt(x**2 + y**2)
theta = np.arctan2(x, y) # θ referenced to vertical
return r, theta
def polar2cart(r, theta):
"""
Transform polar coordinates to Cartesian
Parameters
-------
r, theta : floats or arrays
Polar coordinates
Returns
----------
x, y : floats or arrays
Cartesian coordinates
"""
y = r * np.cos(theta) # θ referenced to vertical
x = r * np.sin(theta)
return x, y
| [
[
[
48,
63
]
],
[
[
87,
95
]
],
[
[
119,
133
]
],
[
[
157,
173
]
],
[
[
182,
193
],
[
1831,
1833
],
[
2170,
2172
],
[
2177,
2179
],
[
2300,
2302
],
[
2307,
2309
],
[
2435,
2437
],
[
2499,
2501
],
[
2582,
2584
],
[
2872,
2874
],
[
3057,
3059
],
[
3657,
3659
],
[
3669,
3671
],
[
3691,
3693
],
[
4048,
4050
],
[
4081,
4083
],
[
4431,
4433
],
[
4486,
4488
]
],
[
[
220,
235
],
[
2944,
2959
]
],
[
[
276,
281
]
],
[
[
309,
318
]
],
[
[
320,
328
]
],
[
[
335,
361
]
],
[
[
3114,
3126
],
[
2011,
2023
]
],
[
[
3772,
3782
],
[
2095,
2105
]
],
[
[
4152,
4162
],
[
2683,
2693
]
]
] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
class okcoinusd (Exchange):
def describe(self):
return self.deep_extend(super(okcoinusd, self).describe(), {
'id': 'okcoinusd',
'name': 'OKCoin USD',
'countries': ['CN', 'US'],
'version': 'v1',
'rateLimit': 1000, # up to 3000 requests per 5 minutes ≈ 600 requests per minute ≈ 10 requests per second ≈ 100 ms
'has': {
'CORS': False,
'fetchOHLCV': True,
'fetchOrder': True,
'fetchOrders': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'withdraw': True,
'futures': False,
},
'extension': '.do', # appended to endpoint URL
'timeframes': {
'1m': '1min',
'3m': '3min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '1hour',
'2h': '2hour',
'4h': '4hour',
'6h': '6hour',
'12h': '12hour',
'1d': '1day',
'3d': '3day',
'1w': '1week',
},
'api': {
'web': {
'get': [
'futures/pc/market/marketOverview', # todo: merge in fetchMarkets
'spot/markets/index-tickers', # todo: add fetchTickers
'spot/markets/currencies',
'spot/markets/products',
'spot/markets/tickers',
'spot/user-level',
],
},
'public': {
'get': [
'depth',
'exchange_rate',
'future_depth',
'future_estimated_price',
'future_hold_amount',
'future_index',
'future_kline',
'future_price_limit',
'future_ticker',
'future_trades',
'kline',
'otcs',
'ticker',
'tickers', # todo: add fetchTickers
'trades',
],
},
'private': {
'post': [
'account_records',
'batch_trade',
'borrow_money',
'borrow_order_info',
'borrows_info',
'cancel_borrow',
'cancel_order',
'cancel_otc_order',
'cancel_withdraw',
'funds_transfer',
'future_batch_trade',
'future_cancel',
'future_devolve',
'future_explosive',
'future_order_info',
'future_orders_info',
'future_position',
'future_position_4fix',
'future_trade',
'future_trades_history',
'future_userinfo',
'future_userinfo_4fix',
'lend_depth',
'order_fee',
'order_history',
'order_info',
'orders_info',
'otc_order_history',
'otc_order_info',
'repayment',
'submit_otc_order',
'trade',
'trade_history',
'trade_otc_order',
'wallet_info',
'withdraw',
'withdraw_info',
'unrepayments_info',
'userinfo',
],
},
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766791-89ffb502-5ee5-11e7-8a5b-c5950b68ac65.jpg',
'api': {
'web': 'https://www.okcoin.com/v2',
'public': 'https://www.okcoin.com/api',
'private': 'https://www.okcoin.com',
},
'www': 'https://www.okcoin.com',
'doc': [
'https://www.okcoin.com/docs/en/',
'https://www.npmjs.com/package/okcoin.com',
],
},
'fees': {
'trading': {
'taker': 0.002,
'maker': 0.002,
},
},
'exceptions': {
# see https://github.com/okcoin-okex/API-docs-OKEx.com/blob/master/API-For-Spot-EN/Error%20Code%20For%20Spot.md
'10000': ExchangeError, # "Required field, can not be null"
'10001': DDoSProtection, # "Request frequency too high to exceed the limit allowed"
'10005': AuthenticationError, # "'SecretKey' does not exist"
'10006': AuthenticationError, # "'Api_key' does not exist"
'10007': AuthenticationError, # "Signature does not match"
'1002': InsufficientFunds, # "The transaction amount exceed the balance"
'1003': InvalidOrder, # "The transaction amount is less than the minimum requirement"
'1004': InvalidOrder, # "The transaction amount is less than 0"
'1013': InvalidOrder, # no contract type(PR-1101)
'1027': InvalidOrder, # createLimitBuyOrder(symbol, 0, 0): Incorrect parameter may exceeded limits
'1050': InvalidOrder, # returned when trying to cancel an order that was filled or canceled previously
'1217': InvalidOrder, # "Order was sent at ±5% of the current market price. Please resend"
'10014': InvalidOrder, # "Order price must be between 0 and 1,000,000"
'1009': OrderNotFound, # for spot markets, cancelling closed order
'1019': OrderNotFound, # order closed?("Undo order failed")
'1051': OrderNotFound, # for spot markets, cancelling "just closed" order
'10009': OrderNotFound, # for spot markets, "Order does not exist"
'20015': OrderNotFound, # for future markets
'10008': ExchangeError, # Illegal URL parameter
# todo: sort out below
# 10000 Required parameter is empty
# 10001 Request frequency too high to exceed the limit allowed
# 10002 Authentication failure
# 10002 System error
# 10003 This connection has requested other user data
# 10004 Request failed
# 10005 api_key or sign is invalid, 'SecretKey' does not exist
# 10006 'Api_key' does not exist
# 10007 Signature does not match
# 10008 Illegal parameter, Parameter erorr
# 10009 Order does not exist
# 10010 Insufficient funds
# 10011 Amount too low
# 10012 Only btc_usd ltc_usd supported
# 10013 Only support https request
# 10014 Order price must be between 0 and 1,000,000
# 10015 Order price differs from current market price too much / Channel subscription temporally not available
# 10016 Insufficient coins balance
# 10017 API authorization error / WebSocket authorization error
# 10018 borrow amount less than lower limit [usd:100,btc:0.1,ltc:1]
# 10019 loan agreement not checked
# 1002 The transaction amount exceed the balance
# 10020 rate cannot exceed 1%
# 10021 rate cannot less than 0.01%
# 10023 fail to get latest ticker
# 10024 balance not sufficient
# 10025 quota is full, cannot borrow temporarily
# 10026 Loan(including reserved loan) and margin cannot be withdrawn
# 10027 Cannot withdraw within 24 hrs of authentication information modification
# 10028 Withdrawal amount exceeds daily limit
# 10029 Account has unpaid loan, please cancel/pay off the loan before withdraw
# 1003 The transaction amount is less than the minimum requirement
# 10031 Deposits can only be withdrawn after 6 confirmations
# 10032 Please enabled phone/google authenticator
# 10033 Fee higher than maximum network transaction fee
# 10034 Fee lower than minimum network transaction fee
# 10035 Insufficient BTC/LTC
# 10036 Withdrawal amount too low
# 10037 Trade password not set
# 1004 The transaction amount is less than 0
# 10040 Withdrawal cancellation fails
# 10041 Withdrawal address not exsit or approved
# 10042 Admin password error
# 10043 Account equity error, withdrawal failure
# 10044 fail to cancel borrowing order
# 10047 self function is disabled for sub-account
# 10048 withdrawal information does not exist
# 10049 User can not have more than 50 unfilled small orders(amount<0.15BTC)
# 10050 can't cancel more than once
# 10051 order completed transaction
# 10052 not allowed to withdraw
# 10064 after a USD deposit, that portion of assets will not be withdrawable for the next 48 hours
# 1007 No trading market information
# 1008 No latest market information
# 1009 No order
# 1010 Different user of the cancelled order and the original order
# 10100 User account frozen
# 10101 order type is wrong
# 10102 incorrect ID
# 10103 the private otc order's key incorrect
# 10106 API key domain not matched
# 1011 No documented user
# 1013 No order type
# 1014 No login
# 1015 No market depth information
# 1017 Date error
# 1018 Order failed
# 1019 Undo order failed
# 10216 Non-available API / non-public API
# 1024 Currency does not exist
# 1025 No chart type
# 1026 No base currency quantity
# 1027 Incorrect parameter may exceeded limits
# 1028 Reserved decimal failed
# 1029 Preparing
# 1030 Account has margin and futures, transactions can not be processed
# 1031 Insufficient Transferring Balance
# 1032 Transferring Not Allowed
# 1035 Password incorrect
# 1036 Google Verification code Invalid
# 1037 Google Verification code incorrect
# 1038 Google Verification replicated
# 1039 Message Verification Input exceed the limit
# 1040 Message Verification invalid
# 1041 Message Verification incorrect
# 1042 Wrong Google Verification Input exceed the limit
# 1043 Login password cannot be same as the trading password
# 1044 Old password incorrect
# 1045 2nd Verification Needed
# 1046 Please input old password
# 1048 Account Blocked
# 1050 Orders have been withdrawn or withdrawn
# 1051 Order completed
# 1201 Account Deleted at 00: 00
# 1202 Account Not Exist
# 1203 Insufficient Balance
# 1204 Invalid currency
# 1205 Invalid Account
# 1206 Cash Withdrawal Blocked
# 1207 Transfer Not Support
# 1208 No designated account
# 1209 Invalid api
# 1216 Market order temporarily suspended. Please send limit order
# 1217 Order was sent at ±5% of the current market price. Please resend
# 1218 Place order failed. Please try again later
# 20001 User does not exist
# 20002 Account frozen
# 20003 Account frozen due to forced liquidation
# 20004 Contract account frozen
# 20005 User contract account does not exist
# 20006 Required field missing
# 20007 Illegal parameter
# 20008 Contract account balance is too low
# 20009 Contract status error
# 20010 Risk rate ratio does not exist
# 20011 Risk rate lower than 90%/80% before opening BTC position with 10x/20x leverage. or risk rate lower than 80%/60% before opening LTC position with 10x/20x leverage
# 20012 Risk rate lower than 90%/80% after opening BTC position with 10x/20x leverage. or risk rate lower than 80%/60% after opening LTC position with 10x/20x leverage
# 20013 Temporally no counter party price
# 20014 System error
# 20015 Order does not exist
# 20016 Close amount bigger than your open positions, liquidation quantity bigger than holding
# 20017 Not authorized/illegal operation/illegal order ID
# 20018 Order price cannot be more than 103-105% or less than 95-97% of the previous minute price
# 20019 IP restricted from accessing the resource
# 20020 Secret key does not exist
# 20021 Index information does not exist
# 20022 Wrong API interface(Cross margin mode shall call cross margin API, fixed margin mode shall call fixed margin API)
# 20023 Account in fixed-margin mode
# 20024 Signature does not match
# 20025 Leverage rate error
# 20026 API Permission Error
# 20027 no transaction record
# 20028 no such contract
# 20029 Amount is large than available funds
# 20030 Account still has debts
# 20038 Due to regulation, self function is not availavle in the country/region your currently reside in.
# 20049 Request frequency too high
# 20100 request time out
# 20101 the format of data is error
# 20102 invalid login
# 20103 event type error
# 20104 subscription type error
# 20107 JSON format error
# 20115 The quote is not match
# 20116 Param not match
# 21020 Contracts are being delivered, orders cannot be placed
# 21021 Contracts are being settled, contracts cannot be placed
},
'options': {
'marketBuyPrice': False,
'defaultContractType': 'this_week', # next_week, quarter
'warnOnFetchOHLCVLimitArgument': True,
'fiats': ['USD', 'CNY'],
'futures': {
'BCH': True,
'BTC': True,
'BTG': True,
'EOS': True,
'ETC': True,
'ETH': True,
'LTC': True,
'NEO': True,
'QTUM': True,
'USDT': True,
'XRP': True,
},
},
})
async def fetch_markets(self, params={}):
response = await self.webGetSpotMarketsProducts()
markets = response['data']
result = []
for i in range(0, len(markets)):
id = markets[i]['symbol']
baseId, quoteId = id.split('_')
baseIdUppercase = baseId.upper()
quoteIdUppercase = quoteId.upper()
base = self.common_currency_code(baseIdUppercase)
quote = self.common_currency_code(quoteIdUppercase)
symbol = base + '/' + quote
precision = {
'amount': markets[i]['maxSizeDigit'],
'price': markets[i]['maxPriceDigit'],
}
minAmount = markets[i]['minTradeSize']
minPrice = math.pow(10, -precision['price'])
active = (markets[i]['online'] != 0)
baseNumericId = markets[i]['baseCurrency']
quoteNumericId = markets[i]['quoteCurrency']
market = self.extend(self.fees['trading'], {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'baseNumericId': baseNumericId,
'quoteNumericId': quoteNumericId,
'info': markets[i],
'type': 'spot',
'spot': True,
'future': False,
'active': active,
'precision': precision,
'limits': {
'amount': {
'min': minAmount,
'max': None,
},
'price': {
'min': minPrice,
'max': None,
},
'cost': {
'min': minAmount * minPrice,
'max': None,
},
},
})
result.append(market)
if (self.has['futures']) and(market['base'] in list(self.options['futures'].keys())):
fiats = self.options['fiats']
for j in range(0, len(fiats)):
fiat = fiats[j]
lowercaseFiat = fiat.lower()
result.append(self.extend(market, {
'quote': fiat,
'symbol': market['base'] + '/' + fiat,
'id': market['base'].lower() + '_' + lowercaseFiat,
'quoteId': lowercaseFiat,
'type': 'future',
'spot': False,
'future': True,
}))
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = 'publicGet'
request = {
'symbol': market['id'],
}
if limit is not None:
request['size'] = limit
if market['future']:
method += 'Future'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
method += 'Depth'
orderbook = await getattr(self, method)(self.extend(request, params))
return self.parse_order_book(orderbook)
def parse_ticker(self, ticker, market=None):
#
# { buy: "48.777300",
# change: "-1.244500",
# changePercentage: "-2.47%",
# close: "49.064000",
# createdDate: 1531704852254,
# currencyId: 527,
# dayHigh: "51.012500",
# dayLow: "48.124200",
# high: "51.012500",
# inflows: "0",
# last: "49.064000",
# low: "48.124200",
# marketFrom: 627,
# name: {},
# open: "50.308500",
# outflows: "0",
# productId: 527,
# sell: "49.064000",
# symbol: "zec_okb",
# volume: "1049.092535" }
#
timestamp = self.safe_integer_2(ticker, 'timestamp', 'createdDate')
symbol = None
if market is None:
if 'symbol' in ticker:
marketId = ticker['symbol']
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
else:
baseId, quoteId = ticker['symbol'].split('_')
base = baseId.upper()
quote = quoteId.upper()
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
last = self.safe_float(ticker, 'last')
open = self.safe_float(ticker, 'open')
change = self.safe_float(ticker, 'change')
percentage = self.safe_float(ticker, 'changePercentage')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': None,
'baseVolume': self.safe_float_2(ticker, 'vol', 'volume'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
method = 'publicGet'
request = {
'symbol': market['id'],
}
if market['future']:
method += 'Future'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
method += 'Ticker'
response = await getattr(self, method)(self.extend(request, params))
ticker = self.safe_value(response, 'ticker')
if ticker is None:
raise ExchangeError(self.id + ' fetchTicker returned an empty response: ' + self.json(response))
timestamp = self.safe_integer(response, 'date')
if timestamp is not None:
timestamp *= 1000
ticker = self.extend(ticker, {'timestamp': timestamp})
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market=None):
symbol = None
if market:
symbol = market['symbol']
return {
'info': trade,
'timestamp': trade['date_ms'],
'datetime': self.iso8601(trade['date_ms']),
'symbol': symbol,
'id': str(trade['tid']),
'order': None,
'type': None,
'side': trade['type'],
'price': self.safe_float(trade, 'price'),
'amount': self.safe_float(trade, 'amount'),
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = 'publicGet'
request = {
'symbol': market['id'],
}
if market['future']:
method += 'Future'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
method += 'Trades'
response = await getattr(self, method)(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
numElements = len(ohlcv)
volumeIndex = 6 if (numElements > 6) else 5
return [
ohlcv[0], # timestamp
float(ohlcv[1]), # Open
float(ohlcv[2]), # High
float(ohlcv[3]), # Low
float(ohlcv[4]), # Close
# float(ohlcv[5]), # quote volume
# float(ohlcv[6]), # base volume
float(ohlcv[volumeIndex]), # okex will return base volume in the 7th element for future markets
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = 'publicGet'
request = {
'symbol': market['id'],
'type': self.timeframes[timeframe],
}
if market['future']:
method += 'Future'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
method += 'Kline'
if limit is not None:
if self.options['warnOnFetchOHLCVLimitArgument']:
raise ExchangeError(self.id + ' fetchOHLCV counts "limit" candles from current time backwards, therefore the "limit" argument for ' + self.id + ' is disabled. Set ' + self.id + '.options["warnOnFetchOHLCVLimitArgument"] = False to suppress self warning message.')
request['size'] = int(limit) # max is 1440 candles
if since is not None:
request['since'] = since
else:
request['since'] = self.milliseconds() - 86400000 # last 24 hours
response = await getattr(self, method)(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privatePostUserinfo(params)
balances = response['info']['funds']
result = {'info': response}
ids = list(balances['free'].keys())
usedField = 'freezed'
# wtf, okex?
# https://github.com/okcoin-okex/API-docs-OKEx.com/commit/01cf9dd57b1f984a8737ef76a037d4d3795d2ac7
if not(usedField in list(balances.keys())):
usedField = 'holds'
usedKeys = list(balances[usedField].keys())
ids = self.array_concat(ids, usedKeys)
for i in range(0, len(ids)):
id = ids[i]
code = id.upper()
if id in self.currencies_by_id:
code = self.currencies_by_id[id]['code']
else:
code = self.common_currency_code(code)
account = self.account()
account['free'] = self.safe_float(balances['free'], id, 0.0)
account['used'] = self.safe_float(balances[usedField], id, 0.0)
account['total'] = self.sum(account['free'], account['used'])
result[code] = account
return self.parse_balance(result)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = 'privatePost'
order = {
'symbol': market['id'],
'type': side,
}
if market['future']:
method += 'Future'
order = self.extend(order, {
'contract_type': self.options['defaultContractType'], # self_week, next_week, quarter
'match_price': 0, # match best counter party price? 0 or 1, ignores price if 1
'lever_rate': 10, # leverage rate value: 10 or 20(10 by default)
'price': price,
'amount': amount,
})
else:
if type == 'limit':
order['price'] = price
order['amount'] = amount
else:
order['type'] += '_market'
if side == 'buy':
if self.options['marketBuyPrice']:
if price is None:
# eslint-disable-next-line quotes
raise ExchangeError(self.id + " market buy orders require a price argument(the amount you want to spend or the cost of the order) when self.options['marketBuyPrice'] is True.")
order['price'] = price
else:
order['price'] = self.safe_float(params, 'cost')
if not order['price']:
# eslint-disable-next-line quotes
raise ExchangeError(self.id + " market buy orders require an additional cost parameter, cost = price * amount. If you want to pass the cost of the market order(the amount you want to spend) in the price argument(the default " + self.id + " behaviour), set self.options['marketBuyPrice'] = True. It will effectively suppress self warning exception as well.")
else:
order['amount'] = amount
params = self.omit(params, 'cost')
method += 'Trade'
response = await getattr(self, method)(self.extend(order, params))
timestamp = self.milliseconds()
return {
'info': response,
'id': str(response['order_id']),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': None,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'amount': amount,
'filled': None,
'remaining': None,
'cost': None,
'trades': None,
'fee': None,
}
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'order_id': id,
}
method = 'privatePost'
if market['future']:
method += 'FutureCancel'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
else:
method += 'CancelOrder'
response = await getattr(self, method)(self.extend(request, params))
return response
def parse_order_status(self, status):
statuses = {
'-1': 'canceled',
'0': 'open',
'1': 'open',
'2': 'closed',
'3': 'open',
'4': 'canceled',
}
return self.safe_value(statuses, status, status)
def parse_order_side(self, side):
if side == 1:
return 'buy' # open long position
if side == 2:
return 'sell' # open short position
if side == 3:
return 'sell' # liquidate long position
if side == 4:
return 'buy' # liquidate short position
return side
def parse_order(self, order, market=None):
side = None
type = None
if 'type' in order:
if (order['type'] == 'buy') or (order['type'] == 'sell'):
side = order['type']
type = 'limit'
elif order['type'] == 'buy_market':
side = 'buy'
type = 'market'
elif order['type'] == 'sell_market':
side = 'sell'
type = 'market'
else:
side = self.parse_order_side(order['type'])
if ('contract_name' in list(order.keys())) or ('lever_rate' in list(order.keys())):
type = 'margin'
status = self.parse_order_status(self.safe_string(order, 'status'))
symbol = None
if market is None:
marketId = self.safe_string(order, 'symbol')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market:
symbol = market['symbol']
timestamp = None
createDateField = self.get_create_date_field()
if createDateField in order:
timestamp = order[createDateField]
amount = self.safe_float(order, 'amount')
filled = self.safe_float(order, 'deal_amount')
amount = max(amount, filled)
remaining = max(0, amount - filled)
if type == 'market':
remaining = 0
average = self.safe_float(order, 'avg_price')
# https://github.com/ccxt/ccxt/issues/2452
average = self.safe_float(order, 'price_avg', average)
cost = average * filled
result = {
'info': order,
'id': str(order['order_id']),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'side': side,
'price': order['price'],
'average': average,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': None,
}
return result
def get_create_date_field(self):
# needed for derived exchanges
# allcoin typo create_data instead of create_date
return 'create_date'
def get_orders_field(self):
# needed for derived exchanges
# allcoin typo order instead of orders(expected based on their API docs)
return 'orders'
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ExchangeError(self.id + ' fetchOrder requires a symbol parameter')
await self.load_markets()
market = self.market(symbol)
method = 'privatePost'
request = {
'order_id': id,
'symbol': market['id'],
# 'status': 0, # 0 for unfilled orders, 1 for filled orders
# 'current_page': 1, # current page number
# 'page_length': 200, # number of orders returned per page, maximum 200
}
if market['future']:
method += 'Future'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
method += 'OrderInfo'
response = await getattr(self, method)(self.extend(request, params))
ordersField = self.get_orders_field()
numOrders = len(response[ordersField])
if numOrders > 0:
return self.parse_order(response[ordersField][0])
raise OrderNotFound(self.id + ' order ' + id + ' not found')
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ExchangeError(self.id + ' fetchOrders requires a symbol parameter')
await self.load_markets()
market = self.market(symbol)
method = 'privatePost'
request = {
'symbol': market['id'],
}
order_id_in_params = ('order_id' in list(params.keys()))
if market['future']:
method += 'FutureOrdersInfo'
request['contract_type'] = self.options['defaultContractType'] # self_week, next_week, quarter
if not order_id_in_params:
raise ExchangeError(self.id + ' fetchOrders() requires order_id param for futures market ' + symbol + '(a string of one or more order ids, comma-separated)')
else:
status = None
if 'type' in params:
status = params['type']
elif 'status' in params:
status = params['status']
else:
name = 'type' if order_id_in_params else 'status'
raise ExchangeError(self.id + ' fetchOrders() requires ' + name + ' param for spot market ' + symbol + '(0 - for unfilled orders, 1 - for filled/canceled orders)')
if order_id_in_params:
method += 'OrdersInfo'
request = self.extend(request, {
'type': status,
'order_id': params['order_id'],
})
else:
method += 'OrderHistory'
request = self.extend(request, {
'status': status,
'current_page': 1, # current page number
'page_length': 200, # number of orders returned per page, maximum 200
})
params = self.omit(params, ['type', 'status'])
response = await getattr(self, method)(self.extend(request, params))
ordersField = self.get_orders_field()
return self.parse_orders(response[ordersField], market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
open = 0 # 0 for unfilled orders, 1 for filled orders
return await self.fetch_orders(symbol, since, limit, self.extend({
'status': open,
}, params))
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
closed = 1 # 0 for unfilled orders, 1 for filled orders
orders = await self.fetch_orders(symbol, since, limit, self.extend({
'status': closed,
}, params))
return orders
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
# if amount < 0.01:
# raise ExchangeError(self.id + ' withdraw() requires amount > 0.01')
# for some reason they require to supply a pair of currencies for withdrawing one currency
currencyId = currency['id'] + '_usd'
if tag:
address = address + ':' + tag
request = {
'symbol': currencyId,
'withdraw_address': address,
'withdraw_amount': amount,
'target': 'address', # or 'okcn', 'okcom', 'okex'
}
query = params
if 'chargefee' in query:
request['chargefee'] = query['chargefee']
query = self.omit(query, 'chargefee')
else:
raise ExchangeError(self.id + ' withdraw() requires a `chargefee` parameter')
if self.password:
request['trade_pwd'] = self.password
elif 'password' in query:
request['trade_pwd'] = query['password']
query = self.omit(query, 'password')
elif 'trade_pwd' in query:
request['trade_pwd'] = query['trade_pwd']
query = self.omit(query, 'trade_pwd')
passwordInRequest = ('trade_pwd' in list(request.keys()))
if not passwordInRequest:
raise ExchangeError(self.id + ' withdraw() requires self.password set on the exchange instance or a password / trade_pwd parameter')
response = await self.privatePostWithdraw(self.extend(request, query))
return {
'info': response,
'id': self.safe_string(response, 'withdraw_id'),
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/'
if api != 'web':
url += self.version + '/'
url += path
if api != 'web':
url += self.extension
if api == 'private':
self.check_required_credentials()
query = self.keysort(self.extend({
'api_key': self.apiKey,
}, params))
# secret key must be at the end of query
queryString = self.rawencode(query) + '&secret_key=' + self.secret
query['sign'] = self.hash(self.encode(queryString)).upper()
body = self.urlencode(query)
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
else:
if params:
url += '?' + self.urlencode(params)
url = self.urls['api'][api] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response):
if len(body) < 2:
return # fallback to default error handler
if body[0] == '{':
if 'error_code' in response:
error = self.safe_string(response, 'error_code')
message = self.id + ' ' + self.json(response)
if error in self.exceptions:
ExceptionClass = self.exceptions[error]
raise ExceptionClass(message)
else:
raise ExchangeError(message)
if 'result' in response:
if not response['result']:
raise ExchangeError(self.id + ' ' + self.json(response))
| [
[
[
226,
234
],
[
581,
589
]
],
[
[
242,
246
],
[
17411,
17415
]
],
[
[
276,
289
],
[
5645,
5658
],
[
7224,
7237
],
[
23319,
23332
],
[
25972,
25985
],
[
28969,
28982
],
[
29429,
29442
],
[
34562,
34575
],
[
35692,
35705
],
[
36232,
36245
],
[
36682,
36695
],
[
39121,
39134
],
[
39661,
39674
],
[
41516,
41529
],
[
41645,
41658
]
],
[
[
319,
338
],
[
5823,
5842
],
[
5901,
5920
],
[
5977,
5996
]
],
[
[
368,
385
],
[
30673,
30690
]
],
[
[
415,
432
],
[
6052,
6069
]
],
[
[
462,
474
],
[
6142,
6154
],
[
6245,
6257
],
[
6326,
6338
],
[
6393,
6405
],
[
6509,
6521
],
[
6629,
6641
],
[
6738,
6750
]
],
[
[
504,
517
],
[
6825,
6838
],
[
6909,
6922
],
[
6986,
6999
],
[
7078,
7091
],
[
7162,
7175
],
[
35509,
35522
]
],
[
[
547,
561
],
[
5722,
5736
]
],
[
[
570,
579
],
[
655,
664
]
]
] |
'''
Description:
Shuffle a set of numbers without duplicates.
Example:
// Init an array with set 1, 2, and 3.
int[] nums = {1,2,3};
Solution solution = new Solution(nums);
// Shuffle the array [1,2,3] and return its result. Any permutation of [1,2,3] must equally likely to be returned.
solution.shuffle();
// Resets the array back to its original configuration [1,2,3].
solution.reset();
// Returns the random shuffling of array [1,2,3].
solution.shuffle();
Hint #1
The solution expects that we always use the original array to shuffle() else some of the test cases fail. (Credits; @snehasingh31)
'''
from typing import List
from random import shuffle
class Solution:
def __init__(self, nums: List[int]):
# copy to class member: self.array
self.array = [ *nums ]
self.origin = nums
def reset(self) -> List[int]:
"""
Resets the array to its original configuration and return it.
"""
self.array = [ *(self.origin) ]
return self.array
def shuffle(self) -> List[int]:
"""
Returns a random shuffling of the array.
"""
shuffle( self.array )
return self.array
# n : the length of input array
## Time Complexity: O( n )
#
# The overhead in time is the cost of shuffle and reset, which are of O( n ).
## Space Complexity: O( n )
#
# The overhead in space is the storage for class member, self.array, which is of O( n ).
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'sequence')
def test_bench():
t = TestEntry( sequence = [1,2,3] )
# reference output
# this is a challenge about randomness, so any valid permutation of shuffling result is accpeted.
'''
[1, 3, 2]
[1, 2, 3]
[3, 2, 1]
'''
obj = Solution( t.sequence )
print( obj.shuffle() )
print( obj.reset() )
print( obj.shuffle() )
if __name__ == '__main__':
test_bench() | [
[
[
636,
640
],
[
715,
719
],
[
861,
865
],
[
1067,
1071
]
],
[
[
660,
667
],
[
1159,
1166
]
],
[
[
675,
683
],
[
1811,
1819
]
],
[
[
1496,
1506
],
[
1519,
1529
]
],
[
[
1507,
1516
],
[
1583,
1592
]
],
[
[
1560,
1570
],
[
1948,
1958
]
]
] |
# Copyright (c) 2019-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Please install UMAP before running the code
# use 'conda install -c conda-forge umap-learn' command to install it
import numpy as np
import pytest
import umap
import copy
import cupyx
import scipy.sparse
from cuml.manifold.umap import UMAP as cuUMAP
from cuml.test.utils import array_equal, unit_param, \
quality_param, stress_param
from sklearn.neighbors import NearestNeighbors
import joblib
from cuml.common import logger
from sklearn import datasets
from sklearn.cluster import KMeans
from sklearn.datasets import make_blobs
from sklearn.manifold import trustworthiness
from sklearn.metrics import adjusted_rand_score
dataset_names = ['iris', 'digits', 'wine', 'blobs']
@pytest.mark.parametrize('nrows', [unit_param(500), quality_param(5000),
stress_param(500000)])
@pytest.mark.parametrize('n_feats', [unit_param(20), quality_param(100),
stress_param(1000)])
def test_blobs_cluster(nrows, n_feats):
data, labels = datasets.make_blobs(
n_samples=nrows, n_features=n_feats, centers=5, random_state=0)
embedding = cuUMAP().fit_transform(data, convert_dtype=True)
if nrows < 500000:
score = adjusted_rand_score(labels,
KMeans(5).fit_predict(embedding))
assert score == 1.0
@pytest.mark.parametrize('nrows', [unit_param(500), quality_param(5000),
stress_param(500000)])
@pytest.mark.parametrize('n_feats', [unit_param(10), quality_param(100),
stress_param(1000)])
def test_umap_fit_transform_score(nrows, n_feats):
n_samples = nrows
n_features = n_feats
data, labels = make_blobs(n_samples=n_samples, n_features=n_features,
centers=10, random_state=42)
model = umap.UMAP(n_neighbors=10, min_dist=0.1)
cuml_model = cuUMAP(n_neighbors=10, min_dist=0.01)
embedding = model.fit_transform(data)
cuml_embedding = cuml_model.fit_transform(data, convert_dtype=True)
assert not np.isnan(embedding).any()
assert not np.isnan(cuml_embedding).any()
if nrows < 500000:
cuml_score = adjusted_rand_score(labels,
KMeans(10).fit_predict(
cuml_embedding))
score = adjusted_rand_score(labels,
KMeans(10).fit_predict(embedding))
assert array_equal(score, cuml_score, 1e-2, with_sign=True)
def test_supervised_umap_trustworthiness_on_iris():
iris = datasets.load_iris()
data = iris.data
embedding = cuUMAP(n_neighbors=10, random_state=0,
min_dist=0.01).fit_transform(
data, iris.target, convert_dtype=True)
trust = trustworthiness(iris.data, embedding, n_neighbors=10)
assert trust >= 0.97
def test_semisupervised_umap_trustworthiness_on_iris():
iris = datasets.load_iris()
data = iris.data
target = iris.target.copy()
target[25:75] = -1
embedding = cuUMAP(n_neighbors=10, random_state=0,
min_dist=0.01).fit_transform(
data, target, convert_dtype=True)
trust = trustworthiness(iris.data, embedding, n_neighbors=10)
assert trust >= 0.97
def test_umap_trustworthiness_on_iris():
iris = datasets.load_iris()
data = iris.data
embedding = cuUMAP(n_neighbors=10, min_dist=0.01,
random_state=0).fit_transform(
data, convert_dtype=True)
trust = trustworthiness(iris.data, embedding, n_neighbors=10)
assert trust >= 0.97
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
def test_umap_transform_on_iris(target_metric):
iris = datasets.load_iris()
iris_selection = np.random.RandomState(42).choice(
[True, False], 150, replace=True, p=[0.75, 0.25])
data = iris.data[iris_selection]
fitter = cuUMAP(n_neighbors=10, init="random", n_epochs=800, min_dist=0.01,
random_state=42, target_metric=target_metric)
fitter.fit(data, convert_dtype=True)
new_data = iris.data[~iris_selection]
embedding = fitter.transform(new_data, convert_dtype=True)
assert not np.isnan(embedding).any()
trust = trustworthiness(new_data, embedding, n_neighbors=10)
assert trust >= 0.85
@pytest.mark.parametrize('input_type', ['cupy', 'scipy'])
@pytest.mark.parametrize('xform_method', ['fit', 'fit_transform'])
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
def test_umap_transform_on_digits_sparse(target_metric, input_type,
xform_method):
digits = datasets.load_digits()
digits_selection = np.random.RandomState(42).choice(
[True, False], 1797, replace=True, p=[0.75, 0.25])
if input_type == 'cupy':
sp_prefix = cupyx.scipy.sparse
else:
sp_prefix = scipy.sparse
data = sp_prefix.csr_matrix(
scipy.sparse.csr_matrix(digits.data[digits_selection]))
fitter = cuUMAP(n_neighbors=15,
verbose=logger.level_info,
init="random",
n_epochs=0,
min_dist=0.01,
random_state=42,
target_metric=target_metric)
new_data = sp_prefix.csr_matrix(
scipy.sparse.csr_matrix(digits.data[~digits_selection]))
if xform_method == 'fit':
fitter.fit(data, convert_dtype=True)
embedding = fitter.transform(new_data, convert_dtype=True)
else:
embedding = fitter.fit_transform(new_data, convert_dtype=True)
if input_type == 'cupy':
embedding = embedding.get()
trust = trustworthiness(digits.data[~digits_selection], embedding,
n_neighbors=15)
assert trust >= 0.96
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
def test_umap_transform_on_digits(target_metric):
digits = datasets.load_digits()
digits_selection = np.random.RandomState(42).choice(
[True, False], 1797, replace=True, p=[0.75, 0.25])
data = digits.data[digits_selection]
fitter = cuUMAP(n_neighbors=15,
verbose=logger.level_debug,
init="random",
n_epochs=0,
min_dist=0.01,
random_state=42,
target_metric=target_metric)
fitter.fit(data, convert_dtype=True)
new_data = digits.data[~digits_selection]
embedding = fitter.transform(new_data, convert_dtype=True)
trust = trustworthiness(digits.data[~digits_selection], embedding,
n_neighbors=15)
assert trust >= 0.96
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
@pytest.mark.parametrize('name', dataset_names)
def test_umap_fit_transform_trust(name, target_metric):
if name == 'iris':
iris = datasets.load_iris()
data = iris.data
labels = iris.target
elif name == 'digits':
digits = datasets.load_digits(n_class=5)
data = digits.data
labels = digits.target
elif name == 'wine':
wine = datasets.load_wine()
data = wine.data
labels = wine.target
else:
data, labels = make_blobs(n_samples=500, n_features=10,
centers=10, random_state=42)
model = umap.UMAP(n_neighbors=10, min_dist=0.01,
target_metric=target_metric)
cuml_model = cuUMAP(n_neighbors=10, min_dist=0.01,
target_metric=target_metric)
embedding = model.fit_transform(data)
cuml_embedding = cuml_model.fit_transform(data, convert_dtype=True)
trust = trustworthiness(data, embedding, n_neighbors=10)
cuml_trust = trustworthiness(data, cuml_embedding, n_neighbors=10)
assert array_equal(trust, cuml_trust, 1e-1, with_sign=True)
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
@pytest.mark.parametrize('name', [unit_param('digits')])
@pytest.mark.parametrize('nrows', [quality_param(5000),
stress_param(500000)])
@pytest.mark.parametrize('n_feats', [quality_param(100),
stress_param(1000)])
@pytest.mark.parametrize('should_downcast', [True])
@pytest.mark.parametrize('input_type', ['dataframe', 'ndarray'])
def test_umap_data_formats(input_type, should_downcast,
nrows, n_feats, name, target_metric):
dtype = np.float32 if not should_downcast else np.float64
n_samples = nrows
n_feats = n_feats
if name == 'digits':
# use the digits dataset for unit test
digits = datasets.load_digits(n_class=9)
X = digits["data"].astype(dtype)
else:
X, y = datasets.make_blobs(n_samples=n_samples,
n_features=n_feats, random_state=0)
umap = cuUMAP(n_neighbors=3, n_components=2, target_metric=target_metric)
embeds = umap.fit_transform(X)
assert type(embeds) == np.ndarray
@pytest.mark.parametrize('target_metric', ["categorical", "euclidean"])
@pytest.mark.filterwarnings("ignore:(.*)connected(.*):UserWarning:sklearn[.*]")
def test_umap_fit_transform_score_default(target_metric):
n_samples = 500
n_features = 20
data, labels = make_blobs(n_samples=n_samples, n_features=n_features,
centers=10, random_state=42)
model = umap.UMAP(target_metric=target_metric)
cuml_model = cuUMAP(target_metric=target_metric)
embedding = model.fit_transform(data)
cuml_embedding = cuml_model.fit_transform(data, convert_dtype=True)
cuml_score = adjusted_rand_score(labels,
KMeans(10).fit_predict(
cuml_embedding))
score = adjusted_rand_score(labels,
KMeans(10).fit_predict(embedding))
assert array_equal(score, cuml_score, 1e-2, with_sign=True)
def test_umap_fit_transform_against_fit_and_transform():
n_samples = 500
n_features = 20
data, labels = make_blobs(n_samples=n_samples, n_features=n_features,
centers=10, random_state=42)
"""
First test the default option does not hash the input
"""
cuml_model = cuUMAP()
ft_embedding = cuml_model.fit_transform(data, convert_dtype=True)
fit_embedding_same_input = cuml_model.transform(data, convert_dtype=True)
assert joblib.hash(ft_embedding) != joblib.hash(fit_embedding_same_input)
"""
Next, test explicitly enabling feature hashes the input
"""
cuml_model = cuUMAP(hash_input=True)
ft_embedding = cuml_model.fit_transform(data, convert_dtype=True)
fit_embedding_same_input = cuml_model.transform(data, convert_dtype=True)
assert joblib.hash(ft_embedding) == joblib.hash(fit_embedding_same_input)
fit_embedding_diff_input = cuml_model.transform(data[1:],
convert_dtype=True)
assert joblib.hash(ft_embedding) != joblib.hash(fit_embedding_diff_input)
@pytest.mark.parametrize('n_components,random_state',
[unit_param(2, None),
unit_param(2, 8),
unit_param(2, np.random.RandomState(42)),
unit_param(21, None),
unit_param(21, np.random.RandomState(42)),
unit_param(25, 8),
unit_param(50, None),
stress_param(50, 8)])
def test_umap_fit_transform_reproducibility(n_components, random_state):
n_samples = 8000
n_features = 200
if random_state is None:
n_components *= 2
data, labels = make_blobs(n_samples=n_samples, n_features=n_features,
centers=10, random_state=42)
def get_embedding(n_components, random_state):
reducer = cuUMAP(init="random",
n_components=n_components,
random_state=random_state)
return reducer.fit_transform(data, convert_dtype=True)
state = copy.copy(random_state)
cuml_embedding1 = get_embedding(n_components, state)
state = copy.copy(random_state)
cuml_embedding2 = get_embedding(n_components, state)
assert not np.isnan(cuml_embedding1).any()
assert not np.isnan(cuml_embedding2).any()
# Reproducibility threshold raised until intermittent failure is fixed
# Ref: https://github.com/rapidsai/cuml/issues/1903
mean_diff = np.mean(np.abs(cuml_embedding1 - cuml_embedding2))
if random_state is not None:
assert mean_diff == 0.0
else:
assert mean_diff > 0.5
@pytest.mark.parametrize('n_components,random_state',
[unit_param(2, None),
unit_param(2, 8),
unit_param(2, np.random.RandomState(42)),
unit_param(21, None),
unit_param(25, 8),
unit_param(25, np.random.RandomState(42)),
unit_param(50, None),
stress_param(50, 8)])
def test_umap_transform_reproducibility(n_components, random_state):
n_samples = 5000
n_features = 200
if random_state is None:
n_components *= 2
data, labels = make_blobs(n_samples=n_samples, n_features=n_features,
centers=10, random_state=42)
selection = np.random.RandomState(42).choice(
[True, False], n_samples, replace=True, p=[0.5, 0.5])
fit_data = data[selection]
transform_data = data[~selection]
def get_embedding(n_components, random_state):
reducer = cuUMAP(init="random",
n_components=n_components,
random_state=random_state)
reducer.fit(fit_data, convert_dtype=True)
return reducer.transform(transform_data, convert_dtype=True)
state = copy.copy(random_state)
cuml_embedding1 = get_embedding(n_components, state)
state = copy.copy(random_state)
cuml_embedding2 = get_embedding(n_components, state)
assert not np.isnan(cuml_embedding1).any()
assert not np.isnan(cuml_embedding2).any()
# Reproducibility threshold raised until intermittent failure is fixed
# Ref: https://github.com/rapidsai/cuml/issues/1903
mean_diff = np.mean(np.abs(cuml_embedding1 - cuml_embedding2))
if random_state is not None:
assert mean_diff == 0.0
else:
assert mean_diff > 0.5
def test_umap_fit_transform_trustworthiness_with_consistency_enabled():
iris = datasets.load_iris()
data = iris.data
algo = cuUMAP(n_neighbors=10, min_dist=0.01, init="random",
random_state=42)
embedding = algo.fit_transform(data, convert_dtype=True)
trust = trustworthiness(iris.data, embedding, n_neighbors=10)
assert trust >= 0.97
def test_umap_transform_trustworthiness_with_consistency_enabled():
iris = datasets.load_iris()
data = iris.data
selection = np.random.RandomState(42).choice(
[True, False], data.shape[0], replace=True, p=[0.5, 0.5])
fit_data = data[selection]
transform_data = data[~selection]
model = cuUMAP(n_neighbors=10, min_dist=0.01, init="random",
random_state=42)
model.fit(fit_data, convert_dtype=True)
embedding = model.transform(transform_data, convert_dtype=True)
trust = trustworthiness(transform_data, embedding, n_neighbors=10)
assert trust >= 0.92
@pytest.mark.filterwarnings("ignore:(.*)zero(.*)::scipy[.*]|umap[.*]")
def test_exp_decay_params():
def compare_exp_decay_params(a=None, b=None, min_dist=0.1, spread=1.0):
cuml_model = cuUMAP(a=a, b=b, min_dist=min_dist, spread=spread)
state = cuml_model.__getstate__()
cuml_a, cuml_b = state['a'], state['b']
skl_model = umap.UMAP(a=a, b=b, min_dist=min_dist, spread=spread)
skl_model.fit(np.zeros((1, 1)))
sklearn_a, sklearn_b = skl_model._a, skl_model._b
assert abs(cuml_a) - abs(sklearn_a) < 1e-6
assert abs(cuml_b) - abs(sklearn_b) < 1e-6
compare_exp_decay_params(min_dist=0.1, spread=1.0)
compare_exp_decay_params(a=0.5, b=2.0)
compare_exp_decay_params(a=0.5)
compare_exp_decay_params(b=0.5)
compare_exp_decay_params(min_dist=0.1, spread=10.0)
@pytest.mark.parametrize('n_neighbors', [5, 15])
def test_umap_knn_parameters(n_neighbors):
data, labels = datasets.make_blobs(
n_samples=2000, n_features=10, centers=5, random_state=0)
data = data.astype(np.float32)
def fit_transform_embed(knn_graph=None):
model = cuUMAP(random_state=42,
init='random',
n_neighbors=n_neighbors)
return model.fit_transform(data, knn_graph=knn_graph,
convert_dtype=True)
def transform_embed(knn_graph=None):
model = cuUMAP(random_state=42,
init='random',
n_neighbors=n_neighbors)
model.fit(data, knn_graph=knn_graph, convert_dtype=True)
return model.transform(data, knn_graph=knn_graph,
convert_dtype=True)
def test_trustworthiness(embedding):
trust = trustworthiness(data, embedding, n_neighbors=n_neighbors)
assert trust >= 0.92
def test_equality(e1, e2):
mean_diff = np.mean(np.abs(e1 - e2))
print("mean diff: %s" % mean_diff)
assert mean_diff < 1.0
neigh = NearestNeighbors(n_neighbors=n_neighbors)
neigh.fit(data)
knn_graph = neigh.kneighbors_graph(data, mode="distance")
embedding1 = fit_transform_embed(None)
embedding2 = fit_transform_embed(knn_graph.tocsr())
embedding3 = fit_transform_embed(knn_graph.tocoo())
embedding4 = fit_transform_embed(knn_graph.tocsc())
embedding5 = transform_embed(knn_graph.tocsr())
embedding6 = transform_embed(knn_graph.tocoo())
embedding7 = transform_embed(knn_graph.tocsc())
test_trustworthiness(embedding1)
test_trustworthiness(embedding2)
test_trustworthiness(embedding3)
test_trustworthiness(embedding4)
test_trustworthiness(embedding5)
test_trustworthiness(embedding6)
test_trustworthiness(embedding7)
test_equality(embedding2, embedding3)
test_equality(embedding3, embedding4)
test_equality(embedding5, embedding6)
test_equality(embedding6, embedding7)
| [
[
[
721,
732
],
[
11875,
11877
],
[
11992,
11994
],
[
13500,
13502
],
[
13662,
13664
],
[
2624,
2626
],
[
2665,
2667
],
[
4347,
4349
],
[
4785,
4787
],
[
5286,
5288
],
[
6580,
6582
],
[
9073,
9075
],
[
9112,
9114
],
[
9610,
9612
],
[
12929,
12931
],
[
12976,
12978
],
[
13156,
13158
],
[
13164,
13166
],
[
14105,
14107
],
[
14788,
14790
],
[
14835,
14837
],
[
15015,
15017
],
[
15023,
15025
],
[
15689,
15691
],
[
17232,
17234
],
[
16603,
16605
],
[
18072,
18074
],
[
18080,
18082
]
],
[
[
740,
746
],
[
1287,
1293
],
[
1408,
1414
],
[
1913,
1919
],
[
2034,
2040
],
[
4173,
4179
],
[
4905,
4911
],
[
4963,
4969
],
[
5030,
5036
],
[
6398,
6404
],
[
7282,
7288
],
[
7354,
7360
],
[
8487,
8493
],
[
8559,
8565
],
[
8616,
8622
],
[
8720,
8726
],
[
8823,
8829
],
[
8875,
8881
],
[
9624,
9630
],
[
9696,
9702
],
[
11691,
11697
],
[
13316,
13322
],
[
16170,
16176
],
[
17012,
17018
]
],
[
[
754,
758
],
[
2398,
2402
],
[
7972,
7976
],
[
10021,
10025
],
[
16527,
16531
]
],
[
[
766,
770
],
[
12739,
12743
],
[
12832,
12836
],
[
14598,
14602
],
[
14691,
14695
]
],
[
[
779,
784
],
[
5429,
5434
]
],
[
[
792,
804
],
[
5478,
5483
],
[
5533,
5538
],
[
5907,
5912
]
],
[
[
837,
851
],
[
1695,
1701
],
[
2455,
2461
],
[
3203,
3209
],
[
3615,
3621
],
[
3953,
3959
],
[
4490,
4496
],
[
5603,
5609
],
[
6728,
6734
],
[
8081,
8087
],
[
9480,
9486
],
[
10077,
10083
],
[
10891,
10897
],
[
11223,
11229
],
[
15310,
15316
],
[
15870,
15876
],
[
12537,
12543
],
[
14340,
14346
],
[
16366,
16372
],
[
17306,
17312
],
[
17591,
17597
]
],
[
[
880,
891
],
[
3027,
3038
],
[
8431,
8442
],
[
10511,
10522
]
],
[
[
893,
903
],
[
1321,
1331
],
[
1444,
1454
],
[
1947,
1957
],
[
2070,
2080
],
[
8592,
8602
],
[
11770,
11780
],
[
11817,
11827
],
[
11861,
11871
],
[
11929,
11939
],
[
11977,
11987
],
[
12046,
12056
],
[
12091,
12101
],
[
13395,
13405
],
[
13442,
13452
],
[
13486,
13496
],
[
13554,
13564
],
[
13602,
13612
],
[
13647,
13657
],
[
13716,
13726
]
],
[
[
911,
924
],
[
1338,
1351
],
[
1460,
1473
],
[
1964,
1977
],
[
2086,
2099
],
[
8650,
8663
],
[
8756,
8769
]
],
[
[
926,
938
],
[
1384,
1396
],
[
1505,
1517
],
[
2010,
2022
],
[
2131,
2143
],
[
8696,
8708
],
[
8801,
8813
],
[
12139,
12151
],
[
13764,
13776
]
],
[
[
969,
985
],
[
18184,
18200
]
],
[
[
994,
1000
],
[
11061,
11067
],
[
11090,
11096
],
[
11408,
11414
],
[
11437,
11443
],
[
11621,
11627
],
[
11650,
11656
]
],
[
[
1026,
1032
],
[
5654,
5660
],
[
6779,
6785
]
],
[
[
1054,
1062
],
[
1586,
1594
],
[
3145,
3153
],
[
3502,
3510
],
[
3895,
3903
],
[
4304,
4312
],
[
5239,
5247
],
[
6533,
6541
],
[
7496,
7504
],
[
7616,
7624
],
[
7747,
7755
],
[
9257,
9265
],
[
9356,
9364
],
[
15257,
15265
],
[
15631,
15639
],
[
17122,
17130
]
],
[
[
1091,
1097
],
[
1848,
1854
],
[
2810,
2816
],
[
2976,
2982
],
[
10311,
10317
],
[
10464,
10470
]
],
[
[
1127,
1137
],
[
2271,
2281
],
[
7855,
7865
],
[
9894,
9904
],
[
10684,
10694
],
[
12353,
12363
],
[
13974,
13984
]
],
[
[
1167,
1182
],
[
3354,
3369
],
[
3762,
3777
],
[
4091,
4106
],
[
4824,
4839
],
[
6267,
6282
],
[
7151,
7166
],
[
8299,
8314
],
[
8365,
8380
],
[
15471,
15486
],
[
16083,
16098
],
[
17933,
17948
]
],
[
[
1211,
1230
],
[
1784,
1803
],
[
2741,
2760
],
[
2912,
2931
],
[
10246,
10265
],
[
10404,
10423
]
],
[
[
1232,
1245
],
[
7386,
7399
]
],
[
[
1530,
1548
]
],
[
[
2156,
2185
]
],
[
[
3086,
3130
]
],
[
[
3439,
3487
]
],
[
[
3847,
3880
]
],
[
[
4248,
4275
]
],
[
[
5105,
5141
]
],
[
[
6473,
6502
]
],
[
[
7405,
7434
]
],
[
[
8943,
8965
]
],
[
[
9779,
9816
]
],
[
[
10570,
10619
]
],
[
[
12165,
12204
]
],
[
[
13790,
13825
]
],
[
[
15178,
15242
]
],
[
[
15556,
15616
]
],
[
[
16244,
16265
]
],
[
[
17064,
17088
]
]
] |
# Given a binary tree, find its maximum depth.
# The maximum depth is the number of nodes along the longest path from the root node down to the farthest leaf node.
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param root, a tree node
# @return an integer
def maxDepth(self, root):
if root is None:
return 0
L = 0;
R = 0;
if not (root.left is None):
L = self.maxDepth(root.left)
if not (root.right is None):
R = self.maxDepth(root.right)
return (L + 1) if L > R else (R + 1)
# if __name__ == "__main__" :
# root = TreeNode(1)
# root.left = TreeNode(1)
# root.right = TreeNode(1)
# s = Solution()
# print s.maxDepth(root)
# print "hello"
| [
[
[
207,
215
]
],
[
[
323,
331
]
]
] |
import os
def shell():
inputuser = input("> ")
if (inputuser == "exit"):
exit()
os.system(inputuser)
return shell()
print("Welcome to duckshell (python alpha)")
print("System info: ")
os.system("echo distro: $(uname -o)");
os.system("echo hostname: $(uname -n)");
os.system("echo architecture: $(uname -m)");
os.system("echo linux kernel: $(uname -r)");
os.system("echo python version: $(python --version)")
shell() | [
[
[
7,
9
],
[
210,
212
],
[
249,
251
],
[
290,
292
],
[
335,
337
],
[
380,
382
],
[
101,
103
]
],
[
[
15,
20
],
[
434,
439
],
[
133,
138
]
]
] |
import logging
import subprocess
import click
try:
from instance import settings
APP_ROOT = settings.APP_ROOT
except ImportError:
logging.error('Ensure __init__.py and settings.py both exist in instance/')
exit(1)
except AttributeError:
from config import settings
APP_ROOT = settings.APP_ROOT
PACKAGE_PATH = '{0}/{1}'.format(APP_ROOT, '/catwatch')
@click.command()
@click.argument('path', default=PACKAGE_PATH)
def cli(path):
"""
Run test coverage report.
:return: Subprocess call result
"""
cmd = 'py.test --cov-report term-missing --cov {0} {0}'.format(path)
return subprocess.call(cmd, shell=True)
| [
[
[
7,
14
],
[
145,
152
]
],
[
[
22,
32
],
[
625,
635
]
],
[
[
41,
46
],
[
381,
386
],
[
398,
403
]
],
[
[
78,
86
],
[
103,
111
]
],
[
[
92,
100
],
[
355,
363
]
],
[
[
279,
287
],
[
304,
312
]
],
[
[
293,
301
],
[
355,
363
]
],
[
[
323,
335
],
[
429,
441
]
],
[
[
447,
450
]
]
] |
import os
import time
from multiprocessing import Process
from typing import Tuple
import flwr as fl
import numpy as np
import tensorflow as tf
from flwr.server.strategy import FedAvg
import dataset
# generate random integer values
from random import seed
from random import randint
# Make TensorFlow log less verbose
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
# K: Prevent TF from using GPU (not enough memory)
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
DATASET = Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray, np.ndarray]]
def start_server(num_rounds: int, num_clients: int, fraction_fit: float):
"""Start the server with a slightly adjusted FedAvg strategy."""
strategy = FedAvg(min_available_clients=num_clients, fraction_fit=fraction_fit)
# Exposes the server by default on port 8080
fl.server.start_server(strategy=strategy, config={"num_rounds": num_rounds})
def start_client(dataset: DATASET) -> None:
"""Start a single client with the provided dataset."""
# Load and compile a Keras model for CIFAR-10
#model = tf.keras.applications.MobileNetV2((32, 32, 3), classes=10, weights=None)
model = tf.keras.Sequential(
[
tf.keras.Input(shape=(32, 32, 3)),
tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation="relu"),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),
tf.keras.layers.Conv2D(64, kernel_size=(3, 3), activation="relu"),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(10, activation="softmax"),
]
)
model.compile("adam", "sparse_categorical_crossentropy", metrics=[tf.keras.metrics.CategoricalAccuracy(), tf.keras.metrics.MeanSquaredError()])
### @TODO: check if "accuracy" and tf.keras.metrics.CategoricalAccuracy() return the same results
# Unpack the CIFAR-10 dataset partition
(x_train, y_train), (x_test, y_test) = dataset
# Define a Flower client
class CifarClient(fl.client.NumPyClient):
def get_parameters(self):
"""Return current weights."""
return model.get_weights()
def fit(self, parameters, config):
"""Fit model and return new weights as well as number of training
examples."""
model.set_weights(parameters)
# Remove steps_per_epoch if you want to train over the full dataset
# https://keras.io/api/models/model_training_apis/#fit-method
#nap_time = randint (0, 5)
#time.sleep (nap_time)
#print ("Slept for", nap_time, "seconds.")
model.fit(x_train, y_train, epochs=10, batch_size=256, steps_per_epoch=10)
return model.get_weights(), len(x_train), {}
def evaluate(self, parameters, config):
"""Evaluate using provided parameters."""
model.set_weights(parameters)
loss, accuracy, mse = model.evaluate(x_test, y_test)
print ('"Loss:', loss, ". Accuracy:", accuracy, ". MSE:", mse, ".")
return loss, len(x_test), {"accuracy": accuracy}
# Start Flower client
fl.client.start_numpy_client("0.0.0.0:8080", client=CifarClient())
def run_simulation(num_rounds: int, num_clients: int, fraction_fit: float):
"""Start a FL simulation."""
# This will hold all the processes which we are going to create
processes = []
# Start the server
server_process = Process(
target=start_server, args=(num_rounds, num_clients, fraction_fit)
)
server_process.start()
processes.append(server_process)
# Optionally block the script here for a second or two so the server has time to start
time.sleep(2)
# Load the dataset partitions
partitions = dataset.load(num_partitions=num_clients)
# Start all the clients
for partition in partitions:
client_process = Process(target=start_client, args=(partition,))
client_process.start()
processes.append(client_process)
# Block until all processes are finished
for p in processes:
p.join()
if __name__ == "__main__":
run_simulation(num_rounds=100, num_clients=5, fraction_fit=0.5)
| [
[
[
7,
9
],
[
322,
324
],
[
415,
417
]
],
[
[
17,
21
],
[
3720,
3724
]
],
[
[
50,
57
],
[
3471,
3478
],
[
3914,
3921
]
],
[
[
77,
82
],
[
469,
474
],
[
475,
480
],
[
506,
511
]
],
[
[
91,
101
],
[
819,
821
],
[
2022,
2024
],
[
3160,
3162
]
],
[
[
109,
120
],
[
481,
483
],
[
493,
495
],
[
512,
514
],
[
524,
526
]
],
[
[
128,
144
],
[
1150,
1152
],
[
1185,
1187
],
[
1228,
1230
],
[
1303,
1305
],
[
1359,
1361
],
[
1434,
1436
],
[
1490,
1492
],
[
1525,
1527
],
[
1563,
1565
],
[
1694,
1696
],
[
1734,
1736
]
],
[
[
178,
184
],
[
697,
703
]
],
[
[
193,
200
],
[
3786,
3793
]
],
[
[
254,
258
]
],
[
[
278,
285
]
],
[
[
459,
466
],
[
924,
931
]
],
[
[
543,
555
],
[
3495,
3507
]
],
[
[
902,
914
],
[
3929,
3941
]
],
[
[
3233,
3247
],
[
4154,
4168
]
]
] |
#!/usr/bin/env python3
elements = (1, 2, 4)
bitset = 0 # 1. Initialize bitset
# Add elements to bitset
for i in elements:
bitset = bitset | 1<<i # 2. Add element to bitset
# Print contents of bitset
print(bitset)
# Test for elements in bitset
for i in range(6):
if bitset & 1<<i: # 3. Test if element is in bitset
print(i)
# Remove elements from bitset
for i in elements:
bitset = bitset & ~(1<<i) # 4. Remove element from bitset
# Print contents of bitset
print(bitset)
| [
[
[
24,
32
],
[
139,
147
],
[
431,
439
]
],
[
[
45,
51
],
[
162,
168
],
[
246,
252
],
[
311,
317
],
[
454,
460
],
[
543,
549
]
],
[
[
134,
135
],
[
174,
175
]
],
[
[
153,
159
],
[
162,
168
],
[
246,
252
],
[
311,
317
],
[
454,
460
],
[
543,
549
]
],
[
[
289,
290
],
[
323,
324
],
[
388,
389
]
],
[
[
426,
427
],
[
468,
469
]
],
[
[
445,
451
],
[
454,
460
],
[
543,
549
]
]
] |
'''
The management of salt command line utilities are stored in here
'''
# Import python libs
import os
import sys
# Import salt components
import salt.cli.caller
import salt.cli.cp
import salt.cli.key
import salt.cli.batch
import salt.client
import salt.output
import salt.runner
import optparse
from salt.utils import parsers
from salt.utils.verify import verify_env
from salt.version import __version__ as VERSION
from salt.exceptions import SaltInvocationError, SaltClientError, SaltException
class SaltCMD(parsers.SaltCMDOptionParser):
'''
The execution of a salt command happens here
'''
def run(self):
'''
Execute the salt command line
'''
self.parse_args()
try:
local = salt.client.LocalClient(self.get_config_file_path('master'))
except SaltClientError as exc:
self.exit(2, '{0}\n'.format(exc))
return
if self.options.query:
ret = local.find_cmd(self.config['cmd'])
for jid in ret:
if isinstance(ret, list) or isinstance(ret, dict):
print('Return data for job {0}:'.format(jid))
salt.output.display_output(ret[jid], None, self.config)
print('')
elif self.options.batch:
batch = salt.cli.batch.Batch(self.config)
batch.run()
else:
if self.options.timeout <= 0:
self.options.timeout = local.opts['timeout']
args = [
self.config['tgt'],
self.config['fun'],
self.config['arg'],
self.options.timeout,
]
if self.selected_target_option:
args.append(self.selected_target_option)
else:
args.append('glob')
if getattr(self.options, 'return'):
args.append(getattr(self.options, 'return'))
else:
args.append('')
try:
# local will be None when there was an error
if local:
if self.options.static:
if self.options.verbose:
args.append(True)
full_ret = local.cmd_full_return(*args)
ret, out = self._format_ret(full_ret)
self._output_ret(ret, out)
elif self.config['fun'] == 'sys.doc':
ret = {}
out = ''
for full_ret in local.cmd_cli(*args):
ret_, out = self._format_ret(full_ret)
ret.update(ret_)
self._output_ret(ret, out)
else:
if self.options.verbose:
args.append(True)
for full_ret in local.cmd_cli(*args):
ret, out = self._format_ret(full_ret)
self._output_ret(ret, out)
except SaltInvocationError as exc:
ret = exc
out = ''
def _output_ret(self, ret, out):
'''
Print the output from a single return to the terminal
'''
# Handle special case commands
if self.config['fun'] == 'sys.doc':
self._print_docs(ret)
else:
# Determine the proper output method and run it
salt.output.display_output(ret, out, self.config)
def _format_ret(self, full_ret):
'''
Take the full return data and format it to simple output
'''
ret = {}
out = ''
for key, data in full_ret.items():
ret[key] = data['ret']
if 'out' in data:
out = data['out']
return ret, out
def _print_docs(self, ret):
'''
Print out the docstrings for all of the functions on the minions
'''
docs = {}
if not ret:
self.exit(2, 'No minions found to gather docs from\n')
for host in ret:
for fun in ret[host]:
if fun not in docs:
if ret[host][fun]:
docs[fun] = ret[host][fun]
for fun in sorted(docs):
print(fun + ':')
print(docs[fun])
print('')
class SaltCP(parsers.SaltCPOptionParser):
'''
Run the salt-cp command line client
'''
def run(self):
'''
Execute salt-cp
'''
self.parse_args()
cp_ = salt.cli.cp.SaltCP(self.config)
cp_.run()
class SaltKey(parsers.SaltKeyOptionParser):
'''
Initialize the Salt key manager
'''
def run(self):
'''
Execute salt-key
'''
self.parse_args()
if self.config['verify_env']:
verify_env([
os.path.join(self.config['pki_dir'], 'minions'),
os.path.join(self.config['pki_dir'], 'minions_pre'),
os.path.join(self.config['pki_dir'], 'minions_rejected'),
os.path.dirname(self.config['key_logfile']),
],
self.config['user'],
permissive=self.config['permissive_pki_access'],
pki_dir=self.config['pki_dir'],
)
self.setup_logfile_logger()
key = salt.cli.key.Key(self.config)
key.run()
class SaltCall(parsers.SaltCallOptionParser):
'''
Used to locally execute a salt command
'''
def run(self):
'''
Execute the salt call!
'''
self.parse_args()
if self.config['verify_env']:
verify_env([
self.config['pki_dir'],
self.config['cachedir'],
os.path.dirname(self.config['log_file'])
],
self.config['user'],
permissive=self.config['permissive_pki_access'],
pki_dir=self.config['pki_dir'],
)
caller = salt.cli.caller.Caller(self.config)
if self.options.doc:
caller.print_docs()
self.exit(0)
if self.options.grains_run:
caller.print_grains()
self.exit(0)
caller.run()
class SaltRun(parsers.SaltRunOptionParser):
def run(self):
'''
Execute salt-run
'''
self.parse_args()
runner = salt.runner.Runner(self.config)
if self.options.doc:
runner._print_docs()
else:
# Run this here so SystemExit isn't raised anywhere else when
# someone tries to use the runners via the python api
try:
runner.run()
except SaltClientError as exc:
raise SystemExit(str(exc))
| [
[
[
102,
104
],
[
4940,
4942
],
[
5009,
5011
],
[
5082,
5084
],
[
5160,
5162
],
[
5869,
5871
]
],
[
[
112,
115
]
],
[
[
149,
164
]
],
[
[
172,
183
]
],
[
[
191,
203
]
],
[
[
211,
225
]
],
[
[
233,
244
]
],
[
[
252,
263
]
],
[
[
271,
282
],
[
753,
757
],
[
1184,
1188
],
[
1323,
1327
],
[
3494,
3498
],
[
4613,
4617
],
[
5440,
5444
],
[
6111,
6115
],
[
6511,
6515
]
],
[
[
291,
299
]
],
[
[
323,
330
],
[
516,
523
],
[
4420,
4427
],
[
4679,
4686
],
[
5505,
5512
],
[
6368,
6375
]
],
[
[
361,
371
],
[
4907,
4917
],
[
5747,
5757
]
],
[
[
397,
419
]
],
[
[
448,
467
],
[
3088,
3107
]
],
[
[
469,
484
],
[
829,
844
],
[
6824,
6839
]
],
[
[
486,
499
]
],
[
[
508,
515
]
],
[
[
4413,
4419
]
],
[
[
4671,
4678
]
],
[
[
5496,
5504
]
],
[
[
6360,
6367
]
]
] |
# # ⚠ Warning
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# [🥭 Mango Markets](https://mango.markets/) support is available at:
# [Docs](https://docs.mango.markets/)
# [Discord](https://discord.gg/67jySBhxrg)
# [Twitter](https://twitter.com/mangomarkets)
# [Github](https://github.com/blockworks-foundation)
# [Email](mailto:hello@blockworks.foundation)
import typing
from decimal import Decimal
from solana.publickey import PublicKey
from .accountinfo import AccountInfo
from .addressableaccount import AddressableAccount
from .cache import Cache, PerpMarketCache, MarketCache
from .constants import SYSTEM_PROGRAM_ADDRESS
from .context import Context
from .instrumentlookup import InstrumentLookup
from .instrumentvalue import InstrumentValue
from .layouts import layouts
from .lotsizeconverter import LotSizeConverter, RaisingLotSizeConverter
from .marketlookup import MarketLookup
from .metadata import Metadata
from .token import Instrument, Token
from .tokenbank import TokenBank
from .version import Version
# # 🥭 GroupSlotSpotMarket class
#
class GroupSlotSpotMarket:
def __init__(self, address: PublicKey, maint_asset_weight: Decimal, init_asset_weight: Decimal, maint_liab_weight: Decimal, init_liab_weight: Decimal) -> None:
self.address: PublicKey = address
self.maint_asset_weight: Decimal = maint_asset_weight
self.init_asset_weight: Decimal = init_asset_weight
self.maint_liab_weight: Decimal = maint_liab_weight
self.init_liab_weight: Decimal = init_liab_weight
@staticmethod
def from_layout(layout: typing.Any) -> "GroupSlotSpotMarket":
spot_market: PublicKey = layout.spot_market
maint_asset_weight: Decimal = round(layout.maint_asset_weight, 8)
init_asset_weight: Decimal = round(layout.init_asset_weight, 8)
maint_liab_weight: Decimal = round(layout.maint_liab_weight, 8)
init_liab_weight: Decimal = round(layout.init_liab_weight, 8)
return GroupSlotSpotMarket(spot_market, maint_asset_weight, init_asset_weight, maint_liab_weight, init_liab_weight)
@staticmethod
def from_layout_or_none(layout: typing.Any) -> typing.Optional["GroupSlotSpotMarket"]:
if (layout.spot_market is None) or (layout.spot_market == SYSTEM_PROGRAM_ADDRESS):
return None
return GroupSlotSpotMarket.from_layout(layout)
def __str__(self) -> str:
return f"""« 𝙶𝚛𝚘𝚞𝚙𝚂𝚕𝚘𝚝𝚂𝚙𝚘𝚝𝙼𝚊𝚛𝚔𝚎𝚝 [{self.address}]
Asset Weights:
Initial: {self.init_asset_weight}
Maintenance: {self.maint_asset_weight}
Liability Weights:
Initial: {self.init_liab_weight}
Maintenance: {self.maint_liab_weight}
»"""
def __repr__(self) -> str:
return f"{self}"
# # 🥭 GroupSlotPerpMarket class
#
class GroupSlotPerpMarket:
def __init__(self, address: PublicKey, maint_asset_weight: Decimal, init_asset_weight: Decimal, maint_liab_weight: Decimal, init_liab_weight: Decimal, liquidation_fee: Decimal, base_lot_size: Decimal, quote_lot_size: Decimal) -> None:
self.address: PublicKey = address
self.maint_asset_weight: Decimal = maint_asset_weight
self.init_asset_weight: Decimal = init_asset_weight
self.maint_liab_weight: Decimal = maint_liab_weight
self.init_liab_weight: Decimal = init_liab_weight
self.liquidation_fee: Decimal = liquidation_fee
self.base_lot_size: Decimal = base_lot_size
self.quote_lot_size: Decimal = quote_lot_size
@staticmethod
def from_layout(layout: typing.Any) -> "GroupSlotPerpMarket":
perp_market: PublicKey = layout.perp_market
maint_asset_weight: Decimal = round(layout.maint_asset_weight, 8)
init_asset_weight: Decimal = round(layout.init_asset_weight, 8)
maint_liab_weight: Decimal = round(layout.maint_liab_weight, 8)
init_liab_weight: Decimal = round(layout.init_liab_weight, 8)
liquidation_fee: Decimal = round(layout.liquidation_fee, 8)
base_lot_size: Decimal = layout.base_lot_size
quote_lot_size: Decimal = layout.quote_lot_size
return GroupSlotPerpMarket(perp_market, maint_asset_weight, init_asset_weight, maint_liab_weight, init_liab_weight, liquidation_fee, base_lot_size, quote_lot_size)
@staticmethod
def from_layout_or_none(layout: typing.Any) -> typing.Optional["GroupSlotPerpMarket"]:
if (layout.perp_market is None) or (layout.perp_market == SYSTEM_PROGRAM_ADDRESS):
return None
return GroupSlotPerpMarket.from_layout(layout)
def __str__(self) -> str:
return f"""« 𝙶𝚛𝚘𝚞𝚙𝚂𝚕𝚘𝚝𝙿𝚎𝚛𝚙𝙼𝚊𝚛𝚔𝚎𝚝 [{self.address}]
Asset Weights:
Initial: {self.init_asset_weight}
Maintenance: {self.maint_asset_weight}
Liability Weights:
Initial: {self.init_liab_weight}
Maintenance: {self.maint_liab_weight}
Liquidation Fee: {self.liquidation_fee}
Base Lot Size: {self.base_lot_size}
Quote Lot Size: {self.quote_lot_size}
»"""
def __repr__(self) -> str:
return f"{self}"
# # 🥭 GroupSlot class
#
# `GroupSlot` gathers indexed slot items together instead of separate arrays.
#
class GroupSlot:
def __init__(self, index: int, base_instrument: Instrument, base_token_bank: typing.Optional[TokenBank], quote_token_bank: TokenBank, spot_market_info: typing.Optional[GroupSlotSpotMarket], perp_market_info: typing.Optional[GroupSlotPerpMarket], perp_lot_size_converter: LotSizeConverter, oracle: PublicKey) -> None:
self.index: int = index
self.base_instrument: Instrument = base_instrument
self.base_token_bank: typing.Optional[TokenBank] = base_token_bank
self.quote_token_bank: TokenBank = quote_token_bank
self.spot_market: typing.Optional[GroupSlotSpotMarket] = spot_market_info
self.perp_market: typing.Optional[GroupSlotPerpMarket] = perp_market_info
self.perp_lot_size_converter: LotSizeConverter = perp_lot_size_converter
self.oracle: PublicKey = oracle
def __str__(self) -> str:
base_token_bank = f"{self.base_token_bank}".replace("\n", "\n ")
quote_token_bank = f"{self.quote_token_bank}".replace("\n", "\n ")
spot_market_info = f"{self.spot_market}".replace("\n", "\n ")
perp_market_info = f"{self.perp_market}".replace("\n", "\n ")
return f"""« 𝙶𝚛𝚘𝚞𝚙𝚂𝚕𝚘𝚝[{self.index}] {self.base_instrument}
Base Token Info:
{base_token_bank}
Quote Token Info:
{quote_token_bank}
Oracle: {self.oracle}
Spot Market:
{spot_market_info}
Perp Market:
{perp_market_info}
»"""
def __repr__(self) -> str:
return f"{self}"
# # 🥭 Group class
#
# `Group` defines root functionality for Mango Markets.
#
class Group(AddressableAccount):
def __init__(self, account_info: AccountInfo, version: Version, name: str,
meta_data: Metadata,
shared_quote: TokenBank,
slot_indices: typing.Sequence[bool],
slots: typing.Sequence[GroupSlot],
signer_nonce: Decimal, signer_key: PublicKey,
admin: PublicKey, serum_program_address: PublicKey, cache: PublicKey, valid_interval: Decimal,
insurance_vault: PublicKey, srm_vault: PublicKey, msrm_vault: PublicKey, fees_vault: PublicKey) -> None:
super().__init__(account_info)
self.version: Version = version
self.name: str = name
self.meta_data: Metadata = meta_data
self.shared_quote: TokenBank = shared_quote
self.slot_indices: typing.Sequence[bool] = slot_indices
self.slots: typing.Sequence[GroupSlot] = slots
self.signer_nonce: Decimal = signer_nonce
self.signer_key: PublicKey = signer_key
self.admin: PublicKey = admin
self.serum_program_address: PublicKey = serum_program_address
self.cache: PublicKey = cache
self.valid_interval: Decimal = valid_interval
self.insurance_vault: PublicKey = insurance_vault
self.srm_vault: PublicKey = srm_vault
self.msrm_vault: PublicKey = msrm_vault
self.fees_vault: PublicKey = fees_vault
@property
def shared_quote_token(self) -> Token:
return Token.ensure(self.shared_quote.token)
@property
def liquidity_incentive_token_bank(self) -> TokenBank:
for token_bank in self.tokens:
if token_bank.token.symbol_matches("MNGO"):
return token_bank
raise Exception(f"Could not find token info for symbol 'MNGO' in group {self.address}")
@property
def liquidity_incentive_token(self) -> Token:
return Token.ensure(self.liquidity_incentive_token_bank.token)
@property
def tokens(self) -> typing.Sequence[TokenBank]:
return [*self.base_tokens, self.shared_quote]
@property
def tokens_by_index(self) -> typing.Sequence[typing.Optional[TokenBank]]:
return [*self.base_tokens_by_index, self.shared_quote]
@property
def slots_by_index(self) -> typing.Sequence[typing.Optional[GroupSlot]]:
mapped_items: typing.List[typing.Optional[GroupSlot]] = []
slot_counter = 0
for available in self.slot_indices:
if available:
mapped_items += [self.slots[slot_counter]]
slot_counter += 1
else:
mapped_items += [None]
return mapped_items
@property
def base_tokens(self) -> typing.Sequence[TokenBank]:
return [slot.base_token_bank for slot in self.slots if slot.base_token_bank is not None]
@property
def base_tokens_by_index(self) -> typing.Sequence[typing.Optional[TokenBank]]:
return [slot.base_token_bank if slot is not None else None for slot in self.slots_by_index]
@property
def oracles(self) -> typing.Sequence[PublicKey]:
return [slot.oracle for slot in self.slots if slot.oracle is not None]
@property
def oracles_by_index(self) -> typing.Sequence[typing.Optional[PublicKey]]:
return [slot.oracle if slot is not None else None for slot in self.slots_by_index]
@property
def spot_markets(self) -> typing.Sequence[GroupSlotSpotMarket]:
return [slot.spot_market for slot in self.slots if slot.spot_market is not None]
@property
def spot_markets_by_index(self) -> typing.Sequence[typing.Optional[GroupSlotSpotMarket]]:
return [slot.spot_market if slot is not None else None for slot in self.slots_by_index]
@property
def perp_markets(self) -> typing.Sequence[GroupSlotPerpMarket]:
return [slot.perp_market for slot in self.slots if slot.perp_market is not None]
@property
def perp_markets_by_index(self) -> typing.Sequence[typing.Optional[GroupSlotPerpMarket]]:
return [slot.perp_market if slot is not None else None for slot in self.slots_by_index]
@staticmethod
def from_layout(layout: typing.Any, name: str, account_info: AccountInfo, version: Version, instrument_lookup: InstrumentLookup, market_lookup: MarketLookup) -> "Group":
meta_data: Metadata = Metadata.from_layout(layout.meta_data)
tokens: typing.List[typing.Optional[TokenBank]] = [
TokenBank.from_layout_or_none(t, instrument_lookup) for t in layout.tokens]
# By convention, the shared quote token is always at the end.
quote_token_bank: typing.Optional[TokenBank] = tokens[-1]
if quote_token_bank is None:
raise Exception("Could not find quote token info at end of group tokens.")
slots: typing.List[GroupSlot] = []
in_slots: typing.List[bool] = []
for index in range(len(tokens) - 1):
spot_market_info: typing.Optional[GroupSlotSpotMarket] = GroupSlotSpotMarket.from_layout_or_none(
layout.spot_markets[index])
perp_market_info: typing.Optional[GroupSlotPerpMarket] = GroupSlotPerpMarket.from_layout_or_none(
layout.perp_markets[index])
if (spot_market_info is None) and (perp_market_info is None):
in_slots += [False]
else:
perp_lot_size_converter: LotSizeConverter = RaisingLotSizeConverter()
base_token_bank: typing.Optional[TokenBank] = tokens[index]
base_instrument: Instrument
if base_token_bank is not None:
base_instrument = base_token_bank.token
else:
# It's possible there's no underlying SPL token and we have a pure PERP market.
if perp_market_info is None:
raise Exception(f"Cannot find base token or perp market info for index {index}")
perp_market = market_lookup.find_by_address(perp_market_info.address)
if perp_market is None:
raise Exception(f"Cannot find base token or perp market for index {index}")
base_instrument = perp_market.base
if perp_market_info is not None:
perp_lot_size_converter = LotSizeConverter(
base_instrument, perp_market_info.base_lot_size, quote_token_bank.token, perp_market_info.quote_lot_size)
oracle: PublicKey = layout.oracles[index]
slot: GroupSlot = GroupSlot(index, base_instrument, base_token_bank, quote_token_bank,
spot_market_info, perp_market_info, perp_lot_size_converter, oracle)
slots += [slot]
in_slots += [True]
signer_nonce: Decimal = layout.signer_nonce
signer_key: PublicKey = layout.signer_key
admin: PublicKey = layout.admin
serum_program_address: PublicKey = layout.serum_program_address
cache_address: PublicKey = layout.cache
valid_interval: Decimal = layout.valid_interval
insurance_vault: PublicKey = layout.insurance_vault
srm_vault: PublicKey = layout.srm_vault
msrm_vault: PublicKey = layout.msrm_vault
fees_vault: PublicKey = layout.fees_vault
return Group(account_info, version, name, meta_data, quote_token_bank, in_slots, slots, signer_nonce, signer_key, admin, serum_program_address, cache_address, valid_interval, insurance_vault, srm_vault, msrm_vault, fees_vault)
@staticmethod
def parse(account_info: AccountInfo, name: str, instrument_lookup: InstrumentLookup, market_lookup: MarketLookup) -> "Group":
data = account_info.data
if len(data) != layouts.GROUP.sizeof():
raise Exception(
f"Group data length ({len(data)}) does not match expected size ({layouts.GROUP.sizeof()})")
layout = layouts.GROUP.parse(data)
return Group.from_layout(layout, name, account_info, Version.V3, instrument_lookup, market_lookup)
@staticmethod
def parse_with_context(context: Context, account_info: AccountInfo) -> "Group":
name = context.lookup_group_name(account_info.address)
return Group.parse(account_info, name, context.instrument_lookup, context.market_lookup)
@staticmethod
def load(context: Context, address: typing.Optional[PublicKey] = None) -> "Group":
group_address: PublicKey = address or context.group_address
account_info = AccountInfo.load(context, group_address)
if account_info is None:
raise Exception(f"Group account not found at address '{group_address}'")
name = context.lookup_group_name(account_info.address)
return Group.parse(account_info, name, context.instrument_lookup, context.market_lookup)
def slot_by_spot_market_address(self, spot_market_address: PublicKey) -> GroupSlot:
for slot in self.slots:
if slot.spot_market is not None and slot.spot_market.address == spot_market_address:
return slot
raise Exception(f"Could not find spot market {spot_market_address} in group {self.address}")
def slot_by_perp_market_address(self, perp_market_address: PublicKey) -> GroupSlot:
for slot in self.slots:
if slot.perp_market is not None and slot.perp_market.address == perp_market_address:
return slot
raise Exception(f"Could not find perp market {perp_market_address} in group {self.address}")
def slot_by_instrument_or_none(self, instrument: Instrument) -> typing.Optional[GroupSlot]:
for slot in self.slots:
if slot.base_instrument == instrument:
return slot
return None
def slot_by_instrument(self, instrument: Instrument) -> GroupSlot:
slot: typing.Optional[GroupSlot] = self.slot_by_instrument_or_none(instrument)
if slot is not None:
return slot
raise Exception(f"Could not find slot for {instrument} in group {self.address}")
def token_bank_by_instrument(self, instrument: Instrument) -> TokenBank:
for token_bank in self.tokens:
if token_bank.token == instrument:
return token_bank
raise Exception(f"Could not find token {instrument} in group {self.address}")
def token_price_from_cache(self, cache: Cache, token: Instrument) -> InstrumentValue:
market_cache: MarketCache = self.market_cache_from_cache(cache, token)
return market_cache.adjusted_price(token, self.shared_quote_token)
def perp_market_cache_from_cache(self, cache: Cache, token: Instrument) -> typing.Optional[PerpMarketCache]:
market_cache: MarketCache = self.market_cache_from_cache(cache, token)
return market_cache.perp_market
def market_cache_from_cache(self, cache: Cache, instrument: Instrument) -> MarketCache:
slot: GroupSlot = self.slot_by_instrument(instrument)
instrument_index: int = slot.index
return cache.market_cache_for_index(instrument_index)
def fetch_cache(self, context: Context) -> Cache:
return Cache.load(context, self.cache)
def __str__(self) -> str:
slot_count = len(self.slots)
slots = "\n ".join([f"{item}".replace("\n", "\n ") for item in self.slots])
return f"""« 𝙶𝚛𝚘𝚞𝚙 {self.version} [{self.address}]
{self.meta_data}
Name: {self.name}
Signer [Nonce: {self.signer_nonce}]: {self.signer_key}
Admin: {self.admin}
DEX Program ID: {self.serum_program_address}
Cache: {self.cache}
Insurance Vault: {self.insurance_vault}
SRM Vault: {self.srm_vault}
MSRM Vault: {self.msrm_vault}
Fees Vault: {self.fees_vault}
Valid Interval: {self.valid_interval}
Basket [{slot_count} markets]:
{slots}
»"""
| [
[
[
802,
808
],
[
2014,
2020
],
[
2586,
2592
],
[
2571,
2577
],
[
3959,
3965
],
[
4758,
4764
],
[
4743,
4749
],
[
5668,
5674
],
[
5743,
5749
],
[
5799,
5805
],
[
6029,
6035
],
[
6160,
6166
],
[
6242,
6248
],
[
7409,
7415
],
[
7456,
7462
],
[
8015,
8021
],
[
8072,
8078
],
[
9191,
9197
],
[
9321,
9327
],
[
9337,
9343
],
[
9476,
9482
],
[
9492,
9498
],
[
9543,
9549
],
[
9555,
9561
],
[
9906,
9912
],
[
10084,
10090
],
[
10100,
10106
],
[
10269,
10275
],
[
10425,
10431
],
[
10441,
10447
],
[
10606,
10612
],
[
10787,
10793
],
[
10803,
10809
],
[
10983,
10989
],
[
11164,
11170
],
[
11180,
11186
],
[
11362,
11368
],
[
11593,
11599
],
[
11605,
11611
],
[
11822,
11828
],
[
12001,
12007
],
[
12047,
12053
],
[
12145,
12151
],
[
12299,
12305
],
[
12670,
12676
],
[
15618,
15624
],
[
16841,
16847
],
[
17087,
17093
],
[
17913,
17919
]
],
[
[
830,
837
],
[
1584,
1591
],
[
1612,
1619
],
[
1640,
1647
],
[
1667,
1674
],
[
1760,
1767
],
[
1821,
1828
],
[
1881,
1888
],
[
1940,
1947
],
[
2132,
2139
],
[
2205,
2212
],
[
2277,
2284
],
[
2348,
2355
],
[
3292,
3299
],
[
3320,
3327
],
[
3348,
3355
],
[
3375,
3382
],
[
3401,
3408
],
[
3425,
3432
],
[
3450,
3457
],
[
3543,
3550
],
[
3604,
3611
],
[
3664,
3671
],
[
3723,
3730
],
[
3780,
3787
],
[
3834,
3841
],
[
3887,
3894
],
[
4077,
4084
],
[
4150,
4157
],
[
4222,
4229
],
[
4293,
4300
],
[
4362,
4369
],
[
4428,
4435
],
[
4483,
4490
],
[
7515,
7522
],
[
7650,
7657
],
[
8134,
8141
],
[
8380,
8387
],
[
14038,
14045
],
[
14302,
14309
]
],
[
[
867,
876
],
[
1553,
1562
],
[
1707,
1716
],
[
2073,
2082
],
[
3261,
3270
],
[
3490,
3499
],
[
4018,
4027
],
[
5888,
5897
],
[
6400,
6409
],
[
7536,
7545
],
[
7571,
7580
],
[
7605,
7614
],
[
7623,
7632
],
[
7693,
7702
],
[
7715,
7724
],
[
7738,
7747
],
[
7761,
7770
],
[
8182,
8191
],
[
8225,
8234
],
[
8279,
8288
],
[
8333,
8342
],
[
8435,
8444
],
[
8487,
8496
],
[
8534,
8543
],
[
8582,
8591
],
[
10285,
10294
],
[
10457,
10466
],
[
13698,
13707
],
[
14088,
14097
],
[
14133,
14142
],
[
14189,
14198
],
[
14253,
14262
],
[
14359,
14368
],
[
14413,
14422
],
[
14462,
14471
],
[
14512,
14521
],
[
15634,
15643
],
[
15688,
15697
],
[
16140,
16149
],
[
16488,
16497
]
],
[
[
903,
914
],
[
7256,
7267
],
[
11399,
11410
],
[
14825,
14836
],
[
15374,
15385
],
[
15756,
15767
]
],
[
[
947,
965
],
[
7198,
7216
]
],
[
[
985,
990
],
[
17633,
17638
],
[
17884,
17889
],
[
18112,
18117
],
[
18374,
18379
],
[
18396,
18401
]
],
[
[
992,
1007
],
[
17929,
17944
]
],
[
[
1009,
1020
],
[
17701,
17712
],
[
17969,
17980
],
[
18146,
18157
]
],
[
[
1044,
1066
],
[
2692,
2714
],
[
4864,
4886
]
],
[
[
1088,
1095
],
[
15351,
15358
],
[
15600,
15607
],
[
18362,
18369
]
],
[
[
1126,
1142
],
[
11449,
11465
],
[
14868,
14884
]
],
[
[
1172,
1187
],
[
17662,
17677
]
],
[
[
1209,
1216
],
[
14984,
14991
],
[
15118,
15125
],
[
15163,
15170
]
],
[
[
1247,
1263
],
[
5862,
5878
],
[
6336,
6352
],
[
12592,
12608
],
[
13525,
13541
]
],
[
[
1265,
1288
],
[
12611,
12634
]
],
[
[
1315,
1327
],
[
11482,
11494
],
[
14901,
14913
]
],
[
[
1350,
1358
],
[
7326,
7334
],
[
7915,
7923
],
[
11538,
11546
],
[
11527,
11535
]
],
[
[
1378,
1388
],
[
5639,
5649
],
[
5970,
5980
],
[
12746,
12756
],
[
16826,
16836
],
[
17047,
17057
],
[
17355,
17365
],
[
17647,
17657
],
[
17898,
17908
],
[
18131,
18141
]
],
[
[
1390,
1395
],
[
8656,
8661
],
[
8678,
8683
],
[
9074,
9079
],
[
9096,
9101
]
],
[
[
1419,
1428
],
[
5684,
5693
],
[
5714,
5723
],
[
6045,
6054
],
[
6105,
6114
],
[
7367,
7376
],
[
7963,
7972
],
[
8779,
8788
],
[
9207,
9216
],
[
9353,
9362
],
[
9922,
9931
],
[
10116,
10125
],
[
11649,
11658
],
[
11621,
11630
],
[
11838,
11847
],
[
12686,
12695
],
[
17370,
17379
]
],
[
[
1450,
1457
],
[
7278,
7285
],
[
7842,
7849
],
[
11421,
11428
],
[
15250,
15257
]
],
[
[
1500,
1519
],
[
2407,
2426
],
[
2757,
2776
],
[
5759,
5778
],
[
6176,
6195
],
[
10622,
10641
],
[
10819,
10838
],
[
12184,
12203
],
[
12161,
12180
]
],
[
[
3208,
3227
],
[
4531,
4550
],
[
4929,
4948
],
[
5815,
5834
],
[
6258,
6277
],
[
10999,
11018
],
[
11196,
11215
],
[
12338,
12357
],
[
12315,
12334
]
],
[
[
5576,
5585
],
[
7472,
7481
],
[
8088,
8097
],
[
9508,
9517
],
[
9571,
9580
],
[
12013,
12022
],
[
13766,
13775
],
[
13754,
13763
],
[
16154,
16163
],
[
16502,
16511
],
[
16857,
16866
],
[
17062,
17071
],
[
17103,
17112
],
[
18173,
18182
]
],
[
[
7192,
7197
],
[
14558,
14563
],
[
15204,
15209
],
[
15477,
15482
],
[
15994,
15999
]
]
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
排序算法:
O(n^2)
冒泡排序
插入排序
选择排序
Q(n log n) 分而治之
快速排序
归并排序
"""
| [] |
from logging import getLogger
from typing import (
List,
Optional,
Set,
)
from pydantic import (
BaseModel,
Extra,
Field,
)
from galaxy import exceptions
from galaxy.datatypes.registry import Registry
from galaxy.managers.collections import DatasetCollectionManager
from galaxy.managers.collections_util import (
api_payload_to_create_params,
dictify_dataset_collection_instance,
dictify_element_reference,
)
from galaxy.managers.context import ProvidesHistoryContext
from galaxy.managers.hdcas import HDCAManager
from galaxy.managers.histories import HistoryManager
from galaxy.schema.fields import (
EncodedDatabaseIdField,
ModelClassField,
)
from galaxy.schema.schema import (
AnyHDCA,
CreateNewCollectionPayload,
DatasetCollectionInstanceType,
DCESummary,
DCEType,
HDCADetailed,
TagCollection,
)
from galaxy.security.idencoding import IdEncodingHelper
from galaxy.webapps.base.controller import UsesLibraryMixinItems
from galaxy.webapps.galaxy.services.base import ServiceBase
log = getLogger(__name__)
class UpdateCollectionAttributePayload(BaseModel):
"""Contains attributes that can be updated for all elements in a dataset collection."""
dbkey: str = Field(..., description="TODO")
class Config:
extra = Extra.forbid # will cause validation to fail if extra attributes are included,
class DatasetCollectionAttributesResult(BaseModel):
dbkey: str = Field(..., description="TODO")
# Are the following fields really used/needed?
extension: str = Field(..., description="The dataset file extension.", example="txt")
model_class: str = ModelClassField("HistoryDatasetCollectionAssociation")
dbkeys: Optional[Set[str]]
extensions: Optional[Set[str]]
tags: TagCollection
class SuitableConverter(BaseModel):
tool_id: str = Field(..., description="The ID of the tool that can perform the type conversion.")
name: str = Field(..., description="The name of the converter.")
target_type: str = Field(..., description="The type to convert to.")
original_type: str = Field(..., description="The type to convert from.")
class SuitableConverters(BaseModel):
"""Collection of converters that can be used on a particular dataset collection."""
__root__: List[SuitableConverter]
class DatasetCollectionContentElements(BaseModel):
"""Represents a collection of elements contained in the dataset collection."""
__root__: List[DCESummary]
class DatasetCollectionsService(ServiceBase, UsesLibraryMixinItems):
def __init__(
self,
security: IdEncodingHelper,
history_manager: HistoryManager,
hdca_manager: HDCAManager,
collection_manager: DatasetCollectionManager,
datatypes_registry: Registry,
):
super().__init__(security)
self.history_manager = history_manager
self.hdca_manager = hdca_manager
self.collection_manager = collection_manager
self.datatypes_registry = datatypes_registry
def create(self, trans: ProvidesHistoryContext, payload: CreateNewCollectionPayload) -> HDCADetailed:
"""
Create a new dataset collection instance.
:type payload: dict
:param payload: (optional) dictionary structure containing:
* collection_type: dataset collection type to create.
* instance_type: Instance type - 'history' or 'library'.
* name: the new dataset collections's name
* datasets: object describing datasets for collection
:rtype: dict
:returns: element view of new dataset collection
"""
# TODO: Error handling...
create_params = api_payload_to_create_params(payload.dict(exclude_unset=True))
if payload.instance_type == DatasetCollectionInstanceType.history:
if payload.history_id is None:
raise exceptions.RequestParameterInvalidException("Parameter history_id is required.")
history_id = self.decode_id(payload.history_id)
history = self.history_manager.get_owned(history_id, trans.user, current_history=trans.history)
create_params["parent"] = history
create_params["history"] = history
elif payload.instance_type == DatasetCollectionInstanceType.library:
library_folder = self.get_library_folder(trans, payload.folder_id, check_accessible=True)
self.check_user_can_add_to_library_item(trans, library_folder, check_accessible=False)
create_params["parent"] = library_folder
else:
raise exceptions.RequestParameterInvalidException()
dataset_collection_instance = self.collection_manager.create(trans=trans, **create_params)
rval = dictify_dataset_collection_instance(
dataset_collection_instance,
security=trans.security,
url_builder=trans.url_builder,
parent=create_params["parent"],
)
return rval
def copy(
self, trans: ProvidesHistoryContext, id: EncodedDatabaseIdField, payload: UpdateCollectionAttributePayload
):
"""
Iterate over all datasets of a collection and copy datasets with new attributes to a new collection.
e.g attributes = {'dbkey': 'dm3'}
"""
self.collection_manager.copy(
trans, trans.history, "hdca", id, copy_elements=True, dataset_instance_attributes=payload.dict()
)
def attributes(
self,
trans: ProvidesHistoryContext,
id: EncodedDatabaseIdField,
instance_type: DatasetCollectionInstanceType = DatasetCollectionInstanceType.history,
) -> DatasetCollectionAttributesResult:
"""
Returns dbkey/extension for collection elements
"""
dataset_collection_instance = self.collection_manager.get_dataset_collection_instance(
trans, id=id, instance_type=instance_type, check_ownership=True
)
rval = dataset_collection_instance.to_dict(view="dbkeysandextensions")
return rval
def suitable_converters(
self,
trans: ProvidesHistoryContext,
id: EncodedDatabaseIdField,
instance_type: DatasetCollectionInstanceType = DatasetCollectionInstanceType.history,
) -> SuitableConverters:
"""
Returns suitable converters for all datatypes in collection
"""
rval = self.collection_manager.get_converters_for_collection(trans, id, self.datatypes_registry, instance_type)
return rval
def show(
self,
trans: ProvidesHistoryContext,
id: EncodedDatabaseIdField,
instance_type: DatasetCollectionInstanceType = DatasetCollectionInstanceType.history,
) -> AnyHDCA:
"""
Returns information about a particular dataset collection.
"""
dataset_collection_instance = self.collection_manager.get_dataset_collection_instance(
trans,
id=id,
instance_type=instance_type,
)
if instance_type == DatasetCollectionInstanceType.history:
parent = dataset_collection_instance.history
elif instance_type == DatasetCollectionInstanceType.library:
parent = dataset_collection_instance.folder
else:
raise exceptions.RequestParameterInvalidException()
rval = dictify_dataset_collection_instance(
dataset_collection_instance,
security=trans.security,
url_builder=trans.url_builder,
parent=parent,
view="element",
)
return rval
def contents(
self,
trans: ProvidesHistoryContext,
hdca_id: EncodedDatabaseIdField,
parent_id: EncodedDatabaseIdField,
instance_type: DatasetCollectionInstanceType = DatasetCollectionInstanceType.history,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> DatasetCollectionContentElements:
"""
Shows direct child contents of indicated dataset collection parent id
:type string: encoded string id
:param id: HDCA.id
:type string: encoded string id
:param parent_id: parent dataset_collection.id for the dataset contents to be viewed
:type integer: int
:param limit: pagination limit for returned dataset collection elements
:type integer: int
:param offset: pagination offset for returned dataset collection elements
:rtype: list
:returns: list of dataset collection elements and contents
"""
# validate HDCA for current user, will throw error if not permitted
# TODO: refactor get_dataset_collection_instance
hdca = self.collection_manager.get_dataset_collection_instance(
trans, id=hdca_id, check_ownership=True, instance_type=instance_type
)
# check to make sure the dsc is part of the validated hdca
decoded_parent_id = self.decode_id(parent_id)
if parent_id != hdca_id and not hdca.contains_collection(decoded_parent_id):
raise exceptions.ObjectNotFound(
"Requested dataset collection is not contained within indicated history content"
)
# retrieve contents
contents = self.collection_manager.get_collection_contents(trans, decoded_parent_id, limit=limit, offset=offset)
# dictify and tack on a collection_url for drilling down into nested collections
def serialize_element(dsc_element) -> DCESummary:
result = dictify_element_reference(dsc_element, recursive=False, security=trans.security)
if result["element_type"] == DCEType.dataset_collection:
assert trans.url_builder
result["object"]["contents_url"] = trans.url_builder(
"contents_dataset_collection",
hdca_id=self.encode_id(hdca.id),
parent_id=self.encode_id(result["object"]["id"]),
)
trans.security.encode_all_ids(result, recursive=True)
return result
rval = [serialize_element(el) for el in contents]
return DatasetCollectionContentElements.parse_obj(rval)
| [
[
[
20,
29
],
[
1062,
1071
]
],
[
[
55,
59
],
[
2303,
2307
],
[
2478,
2482
]
],
[
[
65,
73
],
[
1724,
1732
],
[
1759,
1767
],
[
7931,
7939
],
[
7969,
7977
]
],
[
[
79,
82
],
[
1733,
1736
],
[
1768,
1771
]
],
[
[
114,
123
],
[
1123,
1132
],
[
1433,
1442
],
[
1828,
1837
],
[
2188,
2197
],
[
2368,
2377
]
],
[
[
129,
134
],
[
1311,
1316
]
],
[
[
140,
145
],
[
1245,
1250
],
[
1462,
1467
],
[
1565,
1570
],
[
1859,
1864
],
[
1958,
1963
],
[
2034,
2039
],
[
2109,
2114
]
],
[
[
169,
179
],
[
3940,
3950
],
[
4645,
4655
],
[
7361,
7371
],
[
9212,
9222
]
],
[
[
218,
226
],
[
2792,
2800
]
],
[
[
267,
291
],
[
2738,
2762
]
],
[
[
343,
371
],
[
3737,
3765
]
],
[
[
377,
412
],
[
4806,
4841
],
[
7423,
7458
]
],
[
[
418,
443
],
[
9669,
9694
]
],
[
[
483,
505
],
[
3067,
3089
],
[
5074,
5096
],
[
5557,
5579
],
[
6174,
6196
],
[
6633,
6655
],
[
7714,
7736
]
],
[
[
540,
551
],
[
2697,
2708
]
],
[
[
590,
604
],
[
2659,
2673
]
],
[
[
644,
666
],
[
5102,
5124
],
[
5593,
5615
],
[
6210,
6232
],
[
6669,
6691
],
[
7755,
7777
],
[
7798,
7820
]
],
[
[
672,
687
],
[
1657,
1672
]
],
[
[
730,
737
],
[
6796,
6803
]
],
[
[
743,
769
],
[
3100,
3126
]
],
[
[
775,
804
],
[
5672,
5701
],
[
6289,
6318
],
[
6748,
6777
],
[
7877,
7906
],
[
3836,
3865
],
[
4320,
4349
],
[
5640,
5669
],
[
6257,
6286
],
[
6716,
6745
],
[
7108,
7137
],
[
7234,
7263
],
[
7845,
7874
]
],
[
[
810,
820
],
[
2483,
2493
],
[
9636,
9646
]
],
[
[
826,
833
],
[
9791,
9798
]
],
[
[
839,
851
],
[
3131,
3143
]
],
[
[
857,
870
],
[
1788,
1801
]
],
[
[
913,
929
],
[
2616,
2632
]
],
[
[
973,
994
],
[
2542,
2563
]
],
[
[
1043,
1054
],
[
2529,
2540
]
],
[
[
1056,
1059
]
],
[
[
1090,
1122
],
[
5135,
5167
]
],
[
[
1399,
1432
],
[
5720,
5753
]
],
[
[
1810,
1827
],
[
2308,
2325
]
],
[
[
2169,
2187
],
[
6337,
6355
]
],
[
[
2335,
2367
],
[
8000,
8032
],
[
10288,
10320
]
],
[
[
2503,
2528
]
]
] |
# Generated by Django 2.2.10 on 2020-03-04 23:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dcim', '0101_auto_20200304_1403'),
]
operations = [
migrations.CreateModel(
name='ApplicationType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('created', models.DateField(auto_now_add=True, null=True)),
('last_updated', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=100, unique=True)),
('slug', models.SlugField(max_length=100, unique=True)),
('language', models.CharField(blank=True, max_length=50, null=True)),
('actual_version', models.CharField(blank=True, max_length=50)),
('recommended_version', models.CharField(max_length=100)),
],
options={
'ordering': ['name', 'slug', 'language', 'actual_version', 'recommended_version'],
},
),
]
| [
[
[
72,
82
],
[
109,
119
],
[
233,
243
]
],
[
[
84,
90
],
[
337,
343
],
[
438,
444
],
[
520,
526
],
[
594,
600
],
[
667,
673
],
[
744,
750
],
[
836,
842
],
[
922,
928
]
],
[
[
99,
108
]
]
] |
# -*- coding:utf-8 -*-
"""Test function for vega.run"""
import unittest
def lazy(func):
"""lazy function wrapper
:param func: function name
"""
attr_name = "_lazy_" + func.__name__
def lazy_func(*args, **kwargs):
"""Wrapper of lazy func
:param args: any object
:param kwargs: any kwargs
:return:
"""
if not hasattr(func, attr_name):
setattr(func, attr_name, func(*args, **kwargs))
return getattr(func, attr_name)
return lazy_func
@lazy
def env_args(args):
"""A lazy function will be execute when call
:param args: any object
:return:
"""
return args
class TestPipeline(unittest.TestCase):
"""Test lazy function worked in pipeline"""
def test_env_args(self):
"""Test function 'env_args' is a lazy function"""
args = {'env': 'test'}
env_args(args)
self.assertEqual(env_args(), {'env': 'test'})
| [
[
[
63,
71
],
[
692,
700
]
],
[
[
78,
82
],
[
531,
535
]
],
[
[
540,
548
],
[
887,
895
],
[
927,
935
]
],
[
[
679,
691
]
]
] |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Almost every FBCodeBuilder string is ultimately passed to a shell. Escaping
too little or too much tends to be the most common error. The utilities in
this file give a systematic way of avoiding such bugs:
- When you write literal strings destined for the shell, use `ShellQuoted`.
- When these literal strings are parameterized, use `ShellQuoted.format`.
- Any parameters that are raw strings get `shell_quote`d automatically,
while any ShellQuoted parameters will be left intact.
- Use `path_join` to join path components.
- Use `shell_join` to join already-quoted command arguments or shell lines.
"""
import os
from collections import namedtuple
# pyre-fixme[13] This is too magical for Pyre.
class ShellQuoted(namedtuple("ShellQuoted", ("do_not_use_raw_str",))):
"""
Wrap a string with this to make it transparent to shell_quote(). It
will almost always suffice to use ShellQuoted.format(), path_join(),
or shell_join().
If you really must, use raw_shell() to access the raw string.
"""
def __new__(cls, s):
"No need to nest ShellQuoted."
return super(ShellQuoted, cls).__new__(
cls, s.do_not_use_raw_str if isinstance(s, ShellQuoted) else s
)
def __str__(self):
raise RuntimeError(
"One does not simply convert {0} to a string -- use path_join() "
"or ShellQuoted.format() instead".format(repr(self))
)
def __repr__(self) -> str:
return "{0}({1})".format(self.__class__.__name__, repr(self.do_not_use_raw_str))
def format(self, **kwargs) -> "ShellQuoted":
"""
Use instead of str.format() when the arguments are either
`ShellQuoted()` or raw strings needing to be `shell_quote()`d.
Positional args are deliberately not supported since they are more
error-prone.
"""
return ShellQuoted(
self.do_not_use_raw_str.format(
**dict(
(k, shell_quote(v).do_not_use_raw_str) for k, v in kwargs.items()
)
)
)
def shell_quote(s) -> ShellQuoted:
"Quotes a string if it is not already quoted"
return (
s
if isinstance(s, ShellQuoted)
else ShellQuoted("'" + str(s).replace("'", "'\\''") + "'")
)
def raw_shell(s: ShellQuoted):
"Not a member of ShellQuoted so we get a useful error for raw strings"
if isinstance(s, ShellQuoted):
return s.do_not_use_raw_str
raise RuntimeError("{0} should have been ShellQuoted".format(s))
def shell_join(delim, it) -> ShellQuoted:
"Joins an iterable of ShellQuoted with a delimiter between each two"
return ShellQuoted(delim.join(raw_shell(s) for s in it))
def path_join(*args) -> ShellQuoted:
"Joins ShellQuoted and raw pieces of paths to make a shell-quoted path"
return ShellQuoted(os.path.join(*[raw_shell(shell_quote(s)) for s in args]))
def shell_comment(c: ShellQuoted) -> ShellQuoted:
"Do not shell-escape raw strings in comments, but do handle line breaks."
return ShellQuoted("# {c}").format(
c=ShellQuoted(
(raw_shell(c) if isinstance(c, ShellQuoted) else c).replace("\n", "\n# ")
)
)
| [
[
[
702,
704
],
[
2955,
2957
]
],
[
[
729,
739
],
[
807,
817
]
],
[
[
795,
806
],
[
1198,
1209
],
[
1280,
1291
],
[
1961,
1972
],
[
2194,
2205
],
[
2305,
2316
],
[
2331,
2342
],
[
2410,
2421
],
[
2520,
2531
],
[
2670,
2681
],
[
2767,
2778
],
[
2843,
2854
],
[
2943,
2954
],
[
3052,
3063
],
[
3036,
3047
],
[
3154,
3165
],
[
3193,
3204
],
[
3249,
3260
]
],
[
[
2176,
2187
],
[
2066,
2077
],
[
2980,
2991
]
],
[
[
2397,
2406
],
[
2790,
2799
],
[
2970,
2979
],
[
3219,
3228
]
],
[
[
2645,
2655
]
],
[
[
2823,
2832
]
],
[
[
3019,
3032
]
]
] |
"""Assortment of utilities.
"""
from datetime import datetime
from hashlib import sha256, md5
import random
import re
import string
import unicodedata
epoch = datetime.utcfromtimestamp(0)
def unix_time(dt=None):
"""Returns the UNIX time representation for a given date or current UTC."""
dt = dt or datetime.utcnow()
return int((dt - epoch).total_seconds())
def unix_time_millis(dt=None):
"""Returns the UNIX time representation for a given date or current UTC."""
dt = dt or datetime.utcnow()
return int((dt - epoch).total_seconds() * 1000)
def from_iso_to_unix(iso_dt):
"""Converts from ISO format (e.g., "1984-06-02T19:05:00.000Z") to UNIX.
"""
if isinstance(iso_dt, int):
return iso_dt # Already UNIX.
try:
dt = datetime.strptime(iso_dt, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
dt = datetime.strptime(iso_dt, '%Y-%m-%dT%H:%M:%SZ')
return unix_time(dt)
def from_unix_to_iso(dt=None):
"""Returns the ISO representation for a given date or current UTC."""
dt = int(dt) if dt is not None else unix_time()
return datetime.utcfromtimestamp(dt).strftime('%Y-%m-%dT%H:%M:%SZ')
def datetime_from_unix_time(unix_dt):
"""Returns a datetime object."""
return datetime.utcfromtimestamp(unix_dt)
def datetime_from_iso(iso_dt):
"""Returns a datetime object."""
return datetime.strptime(iso_dt, '%Y-%m-%dT%H:%M:%SZ')
def password_hash(password, salt):
if salt:
return sha256(password + salt).hexdigest()
else:
return md5(password).hexdigest()[::-1]
def random_string(size):
return ''.join(random.SystemRandom().choice(
string.ascii_letters + string.digits) for _ in range(size))
def remove_accents(input_str):
"""Replaces accented chars by their ascii equivalents."""
nfkd_form = unicodedata.normalize('NFKD', input_str)
return u"".join([c for c in nfkd_form if not unicodedata.combining(c)])
def make_url_friendly(input_str):
"""Returns a URL-friedly version of input_str.
Removes non-word chars, replaces accented ones and makes it lowercase.
"""
if input_str is None:
return None
return re.sub(r'[\W\\/_]+', '-', remove_accents(input_str)).lower()
| [
[
[
54,
62
],
[
163,
171
],
[
313,
321
],
[
504,
512
],
[
788,
796
],
[
875,
883
],
[
1118,
1126
],
[
1267,
1275
],
[
1383,
1391
]
],
[
[
83,
89
],
[
1496,
1502
]
],
[
[
91,
94
],
[
1557,
1560
]
],
[
[
103,
109
],
[
1635,
1641
]
],
[
[
117,
119
],
[
2189,
2191
]
],
[
[
127,
133
],
[
1673,
1679
],
[
1696,
1702
]
],
[
[
141,
152
],
[
1844,
1855
],
[
1934,
1945
]
],
[
[
155,
160
],
[
352,
357
],
[
543,
548
]
],
[
[
198,
207
],
[
934,
943
],
[
1095,
1104
]
],
[
[
382,
398
]
],
[
[
580,
596
]
],
[
[
954,
970
]
],
[
[
1185,
1208
]
],
[
[
1308,
1325
]
],
[
[
1437,
1450
]
],
[
[
1595,
1608
]
],
[
[
1739,
1753
],
[
2215,
2229
]
],
[
[
1967,
1984
]
]
] |
"""Copyright (c) 2021, Deep Net. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
EPSILON = 1e-20
| [
[
[
582,
589
]
]
] |
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('C6A', ['C8pro'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 153750.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C6A_0', 0.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C6A_obs', C6A())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6pro(C3A=None), C6pro_0)
| [
[
[
53,
58
],
[
166,
171
]
],
[
[
60,
67
],
[
175,
182
],
[
207,
214
],
[
233,
240
],
[
267,
274
],
[
294,
301
],
[
328,
335
],
[
363,
370
],
[
402,
409
],
[
429,
436
],
[
446,
453
],
[
488,
495
],
[
502,
509
],
[
518,
525
],
[
552,
559
],
[
578,
585
],
[
612,
619
],
[
640,
647
],
[
658,
665
],
[
723,
730
],
[
740,
747
],
[
765,
772
],
[
781,
788
],
[
824,
831
],
[
841,
848
],
[
867,
874
],
[
907,
914
]
],
[
[
69,
78
],
[
934,
943
],
[
1000,
1009
],
[
1066,
1075
],
[
1130,
1139
],
[
1194,
1203
],
[
1267,
1276
],
[
1340,
1349
],
[
1422,
1431
],
[
1498,
1507
],
[
1574,
1583
],
[
1650,
1659
],
[
1732,
1741
],
[
1814,
1823
],
[
1881,
1890
],
[
1948,
1957
],
[
2025,
2034
],
[
2102,
2111
],
[
2179,
2188
],
[
2256,
2265
],
[
2333,
2342
],
[
2399,
2408
],
[
2465,
2474
],
[
2541,
2550
],
[
2617,
2626
],
[
2693,
2702
],
[
2771,
2780
],
[
2849,
2858
],
[
2927,
2936
],
[
2991,
3000
],
[
3055,
3064
],
[
3132,
3141
],
[
3209,
3218
],
[
3286,
3295
],
[
3364,
3373
],
[
3442,
3451
],
[
3520,
3529
],
[
3569,
3578
],
[
3618,
3627
],
[
3667,
3676
],
[
3716,
3725
],
[
3765,
3774
],
[
3814,
3823
],
[
3886,
3895
],
[
3958,
3967
],
[
4030,
4039
],
[
4104,
4113
],
[
4178,
4187
],
[
4252,
4261
],
[
4328,
4337
],
[
4404,
4413
],
[
4480,
4489
],
[
4556,
4565
],
[
4632,
4641
],
[
4708,
4717
],
[
4784,
4793
],
[
4860,
4869
],
[
4936,
4945
],
[
4966,
4975
],
[
4998,
5007
],
[
5022,
5031
],
[
5053,
5062
],
[
5082,
5091
],
[
5107,
5116
],
[
5137,
5146
],
[
5163,
5172
],
[
5189,
5198
],
[
5219,
5228
],
[
5247,
5256
],
[
5272,
5281
],
[
5303,
5312
],
[
5327,
5336
],
[
5357,
5366
],
[
5389,
5398
],
[
5416,
5425
],
[
5441,
5450
],
[
5472,
5481
],
[
5502,
5511
],
[
5527,
5536
],
[
5551,
5560
],
[
5577,
5586
],
[
5602,
5611
],
[
5633,
5642
]
],
[
[
80,
90
]
],
[
[
92,
103
]
],
[
[
105,
109
],
[
6497,
6501
],
[
6758,
6762
],
[
7031,
7035
],
[
7319,
7323
],
[
7570,
7574
],
[
7835,
7839
],
[
8034,
8038
],
[
8262,
8266
],
[
8526,
8530
],
[
8755,
8759
],
[
9049,
9053
],
[
9296,
9300
],
[
9579,
9583
],
[
9914,
9918
],
[
10159,
10163
],
[
10456,
10460
],
[
10685,
10689
],
[
10855,
10859
],
[
11125,
11129
],
[
11376,
11380
],
[
11751,
11755
],
[
12106,
12110
],
[
12474,
12478
],
[
12962,
12966
],
[
13570,
13574
],
[
14273,
14277
],
[
14922,
14926
],
[
15633,
15637
],
[
16279,
16283
],
[
16568,
16572
],
[
16811,
16815
],
[
17102,
17106
],
[
17335,
17339
],
[
17602,
17606
]
],
[
[
111,
121
],
[
5662,
5672
],
[
5697,
5707
],
[
5730,
5740
],
[
5759,
5769
],
[
5792,
5802
],
[
5823,
5833
],
[
5854,
5864
],
[
5885,
5895
],
[
5918,
5928
],
[
5951,
5961
],
[
5982,
5992
],
[
6009,
6019
],
[
6040,
6050
],
[
6073,
6083
],
[
6102,
6112
],
[
6135,
6145
],
[
6170,
6180
],
[
6205,
6215
],
[
6236,
6246
],
[
6269,
6279
],
[
6300,
6310
],
[
6331,
6341
],
[
6360,
6370
],
[
6393,
6403
],
[
6424,
6434
],
[
6463,
6473
]
],
[
[
123,
130
],
[
17812,
17819
],
[
17853,
17860
],
[
17887,
17894
],
[
17930,
17937
],
[
17965,
17972
],
[
18009,
18016
],
[
18054,
18061
],
[
18103,
18110
],
[
18138,
18145
],
[
18164,
18171
],
[
18219,
18226
],
[
18239,
18246
],
[
18263,
18270
],
[
18308,
18315
],
[
18340,
18347
],
[
18385,
18392
],
[
18422,
18429
],
[
18450,
18457
],
[
18534,
18541
],
[
18560,
18567
],
[
18592,
18599
],
[
18616,
18623
],
[
18671,
18678
],
[
18697,
18704
],
[
18730,
18737
],
[
18784,
18791
]
],
[
[
132,
141
]
],
[
[
143,
153
]
],
[
[
155,
158
],
[
6824,
6827
],
[
6892,
6895
],
[
7104,
7107
],
[
7166,
7169
],
[
7401,
7404
],
[
7458,
7461
]
],
[
[
160,
164
]
]
] |
from __future__ import annotations
import typing as t
from functools import singledispatch
from inflection import underscore
from sqlalchemy import Date
from sqlalchemy import DateTime
from sqlalchemy import Text
from sqlalchemy import Time
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy.orm import RelationshipProperty
from sqlalchemy.types import Boolean
from sqlalchemy.types import Float
from sqlalchemy.types import Integer
from sqlalchemy.types import Numeric
from sqlalchemy.types import String
from sqlalchemy.types import VARCHAR
from sqlalchemy_utils import EmailType
from sqlalchemy_utils import get_mapper
from sqlalchemy_utils import JSONType
from sqlalchemy_utils import PhoneNumberType
from sqlalchemy_utils import URLType
from sqlalchemy_utils.types import ChoiceType
from .definitions import MagqlEnumType
from .definitions import MagqlInputField
from .definitions import MagqlInputObjectType
StringFilter = MagqlInputObjectType(
"StringFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"StringOperator",
{
"INCLUDES": "INCLUDES",
"EQUALS": "EQUALS",
"EXISTS": "EXISTS",
"DOESNOTEXIST": "DOESNOTEXIST",
},
)
),
"value": MagqlInputField("String"),
},
)
DateFilter = MagqlInputObjectType(
"DateFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"DateOperator", {"BEFORE": "BEFORE", "ON": "ON", "AFTER": "AFTER"}
)
),
"value": MagqlInputField("String"),
},
)
IntFilter = MagqlInputObjectType(
"IntFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"IntOperator",
{
"lt": "lt",
"lte": "lte",
"eq": "eq",
"neq": "neq",
"gt": "gt",
"gte": "gte",
},
)
),
"value": MagqlInputField("Int"),
},
)
FloatFilter = MagqlInputObjectType(
"FloatFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"FloatOperator",
{
"lt": "lt",
"lte": "lte",
"eq": "eq",
"neq": "neq",
"gt": "gt",
"gte": "gte",
},
)
),
"value": MagqlInputField("Float"),
},
)
RelFilter = MagqlInputObjectType(
"RelFilter",
{
"operator": MagqlInputField(
MagqlEnumType("RelOperator", {"INCLUDES": "INCLUDES"})
),
"value": MagqlInputField("Int"),
},
)
BooleanFilter = MagqlInputObjectType(
"BooleanFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"BooleanOperator", {"EQUALS": "EQUALS", "NOTEQUALS": "NOTEQUALS"}
)
),
"value": MagqlInputField("Boolean"),
},
)
EnumOperator = MagqlEnumType("EnumOperator", {"INCLUDES": "INCLUDES"})
def EnumFilter(base_type: t.Any) -> MagqlInputObjectType:
name = base_type.name + "Filter"
input_ = {
"operator": MagqlInputField(EnumOperator),
"value": MagqlInputField(base_type),
}
return MagqlInputObjectType(name, input_)
@singledispatch
def get_filter_comparator(type: t.Any) -> t.Any:
raise TypeError(f"No comparator registered for {type.__class__.__name__!r}.")
@get_filter_comparator.register(RelationshipProperty)
def _get_relationship_comparator(rel: RelationshipProperty) -> t.Optional[t.Callable]:
direction = rel.direction.name
if "TOONE" in direction:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field == filter_value
return None
return condition
elif "TOMANY" in direction:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field.any(field.contains(filter_value))
return None
return condition
return None
@get_filter_comparator.register(DateTime)
@get_filter_comparator.register(Date)
def _get_date_comparator(_: t.Union[DateTime, Date]) -> t.Callable:
def condition(
filter_value: t.Union[DateTime, Date],
filter_operator: str,
field: t.Union[DateTime, Date],
) -> t.Any:
if filter_operator == "BEFORE":
return field < filter_value
elif filter_operator == "ON":
return field == filter_value
elif filter_operator == "After":
return field > filter_value
return None
return condition
@get_filter_comparator.register(JSONType)
@get_filter_comparator.register(Text)
@get_filter_comparator.register(UnicodeText)
@get_filter_comparator.register(Unicode)
@get_filter_comparator.register(URLType)
@get_filter_comparator.register(PhoneNumberType)
@get_filter_comparator.register(EmailType)
@get_filter_comparator.register(Time)
@get_filter_comparator.register(String)
@get_filter_comparator.register(VARCHAR)
def _get_string_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field.like(f"%{filter_value}%")
elif filter_operator == "EQUALS":
return field == filter_value
elif filter_operator == "EXISTS":
return field.like("%")
elif filter_operator == "DOESNOTEXIST":
return field.is_(None)
return condition
@get_filter_comparator.register(Float)
@get_filter_comparator.register(Numeric)
@get_filter_comparator.register(Integer)
def _get_number_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "lt":
return field < filter_value
elif filter_operator == "lte":
return field <= filter_value
elif filter_operator == "eq":
return field == filter_value
elif filter_operator == "neq":
return field != filter_value
elif filter_operator == "gt":
return field > filter_value
elif filter_operator == "gte":
return field >= filter_value
return condition
@get_filter_comparator.register(Boolean)
def _get_boolean_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "EQUALS":
return field == filter_value
elif filter_operator == "NOTEQUALS":
return field != filter_value
return condition
@get_filter_comparator.register(ChoiceType)
def _get_choice_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field == filter_value
return condition
def generate_filters(table: t.Any, info: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
sqla_filters = []
if "filter" in kwargs and kwargs["filter"] is not None:
mapper = get_mapper(table)
gql_filters = kwargs["filter"]
for filter_name, gql_filter in gql_filters.items():
gql_filter_value = gql_filter["value"]
filter_name = underscore(filter_name)
if filter_name in table.c:
filter_type = table.c[filter_name].type
elif filter_name in mapper.relationships:
rel = mapper.relationships[filter_name]
rel_mapper = get_mapper(rel.target)
gql_filter_value = (
info.context.query(rel_mapper.class_)
.filter_by(id=gql_filter_value)
.one()
)
filter_type = rel
else:
raise KeyError(filter_name)
sql_filter = get_filter_comparator(filter_type)(
gql_filter_value,
gql_filter["operator"],
getattr(mapper.class_, filter_name),
)
sqla_filters.append(sql_filter)
return sqla_filters
| [
[
[
23,
34
]
],
[
[
43,
54
],
[
3237,
3238
],
[
3530,
3531
],
[
3520,
3521
],
[
3738,
3739
],
[
3749,
3750
],
[
4492,
4493
],
[
4464,
4465
],
[
5398,
5399
],
[
5388,
5389
],
[
6016,
6017
],
[
6006,
6007
],
[
6692,
6693
],
[
6682,
6683
],
[
7064,
7065
],
[
7054,
7055
],
[
7350,
7351
],
[
7296,
7297
],
[
7309,
7310
],
[
7323,
7324
],
[
7340,
7341
],
[
3909,
3910
],
[
3863,
3864
],
[
3899,
3900
],
[
4172,
4173
],
[
4126,
4127
],
[
4162,
4163
],
[
4649,
4650
],
[
4545,
4546
],
[
4615,
4616
],
[
5488,
5489
],
[
5442,
5443
],
[
5478,
5479
],
[
6106,
6107
],
[
6060,
6061
],
[
6096,
6097
],
[
6782,
6783
],
[
6736,
6737
],
[
6772,
6773
],
[
7154,
7155
],
[
7108,
7109
],
[
7144,
7145
]
],
[
[
77,
91
],
[
3473,
3487
]
],
[
[
116,
126
],
[
7650,
7660
]
],
[
[
150,
154
],
[
4430,
4434
],
[
4482,
4486
],
[
4563,
4567
],
[
4633,
4637
]
],
[
[
178,
186
],
[
4388,
4396
],
[
4472,
4480
],
[
4553,
4561
],
[
4623,
4631
]
],
[
[
210,
214
],
[
5014,
5018
]
],
[
[
238,
242
],
[
5271,
5275
]
],
[
[
266,
273
],
[
5097,
5104
]
],
[
[
297,
308
],
[
5052,
5063
]
],
[
[
336,
356
],
[
3653,
3673
],
[
3713,
3733
]
],
[
[
386,
393
],
[
6642,
6649
]
],
[
[
423,
428
],
[
5887,
5892
]
],
[
[
458,
465
],
[
5967,
5974
]
],
[
[
495,
502
],
[
5926,
5933
]
],
[
[
532,
538
],
[
5309,
5315
]
],
[
[
568,
575
],
[
5349,
5356
]
],
[
[
605,
614
],
[
5228,
5237
]
],
[
[
644,
654
],
[
7456,
7466
],
[
7908,
7918
]
],
[
[
684,
692
],
[
4972,
4980
]
],
[
[
722,
737
],
[
5179,
5194
]
],
[
[
767,
774
],
[
5138,
5145
]
],
[
[
810,
820
],
[
7012,
7022
]
],
[
[
847,
860
],
[
1061,
1074
],
[
1511,
1524
],
[
1795,
1808
],
[
2262,
2275
],
[
2729,
2742
],
[
2961,
2974
],
[
3153,
3166
]
],
[
[
886,
901
],
[
1032,
1047
],
[
1365,
1380
],
[
1482,
1497
],
[
1651,
1666
],
[
1766,
1781
],
[
2118,
2133
],
[
2233,
2248
],
[
2587,
2602
],
[
2700,
2715
],
[
2812,
2827
],
[
2932,
2947
],
[
3100,
3115
],
[
3342,
3357
],
[
3390,
3405
]
],
[
[
927,
947
],
[
964,
984
],
[
1416,
1436
],
[
1701,
1721
],
[
2166,
2186
],
[
2635,
2655
],
[
2863,
2883
],
[
3247,
3267
],
[
3435,
3455
]
],
[
[
949,
961
]
],
[
[
1403,
1413
]
],
[
[
1689,
1698
]
],
[
[
2152,
2163
]
],
[
[
2623,
2632
]
],
[
[
2847,
2860
]
],
[
[
3138,
3150
],
[
3358,
3370
]
],
[
[
3215,
3225
]
],
[
[
3492,
3513
],
[
3622,
3643
],
[
4357,
4378
],
[
4399,
4420
],
[
4941,
4962
],
[
4983,
5004
],
[
5021,
5042
],
[
5066,
5087
],
[
5107,
5128
],
[
5148,
5169
],
[
5197,
5218
],
[
5240,
5261
],
[
5278,
5299
],
[
5318,
5339
],
[
5856,
5877
],
[
5895,
5916
],
[
5936,
5957
],
[
6611,
6632
],
[
6981,
7002
],
[
8245,
8266
]
],
[
[
3679,
3707
]
],
[
[
4440,
4460
]
],
[
[
5362,
5384
]
],
[
[
5980,
6002
]
],
[
[
6655,
6678
]
],
[
[
7028,
7050
]
],
[
[
7272,
7288
]
]
] |
from .__main__ import create_app
application = create_app()
| [
[
[
22,
32
],
[
48,
58
]
],
[
[
34,
45
]
]
] |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""comm_helper"""
from mindspore.parallel._ps_context import _is_role_pserver, _is_role_sched
from ._hccl_management import load_lib as hccl_load_lib
_HCCL_AVAILABLE = False
_NCCL_AVAILABLE = False
try:
import mindspore._ms_mpi as mpi
_NCCL_AVAILABLE = True
except ImportError:
_NCCL_AVAILABLE = False
try:
hccl_load_lib()
_HCCL_AVAILABLE = True
except RuntimeError:
_HCCL_AVAILABLE = False
if _HCCL_AVAILABLE:
from . import _hccl_management as hccl
else:
try:
import hccl_test.manage.api as hccl
_HCCL_AVAILABLE = True
except ImportError:
_HCCL_AVAILABLE = False
HCCL_WORLD_COMM_GROUP = "hccl_world_group"
NCCL_WORLD_COMM_GROUP = "nccl_world_group"
class Backend:
"""
Class for available backends.
Note:
The backends' value should be string, e.g., "hccl".
If backend is set to Backend.UNDEFINED, it will be seen as invaliad.
Args:
name (str): The name of backend.
Raises:
TypeError: If name is not a string.
ValueError: If backend is invalid.
Examples:
>>> Backend("abc")
>>> hccl = Backend("hccl")
"""
UNDEFINED = "undefined"
HCCL = "hccl"
NCCL = "nccl"
def __new__(cls, name):
"""Create instance object of Backend."""
if not isinstance(name, str):
raise TypeError("Backend name must be a string, but got {}".format(type(name)))
value = getattr(Backend, name.upper(), Backend.UNDEFINED)
if value == Backend.UNDEFINED:
raise ValueError("Invalid backend: '{}'".format(name))
return value
def is_hccl_available():
"""
Check hccl api is available.
Returns:
Boolean. Return whether hccl is available or not.
"""
return _HCCL_AVAILABLE
def is_nccl_available():
"""
Check nccl api is available.
Returns:
Boolean. Return whether nccl is available or not.
"""
return _NCCL_AVAILABLE
def check_parameter_available(func):
"""
Check parameter is available. If not available, raise Error.
Args:
func (Function): The function to be run.
Raises:
RuntimeError.
Returns:
Wrapper. If not available, raise Error.
"""
def wrapper(*args, **kargs):
if _is_role_pserver() or _is_role_sched():
return func(*args, **kargs)
group = None
if "group" in kargs.keys():
group = kargs.get("group")
if group is not None and not isinstance(group, str):
raise TypeError("Group should be str or None, "
"but got group {}".format(type(group)))
if "backend" in kargs.keys():
backend = kargs.get("backend")
if backend is Backend.HCCL and not is_hccl_available():
raise RuntimeError("Distributed Communication doesn't have HCCL built in")
if backend is Backend.NCCL and not is_nccl_available():
raise RuntimeError("Distributed Communication doesn't have NCCL built in")
if group is None:
if backend is Backend.HCCL:
group = HCCL_WORLD_COMM_GROUP
elif backend is Backend.NCCL:
group = NCCL_WORLD_COMM_GROUP
return func(*args, **kargs)
return wrapper
@check_parameter_available
def _get_rank_helper(group, backend):
"""
The Helper to do get_rank_id.
Args:
group (str): The communication group.
backend (str): The backend, like "hccl".
Raises:
ValueError: If backend is invalid.
Returns:
Integer. The local rank id of the calling process.
"""
rank_id = None
if _is_role_pserver() or _is_role_sched():
rank_id = 0
return rank_id
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
rank_id = hccl.get_rank_id()
else:
rank_id = hccl.get_rank_id(group)
elif backend == Backend.NCCL:
rank_id = mpi.get_rank_id(group)
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return rank_id
@check_parameter_available
def _get_local_rank_helper(group, backend):
"""
The Helper to do get_local_rank_id.
Args:
group (str): The communication group.
backend (str): The backend, like "hccl".
Raises:
ValueError: If backend is invalid.
Returns:
Integer. The local rank id of the calling process.
"""
rank_id = None
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
rank_id = hccl.get_local_rank_id()
else:
rank_id = hccl.get_local_rank_id(group)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support get_local_rank_id now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return rank_id
@check_parameter_available
def _get_size_helper(group, backend):
"""
The Helper to do get_rank_size.
Args:
group (str): The communication group.
backend (str): The backend, like "hccl".
Raises:
ValueError: If backend is invalid.
Returns:
Integer. The rank size of specified group.
"""
size = None
if _is_role_pserver() or _is_role_sched():
size = 1
return size
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
size = hccl.get_rank_size()
else:
size = hccl.get_rank_size(group)
elif backend == Backend.NCCL:
size = mpi.get_rank_size(group)
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return size
@check_parameter_available
def _get_local_size_helper(group, backend):
"""
The Helper to do get_local_rank_size.
Args:
group (str): The communication group.
backend (str): The backend, like "hccl".
Raises:
ValueError: If backend is invalid.
Returns:
Integer. The local rank size where the calling process is being within specified group.
"""
size = None
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
size = hccl.get_local_rank_size()
else:
size = hccl.get_local_rank_size(group)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support get_local_rank_size now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return size
@check_parameter_available
def _get_world_rank_from_group_rank_helper(group, group_rank_id, backend):
"""
The Helper to do get_world_rank_from_group_rank.
Args:
group (str): The user communication group.
group_rank_id (int): A rank id in user communication group.
backend (str): The backend, like "hccl".
Raises:
TypeError: If group_rank_id is not int.
ValueError: If group is "hccl_world_group" or backend is invalid.
Returns:
Integer. A rank id in world communication group.
"""
world_rank_id = None
if not isinstance(group_rank_id, int):
raise TypeError("group_rank_id should be int, but got type {}".format(type(group_rank_id)))
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
raise ValueError("Group cannot be 'hccl_world_group'. ")
world_rank_id = hccl.get_world_rank_from_group_rank(group, group_rank_id)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support get_world_rank_from_group_rank now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return world_rank_id
@check_parameter_available
def _get_group_rank_from_world_rank_helper(world_rank_id, group, backend):
"""
The Helper to do get_group_rank_from_world_rank.
Args:
world_rank_id (int): A rank id in world communication group.
group (str): The user communication group.
backend (str): The backend, like "hccl".
Raises:
TypeError: If world_rank_id is not int.
ValueError: If group is 'hccl_world_group' or backend is invalid.
Returns:
Integer. A rank id in user communication group.
"""
group_rank_id = None
if not isinstance(world_rank_id, int):
raise TypeError("world_rank_id should be int, but got type {}".format(type(world_rank_id)))
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
raise ValueError("Group cannot be 'hccl_world_group'. ")
group_rank_id = hccl.get_group_rank_from_world_rank(world_rank_id, group)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support get_group_rank_from_world_rank now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
return group_rank_id
@check_parameter_available
def _create_group_helper(group, rank_ids, backend):
"""
The Helper to do create_group.
Args:
group (str): The communication group.
rank_ids (list): Rank ids in the group.
backend (str): The backend, like "hccl".
Raises:
TypeError: If rank_ids is not a list.
ValueError: If rank_ids size is not larger than 1 or rank_ids has duplicate data or backend is invalid.
"""
if backend == Backend.HCCL:
if not isinstance(rank_ids, list):
raise TypeError("Rank_ids {} should be list".format(rank_ids))
rank_size = len(rank_ids)
if rank_size < 1:
raise ValueError("Rank_ids size {} should be large than 0".format(rank_size))
if len(rank_ids) - len(list(set(rank_ids))) > 0:
raise ValueError("List rank_ids in Group {} has duplicate data!".format(group))
hccl.create_group(group, rank_size, rank_ids)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support create_group now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
@check_parameter_available
def _destroy_group_helper(group, backend):
"""
The Helper to do destroy_group.
Args:
group (str): The user communication group.
backend (str): The backend, like "hccl".
Raises:
ValueError: If group is "hccl_world_group" or backend is invalid.
"""
if backend == Backend.HCCL:
if group == HCCL_WORLD_COMM_GROUP:
raise ValueError("The hccl_world_group does not support destruction.")
hccl.destroy_group(group)
elif backend == Backend.NCCL:
raise RuntimeError("Nccl doesn't support destroy_group now.")
else:
raise ValueError("Invalid backend: '{}'".format(backend))
| [
[
[
729,
745
],
[
4374,
4390
],
[
5946,
5962
],
[
2963,
2979
]
],
[
[
747,
761
],
[
4396,
4410
],
[
5968,
5982
],
[
2985,
2999
]
],
[
[
792,
817
],
[
994,
1007
]
],
[
[
819,
834
],
[
1090,
1105
],
[
2451,
2466
]
],
[
[
843,
858
],
[
2626,
2641
]
],
[
[
883,
907
],
[
4685,
4688
],
[
6246,
6249
]
],
[
[
912,
927
],
[
2626,
2641
]
],
[
[
959,
974
],
[
2626,
2641
]
],
[
[
1014,
1029
],
[
1090,
1105
],
[
2451,
2466
]
],
[
[
1062,
1077
],
[
1090,
1105
],
[
2451,
2466
]
],
[
[
1125,
1149
],
[
4554,
4558
],
[
4609,
4613
],
[
5283,
5287
],
[
5344,
5348
],
[
6117,
6121
],
[
6171,
6175
],
[
6876,
6880
],
[
6936,
6940
],
[
8064,
8068
],
[
9238,
9242
],
[
10432,
10436
],
[
11145,
11149
]
],
[
[
1180,
1208
],
[
4554,
4558
],
[
4609,
4613
],
[
5283,
5287
],
[
5344,
5348
],
[
6117,
6121
],
[
6171,
6175
],
[
6876,
6880
],
[
6936,
6940
],
[
8064,
8068
],
[
9238,
9242
],
[
10432,
10436
],
[
11145,
11149
]
],
[
[
1217,
1232
],
[
2451,
2466
]
],
[
[
1272,
1287
],
[
2451,
2466
]
],
[
[
1298,
1319
],
[
4509,
4530
],
[
5238,
5259
],
[
6075,
6096
],
[
6834,
6855
],
[
7948,
7969
],
[
9122,
9143
],
[
11031,
11052
],
[
3831,
3852
]
],
[
[
1341,
1362
],
[
3919,
3940
]
],
[
[
1391,
1398
],
[
2123,
2130
],
[
2146,
2153
],
[
2185,
2192
],
[
4475,
4482
],
[
4653,
4660
],
[
5204,
5211
],
[
5394,
5401
],
[
6041,
6048
],
[
6217,
6224
],
[
6800,
6807
],
[
6988,
6995
],
[
7914,
7921
],
[
8142,
8149
],
[
9088,
9095
],
[
9316,
9323
],
[
9993,
10000
],
[
10498,
10505
],
[
10997,
11004
],
[
11191,
11198
],
[
3448,
3455
],
[
3607,
3614
],
[
3793,
3800
],
[
3881,
3888
]
],
[
[
2298,
2315
],
[
3469,
3486
]
],
[
[
2473,
2490
],
[
3628,
3645
]
],
[
[
2648,
2673
],
[
3999,
4024
],
[
4806,
4831
],
[
5580,
5605
],
[
6366,
6391
],
[
7173,
7198
],
[
8347,
8372
],
[
9521,
9546
],
[
10660,
10685
]
],
[
[
4029,
4045
]
],
[
[
4836,
4858
]
],
[
[
5610,
5626
]
],
[
[
6396,
6418
]
],
[
[
7203,
7241
]
],
[
[
8377,
8415
]
],
[
[
9551,
9571
]
],
[
[
10690,
10711
]
]
] |
from features.arduino_features import BlackrockSerialDIORowByte, SerialDIORowByte
from riglib import experiment
class par(object):
def init(self):
pass
class F(BlackrockSerialDIORowByte, par):
pass
f = F()
f.init()
| [
[
[
38,
63
],
[
174,
199
]
],
[
[
65,
81
]
],
[
[
101,
111
]
],
[
[
119,
122
],
[
201,
204
]
],
[
[
172,
173
],
[
221,
222
]
],
[
[
217,
218
],
[
225,
226
]
]
] |
import logging
import chevron
import re
from itertools import dropwhile
import snex.util as util
logger = logging.getLogger(__name__)
DEFAULT = {
# :snippet global-default-config lang: python
"output_template": "```{{lang}}\n{{{snippet}}}\n```\n",
"valid_param_keys": ["name", "lang", "lnum", "fname", "path"],
"output_path": "snippets",
"line_prefix": "",
"comment_prefix": "# ",
"comment_suffix": "",
"snippet_start": ":snippet",
"snippet_end": ":endsnippet",
"cloak_start": ":cloak",
"cloak_end": ":endcloak",
"output_suffix": ".md",
# :endsnippet
}
class Snippet:
@classmethod
def from_raw_data(cls, params, data, origin=None, prefix=""):
(head,) = data[:1]
(tail,) = data[-1:]
body = data[1:-1]
idlvl = util.det_indent_lvl(body)
body = (line[idlvl:] for line in body)
body = dropwhile(util.is_empty, body)
body = util.dropwhile_right(util.is_empty, list(body))
body = (prefix + line for line in body)
return cls(params=params, head=head.lstrip(), body=list(body), tail=tail.lstrip(), origin=origin)
@property
def name(self):
return self.params["name"]
@property
def line_number(self):
return self.params.get("lnum", -1)
def __init__(self, params=None, head=None, body=None, tail=None, origin=None):
self.params = params
self.head = head
self.body = body
self.tail = tail
self.origin = origin
def __repr__(self):
return f"Snippet({self.params!r}, {self.head!r}, {self.body!r}, {self.tail!r}, {self.origin!r})"
def render_snippet(template, params, body):
return chevron.render(template, {**params, **{"snippet": "\n".join(body)}})
def get_configs(conf):
configs = conf["config"]
default = configs["default"] if "default" in configs else None
for name in [n for n in configs if not n == "default"]:
c = util.merge_with_default_conf(configs[name], default, global_default=DEFAULT)
# prevent defining a global name for all snippets in the config
c.pop("name", None)
yield (name, c)
def extract_from_path(f, conf, base_path):
comment_prefix = re.escape(conf["comment_prefix"])
comment_suffix = re.escape(conf["comment_suffix"])
line_prefix = conf["line_prefix"]
cloak_start = re.escape(conf["cloak_start"])
cloak_end = re.escape(conf["cloak_end"])
cloak_start_re = f"^\\s*{comment_prefix}{cloak_start}{comment_suffix}$"
cloak_end_re = f"^\\s*{comment_prefix}{cloak_end}{comment_suffix}$"
snippet_start = re.escape(conf["snippet_start"])
snippet_end = re.escape(conf["snippet_end"])
snippet_start_re = f"^\\s*{comment_prefix}{snippet_start}(.*){comment_suffix}$"
snippet_end_re = f"^\\s*{comment_prefix}{snippet_end}{comment_suffix}$"
snippets = []
in_snippet = False
cloaked = False
data = []
params = {}
for idx, line in util.read_path(f):
lnum = idx + 1
if re.search(cloak_end_re, line):
cloaked = False
continue
if re.search(cloak_start_re, line):
cloaked = True
if cloaked:
continue
if match := re.search(snippet_start_re, line):
try:
params = util.construct_params(match.group(1), f, base_path, lnum)
params = util.sanitize_params(params, conf["valid_param_keys"])
except Exception as ex:
logger.error(f"could not parse snippet params: {line} in file {f}:{lnum}")
raise ex
in_snippet = True
if not in_snippet:
continue
data.append(line.rstrip("\n"))
if re.search(snippet_end_re, line):
in_snippet = False
s = Snippet.from_raw_data(params, data, origin=f, prefix=line_prefix)
snippets.append(s)
data = []
params = {}
return snippets
| [
[
[
7,
14
],
[
107,
114
]
],
[
[
22,
29
],
[
1702,
1709
]
],
[
[
37,
39
],
[
2231,
2233
],
[
2286,
2288
],
[
2378,
2380
],
[
2425,
2427
],
[
2623,
2625
],
[
2674,
2676
],
[
3033,
3035
],
[
3124,
3126
],
[
3246,
3248
],
[
3742,
3744
]
],
[
[
62,
71
],
[
894,
903
]
],
[
[
79,
96
],
[
805,
809
],
[
904,
908
],
[
940,
944
],
[
961,
965
],
[
1964,
1968
],
[
2980,
2984
],
[
3323,
3327
],
[
3406,
3410
]
],
[
[
98,
104
],
[
3513,
3519
]
],
[
[
136,
143
],
[
2032,
2039
]
],
[
[
615,
622
],
[
3822,
3829
]
],
[
[
1651,
1665
]
],
[
[
1777,
1788
]
],
[
[
2171,
2188
]
]
] |
__all__ = ('modulize',)
import sys
from types import FunctionType, GetSetDescriptorType, MappingProxyType, ModuleType
from .docs import has_docs
NoneType = type(None)
try:
from _weakref import ref as WeakrefType
except ImportError:
from weakref import ref as WeakrefType
# This 2 type can be function
WrapperDescriptorType = type(object.__ne__)
MethodDescriptorType = type(object.__format__)
DO_NOT_MODULIZE_TYPES = [MappingProxyType, GetSetDescriptorType]
if WrapperDescriptorType is not FunctionType:
DO_NOT_MODULIZE_TYPES.append(WrapperDescriptorType)
if MethodDescriptorType is not FunctionType:
DO_NOT_MODULIZE_TYPES.append(MethodDescriptorType)
DO_NOT_MODULIZE_TYPES = tuple(DO_NOT_MODULIZE_TYPES)
@has_docs
def _modulize_function(old, globals_, source_module, module_name, module_path):
"""
Changes the given function's scopes and qualname if they were defined inside of a modulized class.
Parameters
----------
old : `function`
A function present inside of a modulized class.
globals_ : `dict` of (`str`, `Any`)
Global variables of the respective module.
source_module : `module`
The module, where the modulized class was defined.
module_name : `str`
The newly created module's name.
module_path : `str`
The newly created module's path.
Returns
-------
new : `function`
Newly recreated function if applicable.
"""
if old.__module__ != source_module:
return old
new = FunctionType(old.__code__, globals_, old.__name__, old.__defaults__, old.__closure__)
new.__module__ = module_path
qualname = old.__qualname__
if (qualname is not None) and (len(qualname) > len(module_name)) and qualname[len(module_name)] =='.' and \
qualname.startswith(module_name):
new.__qualname__ = qualname[len(module_name) + 1:]
return new
@has_docs
def _modulize_type(klass, globals_, source_module, module_name, module_path):
"""
Changes the given class's scopes and qualname if they were defined inside of a modulized class.
Parameters
----------
klass : `type`
A class present inside of a modulized class.
globals_ : `dict` of (`str`, `Any`)
Global variables of the respective module.
source_module : `module`
The module, where the modulized class was defined.
module_name : `str`
The newly created module's name.
module_path : `str`
The newly created module's path.
"""
if klass.__module__ != source_module:
return
qualname = klass.__qualname__
if (qualname is None) or (len(qualname) <= len(module_name)) or qualname[len(module_name)] != '.' \
or not qualname.startswith(module_name):
return
klass.__qualname__ = qualname[len(module_name) + 1:]
klass.__module__ = module_path
for name in dir(klass):
value = getattr(klass, name)
value_type = value.__class__
if value_type is FunctionType:
value = _modulize_function(value, globals_, source_module, module_name, module_path)
setattr(klass, name, value)
if issubclass(value_type, type):
_modulize_type(value, globals_, source_module, module_name, module_path)
@has_docs
def modulize(klass):
"""
Transforms the given class to a module.
Every functions and classes defined inside of given class, which are also present at transformation as well, will
have their global scope modified.
Parameters
----------
klass : `type`
The class to transform to module.
Returns
-------
result_module : `module`
The created module object.
Raises
------
TypeError
If `klass` is not given as `type`.
"""
if not isinstance(klass, type):
raise TypeError(
f'Only types can be modulized, got {klass.__class__.__name__}; {klass!r}.'
)
source_module = klass.__module__
module_name = klass.__name__
module_path = f'{klass.__module__}.{module_name}'
try:
result_module = sys.modules['module_path']
except KeyError:
result_module = ModuleType(module_path)
sys.modules[module_path] = result_module
globals_ = result_module.__dict__
globals_['__builtins__'] = __builtins__
else:
globals_ = result_module.__dict__
collected_names = []
for name in globals_.keys():
if name.startswith('__') and name.endswith('__'):
continue
collected_names.append(name)
for name in collected_names:
del globals_[name]
globals_['__doc__'] = None
for name in type.__dir__(klass):
if name.startswith('__') and name.endswith('__') and name != '__doc__':
continue
value = type.__getattribute__(klass, name)
value_type = type(value)
if value_type in DO_NOT_MODULIZE_TYPES:
continue
if value_type is FunctionType:
value = _modulize_function(value, globals_, source_module, module_name, module_path)
if issubclass(value_type, type):
_modulize_type(value, globals_, source_module, module_name, module_path)
ModuleType.__setattr__(result_module, name, value)
return result_module
| [
[
[
0,
7
]
],
[
[
32,
35
],
[
4172,
4175
],
[
4276,
4279
]
],
[
[
54,
66
],
[
505,
517
],
[
607,
619
],
[
1529,
1541
],
[
3044,
3056
],
[
5126,
5138
]
],
[
[
68,
88
],
[
450,
470
]
],
[
[
90,
106
],
[
432,
448
]
],
[
[
108,
118
],
[
4244,
4254
],
[
5389,
5399
]
],
[
[
138,
146
],
[
732,
740
],
[
1919,
1927
],
[
3332,
3340
]
],
[
[
149,
157
]
],
[
[
202,
220
]
],
[
[
265,
283
]
],
[
[
315,
336
],
[
476,
497
],
[
552,
573
]
],
[
[
359,
379
],
[
579,
599
],
[
654,
674
]
],
[
[
407,
428
],
[
523,
544
],
[
625,
646
],
[
707,
728
]
],
[
[
677,
698
],
[
5048,
5069
]
],
[
[
745,
763
],
[
3078,
3096
],
[
5160,
5178
]
],
[
[
1932,
1946
],
[
3257,
3271
],
[
5299,
5313
]
],
[
[
3345,
3353
]
]
] |
import shutil
import tempfile
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.cache import cache
from django.contrib.auth import get_user_model
from django.test import Client, TestCase, override_settings
from django.urls import reverse
from django import forms
from ..models import Post, Group, Follow
User = get_user_model()
TEMP_MEDIA_ROOT = tempfile.mkdtemp(dir=settings.BASE_DIR)
class PaginatorTest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='NoName')
cls.user_client = Client()
cls.user_client.force_login(cls.user)
cls.count_posts_on_page = 10
cls.count_posts = 15
cls.rest_posts = cls.count_posts % cls.count_posts_on_page
cls.group = Group.objects.create(
title='Тестовый заголовок',
slug='test-slug',
description='Описание...'
)
posts = (
Post(
text='Тестовый текст поста.',
author=cls.user,
group=cls.group
) for i in range(cls.count_posts)
)
Post.objects.bulk_create(posts, cls.count_posts)
def test_paginator_pages(self):
pages_paginator = [
reverse(
'posts:index'
),
reverse(
'posts:group_list',
kwargs={'slug': PaginatorTest.group.slug}
),
reverse(
'posts:profile',
kwargs={'username': PaginatorTest.user}
)
]
for page in pages_paginator:
with self.subTest(page=page):
response = PaginatorTest.user_client.get(page)
self.assertEqual(
len(response.context['page_obj']), (
PaginatorTest.count_posts_on_page
)
)
response = PaginatorTest.user_client.get(
page + '?page=2'
)
self.assertEqual(
len(response.context['page_obj']),
PaginatorTest.rest_posts
)
@override_settings(MEDIA_ROOT=TEMP_MEDIA_ROOT)
class PostPagesTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='NoName')
cls.user_client = Client()
cls.user_client.force_login(cls.user)
small_gif = (
b'\x47\x49\x46\x38\x39\x61\x02\x00'
b'\x01\x00\x80\x00\x00\x00\x00\x00'
b'\xFF\xFF\xFF\x21\xF9\x04\x00\x00'
b'\x00\x00\x00\x2C\x00\x00\x00\x00'
b'\x02\x00\x01\x00\x00\x02\x02\x0C'
b'\x0A\x00\x3B'
)
image = SimpleUploadedFile(
name='small.gif',
content=small_gif,
content_type='image/gif'
)
cls.group = Group.objects.create(
title='Тестовый заголовок',
slug='test-slug',
description='Описание...'
)
cls.post = Post.objects.create(
text='Тестовый текст поста.',
image=image,
author=cls.user,
group=cls.group
)
@classmethod
def tearDownClass(cls):
super().tearDownClass()
shutil.rmtree(TEMP_MEDIA_ROOT, ignore_errors=True)
def test_pages_uses_correct_template(self):
templates_pages_names = {
reverse('posts:index'): 'posts/index.html',
reverse(
'posts:group_list',
kwargs={'slug': PostPagesTests.group.slug}
): 'posts/group_list.html',
reverse(
'posts:profile',
kwargs={'username': PostPagesTests.user}
): 'posts/profile.html',
reverse(
'posts:post_detail',
kwargs={'post_id': PostPagesTests.post.id}
): 'posts/post_detail.html',
reverse('posts:post_create'): 'posts/create_post.html',
reverse(
'posts:post_edit',
kwargs={'post_id': PostPagesTests.post.id}
): 'posts/create_post.html'
}
for reverse_name, template in templates_pages_names.items():
with self.subTest(reverse_name=reverse_name):
response = PostPagesTests.user_client.get(reverse_name)
self.assertTemplateUsed(response, template)
def check_post_context(self, post):
self.assertEqual(post.id, PostPagesTests.post.id)
self.assertEqual(post.author, PostPagesTests.post.author)
self.assertEqual(post.text, PostPagesTests.post.text)
self.assertEqual(post.image, PostPagesTests.post.image)
self.assertEqual(post.group, PostPagesTests.post.group)
def test_index_page_context(self):
response = PostPagesTests.user_client.get(reverse('posts:index'))
self.check_post_context(response.context['page_obj'][0])
def test_group_list_page_context(self):
response = PostPagesTests.user_client.get(
reverse(
'posts:group_list',
kwargs={'slug': PostPagesTests.group.slug}
)
)
self.check_post_context(response.context['page_obj'][0])
self.assertEqual(
PostPagesTests.group,
response.context['group']
)
def test_new_group_list_none(self):
group = Group.objects.create(
title='Тестовый заголовок',
slug='test-slug-new',
description='Описание...'
)
response = PostPagesTests.user_client.get(
reverse(
'posts:group_list',
kwargs={'slug': group.slug}
)
)
self.assertEqual(len(response.context['page_obj']), 0)
def test_profile_page_context(self):
response = PostPagesTests.user_client.get(
reverse(
'posts:profile',
kwargs={'username': PostPagesTests.user}
)
)
self.check_post_context(response.context['page_obj'][0])
self.assertEqual(
PostPagesTests.user,
response.context['author']
)
self.assertIsNotNone(response.context['following'])
def test_post_detail_page_context(self):
response = PostPagesTests.user_client.get(
reverse(
'posts:post_detail',
kwargs={'post_id': PostPagesTests.post.id}
)
)
self.check_post_context(response.context['post'])
form_fields = {'text': forms.fields.CharField}
for value, expected in form_fields.items():
with self.subTest(value=value):
form_fields = response.context['form'].fields.get(value)
self.assertIsInstance(form_fields, expected)
self.assertIsNotNone(response.context['comments'])
def test_edit_post_page_context(self):
response = PostPagesTests.user_client.get(
reverse(
'posts:post_edit',
kwargs={'post_id': PostPagesTests.post.id}
)
)
self.assertIsNotNone(response.context['form'])
self.assertIsNotNone(response.context['is_edit'])
self.assertTrue(response.context['is_edit'])
form_fields = {
'text': forms.fields.CharField,
'group': forms.fields.ChoiceField,
}
for value, expected in form_fields.items():
with self.subTest(value=value):
form_fields = response.context['form'].fields.get(value)
self.assertIsInstance(form_fields, expected)
def test_create_post_page_context(self):
response = PostPagesTests.user_client.get(
reverse('posts:post_create')
)
form_fields = {
'text': forms.fields.CharField,
'group': forms.fields.ChoiceField,
}
for value, expected in form_fields.items():
with self.subTest(value=value):
form_fields = response.context['form'].fields.get(value)
self.assertIsInstance(form_fields, expected)
def test_cache_index(self):
post = Post.objects.create(
text='Тестовый текст поста.',
author=PostPagesTests.user,
)
response = PostPagesTests.user_client.get(
reverse('posts:index')
)
page = response.content
post.delete()
response = PostPagesTests.user_client.get(
reverse('posts:index')
)
self.assertEqual(page, response.content)
cache.clear()
response = PostPagesTests.user_client.get(
reverse('posts:index')
)
self.assertNotEqual(page, response.content)
class FollowTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='NoName')
cls.user_client = Client()
cls.user_client.force_login(cls.user)
cls.author = User.objects.create_user(username='Author')
cls.author_client = Client()
cls.author_client.force_login(cls.author)
def test_following_auth(self):
#FIXME: connection through ORM
FollowTests.user_client.get(
reverse(
'posts:profile_follow',
kwargs={'username': FollowTests.author}
)
)
#FIXME: necessary to calculate that a BD increased by one object
follow = Follow.objects.last()
self.assertEqual(follow.user, FollowTests.user)
self.assertEqual(follow.author, FollowTests.author)
def test_unfollow_auth(self):
#FIXME: connection through ORM
FollowTests.user_client.get(
reverse(
'posts:profile_follow',
kwargs={'username': FollowTests.author}
)
)
follows_count = Follow.objects.count()
FollowTests.user_client.get(
reverse(
'posts:profile_unfollow',
kwargs={'username': FollowTests.author}
)
)
self.assertEqual(Follow.objects.count(), follows_count - 1)
def test_new_post_follow(self):
FollowTests.user_client.get(
reverse(
'posts:profile_follow',
kwargs={'username': FollowTests.author}
)
)
post = Post.objects.create(
text='Тестовый текст поста.',
author=FollowTests.author,
)
response = FollowTests.user_client.get(
reverse(
'posts:follow_index'
)
)
self.assertEqual(
post.id, response.context['page_obj'][0].id
)
def test_new_post_unfollow(self):
FollowTests.user_client.get(
reverse(
'posts:profile_follow',
kwargs={'username': FollowTests.author}
)
)
Post.objects.create(
text='Тестовый текст поста.',
author=FollowTests.author,
)
user = User.objects.create_user(username='NameNo')
user_client = Client()
user_client.force_login(user)
response = user_client.get(
reverse(
'posts:follow_index'
)
)
self.assertEqual(len(response.context['page_obj']), 0)
| [
[
[
7,
13
],
[
3404,
3410
]
],
[
[
21,
29
],
[
412,
420
]
],
[
[
55,
63
],
[
433,
441
]
],
[
[
107,
125
],
[
2856,
2874
]
],
[
[
156,
161
],
[
8754,
8759
]
],
[
[
194,
208
],
[
377,
391
]
],
[
[
233,
239
],
[
645,
651
],
[
2485,
2491
],
[
9107,
9113
],
[
9255,
9261
],
[
11326,
11332
]
],
[
[
241,
249
],
[
474,
482
],
[
2314,
2322
],
[
8936,
8944
]
],
[
[
251,
268
],
[
2247,
2264
]
],
[
[
294,
301
],
[
1330,
1337
],
[
1396,
1403
],
[
1526,
1533
],
[
3550,
3557
],
[
3606,
3613
],
[
3762,
3769
],
[
3910,
3917
],
[
4068,
4075
],
[
4136,
4143
],
[
4993,
5000
],
[
5190,
5197
],
[
5755,
5762
],
[
6036,
6043
],
[
6501,
6508
],
[
7139,
7146
],
[
7896,
7903
],
[
8514,
8521
],
[
8664,
8671
],
[
8831,
8838
],
[
9440,
9447
],
[
9920,
9927
],
[
10145,
10152
],
[
10430,
10437
],
[
10746,
10753
],
[
10996,
11003
],
[
11421,
11428
]
],
[
[
321,
326
],
[
6719,
6724
],
[
7476,
7481
],
[
7521,
7526
],
[
7979,
7984
],
[
8024,
8029
]
],
[
[
349,
353
],
[
1023,
1027
],
[
1204,
1208
],
[
3163,
3167
],
[
8338,
8342
],
[
10574,
10578
],
[
11133,
11137
]
],
[
[
355,
360
],
[
853,
858
],
[
3004,
3009
],
[
5548,
5553
]
],
[
[
362,
368
],
[
9659,
9665
],
[
10073,
10079
],
[
10301,
10307
]
],
[
[
370,
374
],
[
575,
579
],
[
2415,
2419
],
[
9037,
9041
],
[
9183,
9187
],
[
11260,
11264
]
],
[
[
394,
409
],
[
2276,
2291
],
[
3418,
3433
]
],
[
[
460,
473
],
[
1473,
1486
],
[
1604,
1617
],
[
1754,
1767
],
[
1905,
1918
],
[
2006,
2019
],
[
2201,
2214
]
],
[
[
2299,
2313
],
[
3683,
3697
],
[
3840,
3854
],
[
3991,
4005
],
[
4215,
4229
],
[
4443,
4457
],
[
4623,
4637
],
[
4685,
4699
],
[
4749,
4763
],
[
4812,
4826
],
[
4876,
4890
],
[
4962,
4976
],
[
5146,
5160
],
[
5267,
5281
],
[
5421,
5435
],
[
5711,
5725
],
[
5992,
6006
],
[
6114,
6128
],
[
6262,
6276
],
[
6457,
6471
],
[
6582,
6596
],
[
7095,
7109
],
[
7218,
7232
],
[
7852,
7866
],
[
8420,
8434
],
[
8470,
8484
],
[
8620,
8634
],
[
8787,
8801
]
],
[
[
8924,
8935
],
[
9399,
9410
],
[
9525,
9536
],
[
9719,
9730
],
[
9777,
9788
],
[
9879,
9890
],
[
10005,
10016
],
[
10104,
10115
],
[
10232,
10243
],
[
10389,
10400
],
[
10515,
10526
],
[
10656,
10667
],
[
10705,
10716
],
[
10955,
10966
],
[
11081,
11092
],
[
11215,
11226
]
]
] |
from sqlalchemy import Column
from sqlalchemy import Integer
from alembic import command
from alembic import op
from alembic.testing import config
from alembic.testing.env import _no_sql_testing_config
from alembic.testing.env import clear_staging_env
from alembic.testing.env import staging_env
from alembic.testing.env import three_rev_fixture
from alembic.testing.fixtures import capture_context_buffer
from alembic.testing.fixtures import op_fixture
from alembic.testing.fixtures import TestBase
from alembic.util import sqla_compat
class FullEnvironmentTests(TestBase):
@classmethod
def setup_class(cls):
staging_env()
cls.cfg = cfg = _no_sql_testing_config("oracle")
cls.a, cls.b, cls.c = three_rev_fixture(cfg)
@classmethod
def teardown_class(cls):
clear_staging_env()
def test_begin_comit(self):
with capture_context_buffer(transactional_ddl=True) as buf:
command.upgrade(self.cfg, self.a, sql=True)
assert "SET TRANSACTION READ WRITE\n\n/" in buf.getvalue()
assert "COMMIT\n\n/" in buf.getvalue()
def test_batch_separator_default(self):
with capture_context_buffer() as buf:
command.upgrade(self.cfg, self.a, sql=True)
assert "/" in buf.getvalue()
assert ";" not in buf.getvalue()
def test_batch_separator_custom(self):
with capture_context_buffer(oracle_batch_separator="BYE") as buf:
command.upgrade(self.cfg, self.a, sql=True)
assert "BYE" in buf.getvalue()
class OpTest(TestBase):
def test_add_column(self):
context = op_fixture("oracle")
op.add_column("t1", Column("c1", Integer, nullable=False))
context.assert_("ALTER TABLE t1 ADD c1 INTEGER NOT NULL")
def test_add_column_with_default(self):
context = op_fixture("oracle")
op.add_column(
"t1", Column("c1", Integer, nullable=False, server_default="12")
)
context.assert_("ALTER TABLE t1 ADD c1 INTEGER DEFAULT '12' NOT NULL")
@config.requirements.comments
def test_add_column_with_comment(self):
context = op_fixture("oracle")
op.add_column(
"t1", Column("c1", Integer, nullable=False, comment="c1 comment")
)
context.assert_(
"ALTER TABLE t1 ADD c1 INTEGER NOT NULL",
"COMMENT ON COLUMN t1.c1 IS 'c1 comment'",
)
@config.requirements.computed_columns_api
def test_add_column_computed(self):
context = op_fixture("oracle")
op.add_column(
"t1",
Column("some_column", Integer, sqla_compat.Computed("foo * 5")),
)
context.assert_(
"ALTER TABLE t1 ADD some_column "
"INTEGER GENERATED ALWAYS AS (foo * 5)"
)
def test_alter_table_rename_oracle(self):
context = op_fixture("oracle")
op.rename_table("s", "t")
context.assert_("ALTER TABLE s RENAME TO t")
def test_alter_table_rename_schema_oracle(self):
context = op_fixture("oracle")
op.rename_table("s", "t", schema="myowner")
context.assert_("ALTER TABLE myowner.s RENAME TO t")
def test_alter_column_rename_oracle(self):
context = op_fixture("oracle")
op.alter_column("t", "c", new_column_name="x")
context.assert_("ALTER TABLE t RENAME COLUMN c TO x")
def test_alter_column_new_type(self):
context = op_fixture("oracle")
op.alter_column("t", "c", type_=Integer)
context.assert_("ALTER TABLE t MODIFY c INTEGER")
def test_alter_column_add_comment(self):
context = op_fixture("oracle")
op.alter_column("t", "c", type_=Integer, comment="c comment")
context.assert_(
"ALTER TABLE t MODIFY c INTEGER",
"COMMENT ON COLUMN t.c IS 'c comment'",
)
def test_alter_column_add_comment_quotes(self):
context = op_fixture("oracle")
op.alter_column("t", "c", type_=Integer, comment="c 'comment'")
context.assert_(
"ALTER TABLE t MODIFY c INTEGER",
"COMMENT ON COLUMN t.c IS 'c ''comment'''",
)
def test_alter_column_drop_comment(self):
context = op_fixture("oracle")
op.alter_column("t", "c", type_=Integer, comment=None)
context.assert_(
"ALTER TABLE t MODIFY c INTEGER", "COMMENT ON COLUMN t.c IS ''"
)
def test_create_table_comment(self):
# this is handled by SQLAlchemy's compilers
context = op_fixture("oracle")
op.create_table_comment("t2", comment="t2 table", schema="foo")
context.assert_("COMMENT ON TABLE foo.t2 IS 't2 table'")
def test_drop_table_comment(self):
# this is handled by SQLAlchemy's compilers
context = op_fixture("oracle")
op.drop_table_comment("t2", existing_comment="t2 table", schema="foo")
context.assert_("COMMENT ON TABLE foo.t2 IS ''")
def test_drop_index(self):
context = op_fixture("oracle")
op.drop_index("my_idx", "my_table")
context.assert_contains("DROP INDEX my_idx")
def test_drop_column_w_default(self):
context = op_fixture("oracle")
op.drop_column("t1", "c1")
context.assert_("ALTER TABLE t1 DROP COLUMN c1")
def test_drop_column_w_check(self):
context = op_fixture("oracle")
op.drop_column("t1", "c1")
context.assert_("ALTER TABLE t1 DROP COLUMN c1")
def test_alter_column_nullable_w_existing_type(self):
context = op_fixture("oracle")
op.alter_column("t", "c", nullable=True, existing_type=Integer)
context.assert_("ALTER TABLE t MODIFY c NULL")
def test_alter_column_not_nullable_w_existing_type(self):
context = op_fixture("oracle")
op.alter_column("t", "c", nullable=False, existing_type=Integer)
context.assert_("ALTER TABLE t MODIFY c NOT NULL")
def test_alter_column_nullable_w_new_type(self):
context = op_fixture("oracle")
op.alter_column("t", "c", nullable=True, type_=Integer)
context.assert_(
"ALTER TABLE t MODIFY c NULL", "ALTER TABLE t MODIFY c INTEGER"
)
def test_alter_column_not_nullable_w_new_type(self):
context = op_fixture("oracle")
op.alter_column("t", "c", nullable=False, type_=Integer)
context.assert_(
"ALTER TABLE t MODIFY c NOT NULL", "ALTER TABLE t MODIFY c INTEGER"
)
def test_alter_add_server_default(self):
context = op_fixture("oracle")
op.alter_column("t", "c", server_default="5")
context.assert_("ALTER TABLE t MODIFY c DEFAULT '5'")
def test_alter_replace_server_default(self):
context = op_fixture("oracle")
op.alter_column(
"t", "c", server_default="5", existing_server_default="6"
)
context.assert_("ALTER TABLE t MODIFY c DEFAULT '5'")
def test_alter_remove_server_default(self):
context = op_fixture("oracle")
op.alter_column("t", "c", server_default=None)
context.assert_("ALTER TABLE t MODIFY c DEFAULT NULL")
def test_alter_do_everything(self):
context = op_fixture("oracle")
op.alter_column(
"t",
"c",
new_column_name="c2",
nullable=True,
type_=Integer,
server_default="5",
)
context.assert_(
"ALTER TABLE t MODIFY c NULL",
"ALTER TABLE t MODIFY c DEFAULT '5'",
"ALTER TABLE t MODIFY c INTEGER",
"ALTER TABLE t RENAME COLUMN c TO c2",
)
@config.requirements.comments
def test_create_table_with_column_comments(self):
context = op_fixture("oracle")
op.create_table(
"t2", Column("c1", Integer, primary_key=True), comment="t2 comment"
)
context.assert_(
"CREATE TABLE t2 (c1 INTEGER NOT NULL, PRIMARY KEY (c1))",
"COMMENT ON TABLE t2 IS 't2 comment'",
)
# TODO: when we add schema support
# def test_alter_column_rename_oracle_schema(self):
# context = op_fixture('oracle')
# op.alter_column("t", "c", name="x", schema="y")
# context.assert_(
# 'ALTER TABLE y.t RENAME COLUMN c TO c2'
# )
| [
[
[
23,
29
],
[
1662,
1668
],
[
1892,
1898
],
[
2199,
2205
],
[
2592,
2598
],
[
7804,
7810
]
],
[
[
53,
60
],
[
1675,
1682
],
[
1905,
1912
],
[
2212,
2219
],
[
2614,
2621
],
[
3505,
3512
],
[
3697,
3704
],
[
3992,
3999
],
[
4287,
4294
],
[
5633,
5640
],
[
5863,
5870
],
[
6079,
6086
],
[
6352,
6359
],
[
7357,
7364
],
[
7817,
7824
]
],
[
[
82,
89
],
[
942,
949
],
[
1203,
1210
],
[
1455,
1462
]
],
[
[
110,
112
],
[
1642,
1644
],
[
1859,
1861
],
[
2166,
2168
],
[
2547,
2549
],
[
2894,
2896
],
[
3074,
3076
],
[
3274,
3276
],
[
3473,
3475
],
[
3665,
3667
],
[
3960,
3962
],
[
4255,
4257
],
[
4562,
4564
],
[
4830,
4832
],
[
5037,
5039
],
[
5216,
5218
],
[
5388,
5390
],
[
5578,
5580
],
[
5807,
5809
],
[
6032,
6034
],
[
6304,
6306
],
[
6569,
6571
],
[
6774,
6776
],
[
7029,
7031
],
[
7227,
7229
],
[
7769,
7771
]
],
[
[
141,
147
],
[
2046,
2052
],
[
2419,
2425
],
[
7639,
7645
]
],
[
[
180,
202
],
[
667,
689
]
],
[
[
235,
252
],
[
809,
826
]
],
[
[
285,
296
],
[
629,
640
]
],
[
[
329,
346
],
[
731,
748
]
],
[
[
384,
406
],
[
875,
897
],
[
1158,
1180
],
[
1382,
1404
]
],
[
[
444,
454
],
[
1613,
1623
],
[
1830,
1840
],
[
2137,
2147
],
[
2518,
2528
],
[
2865,
2875
],
[
3045,
3055
],
[
3245,
3255
],
[
3444,
3454
],
[
3636,
3646
],
[
3931,
3941
],
[
4226,
4236
],
[
4533,
4543
],
[
4801,
4811
],
[
5008,
5018
],
[
5187,
5197
],
[
5359,
5369
],
[
5549,
5559
],
[
5778,
5788
],
[
6003,
6013
],
[
6275,
6285
],
[
6540,
6550
],
[
6745,
6755
],
[
7000,
7010
],
[
7198,
7208
],
[
7740,
7750
]
],
[
[
492,
500
],
[
567,
575
],
[
1553,
1561
]
],
[
[
526,
537
],
[
2623,
2634
]
],
[
[
546,
566
]
],
[
[
1546,
1552
]
]
] |
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for shell.bzl."""
load("//lib:shell.bzl", "shell")
load("//lib:unittest.bzl", "asserts", "unittest")
def _shell_array_literal_test(ctx):
"""Unit tests for shell.array_literal."""
env = unittest.begin(ctx)
asserts.equals(env, "()", shell.array_literal([]))
asserts.equals(env, "('1')", shell.array_literal([1]))
asserts.equals(env, "('1' '2' '3')", shell.array_literal([1, 2, 3]))
asserts.equals(env, "('$foo')", shell.array_literal(["$foo"]))
asserts.equals(env, "('qu\"o\"te')", shell.array_literal(['qu"o"te']))
unittest.end(env)
shell_array_literal_test = unittest.make(_shell_array_literal_test)
def _shell_quote_test(ctx):
"""Unit tests for shell.quote."""
env = unittest.begin(ctx)
asserts.equals(env, "'foo'", shell.quote("foo"))
asserts.equals(env, "'foo bar'", shell.quote("foo bar"))
asserts.equals(env, "'three spaces'", shell.quote("three spaces"))
asserts.equals(env, "' leading'", shell.quote(" leading"))
asserts.equals(env, "'trailing '", shell.quote("trailing "))
asserts.equals(env, "'new\nline'", shell.quote("new\nline"))
asserts.equals(env, "'tab\tcharacter'", shell.quote("tab\tcharacter"))
asserts.equals(env, "'$foo'", shell.quote("$foo"))
asserts.equals(env, "'qu\"o\"te'", shell.quote('qu"o"te'))
asserts.equals(env, "'it'\\''s'", shell.quote("it's"))
asserts.equals(env, "'foo\\bar'", shell.quote("foo\\bar"))
asserts.equals(env, "'back`echo q`uote'", shell.quote("back`echo q`uote"))
unittest.end(env)
shell_quote_test = unittest.make(_shell_quote_test)
def _shell_spawn_e2e_test_impl(ctx):
"""Test spawning a real shell."""
args = [
"foo",
"foo bar",
"three spaces",
" leading",
"trailing ",
"new\nline",
"tab\tcharacter",
"$foo",
'qu"o"te',
"it's",
"foo\\bar",
"back`echo q`uote",
]
script_content = "\n".join([
"#!/bin/bash",
"myarray=" + shell.array_literal(args),
'output=$(echo "${myarray[@]}")',
# For logging:
'echo "DEBUG: output=[${output}]" >&2',
# The following is a shell representation of what the echo of the quoted
# array will look like. It looks a bit confusing considering it's shell
# quoted into Python. Shell using single quotes to minimize shell
# escaping, so only the single quote needs to be escaped as '\'', all
# others are essentially kept literally.
"expected='foo foo bar three spaces leading trailing new",
"line tab\tcharacter $foo qu\"o\"te it'\\''s foo\\bar back`echo q`uote'",
'[[ "${output}" == "${expected}" ]]',
])
script_file = ctx.actions.declare_file("%s.sh" % (ctx.label.name))
ctx.actions.write(
output = script_file,
content = script_content,
is_executable = True,
)
return [
DefaultInfo(executable = script_file),
]
shell_spawn_e2e_test = rule(
test = True,
implementation = _shell_spawn_e2e_test_impl,
)
def shell_test_suite():
"""Creates the test targets and test suite for shell.bzl tests."""
unittest.suite(
"shell_tests",
shell_array_literal_test,
shell_quote_test,
shell_spawn_e2e_test,
)
| [
[
[
724,
749
],
[
1227,
1252
]
],
[
[
1186,
1210
],
[
3846,
3870
]
],
[
[
1259,
1276
],
[
2189,
2206
]
],
[
[
2156,
2172
],
[
3880,
3896
]
],
[
[
2213,
2239
],
[
3669,
3695
]
],
[
[
3602,
3622
],
[
3906,
3926
]
],
[
[
3704,
3720
]
]
] |
import strax
import straxen
import wfsim
def test_nt_context(register=None, context=None):
"""
Test a context if it is properly setup. To this end, we perform a
simple scan of the field "time" since all plugins should have this
field, if there is some option specified badly, we will quickly find
out since for this scan, we activate all the plugins.
:param register: Register a plugin (optional)
:param context: Test with some other context than the nT simulation
context
"""
if not straxen.utilix_is_configured():
return
if context is None:
context = straxen.contexts.xenonnt_simulation(cmt_run_id_sim='010000', cmt_version='global_ONLINE')
assert isinstance(context, strax.Context), f'{context} is not a context'
if register is not None:
assert issubclass(register, strax.Plugin), f'{register} is not a plugin'
# Search all plugins for the time field (each should have one)
context.search_field('time')
# def test_mc_chain():
# test_nt_context(wfsim.RawRecordsFromMcChain)
# def test_fax_nveto():
# test_nt_context(wfsim.RawRecordsFromFaxnVeto)
# def test_1t_context():
# test_nt_context(context=straxen.contexts.xenon1t_simulation())
| [
[
[
7,
12
],
[
757,
762
],
[
872,
877
]
],
[
[
21,
28
],
[
541,
548
],
[
635,
642
]
],
[
[
37,
42
]
],
[
[
52,
67
]
]
] |
import glob
import datetime
import inference
import numpy as np
flist = []
def run_classifier():
flist = []
list1 = glob.glob("./images/*.jpg")
list1.sort()
print("Printing the time of Interesting Events.....\n\n")
temp = str(inference.run_inference_on_image())
for i in range(len(list1) - 1):
inference.imagePath = list1[i]
temp2 = str(inference.run_inference_on_image2())
inference.imagePath = list1[i+1]
temp = str(inference.run_inference_on_image2())
if temp2 != temp:
print("Time : " + str(datetime.timedelta(seconds=(i))))
flist.extend([i])
else:
print("." ,)
d = np.array(flist)
d.sort()
diff = [y - x for x, y in zip(*[iter(d)] * 2)]
avg = sum(diff) / len(diff)
m = [[d[0]]]
for x in d[1:]:
if x - m[-1][0] < avg:
m[-1].append(x)
else:
m.append([x])
# print(m)
# print(type(m))
with open('list.txt' , 'w') as f:
print("Writing to file\n")
for i in range(0 , (len(m))):
with open('list.txt' , 'a') as f:
print("\n", file=f)
print("start time : " + str(datetime.timedelta(seconds = int((m[i][0])) )) , file=f)
print("end time : " + str(datetime.timedelta(seconds = int((m[i][len(m[i]) - 1])) )) , file=f )
print("\n\nFinished Analysis\n\n")
print("The timestanps of all interesting events is stored in a File named list.txt ")
if __name__ == '__main__':
run_classifier()
| [
[
[
7,
11
],
[
132,
136
]
],
[
[
19,
27
],
[
647,
655
],
[
1370,
1378
],
[
1470,
1478
]
],
[
[
35,
44
],
[
266,
275
],
[
358,
367
],
[
417,
426
],
[
470,
479
],
[
530,
539
]
],
[
[
52,
63
],
[
794,
796
]
],
[
[
64,
69
]
],
[
[
79,
93
],
[
1712,
1726
]
]
] |
import re
import sys
import inspect
_py2 = sys.version_info[0] == 2
_py3 = sys.version_info[0] == 3
# noinspection PyPep8Naming
class route(object):
def __init__(self, rule, **options):
"""
Class Initializer - This will only execute if using BottleCBV's original route() style.
"""
# Not sure if this is needed, need to test what happens when you specify a rule but not options in BottleCBV.
if not options:
options = dict(method='ANY')
self.rule = rule
self.options = options
def __call__(self, func):
f = func
rule = self.rule
options = self.options
def decorator(*_, **__):
if not hasattr(f, '_rule_cache') or f._rule_cache is None:
f._rule_cache = {f.__name__: [(rule, options)]}
elif f.__name__ not in f._rule_cache:
f._rule_cache[f.__name__] = [(rule, options)]
else:
f._rule_cache[f.__name__].append((rule, options))
return f
return decorator()
@staticmethod
def decorate(f, rule, **options):
if not hasattr(f, '_rule_cache') or f._rule_cache is None:
f._rule_cache = {f.__name__: [(rule, options)]}
elif f.__name__ not in f._rule_cache:
f._rule_cache[f.__name__] = [(rule, options)]
else:
f._rule_cache[f.__name__].append((rule, options))
return f
@staticmethod
def get(rule):
"""
GET Method
CRUD Use Case: Read
Example:
Request a user profile
"""
options = dict(method='GET')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def post(rule):
"""
POST Method
CRUD Use Case: Create
Example:
Create a new user
"""
options = dict(method='POST')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def put(rule):
"""
PUT Method
CRUD Use Case: Update / Replace
Example:
Set item# 4022 to Red Seedless Grapes, instead of tomatoes
"""
options = dict(method='PUT')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def patch(rule):
"""
PATCH Method
CRUD Use Case: Update / Modify
Example:
Rename then user's name from Jon to John
"""
options = dict(method='PATCH')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def delete(rule):
"""
DELETE Method
CRUD Use Case: Delete
Example:
Delete user# 12403 (John)
"""
options = dict(method='DELETE')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def head(rule):
"""
HEAD Method
CRUD Use Case: Read (in-part)
Note: This is the same as GET, but without the response body.
This is useful for items such as checking if a user exists, such as this example:
Request: GET /user/12403
Response: (status code) 404 - Not Found
If you are closely following the REST standard, you can also verify if the requested PATCH (update) was
successfully applied, in this example:
Request: PUT /user/12404 { "name": "John"}
Response: (status code) 304 - Not Modified
"""
options = dict(method='HEAD')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
@staticmethod
def any(rule):
"""
From the Bottle Documentation:
The non-standard ANY method works as a low priority fallback: Routes that listen to ANY will match requests
regardless of their HTTP method but only if no other more specific route is defined. This is helpful for
proxy-routes that redirect requests to more specific sub-applications.
"""
options = dict(method='ANY')
def decorator(f):
return route.decorate(f, rule, **options)
return decorator
class BottleView(object):
""" Class based view implementation for bottle (following flask-classy architech)
"""
decorators = []
DEFAULT_ROUTES = ["get", "put", "post", "delete", "index", "options"]
base_route = None
route_prefix = None
view_identifier = "view"
@classmethod
def register(cls, app, base_route=None, route_prefix=None):
""" Register all the possible routes of the subclass
:param app: bottle app instance
:param base_route: prepend to the route rule (/base_route/<class_name OR route_prefix>)
:param route_prefix: used when want to register custom rule, which is not class name
"""
if cls is BottleView:
raise TypeError("cls must be a subclass of BottleView, not BottleView itself")
cls._app = app
cls.route_prefix = route_prefix or cls.route_prefix
cls.base_route = base_route or cls.base_route
# import ipdb; ipdb.set_trace()
# get all the valid members of the class to register Endpoints
routes = cls._get_interesting_members(BottleView)
# initialize the class
klass = cls()
# Iterate through class members to register Endpoints
for func_name, func in routes:
# print "*"*50
method_args = inspect.getargspec(func)[0]
# Get
rule = cls._build_route_rule(func_name, *method_args)
method = "GET"
if func_name in cls.DEFAULT_ROUTES:
if func_name == "index":
method = "GET"
else:
method = func_name.upper()
# create name for endpoint
endpoint = "%s:%s" % (cls.__name__, func_name)
callable_method = getattr(klass, func_name)
for decorator in cls.decorators:
callable_method = decorator(callable_method)
try:
# noinspection PyProtectedMember
custom_rule = func._rule_cache
except AttributeError:
method_args = inspect.getargspec(func)[0]
rule = cls._build_route_rule(func_name, *method_args)
method = "GET"
if func_name in cls.DEFAULT_ROUTES:
if func_name == "index":
method = "GET"
else:
method = func_name.upper()
cls._app.route(callback=callable_method, method=method,
path=rule, name=endpoint)
else:
custom_rule_list = list(custom_rule.values())
if _py3:
custom_rule_list = list(custom_rule_list)
for cached_rule in custom_rule_list[0]:
rule, options = cached_rule
try:
method = options.pop("method")
except KeyError:
method = "GET"
try:
endpoint = options.pop("name")
except KeyError:
pass
cls._app.route(callback=callable_method, path=rule,
method=method, name=endpoint, **options)
print(("%s : %s, Endpoint: %s" % (method, rule, endpoint)))
@classmethod
def _build_route_rule(cls, func_name, *method_args):
klass_name = cls.__name__.lower()
klass_name = (klass_name[:-len(cls.view_identifier)]
if klass_name.endswith(cls.view_identifier)
else klass_name)
rule = klass_name
if not (cls.base_route or cls.route_prefix):
rule = klass_name
elif not cls.base_route and cls.route_prefix:
rule = cls.route_prefix
elif cls.base_route and not cls.route_prefix:
rule = "%s/%s" % (cls.base_route, klass_name)
elif cls.base_route and cls.route_prefix:
rule = "%s/%s" % (cls.base_route, cls.route_prefix)
rule_parts = [rule]
if func_name not in cls.DEFAULT_ROUTES:
rule_parts.append(func_name.replace("_", "-").lower())
ignored_rule_args = ['self']
if hasattr(cls, 'base_args'):
ignored_rule_args += cls.base_args
for arg in method_args:
if arg not in ignored_rule_args:
rule_parts.append("<%s>" % arg)
result = "/%s/" % join_paths(*rule_parts)
result = re.sub(r'(/)\1+', r'\1', result)
result = re.sub("/{2,}", "/", result)
return result
@classmethod
def _get_interesting_members(cls, base_class):
"""Returns a list of methods that can be routed to"""
base_members = dir(base_class)
predicate = inspect.ismethod if _py2 else inspect.isfunction
all_members = inspect.getmembers(cls, predicate=predicate)
return [member for member in all_members
if not member[0] in base_members
and ((hasattr(member[1], "__self__")
and not member[1].__self__ in cls.__class__.__mro__) if _py2 else True)
and not member[0].startswith("_")]
def join_paths(*path_pieces):
"""Join parts of a url path"""
# Remove blank strings, and make sure everything is a string
cleaned_parts = list(map(str, [_f for _f in path_pieces if _f]))
if _py3:
cleaned_parts = list(cleaned_parts)
return "/".join(cleaned_parts)
| [
[
[
7,
9
],
[
8944,
8946
],
[
8994,
8996
]
],
[
[
17,
20
],
[
44,
47
],
[
76,
79
]
],
[
[
28,
35
],
[
5728,
5735
],
[
6501,
6508
],
[
9236,
9243
],
[
9266,
9273
],
[
9307,
9314
]
],
[
[
37,
41
],
[
9256,
9260
],
[
9581,
9585
]
],
[
[
69,
73
],
[
7073,
7077
],
[
9856,
9860
]
],
[
[
137,
142
],
[
1694,
1699
],
[
1997,
2002
],
[
2348,
2353
],
[
2686,
2691
],
[
3003,
3008
],
[
3789,
3794
],
[
4353,
4358
]
],
[
[
4422,
4432
],
[
5107,
5117
],
[
5506,
5516
]
],
[
[
9654,
9664
],
[
8903,
8913
]
]
] |
#!/usr/bin/python
#
# Logic for computing missing and duplicate files using Postgresql DB.
#
import os
import fnmatch
# from shutil import copy2
# from shutil import copyfile
from dbutils import required as db
MISSING_FROM_COMP_FOLDER = "missing_from_comp"
MISSING_FROM_SRC_FOLDER = "missing_from_source"
FILE_NAME_COLUMN = "file"
FILE_EXTENSIONS_TO_SKIP = ['.ini', '.db', '.info', '.pdfcp ']
def handleDuplicates(conn):
# conn = db.getConnection(dbName)
cur = conn.cursor
try:
# duplicates on the source location
cur.execute("SELECT * from filehashes WHERE ")
# duplicates on the comparison location
cur.execute("SELECT * from filehashes ")
finally:
cur.close
# conn.close
db.returnConnection(conn)
#######################
# Logic for identifying missing files in the source and destination/comparison locations.
#######################
def handleMissing(dbName, sourceSchema, compSchema):
conn = db.getConnection(dbName)
cur = conn.cursor()
try:
# identify missing files from the comparison location
command = "SELECT %s from %s.filehashes where hash in (select hash from %s.filehashes except select hash from %s.filehashes);" %(FILE_NAME_COLUMN, sourceSchema, sourceSchema, compSchema)
print("Identifying all missing files in comparison location. Executing: " + command)
cur.execute( command )
missingInComp = cur.fetchall()
# print("Missing files in comparison location: %s" % missingInComp)
# __collectMissingFiles__(list(missingInComp), MISSING_FROM_COMP_FOLDER)
try:
for missingFile in missingInComp:
__collectMissingFiles__(missingFile[0], MISSING_FROM_COMP_FOLDER)
except Exception as ce:
print("There was a problem locating files on comparison location. The comparison location files may have changed/moved since the scan. ")
print(ce)
return
# identify missing files from the source location
command = "SELECT %s from %s.filehashes where hash in (select hash from %s.filehashes except select hash from %s.filehashes);" % (FILE_NAME_COLUMN, compSchema, compSchema, sourceSchema)
print("Identifying all missing files in source location. Executing: " + command)
cur.execute(command)
missingInSource = cur.fetchall()
# print("Missing files in source location: %s" % missingInSource)
# __collectMissingFiles__(list(missingInSource), MISSING_FROM_SRC_FOLDER)
try:
for missingFile in missingInSource:
__collectMissingFiles__(missingFile[0], MISSING_FROM_SRC_FOLDER)
except Exception as se:
print("There was a problem locating files on source location. The source location files may have changed/moved since the scan. ")
print(se)
except Exception as e:
print("Unable to identify missing files! Cause: " + e)
finally:
cur.close()
# conn.close
db.returnConnection(conn)
print("Done processing missing files. Please look at %s and %s folders for missing files." % (MISSING_FROM_SRC_FOLDER, MISSING_FROM_COMP_FOLDER))
#######################
# Utility for copying all of the missing files from the specified result-set into the specified folder.
#######################
def __collectMissingFiles__( missingFile, folderName ):
# for missingFile in missingFiles:
# missingFile.endswith(tuple(set(['.ini', '.db'])))
if not missingFile.endswith(tuple(FILE_EXTENSIONS_TO_SKIP)):
dst = "./" + folderName + missingFile
print("Attempting to copy missing file: " + missingFile + " to destination: " + dst)
if not os.path.exists(os.path.dirname(dst)):
os.makedirs(os.path.dirname(dst))
# TODO implement file type filtering to only get files we want and skip ones we don't care about like *.txt, *.ini, etc.
# if not fnmatch.fnmatch(missingFile, '.*.ini'):
# copyfile( missingFile, dst)
# copy2(missingFile, dst)
os.system('cp -v -p "' + missingFile + '" "' + dst + '"') | [
[
[
102,
104
],
[
3743,
3745
],
[
3758,
3760
],
[
3793,
3795
],
[
3805,
3807
],
[
4094,
4096
]
],
[
[
112,
119
]
],
[
[
198,
212
],
[
751,
753
],
[
980,
982
],
[
3042,
3044
]
],
[
[
214,
238
],
[
1731,
1755
],
[
3192,
3216
]
],
[
[
261,
284
],
[
2667,
2690
],
[
3167,
3190
]
],
[
[
309,
325
],
[
1237,
1253
],
[
2178,
2194
]
],
[
[
335,
358
],
[
3562,
3585
]
],
[
[
402,
418
]
],
[
[
920,
933
]
],
[
[
3377,
3400
],
[
1691,
1714
],
[
2627,
2650
]
]
] |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class PagedGraphUsers(Model):
"""PagedGraphUsers.
:param continuation_token: This will be non-null if there is another page of data. There will never be more than one continuation token returned by a request.
:type continuation_token: list of str
:param graph_users: The enumerable set of users found within a page.
:type graph_users: list of :class:`GraphUser <graph.v4_1.models.GraphUser>`
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': '[str]'},
'graph_users': {'key': 'graphUsers', 'type': '[GraphUser]'}
}
def __init__(self, continuation_token=None, graph_users=None):
super(PagedGraphUsers, self).__init__()
self.continuation_token = continuation_token
self.graph_users = graph_users
| [
[
[
588,
593
],
[
618,
623
]
],
[
[
602,
617
],
[
1274,
1289
]
]
] |
"""Decoder portion of the model."""
import torch
import torch.nn.functional as F
from torch import nn
def _build_base_decoder():
"""Builds the base decoder shared by all three decoder types."""
return nn.Sequential(nn.Conv2d(in_channels=1280, out_channels=256, kernel_size=(3, 3), stride=1, padding=1),
nn.BatchNorm2d(num_features=256), nn.ReLU())
class Decoders(nn.Module):
"""Module which contains all three decoders."""
def __init__(self, num_classes: int, enabled_tasks: (bool, bool, bool), output_size=(128, 256)):
super().__init__()
self._output_size = output_size
self._num_classes = num_classes
self._enabled_tasks = enabled_tasks
self._base_semseg = _build_base_decoder()
self._base_insseg = _build_base_decoder()
self._base_depth = _build_base_decoder()
kernel_size = (1, 1)
self._semsegcls = nn.Conv2d(256, self._num_classes, kernel_size)
self._inssegcls = nn.Conv2d(256, 2, kernel_size)
self._depthcls = nn.Conv2d(256, 1, kernel_size)
def set_output_size(self, size):
self._output_size = size
def forward(self, x):
"""Returns (sem seg, instance seg, depth)."""
# x: [batch x 1280 x H/8 x W/8]
sem_seg_enabled, inst_seg_enabled, depth_enabled = self._enabled_tasks
if sem_seg_enabled:
x1 = self._base_semseg(x)
x1 = self._semsegcls(x1)
x1 = F.interpolate(x1, size=self._output_size, mode='bilinear', align_corners=True)
else:
x1 = None
if inst_seg_enabled:
x2 = self._base_insseg(x)
x2 = self._inssegcls(x2)
x2 = F.interpolate(x2, size=self._output_size, mode='bilinear', align_corners=True)
else:
x2 = None
if depth_enabled:
x3 = self._base_depth(x)
x3 = self._depthcls(x3)
x3 = F.interpolate(x3, size=self._output_size, mode='bilinear', align_corners=True)
else:
x3 = None
return x1, x2, x3
if __name__ == '__main__':
# ### Shape test
output_size = (123, 432)
model = Decoders(num_classes=20, output_size=output_size)
test = torch.zeros(size=(2, 1280, 256, 256))
result = model.forward(test)
assert result[0].shape == (2, 20, *output_size), "output shape is {}".format(result[0].shape)
| [
[
[
44,
49
],
[
2240,
2245
]
],
[
[
57,
81
],
[
1477,
1478
],
[
1714,
1715
],
[
1946,
1947
]
],
[
[
100,
102
],
[
401,
403
],
[
212,
214
],
[
226,
228
],
[
339,
341
],
[
373,
375
],
[
924,
926
],
[
997,
999
],
[
1053,
1055
]
],
[
[
109,
128
],
[
747,
766
],
[
797,
816
],
[
846,
865
]
],
[
[
392,
400
],
[
2179,
2187
]
],
[
[
2142,
2153
],
[
2216,
2227
],
[
2350,
2361
]
],
[
[
2171,
2176
],
[
2291,
2296
]
],
[
[
2233,
2237
],
[
2305,
2309
]
],
[
[
2282,
2288
],
[
2322,
2328
],
[
2392,
2398
]
]
] |
class Queue:
"""
A class used to represent an Queue for for storing processes
...
Attributes
----------
processList : list
The list which would store the process currently in line for execution
priority : str
The type of process (Foreground,Background etc)
SchedulingAlgo : object
The object for corresponding scheduling algorithm to be used in this queue (FCFS,SJF etc)
Methods
-------
queue_enqueue(pList,process)
Uses the scheduling algorithm object to insert process into process list
queue_dequeue(pList,quantum)
Uses the scheduling algorithm object to remove process from process list
size()
Returns the size of process list
isEmpty()
Checks whether the process list is empty or not
front()
Returns the first element in process list if its not empty.
"""
def __init__(self,type):
"""
Parameters
----------
type : str
The type of process (Foreground,Background etc)
"""
self.processList = []
self.priority = type
self.SchedulingAlgo = None
def size(self):
"""Returs the total number of processes left for execution.
Returns
-------
int :
The length of process list
"""
return len(self.processList)
def isEmpty(self):
"""Checks whether all processes have been executed or not.
Returns
-------
bool :
True if processList has 0 length , False otherwise.
"""
return self.size() == 0
def front(self):
"""Returns the first process in list
Returns
-------
object :
The first process if process list is not empty else None
"""
return self.processList[0] if not self.isEmpty else None
def queue_enqueue(self,process):
"""Uses the queue's scheduling algorithm enqueue method to insert
the new process into the process list.
Parameters
----------
process : object
The Process class object defining the new process to insert.
"""
self.SchedulingAlgo.enqueue(self.processList,process)
def queue_dequeue(self,quantum):
"""Takes the quantum time elapsed and removes processes from the
process list using scheduling algorithim;s dequeue method.
Parameters
----------
quantum : int
The time elapsed
Returns
-------
removed : list
A list of rocesses removed during the current cycle
quantum : int
Quantum time updated after being used by each process (Usually 0)
"""
removed,quantum = self.SchedulingAlgo.dequeue(self.processList,quantum)
return removed,quantum
| [
[
[
6,
11
]
]
] |
import numpy as np
import editdistance
class Decoder():
def __init__(self, vocab):
self.vocab_list = [char for char in vocab]
def predict(self, batch_size, logits, y, lengths, y_lengths, n_show=5):
decoded = self.decode(logits, lengths)
cursor = 0
gt = []
n = min(n_show, logits.size(1))
samples = []
for b in range(batch_size):
y_str = ''.join([self.vocab_list[ch] for ch in y[cursor: cursor + y_lengths[b]]])
gt.append(y_str)
cursor += y_lengths[b]
if b < n:
samples.append([y_str, decoded[b]])
return decoded, gt, samples
def decode(self, logits, seq_lens):
raise NotImplementedError
def wer(self, s1, s2):
s1_words, s2_words = s1.split(), s2.split()
distance = editdistance.eval(s1_words, s2_words)
return distance / max(len(s1_words), len(s2_words))
def cer(self, s1, s2):
s1, s2 = s1.replace(' ', ''), s2.replace(' ', '')
distance = editdistance.eval(s1, s2)
return distance / max(len(s1), len(s2))
def cer_batch(self, decoded, gt):
return self.compare_batch(decoded, gt, self.cer)
def wer_batch(self, decoded, gt):
return self.compare_batch(decoded, gt, self.wer)
def compare_batch(self, decoded, gt, func):
assert len(decoded) == len(gt), f'batch size mismatch: {len(decoded)}!={len(gt)}'
results = []
for i, batch in enumerate(decoded):
for sentence in range(len(batch)):
error = func(decoded[i][sentence], gt[i])
results.append(error)
return np.mean(results)
| [
[
[
7,
18
],
[
1674,
1676
]
],
[
[
26,
38
],
[
840,
852
],
[
1044,
1056
]
],
[
[
47,
54
]
]
] |
# MIT License
#
# Copyright (c) 2018-2019 Red Hat, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
A book with our finest spells
"""
import shutil
import subprocess
from pathlib import Path
from click.testing import CliRunner
from packit.cli.packit_base import packit_base
from packit.config import Config
from packit.utils import cwd, run_command
TESTS_DIR = Path(__file__).parent
DATA_DIR = TESTS_DIR / "data"
UPSTREAM = DATA_DIR / "upstream_git"
UPSTREAM_WITH_MUTLIPLE_SOURCES = DATA_DIR / "upstream_git_with_multiple_sources"
EMPTY_CHANGELOG = DATA_DIR / "empty_changelog"
DISTGIT = DATA_DIR / "dist_git"
UP_COCKPIT_OSTREE = DATA_DIR / "cockpit-ostree"
UP_OSBUILD = DATA_DIR / "osbuild"
UP_SNAPD = DATA_DIR / "snapd"
TARBALL_NAME = "beerware-0.1.0.tar.gz"
SOURCEGIT_UPSTREAM = DATA_DIR / "sourcegit" / "upstream"
SOURCEGIT_SOURCEGIT = DATA_DIR / "sourcegit" / "source_git"
DG_OGR = DATA_DIR / "dg-ogr"
SPECFILE = DATA_DIR / "upstream_git/beer.spec"
UPSTREAM_SPEC_NOT_IN_ROOT = DATA_DIR / "spec_not_in_root/upstream"
def git_set_user_email(directory):
subprocess.check_call(
["git", "config", "user.email", "test@example.com"], cwd=directory
)
subprocess.check_call(
["git", "config", "user.name", "Packit Test Suite"], cwd=directory
)
def get_test_config():
conf = Config()
conf._pagure_user_token = "test"
conf._github_token = "test"
return conf
def git_add_and_commit(directory, message):
subprocess.check_call(["git", "add", "."], cwd=directory)
subprocess.check_call(["git", "commit", "-m", message], cwd=directory)
def initiate_git_repo(
directory,
tag=None,
upstream_remote="https://lol.wat",
push=False,
copy_from: str = None,
remotes=None,
):
"""
Initiate a git repo for testing.
:param directory: path to the git repo
:param tag: if set, tag the latest commit with this tag
:param upstream_remote: name of the origin - upstream remote (not used when remotes are set)
:param remotes: provide list of tuples (name, remote_url)
:param push: push to the remote?
:param copy_from: source tree to copy to the newly created git repo
"""
if remotes is None:
remotes = [("origin", upstream_remote)]
if copy_from:
shutil.copytree(copy_from, directory)
subprocess.check_call(["git", "init", "."], cwd=directory)
Path(directory).joinpath("README").write_text("Best upstream project ever!")
git_set_user_email(directory)
subprocess.check_call(["git", "add", "."], cwd=directory)
subprocess.check_call(["git", "commit", "-m", "initial commit"], cwd=directory)
if tag:
subprocess.check_call(
["git", "tag", "-a", "-m", f"tag {tag}, tests", tag], cwd=directory
)
for name, url in remotes:
subprocess.check_call(["git", "remote", "add", name, url], cwd=directory)
if push:
subprocess.check_call(["git", "fetch", "origin"], cwd=directory)
# tox strips some env vars so your user gitconfig is not picked up
# hence we need to be very explicit with git commands here
subprocess.check_call(
["git", "push", "--tags", "-u", "origin", "master:master"], cwd=directory
)
def prepare_dist_git_repo(directory, push=True):
subprocess.check_call(["git", "branch", "f30"], cwd=directory)
if push:
subprocess.check_call(["git", "push", "-u", "origin", "f30:f30"], cwd=directory)
def call_packit(fnc=None, parameters=None, envs=None, working_dir=None):
working_dir = working_dir or "."
fnc = fnc or packit_base
runner = CliRunner()
envs = envs or {}
parameters = parameters or []
# catch exceptions enables debugger
with cwd(working_dir):
return runner.invoke(fnc, args=parameters, env=envs, catch_exceptions=False)
def build_srpm(path: Path):
run_command(["rpmbuild", "--rebuild", str(path)])
def can_a_module_be_imported(module_name):
""" can a module be imported? """
try:
__import__(module_name)
return True
except ImportError:
return False
| [
[
[
1159,
1165
],
[
3289,
3295
]
],
[
[
1173,
1183
],
[
2083,
2093
],
[
2191,
2201
],
[
2475,
2485
],
[
2537,
2547
],
[
3331,
3341
],
[
3509,
3519
],
[
3571,
3581
],
[
3671,
3681
],
[
3823,
3833
],
[
3919,
3929
],
[
4134,
4144
],
[
4308,
4318
],
[
4392,
4402
]
],
[
[
1204,
1208
],
[
1382,
1386
],
[
3394,
3398
],
[
4870,
4874
]
],
[
[
1236,
1245
],
[
4627,
4636
]
],
[
[
1282,
1293
],
[
4602,
4613
]
],
[
[
1320,
1326
],
[
2331,
2337
]
],
[
[
1352,
1355
],
[
4744,
4747
]
],
[
[
1357,
1368
],
[
4881,
4892
]
],
[
[
1370,
1379
],
[
1415,
1424
]
],
[
[
1404,
1412
],
[
1445,
1453
],
[
1504,
1512
],
[
1570,
1578
],
[
1609,
1617
],
[
1651,
1659
],
[
1692,
1700
],
[
1724,
1732
],
[
1803,
1811
],
[
1861,
1869
],
[
1908,
1916
],
[
1939,
1947
],
[
2003,
2011
]
],
[
[
1434,
1442
]
],
[
[
1471,
1501
]
],
[
[
1552,
1567
]
],
[
[
1599,
1606
]
],
[
[
1631,
1648
]
],
[
[
1679,
1689
]
],
[
[
1713,
1721
]
],
[
[
1743,
1755
]
],
[
[
1782,
1800
]
],
[
[
1839,
1858
]
],
[
[
1899,
1905
]
],
[
[
1928,
1936
]
],
[
[
1975,
2000
]
],
[
[
2048,
2066
],
[
3475,
3493
]
],
[
[
2301,
2316
]
],
[
[
2431,
2449
]
],
[
[
2614,
2631
]
],
[
[
4259,
4280
]
],
[
[
4479,
4490
]
],
[
[
4853,
4863
]
],
[
[
4937,
4961
]
]
] |
from django.db import models
from accounts.models import Account
from store.models import Product, Variation
# Create your models here.
class Payment(models.Model):
user = models.ForeignKey(Account, on_delete=models.CASCADE)
payment_id = models.CharField(max_length=100)
payment_method = models.CharField(max_length=100)
amount_paid = models.CharField(max_length=100)
status = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.payment_id
class Order(models.Model):
STATUS = (
('New', 'New'),
('Accepted', 'Accepted'),
('Completed', 'Completed'),
('Cancelled', 'Cancelled'),
)
user = models.ForeignKey(Account, on_delete=models.SET_NULL, null=True)
payment = models.ForeignKey(Payment, on_delete=models.SET_NULL, blank=True, null=True)
order_number = models.CharField(max_length=20)
first_name = models.CharField(max_length=50)
last_name= models.CharField(max_length=50)
phone = models.CharField(max_length=15)
email = models.EmailField(max_length=50)
address_line_1 = models.CharField(max_length=50)
address_line_2 = models.CharField(max_length=50, blank=True)
country = models.CharField(max_length=50)
state = models.CharField(max_length=50)
city = models.CharField(max_length=50)
order_note = models.CharField(max_length=100, blank=True)
order_total = models.FloatField()
tax = models.FloatField()
status = models.CharField(max_length=10, choices=STATUS, default='New')
ip = models.CharField(blank=True, max_length=20)
is_ordered = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def full_name(self):
return f'{self.first_name} {self.last_name}'
def full_address(self):
return f'{self.address_line_1} {self.address_line_2}'
def __str__(self):
return self.first_name
class OrderProduct(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
payment = models.ForeignKey(Payment, on_delete=models.SET_NULL, blank=True, null=True)
user = models.ForeignKey(Account, on_delete=models.CASCADE)
product = models.ForeignKey(Product, on_delete=models.CASCADE)
variations = models.ManyToManyField(Variation, blank=True)
quantity = models.IntegerField()
product_price = models.FloatField()
ordered = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.product.product_name
| [
[
[
22,
28
],
[
152,
158
],
[
178,
184
],
[
215,
221
],
[
248,
254
],
[
302,
308
],
[
353,
359
],
[
399,
405
],
[
449,
455
],
[
557,
563
],
[
735,
741
],
[
772,
778
],
[
814,
820
],
[
851,
857
],
[
910,
916
],
[
959,
965
],
[
1006,
1012
],
[
1050,
1056
],
[
1094,
1100
],
[
1148,
1154
],
[
1201,
1207
],
[
1259,
1265
],
[
1303,
1309
],
[
1346,
1352
],
[
1395,
1401
],
[
1458,
1464
],
[
1488,
1494
],
[
1521,
1527
],
[
1593,
1599
],
[
1654,
1660
],
[
1706,
1712
],
[
1763,
1769
],
[
2045,
2051
],
[
2072,
2078
],
[
2107,
2113
],
[
2137,
2143
],
[
2174,
2180
],
[
2225,
2231
],
[
2262,
2268
],
[
2292,
2298
],
[
2329,
2335
],
[
2362,
2368
],
[
2423,
2429
],
[
2465,
2471
],
[
2499,
2505
],
[
2551,
2557
],
[
2608,
2614
]
],
[
[
57,
64
],
[
196,
203
],
[
753,
760
],
[
2243,
2250
]
],
[
[
90,
97
],
[
2310,
2317
]
],
[
[
99,
108
],
[
2385,
2394
]
],
[
[
144,
151
],
[
832,
839
],
[
2155,
2162
]
],
[
[
551,
556
],
[
2090,
2095
]
],
[
[
2032,
2044
]
]
] |
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Clout9.settings.production")
app = Celery('Clout9')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
# app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| [
[
[
23,
38
]
],
[
[
46,
48
],
[
176,
178
]
],
[
[
68,
74
],
[
260,
266
]
],
[
[
99,
107
]
],
[
[
254,
257
],
[
374,
377
],
[
482,
485
]
],
[
[
506,
516
]
]
] |
# blog/asgi.py
import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blog.settings")
from django.conf import settings
django.setup()
from django.core.asgi import get_asgi_application
from channels.security.websocket import OriginValidator
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.urls import re_path
from app.website.consumers import ExampleConsumer
application = ProtocolTypeRouter(
{
# Django's ASGI application to handle traditional HTTP requests
"http": get_asgi_application(),
# WebSocket handler
"websocket": OriginValidator(AuthMiddlewareStack(
URLRouter(
[
re_path(r"^ws/example/$", ExampleConsumer.as_asgi()),
]
)
), settings.ALLOWED_HOSTS)
}
)
| [
[
[
22,
24
],
[
40,
42
]
],
[
[
32,
38
],
[
138,
144
]
],
[
[
129,
137
],
[
844,
852
]
],
[
[
182,
202
],
[
576,
596
]
],
[
[
243,
258
],
[
649,
664
]
],
[
[
285,
304
],
[
665,
684
]
],
[
[
334,
352
],
[
462,
480
]
],
[
[
354,
363
],
[
698,
707
]
],
[
[
388,
395
],
[
747,
754
]
],
[
[
430,
445
],
[
773,
788
]
],
[
[
448,
459
]
]
] |
valor = 0
num = [[],[]]
for c in range (0,7):
valor = int(input(f'Digite o {c+1}° valor:'))
if valor % 2 == 0:
num[0].append(valor)
else:
num[1].append(valor)
print('-='*30)
print(f'Os numeros pares foram {num[0]}\nOs numeros ímpares foram {num[1]}.')
| [
[
[
0,
5
]
],
[
[
10,
13
],
[
128,
131
],
[
167,
170
],
[
235,
238
],
[
271,
274
]
],
[
[
29,
30
],
[
81,
82
]
],
[
[
51,
56
],
[
104,
109
],
[
142,
147
],
[
181,
186
]
]
] |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'dnsprovejs'
copyright = '2018, Makoto Inoue'
author = 'Makoto Inoue'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx_js', 'sphinx.ext.autodoc']
root_for_relative_js_paths = '../lib'
js_source_path = ['../lib', '../lib/oracle', '../lib/dns']
primary_domain = 'js'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'dnsprovejsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dnsprovejs.tex', 'dnsprovejs Documentation',
'Makoto Inoue', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dnsprovejs', 'dnsprovejs Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dnsprovejs', 'dnsprovejs Documentation',
author, 'dnsprovejs', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| [
[
[
714,
721
],
[
5031,
5038
]
],
[
[
737,
746
]
],
[
[
770,
776
],
[
4481,
4487
],
[
4819,
4825
]
],
[
[
819,
826
]
],
[
[
881,
888
]
],
[
[
1221,
1231
]
],
[
[
1270,
1296
]
],
[
[
1308,
1322
]
],
[
[
1367,
1381
]
],
[
[
1463,
1477
]
],
[
[
1591,
1604
]
],
[
[
1655,
1665
],
[
4117,
4127
],
[
4421,
4431
],
[
4760,
4770
]
],
[
[
1931,
1939
]
],
[
[
2140,
2156
]
],
[
[
2260,
2274
]
],
[
[
2497,
2507
]
],
[
[
2952,
2968
]
],
[
[
3468,
3485
]
],
[
[
3586,
3600
]
],
[
[
4092,
4107
]
],
[
[
4402,
4411
]
],
[
[
4733,
4750
]
],
[
[
5018,
5028
]
],
[
[
5279,
5297
]
]
] |
import os
import time
from membound_cases import memory_bound_cases_list
from utils import (arch_name, datatime_with_format, dtype2str, dump2json,
geometric_mean, md_table_header, scaled_repeat_times,
size2str)
import taichi as ti
class MemoryBound:
suite_name = 'memorybound'
supported_archs = [ti.x64, ti.cuda]
test_cases = memory_bound_cases_list
test_dtype_list = [ti.i32, ti.i64, ti.f32, ti.f64]
test_dsize_list = [
(4**i) * 1024 # kibibytes(KiB) = 1024
for i in range(1, 10) # [4KB,16KB...256MB]
]
basic_repeat_times = 10
evaluator = [geometric_mean]
def __init__(self, arch):
self._arch = arch
self._cases_impl = []
for case in self.test_cases:
for dtype in self.test_dtype_list:
impl = CaseImpl(case, arch, dtype, self.test_dsize_list,
self.evaluator)
self._cases_impl.append(impl)
def run(self):
for case in self._cases_impl:
case.run()
def save_as_json(self, arch_dir='./'):
#folder of suite
suite_path = os.path.join(arch_dir, self.suite_name)
os.makedirs(suite_path)
#json files
self._save_suite_info_as_json(suite_path)
self._save_cases_info_as_json(suite_path)
def save_as_markdown(self, arch_dir='./'):
current_time = datatime_with_format()
commit_hash = ti.core.get_commit_hash() #[:8]
file_name = f'{self.suite_name}.md'
file_path = os.path.join(arch_dir, file_name)
with open(file_path, 'w') as f:
lines = [
f'commit_hash: {commit_hash}\n', f'datatime: {current_time}\n'
]
lines += self._get_markdown_lines()
for line in lines:
print(line, file=f)
def _save_suite_info_as_json(self, suite_path='./'):
info_dict = {
'cases': [func.__name__ for func in self.test_cases],
'dtype': [dtype2str(dtype) for dtype in self.test_dtype_list],
'dsize': [size for size in self.test_dsize_list],
'repeat': [
scaled_repeat_times(self._arch, size, self.basic_repeat_times)
for size in self.test_dsize_list
],
'evaluator': [func.__name__ for func in self.evaluator]
}
info_path = os.path.join(suite_path, '_info.json')
with open(info_path, 'w') as f:
print(dump2json(info_dict), file=f)
def _save_cases_info_as_json(self, suite_path='./'):
for case in self.test_cases: #for case [fill,saxpy,reduction]
results_dict = {}
for impl in self._cases_impl: #find [ti.i32, ti.i64, ti.f32, ti.f64]
if impl._name != case.__name__:
continue
result_name = dtype2str(impl._test_dtype)
results_dict[result_name] = impl.get_results_dict()
case_path = os.path.join(suite_path, (case.__name__ + '.json'))
with open(case_path, 'w') as f:
case_str = dump2json(results_dict)
print(case_str, file=f)
def _get_markdown_lines(self):
lines = []
lines += md_table_header(self.suite_name, self._arch,
self.test_dsize_list, self.basic_repeat_times,
self.evaluator)
result_header = '|kernel elapsed time(ms)' + ''.join(
'|' for i in range(
len(self.test_dsize_list) + len(MemoryBound.evaluator)))
lines += [result_header]
for case in self._cases_impl:
lines += case.get_markdown_lines()
lines.append('')
return lines
class CaseImpl:
def __init__(self, func, arch, test_dtype, test_dsize_list, evaluator):
self._func = func
self._name = func.__name__
self._arch = arch
self._test_dtype = test_dtype
self._test_dsize_list = test_dsize_list
self._min_time_in_us = [] #test results
self._evaluator = evaluator
def run(self):
ti.init(kernel_profiler=True, arch=self._arch)
print("TestCase[%s.%s.%s]" % (self._func.__name__, arch_name(
self._arch), dtype2str(self._test_dtype)))
for test_dsize in self._test_dsize_list:
print("test_dsize = %s" % (size2str(test_dsize)))
self._min_time_in_us.append(
self._func(self._arch, self._test_dtype, test_dsize,
MemoryBound.basic_repeat_times))
time.sleep(0.2)
ti.reset()
def get_markdown_lines(self):
string = '|' + self._name + '.' + dtype2str(self._test_dtype) + '|'
string += ''.join(
str(round(time, 4)) + '|' for time in self._min_time_in_us)
string += ''.join(
str(round(item(self._min_time_in_us), 4)) + '|'
for item in self._evaluator)
return [string]
def get_results_dict(self):
results_dict = {}
for i in range(len(self._test_dsize_list)):
dsize = self._test_dsize_list[i]
repeat = scaled_repeat_times(self._arch, dsize,
MemoryBound.basic_repeat_times)
elapsed_time = self._min_time_in_us[i]
item_name = size2str(dsize).replace('.0', '')
item_dict = {
'dsize_byte': dsize,
'repeat': repeat,
'elapsed_time_ms': elapsed_time
}
results_dict[item_name] = item_dict
return results_dict
| [
[
[
7,
9
],
[
1158,
1160
],
[
1206,
1208
],
[
1563,
1565
],
[
2415,
2417
],
[
3010,
3012
]
],
[
[
17,
21
],
[
4620,
4624
]
],
[
[
50,
73
],
[
380,
403
]
],
[
[
93,
102
],
[
4261,
4270
]
],
[
[
104,
124
],
[
1421,
1441
]
],
[
[
126,
135
],
[
2035,
2044
],
[
2890,
2899
],
[
4297,
4306
],
[
4732,
4741
]
],
[
[
137,
146
],
[
2512,
2521
],
[
3133,
3142
]
],
[
[
167,
181
],
[
633,
647
]
],
[
[
183,
198
],
[
3269,
3284
]
],
[
[
200,
219
],
[
2190,
2209
],
[
5194,
5213
]
],
[
[
240,
248
],
[
4415,
4423
],
[
5381,
5389
]
],
[
[
258,
270
],
[
346,
348
],
[
354,
356
],
[
427,
429
],
[
435,
437
],
[
443,
445
],
[
451,
453
],
[
1466,
1468
],
[
4155,
4157
],
[
4644,
4646
]
],
[
[
279,
290
],
[
3586,
3597
],
[
4575,
4586
],
[
5274,
5285
]
],
[
[
3783,
3791
],
[
843,
851
]
]
] |
from flask_security.utils import hash_password
from flask_smorest import Blueprint, abort
from ..models.user import User
from ..schemas.paging import PageInSchema, paginate
from ..schemas.user import UserPageOutSchema, UserSchema
from .methodviews import ProtectedMethodView
blueprint = Blueprint('users', 'user')
@blueprint.route('', endpoint='list')
class UserListAPI(ProtectedMethodView):
@blueprint.arguments(PageInSchema(), location='headers')
@blueprint.response(UserPageOutSchema)
def get(self, pagination):
"""List users"""
return paginate(User.select(), pagination)
@blueprint.arguments(UserSchema)
@blueprint.response(UserSchema)
def post(self, args):
"""Create user"""
user = User(**args)
user.password = hash_password(user.password)
user.save()
return user
@blueprint.route('/<user_id>', endpoint='detail')
class UserAPI(ProtectedMethodView):
@blueprint.response(UserSchema)
def get(self, user_id):
"""Get user details"""
try:
user = User.get(id=user_id)
except User.DoesNotExist:
abort(404, message='User not found')
return user
@blueprint.arguments(UserSchema(partial=True))
@blueprint.response(UserSchema)
def patch(self, args, user_id):
try:
user = User.get(id=user_id)
except User.DoesNotExist:
abort(404, message='User not found')
for field in args:
setattr(user, field, args[field])
if 'password' in args:
user.password = hash_password(user.password)
user.save()
return user
@blueprint.response(UserSchema)
def delete(self, user_id):
try:
user = User.get(id=user_id)
except User.DoesNotExist:
abort(404, message='User not found')
user.delete_instance()
return user
| [
[
[
33,
46
],
[
785,
798
],
[
1585,
1598
]
],
[
[
73,
82
],
[
289,
298
]
],
[
[
84,
89
],
[
1136,
1141
],
[
1416,
1421
],
[
1821,
1826
]
],
[
[
117,
121
],
[
580,
584
],
[
748,
752
],
[
1069,
1073
],
[
1105,
1109
],
[
1349,
1353
],
[
1385,
1389
],
[
1754,
1758
],
[
1790,
1794
]
],
[
[
151,
163
],
[
421,
433
]
],
[
[
165,
173
],
[
571,
579
]
],
[
[
201,
218
],
[
481,
498
]
],
[
[
220,
230
],
[
633,
643
],
[
669,
679
],
[
966,
976
],
[
1219,
1229
],
[
1269,
1279
],
[
1679,
1689
]
],
[
[
256,
275
],
[
374,
393
],
[
920,
939
]
],
[
[
277,
286
],
[
319,
328
],
[
401,
410
],
[
462,
471
],
[
613,
622
],
[
650,
659
],
[
857,
866
],
[
947,
956
],
[
1199,
1208
],
[
1250,
1259
],
[
1660,
1669
]
],
[
[
362,
373
]
],
[
[
912,
919
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 22 18:44:02 2017
@author: Tirthajyoti Sarkar
Simple selection sort with counter for total number of operations (finding minimum and swapping)
Accepts user input on minimum and maximum bound of the array and the size of the array.
"""
import random
def find_min(array):
n=len(array)
r = array[0]
count=0
for i in range(1,n):
count+=1
if r>array[i]:
r=array[i]
return(r,count)
def selection_sort(array):
n=len(array)
num_op=0
# Iterate over the length of the array, pushing smaller values to the left
for i in range(n):
# Scan the array from i-th element (where the iterator is currently) to the end for minimum
m,c_min=find_min(array[i:n])
# IMPORTANT: Get the index of the minimum element w.r.t. to the main array
m_index=array[i:n].index(m)+i
# If the first element of the unsorted portion i.e. i-th element> minimum, then SWAP
if (array[i]>m):
# Print statement for examining minimum and its index, Troubleshooting
#print("Minimum found {} at position {}. Swapping positions {} and {}".format(m,m_index,i,m_index))
temp=array[i]
array[i]=m
array[m_index]=temp
num_op+=(c_min+1)
print(array)
else:
pass
return (array,num_op)
# User inputs for generating the random arrays
mini = int(input("Enter the minimum bound:"))
maxi = int(input("Enter the maximum bound:"))
num = int(input("Enter the size:"))
# Create random array based on user-specified minimum/maximum bounds and number of elements
a= []
for i in range(num):
a.append(random.randint(mini,maxi))
print("\nInitial array:",a)
# Get the sorted array back along with the count of # of operations it took to sort
sorted_array,n_op=selection_sort(a)
print("Sorted array: {}\nTook {} operations".format(sorted_array,n_op))
| [
[
[
288,
294
],
[
1733,
1739
]
],
[
[
300,
308
],
[
747,
755
]
],
[
[
476,
490
],
[
1892,
1906
]
],
[
[
1472,
1476
],
[
1748,
1752
]
],
[
[
1518,
1522
],
[
1753,
1757
]
],
[
[
1564,
1567
],
[
1714,
1717
]
],
[
[
1693,
1694
],
[
1724,
1725
],
[
1786,
1787
],
[
1907,
1908
]
],
[
[
1703,
1704
]
],
[
[
1874,
1886
],
[
1962,
1974
]
],
[
[
1887,
1891
],
[
1975,
1979
]
]
] |
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('hyperlink23.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with hyperlinks."""
workbook = Workbook(self.got_filename)
# Turn off default URL format for testing.
workbook.default_url_format = None
worksheet = workbook.add_worksheet()
worksheet.write_url('A1', 'https://en.wikipedia.org/wiki/Microsoft_Excel#Data_storage_and_communication', None, 'Display text')
workbook.close()
self.assertExcelEqual()
| [
[
[
249,
268
],
[
331,
350
]
],
[
[
293,
301
],
[
638,
646
]
],
[
[
310,
330
]
]
] |
import boto3
import pickle
import logging
from datetime import date
from pyspark.sql.functions import udf
from pyspark.sql.types import ArrayType, StringType
from generate_common_words import dump_pickle
import config as conf
'''
Assign Top 500 Stackoverflow tags to job postings
'''
# ===== Logger Configs =====
TS = date.today().strftime('%y%m%d')
logger = logging.getLogger('jd_logger')
logger.setLevel(logging.INFO)
fh = logging.FileHandler(conf.LOG_DIR + TS + '_batch_process.log')
fh.setLevel(logging.INFO)
logger.addHandler(fh)
def read_pickle(bucket, key):
s3 = boto3.resource('s3')
pickled = pickle.loads(s3.Bucket(bucket)\
.Object(key)\
.get()['Body']\
.read())
return pickled
def filter_common_words(tokens, word_list):
"""
Filter out words that appear in many of the documents
Args:
tokens: a list of word lemmas
word_list: a list of common words
Returns:
a set of word lemmas with the common words removed
"""
return list(set([word for word in tokens if word not in word_list]))
def select_tag_words(tokens, tag_list):
"""
Match Stackoverflow tags to word lemmas
Args:
tokens: a list of word lemmas
tag_list: a list of tags
Returns:
a list of tags for each job posting
"""
return [tag for tag in tag_list if tag in tokens]
def assign_tags(jd):
"""
Assign Stackoverflow tags and construct a set of keywords
Args:
jd: cleaned job posting dataframe
Returns:
a dataframe with columns containing keywords and tags
"""
logger.info('[STARTING]: Assigning Tags')
common_words = read_pickle(conf.PARQUET_BUCKET, conf.COMMON_WORDS_PATH)
stack_tags = read_pickle(conf.PARQUET_BUCKET, conf.TAG_PATH)
logger.info('[PROCESSING]: Removing Common Words')
cw_remover = udf(lambda body: filter_common_words(body, common_words), ArrayType(StringType()))
jd_keywords = jd.withColumn('keywords', cw_remover('stemmed'))
logger.info('[PROCESSING]: Getting Tags')
tagger = udf(lambda body: select_tag_words(body, stack_tags), ArrayType(StringType()))
jd_tags = jd_keywords.withColumn('tags', tagger('keywords'))
logger.info('[Finished]: Assigning Tags')
return jd_tags
| [
[
[
7,
12
],
[
575,
580
]
],
[
[
20,
26
],
[
610,
616
]
],
[
[
34,
41
],
[
359,
366
],
[
406,
413
],
[
425,
432
],
[
499,
506
]
],
[
[
63,
67
],
[
318,
322
]
],
[
[
102,
105
],
[
1921,
1924
],
[
2131,
2134
]
],
[
[
136,
145
],
[
1979,
1988
],
[
2184,
2193
]
],
[
[
147,
157
],
[
1989,
1999
],
[
2194,
2204
]
],
[
[
192,
203
]
],
[
[
211,
225
],
[
445,
449
],
[
1738,
1742
],
[
1759,
1763
],
[
1812,
1816
],
[
1833,
1837
]
],
[
[
313,
315
],
[
460,
462
]
],
[
[
350,
356
],
[
390,
396
],
[
513,
519
],
[
1665,
1671
],
[
1853,
1859
],
[
2076,
2082
],
[
2279,
2285
]
],
[
[
420,
422
],
[
487,
489
],
[
531,
533
]
],
[
[
540,
551
],
[
1726,
1737
],
[
1800,
1811
]
],
[
[
792,
811
],
[
1938,
1957
]
],
[
[
1146,
1162
],
[
2148,
2164
]
],
[
[
1439,
1450
]
]
] |
"""
WSGI config for simplerest project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "simplerest.settings")
application = get_wsgi_application()
| [
[
[
234,
236
],
[
289,
291
]
],
[
[
267,
287
],
[
375,
395
]
],
[
[
361,
372
]
]
] |
import json
import os
from mako.template import Template
from models.pomodoro_model import PomodoroModel
from datetime import datetime
class ExportPomsResource:
def on_get(self, req, resp):
"""Handles GET requests"""
resp.content_type = 'text/html'
dir_path = os.path.dirname(os.path.realpath(__file__))
user_login_template = Template(
filename=dir_path + '/export_poms_view.mako')
resp.body = user_login_template.render()
def on_post(self, req, resp):
"""Handles POST requests"""
start_date = req.get_param('start_date')
end_date = req.get_param('end_date')
# Query poms within start and end dates
poms = req.context['session'].query(
PomodoroModel).filter(PomodoroModel.created <= end_date). \
filter(PomodoroModel.created >= start_date).filter_by(
user_id=req.context['user'].id).order_by(
PomodoroModel.created, PomodoroModel.start_time).all()
data = {'poms': []}
for row in poms:
pom = {
'created': datetime.strftime(row.created, '%Y-%m-%d'),
'title': row.task,
'start_time': row.start_time.strftime('%I:%M%p'),
'end_time': row.end_time.strftime('%I:%M%p'),
'distractions': row.distractions,
'pom_success': row.pom_success,
'review': row.review,
'flags': []
}
for flag in row.flags:
pom['flags'].append(flag.flag_type)
data['poms'].append(pom)
filename = str(start_date) + '-' + str(
end_date) + '_Arin_Pom_Sheets.json'
resp.body = json.dumps(data, indent=2)
resp.downloadable_as = filename
resp.content_type = 'application/octet-stream'
| [
[
[
7,
11
],
[
1730,
1734
]
],
[
[
19,
21
],
[
292,
294
],
[
308,
310
]
],
[
[
48,
56
],
[
366,
374
]
],
[
[
91,
104
],
[
755,
768
],
[
777,
790
],
[
834,
847
],
[
948,
961
],
[
971,
984
]
],
[
[
126,
134
],
[
1104,
1112
]
],
[
[
143,
161
]
]
] |
import csv
import json
def read_data_file(filename):
all_records = []
with open(filename, mode='r') as csv_file:
csv_reader = csv.reader(csv_file)
row_count = 0
for row in csv_reader:
all_records.append(row)
return all_records
def parse_play_fields(play_fields):
plays = []
for p in play_fields:
temp = {}
temp["event_count"] = int(p[0])
temp["inning_number"] = int(p[1])
temp["home_field_indicator"] = int(p[2])
temp["player_code"] = p[3]
temp["count_at_action"] = p[4]
temp["all_pitches"] = p[5]
temp["play_events"] = p[6]
plays.append(temp)
return plays
def parse_sub_fields(sub_fields):
subs = []
for s in sub_fields:
temp = {}
temp["event_count"] = int(s[0])
temp["player_code"] = s[1]
temp["name"] = s[2]
temp["home_field_indicator"] = int(s[3])
temp["batting_order"] = int(s[4])
temp["field_position"] = int(s[5])
subs.append(temp)
return subs
def parse_info_fields(info_fields):
infos = []
for i in info_fields:
temp = {}
temp[i[0]] = i[1]
infos.append(temp)
return infos
def parse_start_fields(start_fields):
starts = []
for s in start_fields:
temp = {}
temp["player_code"] = s[0]
temp["name"] = s[1]
temp["home_field_indicator"] = int(s[2])
temp["batting_order"] = int(s[3])
temp["field_position"] = int(s[4])
starts.append(temp)
return starts
def parse_data_fields(data_fields):
datas = []
for d in data_fields:
temp = {}
temp["type"] = d[0]
temp["player_code"] = d[1]
temp["earned_runs"] = int(d[2])
datas.append(temp)
return datas
def get_fields(records):
id = ""
version = ""
play = []
info = []
start = []
data = []
sub = []
event_count = 0
for r in records:
if r[0] == "play":
r[0] = event_count
event_count += 1
play.append(r)
elif r[0] == "info":
info.append(r[1:])
elif r[0] == "start":
start.append(r[1:])
elif r[0] == "data":
data.append(r[1:])
elif r[0] == "sub":
r[0] = event_count
event_count += 1
sub.append(r)
elif r[0] == "com":
continue # This one we should ignore
elif r[0].endswith("adj"):
continue # This one we should ignore
elif r[0] == "id":
id = r[1]
elif r[0] == "version":
version = r[1]
else:
print("ERROR")
print(r)
return id, version, play, info, start, data, sub
def get_game(game_records):
id, version, plays, infos, starts, datas, subs = get_fields(game_records)
play_list = parse_play_fields(plays)
info_list = parse_info_fields(infos)
start_list = parse_start_fields(starts)
data_list = parse_data_fields(datas)
sub_list = parse_sub_fields(subs)
game = {}
game["id"] = id
game["version"] = version
game["plays"] = play_list
game["info"] = info_list
game["start"] = start_list
game["data"] = data_list
game["subs"] = sub_list
return game
def get_all_games(all_game_records):
all_games = []
for g in all_game_records:
all_games.append(get_game(g))
return json.dumps(all_games)
# path = "./../storage/ANA201808100.in"
# game_records = read_data_file(path)
# game = get_game(game_records)
#
# out = json.loads(game)
# print(out["id"])
| [
[
[
7,
10
],
[
143,
146
]
],
[
[
18,
22
],
[
3454,
3458
]
],
[
[
28,
42
]
],
[
[
281,
298
],
[
2900,
2917
]
],
[
[
696,
712
],
[
3066,
3082
]
],
[
[
1067,
1084
],
[
2941,
2958
]
],
[
[
1233,
1251
],
[
2983,
3001
]
],
[
[
1576,
1593
],
[
3026,
3043
]
],
[
[
1819,
1829
],
[
2859,
2869
]
],
[
[
2782,
2790
],
[
3430,
3438
]
],
[
[
3322,
3335
]
]
] |
from rubicon.java.android_events import Handler, PythonRunnable
from rubicon.java.jni import java
from travertino.size import at_least
from ..libs.android import R__color
from ..libs.android.graphics import BitmapFactory, Rect
from ..libs.android.view import Gravity, OnClickListener, View__MeasureSpec
from ..libs.android.widget import (
ImageView,
ImageView__ScaleType,
LinearLayout,
LinearLayout__LayoutParams,
RelativeLayout,
RelativeLayout__LayoutParams,
ScrollView,
TextView
)
from ..libs.androidx.swiperefreshlayout import (
SwipeRefreshLayout,
SwipeRefreshLayout__OnRefreshListener
)
from .base import Widget
class DetailedListOnClickListener(OnClickListener):
def __init__(self, impl, row_number):
super().__init__()
self._impl = impl
self._row_number = row_number
def onClick(self, _view):
row = self._impl.interface.data[self._row_number]
self._impl._selection = row
if self._impl.interface.on_select:
self._impl.interface.on_select(self._impl.interface, row=self._impl.interface.data[self._row_number])
class OnRefreshListener(SwipeRefreshLayout__OnRefreshListener):
def __init__(self, interface):
super().__init__()
self._interface = interface
def onRefresh(self):
if self._interface.on_refresh:
self._interface.on_refresh(self._interface)
class DetailedList(Widget):
ROW_HEIGHT = 250
_swipe_refresh_layout = None
_scroll_view = None
_dismissable_container = None
_selection = None
def create(self):
# DetailedList is not a specific widget on Android, so we build it out
# of a few pieces.
if self.native is None:
self.native = LinearLayout(self._native_activity)
self.native.setOrientation(LinearLayout.VERTICAL)
else:
# If create() is called a second time, clear the widget and regenerate it.
self.native.removeAllViews()
scroll_view = ScrollView(self._native_activity)
self._scroll_view = ScrollView(
__jni__=java.NewGlobalRef(scroll_view))
scroll_view_layout_params = LinearLayout__LayoutParams(
LinearLayout__LayoutParams.MATCH_PARENT,
LinearLayout__LayoutParams.MATCH_PARENT
)
scroll_view_layout_params.gravity = Gravity.TOP
swipe_refresh_wrapper = SwipeRefreshLayout(self._native_activity)
swipe_refresh_wrapper.setOnRefreshListener(OnRefreshListener(self.interface))
self._swipe_refresh_layout = SwipeRefreshLayout(
__jni__=java.NewGlobalRef(swipe_refresh_wrapper))
swipe_refresh_wrapper.addView(scroll_view)
self.native.addView(swipe_refresh_wrapper, scroll_view_layout_params)
dismissable_container = LinearLayout(self._native_activity)
self._dismissable_container = LinearLayout(
__jni__=java.NewGlobalRef(dismissable_container)
)
dismissable_container.setOrientation(LinearLayout.VERTICAL)
dismissable_container_params = LinearLayout__LayoutParams(
LinearLayout__LayoutParams.MATCH_PARENT,
LinearLayout__LayoutParams.MATCH_PARENT
)
scroll_view.addView(
dismissable_container, dismissable_container_params
)
for i in range(len((self.interface.data or []))):
self._make_row(dismissable_container, i)
def _make_row(self, container, i):
# Create the foreground.
row_foreground = RelativeLayout(self._native_activity)
container.addView(row_foreground)
# Add user-provided icon to layout.
icon_image_view = ImageView(self._native_activity)
icon = self.interface.data[i].icon
if icon is not None:
icon.bind(self.interface.factory)
bitmap = BitmapFactory.decodeFile(str(icon._impl.path))
icon_image_view.setImageBitmap(bitmap)
icon_layout_params = RelativeLayout__LayoutParams(
RelativeLayout__LayoutParams.WRAP_CONTENT,
RelativeLayout__LayoutParams.WRAP_CONTENT)
icon_layout_params.width = 150
icon_layout_params.setMargins(25, 0, 25, 0)
icon_layout_params.height = self.ROW_HEIGHT
icon_image_view.setScaleType(ImageView__ScaleType.FIT_CENTER)
row_foreground.addView(icon_image_view, icon_layout_params)
# Create layout to show top_text and bottom_text.
text_container = LinearLayout(self._native_activity)
text_container_params = RelativeLayout__LayoutParams(
RelativeLayout__LayoutParams.WRAP_CONTENT,
RelativeLayout__LayoutParams.WRAP_CONTENT)
text_container_params.height = self.ROW_HEIGHT
text_container_params.setMargins(25 + 25 + 150, 0, 0, 0)
row_foreground.addView(text_container, text_container_params)
text_container.setOrientation(LinearLayout.VERTICAL)
text_container.setWeightSum(2.0)
# Create top & bottom text; add them to layout.
top_text = TextView(self._native_activity)
top_text.setText(str(getattr(self.interface.data[i], 'title', '')))
top_text.setTextSize(20.0)
top_text.setTextColor(self._native_activity.getResources().getColor(R__color.black))
bottom_text = TextView(self._native_activity)
bottom_text.setTextColor(self._native_activity.getResources().getColor(R__color.black))
bottom_text.setText(str(getattr(self.interface.data[i], 'subtitle', '')))
bottom_text.setTextSize(16.0)
top_text_params = LinearLayout__LayoutParams(
RelativeLayout__LayoutParams.WRAP_CONTENT,
RelativeLayout__LayoutParams.MATCH_PARENT)
top_text_params.weight = 1.0
top_text.setGravity(Gravity.BOTTOM)
text_container.addView(top_text, top_text_params)
bottom_text_params = LinearLayout__LayoutParams(
RelativeLayout__LayoutParams.WRAP_CONTENT,
RelativeLayout__LayoutParams.MATCH_PARENT)
bottom_text_params.weight = 1.0
bottom_text.setGravity(Gravity.TOP)
bottom_text_params.gravity = Gravity.TOP
text_container.addView(bottom_text, bottom_text_params)
# Apply an onclick listener so that clicking anywhere on the row triggers Toga's on_select(row).
row_foreground.setOnClickListener(DetailedListOnClickListener(self, i))
def change_source(self, source):
# If the source changes, re-build the widget.
self.create()
def set_on_refresh(self, handler):
# No special handling needed.
pass
def after_on_refresh(self):
if self._swipe_refresh_layout:
self._swipe_refresh_layout.setRefreshing(False)
def insert(self, index, item):
# If the data changes, re-build the widget. Brutally effective.
self.create()
def change(self, item):
# If the data changes, re-build the widget. Brutally effective.
self.create()
def remove(self, index, item):
# If the data changes, re-build the widget. Brutally effective.
self.create()
def clear(self):
# If the data changes, re-build the widget. Brutally effective.
self.create()
def get_selection(self):
return self._selection
def set_on_select(self, handler):
# No special handling required.
pass
def set_on_delete(self, handler):
# This widget currently does not implement event handlers for data change.
self.interface.factory.not_implemented("DetailedList.set_on_delete()")
def scroll_to_row(self, row):
def scroll():
row_obj = self._dismissable_container.getChildAt(row)
hit_rect = Rect()
row_obj.getHitRect(hit_rect)
self._scroll_view.requestChildRectangleOnScreen(
self._dismissable_container,
hit_rect,
False,
)
Handler().post(PythonRunnable(scroll))
def rehint(self):
# Android can crash when rendering some widgets until they have their layout params set. Guard for that case.
if self.native.getLayoutParams() is None:
return
self.native.measure(
View__MeasureSpec.UNSPECIFIED,
View__MeasureSpec.UNSPECIFIED,
)
self.interface.intrinsic.width = at_least(self.native.getMeasuredWidth())
self.interface.intrinsic.height = self.native.getMeasuredHeight()
| [
[
[
40,
47
],
[
8059,
8066
]
],
[
[
49,
63
],
[
8074,
8088
]
],
[
[
93,
97
],
[
2120,
2124
],
[
2632,
2636
],
[
2943,
2947
]
],
[
[
126,
134
],
[
8474,
8482
]
],
[
[
163,
171
],
[
5326,
5334
],
[
5476,
5484
]
],
[
[
208,
221
],
[
3891,
3904
]
],
[
[
223,
227
],
[
7818,
7822
]
],
[
[
260,
267
],
[
2383,
2390
],
[
5850,
5857
],
[
6170,
6177
],
[
6220,
6227
]
],
[
[
269,
284
],
[
694,
709
]
],
[
[
286,
303
],
[
8349,
8366
],
[
8392,
8409
]
],
[
[
344,
353
],
[
3719,
3728
]
],
[
[
359,
379
],
[
4338,
4358
]
],
[
[
385,
397
],
[
1763,
1775
],
[
1838,
1850
],
[
2835,
2847
],
[
2909,
2921
],
[
3039,
3051
],
[
4523,
4535
],
[
4967,
4979
]
],
[
[
403,
429
],
[
2188,
2214
],
[
2232,
2258
],
[
2289,
2315
],
[
3101,
3127
],
[
3145,
3171
],
[
3202,
3228
],
[
5639,
5665
],
[
5953,
5979
]
],
[
[
435,
449
],
[
3568,
3582
]
],
[
[
455,
483
],
[
4018,
4046
],
[
4060,
4088
],
[
4115,
4143
],
[
4591,
4619
],
[
4637,
4665
],
[
4696,
4724
],
[
5683,
5711
],
[
5742,
5770
],
[
5997,
6025
],
[
6056,
6084
]
],
[
[
489,
499
],
[
2026,
2036
],
[
2088,
2098
]
],
[
[
505,
513
],
[
5107,
5115
],
[
5365,
5373
]
],
[
[
569,
587
],
[
2427,
2445
],
[
2592,
2610
]
],
[
[
593,
630
],
[
1153,
1190
]
],
[
[
651,
657
],
[
1433,
1439
]
],
[
[
666,
693
],
[
6444,
6471
]
],
[
[
1135,
1152
],
[
2520,
2537
]
],
[
[
1420,
1432
]
]
] |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
#
# Copyright (C) 2021 Graz University of Technology.
#
# Invenio-Records-Marc21 is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Permissions for Invenio Marc21 Records."""
from invenio_records_permissions.generators import AnyUser, SystemProcess
from invenio_records_permissions.policies.records import RecordPermissionPolicy
class Marc21RecordPermissionPolicy(RecordPermissionPolicy):
"""Access control configuration for records.
Note that even if the array is empty, the invenio_access Permission class
always adds the ``superuser-access``, so admins will always be allowed.
- Create action given to everyone for now.
- Read access given to everyone if public record and given to owners
always. (inherited)
- Update access given to record owners. (inherited)
- Delete access given to admins only. (inherited)
"""
# TODO: Change all below when permissions settled
can_create = [AnyUser()]
can_update_files = [AnyUser()]
can_publish = [AnyUser()]
can_read = [AnyUser()]
can_update = [AnyUser()]
can_new_version = [AnyUser()]
can_edit = [AnyUser()]
can_lift_embargo = [AnyUser()]
# Draft permissions
can_read_draft = [AnyUser()]
can_delete_draft = [AnyUser()]
can_update_draft = [AnyUser()]
can_search_drafts = [AnyUser()]
can_draft_read_files = [AnyUser()]
can_draft_create_files = [AnyUser(), SystemProcess()]
can_draft_update_files = [AnyUser()]
can_draft_delete_files = [AnyUser()]
# Files permissions
can_read_files = [AnyUser()]
can_create_files = [AnyUser(), SystemProcess()]
can_update_files = [AnyUser()]
can_delete_files = [AnyUser()]
| [
[
[
372,
379
],
[
1079,
1086
],
[
1114,
1121
],
[
1144,
1151
],
[
1171,
1178
],
[
1200,
1207
],
[
1234,
1241
],
[
1261,
1268
],
[
1296,
1303
],
[
1354,
1361
],
[
1389,
1396
],
[
1424,
1431
],
[
1460,
1467
],
[
1499,
1506
],
[
1540,
1547
],
[
1598,
1605
],
[
1639,
1646
],
[
1697,
1704
],
[
1732,
1739
],
[
1784,
1791
],
[
1819,
1826
]
],
[
[
381,
394
],
[
1551,
1564
],
[
1743,
1756
]
],
[
[
452,
474
],
[
512,
534
]
],
[
[
483,
511
]
]
] |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "animalRescue.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
[
[
29,
31
],
[
75,
77
]
],
[
[
39,
42
],
[
800,
803
]
],
[
[
200,
225
],
[
774,
799
]
],
[
[
463,
469
]
]
] |
import http.server
import threading
import testPackage
class MyHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
self._set_headers()
self.wfile.write(b"<html><head><title>My Title</title></head>")
self.wfile.write(bytes("<body><p>You accessed path: %s</p>" % self.path,"utf-8"))
self.wfile.write(bytes("Light Value: %d" % testPackage.LIGHT_VALUE,"utf-8"))
self.wfile.write(b"</body></html>")
print("Sent light value")
def do_POST(self):
testPackage.increase_light()
self._set_headers()
self.wfile.write(b"Success")
print("Increasing Light value")
class serverThread (threading.Thread) :
def __init__(self, threadID):
threading.Thread.__init__(self)
self.threadID = threadID
def run(self):
print ("Starting serverThread: %d" % self.threadID)
httpd = http.server.HTTPServer(server_address, request_handler)
httpd.serve_forever()
print ("Exiting thread")
port = 30000
server_address = ('',port)
request_handler = MyHTTPRequestHandler
thread_server = serverThread(1)
thread_server.start()
testPackage.polling_forever()
print('After serve_forever()')
| [
[
[
7,
18
],
[
83,
87
],
[
1047,
1051
]
],
[
[
26,
35
],
[
824,
833
],
[
886,
895
]
],
[
[
43,
54
],
[
1300,
1311
],
[
525,
536
],
[
669,
680
]
],
[
[
62,
82
],
[
1225,
1245
]
],
[
[
810,
822
],
[
1262,
1274
]
],
[
[
1167,
1171
],
[
1201,
1205
]
],
[
[
1180,
1194
],
[
1070,
1084
]
],
[
[
1207,
1222
],
[
1086,
1101
]
],
[
[
1246,
1259
],
[
1278,
1291
]
]
] |
import sys
from datetime import datetime
from app import filters
from app.misc.execute import run
def main(args: list[str]):
from app.handlers import dp
# Сохраняем нынешнее время для подсчёта времени безотказной работы
dp.bot['start_time'] = datetime.now()
dp['args'] = args
run(dp)
if __name__ == '__main__':
filters.setup()
main(sys.argv[1:])
| [
[
[
7,
10
],
[
366,
369
]
],
[
[
32,
40
],
[
259,
267
]
],
[
[
58,
65
],
[
341,
348
]
],
[
[
95,
98
],
[
300,
303
]
],
[
[
105,
109
],
[
361,
365
]
]
] |
import cv2
import threading
class RecordingThread (threading.Thread):
def __init__(self, name, camera):
threading.Thread.__init__(self)
self.name = name
self.isRunning = True
self.cap = camera
fourcc = cv2.VideoWriter_fourcc(*'MJPG')
self.out = cv2.VideoWriter('./static/video.avi',fourcc, 20.0, (640,480))
def run(self):
while self.isRunning:
ret, frame = self.cap.read()
if ret:
self.out.write(frame)
self.out.release()
def stop(self):
self.isRunning = False
def __del__(self):
self.out.release()
class VideoCamera(object):
def __init__(self):
# Open a camera
self.cap = cv2.VideoCapture(2)
# Initialize video recording environment
self.is_record = False
self.out = None
# Thread for recording
self.recordingThread = None
def __del__(self):
self.cap.release()
def get_frame(self):
ret, frame = self.cap.read()
if ret:
ret, jpeg = cv2.imencode('.jpg', frame)
# Record video
# if self.is_record:
# if self.out == None:
# fourcc = cv2.VideoWriter_fourcc(*'MJPG')
# self.out = cv2.VideoWriter('./static/video.avi',fourcc, 20.0, (640,480))
# ret, frame = self.cap.read()
# if ret:
# self.out.write(frame)
# else:
# if self.out != None:
# self.out.release()
# self.out = None
return jpeg.tobytes()
else:
return None
def start_record(self):
self.is_record = True
self.recordingThread = RecordingThread("Video Recording Thread", self.cap)
self.recordingThread.start()
def stop_record(self):
self.is_record = False
if self.recordingThread != None:
self.recordingThread.stop()
| [
[
[
7,
10
],
[
248,
251
],
[
299,
302
],
[
736,
739
],
[
1098,
1101
]
],
[
[
18,
27
],
[
52,
61
],
[
117,
126
]
],
[
[
35,
50
],
[
1828,
1843
]
],
[
[
648,
659
]
]
] |
from __future__ import absolute_import
import datetime
import os
from socket import error as SocketError, timeout as SocketTimeout
import socket
import sys
import warnings
from .exceptions import (
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages import six
from .packages.ssl_match_hostname import match_hostname
from .util import connection
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException # noqa: unused in this module
except ImportError:
from httplib import HTTPConnection as _HTTPConnection
from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None):
if (ca_certs or ca_cert_dir) and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
# In case the hostname is an IPv6 address, strip the square
# brackets from it before using it to validate. This is because
# a certificate with an IPv6 address in it won't have square
# brackets around that address. Sadly, match_hostname won't do this
# for us: it expects the plain host part without any extra work
# that might have been done to make it palatable to httplib.
asserted_hostname = self.assert_hostname or hostname
asserted_hostname = asserted_hostname.strip('[]')
match_hostname(cert, asserted_hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None)
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection
| [
[
[
23,
38
]
],
[
[
47,
55
],
[
1337,
1345
],
[
7767,
7775
]
],
[
[
63,
65
],
[
6863,
6865
],
[
6935,
6937
]
],
[
[
85,
105
],
[
4592,
4603
]
],
[
[
107,
131
],
[
4399,
4412
]
],
[
[
139,
145
],
[
2845,
2851
],
[
2865,
2871
],
[
5485,
5491
]
],
[
[
153,
156
],
[
3226,
3229
]
],
[
[
164,
172
],
[
7839,
7847
],
[
8922,
8930
]
],
[
[
204,
222
],
[
4628,
4646
]
],
[
[
228,
247
],
[
4437,
4456
]
],
[
[
253,
274
],
[
9303,
9324
]
],
[
[
280,
297
],
[
8018,
8035
]
],
[
[
323,
326
],
[
3027,
3030
]
],
[
[
368,
382
],
[
9933,
9947
]
],
[
[
401,
411
],
[
4287,
4297
]
],
[
[
441,
458
],
[
7092,
7109
]
],
[
[
464,
483
],
[
7157,
7176
]
],
[
[
489,
504
],
[
8164,
8179
]
],
[
[
510,
528
],
[
8590,
8608
]
],
[
[
579,
612
],
[
1484,
1499
],
[
3848,
3863
]
],
[
[
641,
654
]
],
[
[
730,
763
],
[
1484,
1499
],
[
3848,
3863
]
],
[
[
788,
801
]
],
[
[
872,
875
],
[
895,
898
],
[
10115,
10118
],
[
5996,
5999
],
[
8741,
8744
],
[
10024,
10027
]
],
[
[
880,
892
]
],
[
[
980,
983
],
[
10115,
10118
],
[
5996,
5999
],
[
8741,
8744
],
[
10024,
10027
]
],
[
[
1002,
1014
]
],
[
[
1145,
1160
]
],
[
[
1220,
1235
]
],
[
[
1267,
1281
],
[
2688,
2702
],
[
5351,
5365
]
],
[
[
1323,
1334
],
[
7791,
7802
],
[
7988,
7999
]
],
[
[
1371,
1386
],
[
10273,
10288
]
],
[
[
1469,
1483
],
[
5315,
5329
],
[
5533,
5547
]
],
[
[
5299,
5314
],
[
6081,
6096
],
[
10183,
10198
]
],
[
[
6057,
6080
],
[
10221,
10244
]
],
[
[
10155,
10180
]
],
[
[
10203,
10218
]
],
[
[
10255,
10270
]
]
] |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import shutil
import subprocess
from pre_wigs_validation.enums import ValidationEnforcement, ValidationResult
from pre_wigs_validation.instance import ValidationInstance
from pre_wigs_validation.dataclasses import ValidationOutput
from pre_wigs_validation.utils import check_validation_config, get_ld_library_orig
class RepoAccess:
"""Validate that an instance has access to Yum Repositories."""
validation = "Repo Access"
enforcement = ValidationEnforcement.REQUIRED
@classmethod
def validate(
cls, *, enabled: bool = True, instance: ValidationInstance
) -> ValidationOutput:
"""
Parameters:
enabled (bool): whether or not to run this validation function
instance (ValidationInstance): the instance object being validated
Returns:
ValidationOutput: output of validation
"""
if not enabled:
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.NOT_RUN,
enforcement=cls.enforcement,
)
if instance.distribution == "sles":
repo_check_command = ["zypper", "refresh"]
verbose_error_message = "Command 'zypper' not in $PATH"
package_manager_found = not shutil.which("zypper") == None
else:
repo_check_command = ["yum", "check-update"]
verbose_error_message = "Command 'yum' not in $PATH"
package_manager_found = not shutil.which("yum") == None
error_message = "Unable to validate due to unsupported environment"
fail_message = "Unable to access repositories"
config = check_validation_config(
default_params=cls.validate.__kwdefaults__, local_params=locals()
)
if not package_manager_found:
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.FAIL,
enforcement=cls.enforcement,
config=config,
message=error_message,
verbose_message=verbose_error_message,
)
proc = subprocess.run(
repo_check_command,
env=get_ld_library_orig(),
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
)
if proc.returncode == 1:
verbose_message = proc.stderr.decode("utf-8")
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.FAIL,
enforcement=cls.enforcement,
config=config,
message=fail_message,
verbose_message=verbose_message,
)
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.PASS,
enforcement=cls.enforcement,
config=config,
)
| [
[
[
115,
121
],
[
1397,
1403
],
[
1604,
1610
]
],
[
[
129,
139
],
[
2260,
2270
],
[
2366,
2376
],
[
2405,
2415
]
],
[
[
179,
200
],
[
560,
581
]
],
[
[
202,
218
],
[
1104,
1120
],
[
2037,
2053
],
[
2627,
2643
],
[
2919,
2935
]
],
[
[
260,
278
],
[
675,
693
]
],
[
[
323,
339
],
[
703,
719
],
[
1020,
1036
],
[
1953,
1969
],
[
2543,
2559
],
[
2843,
2859
]
],
[
[
378,
401
],
[
1782,
1805
]
],
[
[
403,
422
],
[
2324,
2343
]
],
[
[
430,
440
]
]
] |
from numpy.random import RandomState
from typing import Any, Optional, List
from numpy import arange
from copy import deepcopy
from pydeeprecsys.rl.neural_networks.dueling import DuelingDDQN
from pydeeprecsys.rl.experience_replay.priority_replay_buffer import (
PrioritizedExperienceReplayBuffer,
)
from pydeeprecsys.rl.experience_replay.buffer_parameters import (
PERBufferParameters,
ExperienceReplayBufferParameters,
)
from pydeeprecsys.rl.agents.agent import ReinforcementLearning
from pydeeprecsys.rl.learning_statistics import LearningStatistics
class RainbowDQNAgent(ReinforcementLearning):
"""Instead of sampling randomly from the buffer we prioritize experiences with PER
Instead of epsilon-greedy we use gaussian noisy layers for exploration
Instead of the Q value we calculate Value and Advantage (Dueling DQN).
This implementation does not include the Categorical DQN part (yet)."""
def __init__(
self,
input_size: int,
output_size: int,
network_update_frequency: int = 5,
network_sync_frequency: int = 200,
priority_importance: float = 0.6,
priority_weigth_growth: float = 0.001,
buffer_size: int = 10000,
buffer_burn_in: int = 1000,
batch_size: int = 32,
noise_sigma: float = 0.017,
discount_factor: float = 0.99,
learning_rate: float = 0.0001,
hidden_layers: List[int] = None,
random_state: RandomState = RandomState(),
statistics: Optional[LearningStatistics] = None,
):
self.network = DuelingDDQN(
n_input=input_size,
n_output=output_size,
learning_rate=learning_rate,
noise_sigma=noise_sigma,
discount_factor=discount_factor,
statistics=statistics,
hidden_layers=hidden_layers,
)
self.target_network = deepcopy(self.network)
self.buffer = PrioritizedExperienceReplayBuffer(
ExperienceReplayBufferParameters(
max_experiences=buffer_size,
minimum_experiences_to_start_predicting=buffer_burn_in,
batch_size=batch_size,
random_state=random_state,
),
PERBufferParameters(
alpha=priority_importance,
beta_growth=priority_weigth_growth,
),
)
self.step_count = 0
self.network_update_frequency = network_update_frequency
self.network_sync_frequency = network_sync_frequency
self.actions = arange(output_size)
self.random_state = random_state
def _check_update_network(self):
# we only start training the network once the buffer is ready
# (the burn in is filled)
if self.buffer.ready_to_predict():
self.step_count += 1
if self.step_count % self.network_update_frequency == 0:
# we train at every K steps
self.network.learn_with(self.buffer, self.target_network)
if self.step_count % self.network_sync_frequency == 0:
# at every N steps replaces the target network with the main network
self.target_network.load_state_dict(self.network.state_dict())
def top_k_actions_for_state(self, state: Any, k: int = 1) -> Any:
state_flat = state.flatten()
if self.buffer.ready_to_predict():
actions = self.target_network.top_k_actions_for_state(state_flat, k=k)
else:
actions = self.random_state.choice(self.actions, size=k)
self._check_update_network()
return actions
def action_for_state(self, state: Any) -> Any:
return self.top_k_actions_for_state(state, k=1)[0]
def store_experience(
self, state: Any, action: Any, reward: float, done: bool, new_state: Any
):
state_flat = state.flatten()
new_state_flat = new_state.flatten()
self.buffer.store_experience(state_flat, action, reward, done, new_state_flat)
| [
[
[
25,
36
],
[
1476,
1487
],
[
1462,
1473
]
],
[
[
56,
59
],
[
3331,
3334
],
[
3311,
3314
],
[
3689,
3692
],
[
3681,
3684
],
[
3801,
3804
],
[
3814,
3817
],
[
3857,
3860
]
],
[
[
61,
69
],
[
1511,
1519
]
],
[
[
71,
75
],
[
1422,
1426
]
],
[
[
94,
100
],
[
2568,
2574
]
],
[
[
118,
126
],
[
1897,
1905
]
],
[
[
179,
190
],
[
1579,
1590
]
],
[
[
266,
299
],
[
1943,
1976
]
],
[
[
373,
392
],
[
2250,
2269
]
],
[
[
398,
430
],
[
1990,
2022
]
],
[
[
475,
496
],
[
588,
609
]
],
[
[
545,
563
],
[
1520,
1538
]
],
[
[
572,
587
]
]
] |
#grad_cam
#[keras-grad-cam/grad-cam.py](https://github.com/jacobgil/keras-grad-cam/blob/master/grad-cam.py)
from keras.applications.vgg16 import (VGG16, preprocess_input, decode_predictions)
from keras.models import Model
from keras.preprocessing import image
from keras.layers.core import Lambda
from keras.models import Sequential
from tensorflow.python.framework import ops
import keras.backend as K
import tensorflow as tf
import numpy as np
import keras
import sys
import cv2
#from keras.applications.resnet50 import ResNet50, preprocess_input, decode_predictions
#from keras.applications.vgg19 import VGG19, preprocess_input, decode_predictions
#from keras.applications.inception_v3 import InceptionV3, preprocess_input, decode_predictions
def target_category_loss(x, category_index, nb_classes):
return tf.multiply(x, K.one_hot([category_index], nb_classes))
def target_category_loss_output_shape(input_shape):
return input_shape
def normalize(x):
# utility function to normalize a tensor by its L2 norm
return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)
def load_image(path):
img_path = sys.argv[1]
img = image.load_img(img_path, target_size=(224,224)) #299,299)) #224, 224))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
return x
def register_gradient():
if "GuidedBackProp" not in ops._gradient_registry._registry:
@ops.RegisterGradient("GuidedBackProp")
def _GuidedBackProp(op, grad):
dtype = op.inputs[0].dtype
return grad * tf.cast(grad > 0., dtype) * \
tf.cast(op.inputs[0] > 0., dtype)
def compile_saliency_function(model, activation_layer='block5_conv3'): #mixed10 'activation_49' add_16 add_32 activation_98
input_img = model.input
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])
#print(layer_dict)
layer_output = layer_dict[activation_layer].output
max_output = K.max(layer_output, axis=3)
saliency = K.gradients(K.sum(max_output), input_img)[0]
return K.function([input_img, K.learning_phase()], [saliency])
def modify_backprop(model, name):
g = tf.get_default_graph()
with g.gradient_override_map({'Relu': name}):
# get layers that have an activation
layer_dict = [layer for layer in model.layers[1:]
if hasattr(layer, 'activation')]
# replace relu activation
for layer in layer_dict:
if layer.activation == keras.activations.relu:
layer.activation = tf.nn.relu
# re-instanciate a new model
new_model = VGG16(weights='imagenet')
#new_model = ResNet50(weights='imagenet')
new_model.summary()
return new_model
def deprocess_image(x):
'''
Same normalization as in:
https://github.com/fchollet/keras/blob/master/examples/conv_filter_visualization.py
'''
if np.ndim(x) > 3:
x = np.squeeze(x)
# normalize tensor: center on 0., ensure std is 0.1
x -= x.mean()
x /= (x.std() + 1e-5)
x *= 0.1
# clip to [0, 1]
x += 0.5
x = np.clip(x, 0, 1)
# convert to RGB array
x *= 255
if K.image_dim_ordering() == 'th':
x = x.transpose((1, 2, 0))
x = np.clip(x, 0, 255).astype('uint8')
return x
def _compute_gradients(tensor, var_list):
grads = tf.gradients(tensor, var_list)
return [grad if grad is not None else tf.zeros_like(var) for var, grad in zip(var_list, grads)]
def grad_cam(input_model, image, category_index, layer_name):
nb_classes = 1000
target_layer = lambda x: target_category_loss(x, category_index, nb_classes)
x = Lambda(target_layer, output_shape = target_category_loss_output_shape)(input_model.output)
model = Model(inputs=input_model.input, outputs=x)
#model.summary()
loss = K.sum(model.output)
conv_output = [l for l in model.layers if l.name == layer_name][0].output #is
grads = normalize(_compute_gradients(loss, [conv_output])[0])
gradient_function = K.function([model.input], [conv_output, grads])
output, grads_val = gradient_function([image])
output, grads_val = output[0, :], grads_val[0, :, :, :]
weights = np.mean(grads_val, axis = (0, 1))
cam = np.ones(output.shape[0 : 2], dtype = np.float32)
for i, w in enumerate(weights):
cam += w * output[:, :, i]
cam = cv2.resize(cam, (224,224)) #299,299)) #224, 224))
cam = np.maximum(cam, 0)
heatmap = cam / np.max(cam)
#Return to BGR [0..255] from the preprocessed image
image = image[0, :]
image -= np.min(image)
image = np.minimum(image, 255)
cam = cv2.applyColorMap(np.uint8(255*heatmap), cv2.COLORMAP_JET)
cam = np.float32(cam) + np.float32(image)
cam = 255 * cam / np.max(cam)
return np.uint8(cam), heatmap
preprocessed_input = load_image(sys.argv[1])
model = VGG16(weights='imagenet')
#model = VGG19(weights='imagenet')
#model = InceptionV3(weights='imagenet')
#model = ResNet50(weights = 'imagenet')
#model.summary()
target_layer = 'block5_conv3' #'activation_49' add_16 "block5_conv3"
predictions = model.predict(preprocessed_input)
register_gradient()
guided_model = modify_backprop(model, 'GuidedBackProp')
guided_model.summary()
for i in range(5):
top_1 = decode_predictions(predictions)[0][i]
print(predictions.argsort()[0][::-1][i])
print('Predicted class:')
print('%s (%s) with probability %.2f' % (top_1[1], top_1[0], top_1[2]))
predicted_class = predictions.argsort()[0][::-1][i] #np.argmax(predictions)
cam, heatmap = grad_cam(model, preprocessed_input, predicted_class, target_layer)
cv2.imwrite("gradcam"+str(top_1[1])+".jpg", cam)
saliency_fn = compile_saliency_function(guided_model)
saliency = saliency_fn([preprocessed_input, 0])
gradcam = saliency[0] * heatmap[..., np.newaxis]
cv2.imwrite("guided_gradcam"+str(top_1[1])+".jpg", deprocess_image(gradcam))
| [
[
[
147,
152
],
[
4881,
4886
],
[
2626,
2631
]
],
[
[
154,
170
],
[
1286,
1302
]
],
[
[
172,
190
],
[
5288,
5306
]
],
[
[
217,
222
],
[
3766,
3771
]
],
[
[
255,
260
],
[
1139,
1144
],
[
1220,
1225
]
],
[
[
291,
297
],
[
3663,
3669
]
],
[
[
323,
333
]
],
[
[
374,
377
],
[
1376,
1379
],
[
1419,
1422
]
],
[
[
385,
403
],
[
831,
832
],
[
1043,
1044
],
[
1050,
1051
],
[
1057,
1058
],
[
1965,
1966
],
[
2008,
2009
],
[
2020,
2021
],
[
2064,
2065
],
[
2087,
2088
],
[
3180,
3181
],
[
3841,
3842
],
[
4035,
4036
]
],
[
[
411,
427
],
[
816,
818
],
[
2163,
2165
],
[
2557,
2559
],
[
3358,
3360
],
[
3431,
3433
],
[
1562,
1564
],
[
1608,
1610
]
],
[
[
435,
446
],
[
5847,
5849
],
[
1252,
1254
],
[
2917,
2919
],
[
2945,
2947
],
[
3115,
3117
],
[
3255,
3257
],
[
4210,
4212
],
[
4254,
4256
],
[
4291,
4293
],
[
4449,
4451
],
[
4488,
4490
],
[
4594,
4596
],
[
4620,
4622
],
[
4672,
4674
],
[
4723,
4725
],
[
4741,
4743
],
[
4781,
4783
],
[
4804,
4806
]
],
[
[
454,
459
],
[
2498,
2503
]
],
[
[
467,
470
],
[
4860,
4863
],
[
1117,
1120
]
],
[
[
478,
481
],
[
5647,
5650
],
[
5863,
5866
],
[
4386,
4389
],
[
4654,
4657
],
[
4695,
4698
]
],
[
[
752,
772
],
[
3603,
3623
]
],
[
[
877,
910
],
[
3699,
3732
]
],
[
[
953,
962
],
[
3957,
3966
]
],
[
[
1084,
1094
],
[
4849,
4859
]
],
[
[
1324,
1341
],
[
5158,
5175
]
],
[
[
1647,
1672
],
[
5714,
5739
]
],
[
[
2125,
2140
],
[
5193,
5208
]
],
[
[
2756,
2771
],
[
5914,
5929
]
],
[
[
3308,
3326
],
[
3967,
3985
]
],
[
[
3494,
3502
],
[
5576,
5584
]
],
[
[
4828,
4846
],
[
5138,
5156
],
[
5592,
5610
],
[
5782,
5800
]
],
[
[
4873,
4878
],
[
5124,
5129
],
[
5209,
5214
],
[
5585,
5590
]
],
[
[
5040,
5052
],
[
5629,
5641
]
],
[
[
5110,
5121
],
[
5307,
5318
],
[
5336,
5347
],
[
5499,
5510
]
],
[
[
5178,
5190
],
[
5234,
5246
],
[
5740,
5752
]
],
[
[
5261,
5262
],
[
5323,
5324
],
[
5367,
5368
],
[
5530,
5531
]
],
[
[
5280,
5285
],
[
5446,
5451
],
[
5456,
5461
],
[
5466,
5471
],
[
5673,
5678
],
[
5896,
5901
]
],
[
[
5481,
5496
],
[
5612,
5627
]
],
[
[
5561,
5564
],
[
5691,
5694
]
],
[
[
5566,
5573
],
[
5834,
5841
]
],
[
[
5700,
5711
],
[
5769,
5780
]
],
[
[
5758,
5766
],
[
5820,
5828
]
],
[
[
5810,
5817
],
[
5930,
5937
]
]
] |
import os
import hashlib
from constants import TEX_DIR
from constants import TEX_TEXT_TO_REPLACE
from constants import TEX_USE_CTEX
from constants import TEX_FIX_SVG
def tex_hash(expression, template_tex_file_body):
id_str = str(expression + template_tex_file_body)
hasher = hashlib.sha256()
hasher.update(id_str.encode())
# Truncating at 16 bytes for cleanliness
return hasher.hexdigest()[:16]
def tex_to_svg_file(expression, template_tex_file_body):
tex_file = generate_tex_file(expression, template_tex_file_body)
dvi_file = tex_to_dvi(tex_file)
return dvi_to_svg(dvi_file)
def generate_tex_file(expression, template_tex_file_body):
result = os.path.join(
TEX_DIR,
tex_hash(expression, template_tex_file_body)
) + ".tex"
if not os.path.exists(result):
print("Writing \"%s\" to %s" % (
"".join(expression), result
))
new_body = template_tex_file_body.replace(
TEX_TEXT_TO_REPLACE, expression
)
with open(result, "w") as outfile:
outfile.write(new_body)
return result
def get_null():
if os.name == "nt":
return "NUL"
return "/dev/null"
def tex_to_dvi(tex_file):
result = tex_file.replace(".tex", ".dvi" if not TEX_USE_CTEX else ".xdv")
if not os.path.exists(result):
commands = [
"latex",
"-interaction=batchmode",
"-halt-on-error",
"-output-directory=" + TEX_DIR,
tex_file,
">",
get_null()
] if not TEX_USE_CTEX else [
"xelatex",
"-no-pdf",
"-interaction=batchmode",
"-halt-on-error",
"-output-directory=" + TEX_DIR,
tex_file,
">",
get_null()
]
exit_code = os.system(" ".join(commands))
if exit_code != 0:
log_file = tex_file.replace(".tex", ".log")
raise Exception(
("Latex error converting to dvi. " if not TEX_USE_CTEX
else "Xelatex error converting to xdv. ") +
"See log output above or the log file: %s" % log_file)
return result
def dvi_to_svg(dvi_file, regen_if_exists=False):
"""
Converts a dvi, which potentially has multiple slides, into a
directory full of enumerated pngs corresponding with these slides.
Returns a list of PIL Image objects for these images sorted as they
where in the dvi
"""
result = dvi_file.replace(".dvi" if not TEX_USE_CTEX else ".xdv", ".svg")
if not os.path.exists(result):
commands = [
"dvisvgm",
dvi_file,
"-n",
"-v",
"0",
"-o",
result,
">",
get_null()
]
os.system(" ".join(commands))
if TEX_FIX_SVG:
commands = [
"cairosvg",
result,
"-f",
"svg",
"-o",
result
]
os.system(" ".join(commands))
return result
| [
[
[
8,
10
],
[
689,
691
],
[
799,
801
],
[
1142,
1144
],
[
1320,
1322
],
[
1847,
1849
],
[
2595,
2597
],
[
2834,
2836
],
[
3082,
3084
]
],
[
[
18,
25
],
[
287,
294
]
],
[
[
49,
56
],
[
711,
718
],
[
1489,
1496
],
[
1746,
1753
]
],
[
[
79,
98
],
[
978,
997
]
],
[
[
121,
133
],
[
1283,
1295
],
[
1577,
1589
],
[
2047,
2059
],
[
2550,
2562
]
],
[
[
156,
167
],
[
2876,
2887
]
],
[
[
174,
182
],
[
728,
736
]
],
[
[
425,
440
]
],
[
[
621,
638
],
[
493,
510
]
],
[
[
1123,
1131
],
[
1549,
1557
],
[
1806,
1814
],
[
2805,
2813
]
],
[
[
1209,
1219
],
[
562,
572
]
],
[
[
2215,
2225
],
[
594,
604
]
]
] |
from django.forms.models import model_to_dict
from django.test import TestCase
from ..forms import EmailChangePasswordForm, ProfileForm
from .factories import StaffFactory, UserFactory
class BaseTestProfileForm(TestCase):
def form_data(self, user, **values):
fields = ProfileForm.Meta.fields
data = model_to_dict(user, fields)
data.update(**values)
return data
def submit_form(self, instance, **extra_data):
form = ProfileForm(instance=instance, data=self.form_data(instance, **extra_data))
if form.is_valid():
form.save()
return form
class TestProfileForm(BaseTestProfileForm):
def setUp(self):
self.user = UserFactory()
def test_email_unique(self):
other_user = UserFactory()
form = self.submit_form(self.user, email=other_user.email)
self.assertFalse(form.is_valid())
self.user.refresh_from_db()
self.assertNotEqual(self.user.email, other_user.email)
def test_can_change_email(self):
new_email = 'me@another.com'
self.submit_form(self.user, email=new_email)
self.user.refresh_from_db()
self.assertEqual(self.user.email, new_email)
def test_cant_set_slack_name(self):
slack_name = '@foobar'
self.submit_form(self.user, slack=slack_name)
self.user.refresh_from_db()
self.assertNotEqual(self.user.slack, slack_name)
class TestStaffProfileForm(BaseTestProfileForm):
def setUp(self):
self.staff = StaffFactory()
def test_cant_change_email(self):
new_email = 'me@this.com'
self.submit_form(self.staff, email=new_email)
self.staff.refresh_from_db()
self.assertNotEqual(new_email, self.staff.email)
def test_can_set_slack_name(self):
slack_name = '@foobar'
self.submit_form(self.staff, slack=slack_name)
self.staff.refresh_from_db()
self.assertEqual(self.staff.slack, slack_name)
def test_can_set_slack_name_with_trailing_space(self):
slack_name = '@foobar'
self.submit_form(self.staff, slack=slack_name)
self.staff.refresh_from_db()
self.assertEqual(self.staff.slack, slack_name)
def test_cant_set_slack_name_with_space(self):
slack_name = '@ foobar'
form = self.submit_form(self.staff, slack=slack_name)
self.assertFalse(form.is_valid())
self.staff.refresh_from_db()
self.assertNotEqual(self.staff.slack, slack_name)
def test_auto_prepend_at(self):
slack_name = 'foobar'
self.submit_form(self.staff, slack=slack_name)
self.staff.refresh_from_db()
self.assertEqual(self.staff.slack, '@' + slack_name)
def test_can_clear_slack_name(self):
slack_name = ''
self.submit_form(self.staff, slack=slack_name)
self.staff.refresh_from_db()
self.assertEqual(self.staff.slack, slack_name)
class TestEmailChangePasswordForm(TestCase):
def setUp(self):
self.user = UserFactory()
def test_doesnt_error_on_null_slack_field(self):
form = EmailChangePasswordForm(self.user)
form.save('', '', None)
def test_can_update_slack(self):
slack_name = 'foobar'
form = EmailChangePasswordForm(self.user)
form.save('', '', slack_name)
self.assertEqual(self.user.slack, slack_name)
| [
[
[
32,
45
],
[
322,
335
]
],
[
[
70,
78
],
[
214,
222
],
[
2972,
2980
]
],
[
[
100,
123
],
[
3107,
3130
],
[
3257,
3280
]
],
[
[
125,
136
],
[
283,
294
],
[
467,
478
]
],
[
[
160,
172
],
[
1523,
1535
]
],
[
[
174,
185
],
[
703,
714
],
[
772,
783
],
[
3024,
3035
]
],
[
[
194,
213
],
[
640,
659
],
[
1459,
1478
]
],
[
[
624,
639
]
],
[
[
1438,
1458
]
],
[
[
2944,
2971
]
]
] |
from daoRefactor2 import DAO
from rssTickerInfo import rssTickerInfo
import json
import boto3
table = 'CompanyRSSFeed'
dao = DAO(table)
def main():
tickerValues = dao.getRssTickerValues('UNP')
print(tickerValues)
if __name__ == "__main__":
# calling main function
main()
| [
[
[
25,
28
],
[
132,
135
]
],
[
[
56,
69
]
],
[
[
78,
82
]
],
[
[
91,
96
]
],
[
[
100,
105
],
[
136,
141
]
],
[
[
126,
129
],
[
175,
178
]
],
[
[
150,
154
],
[
298,
302
]
]
] |
##############################################################################
# Copyright (c) 2015 Orange
# guyrodrigue.koffi@orange.com / koffirodrigue@gmail.com
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from opnfv_testapi.tornado_swagger import swagger
@swagger.model()
class ProjectCreateRequest(object):
def __init__(self, name, description=''):
self.name = name
self.description = description
def format(self):
return {
"name": self.name,
"description": self.description,
}
@swagger.model()
class ProjectUpdateRequest(object):
def __init__(self, name='', description=''):
self.name = name
self.description = description
def format(self):
return {
"name": self.name,
"description": self.description,
}
@swagger.model()
class Project(object):
def __init__(self,
name=None, _id=None, description=None, create_date=None):
self._id = _id
self.name = name
self.description = description
self.creation_date = create_date
@staticmethod
def from_dict(res_dict):
if res_dict is None:
return None
t = Project()
t._id = res_dict.get('_id')
t.creation_date = res_dict.get('creation_date')
t.name = res_dict.get('name')
t.description = res_dict.get('description')
return t
def format(self):
return {
"name": self.name,
"description": self.description,
"creation_date": str(self.creation_date)
}
def format_http(self):
return {
"_id": str(self._id),
"name": self.name,
"description": self.description,
"creation_date": str(self.creation_date),
}
@swagger.model()
class Projects(object):
"""
@property projects:
@ptype projects: C{list} of L{Project}
"""
def __init__(self):
self.projects = list()
@staticmethod
def from_dict(res_dict):
if res_dict is None:
return None
res = Projects()
for project in res_dict.get('projects'):
res.projects.append(Project.from_dict(project))
return res
| [
[
[
528,
535
],
[
539,
546
],
[
830,
837
],
[
1124,
1131
],
[
2115,
2122
]
],
[
[
561,
581
]
],
[
[
852,
872
]
],
[
[
1146,
1153
],
[
1504,
1511
],
[
2509,
2516
]
],
[
[
2137,
2145
],
[
2417,
2425
]
]
] |
"""Random walk routines
"""
from .._ffi.function import _init_api
from .. import backend as F
from ..base import DGLError
from .. import ndarray as nd
from .. import utils
__all__ = [
'random_walk',
'pack_traces']
def random_walk(g, nodes, *, metapath=None, length=None, prob=None, restart_prob=None):
"""Generate random walk traces from an array of starting nodes based on the given metapath.
For a single starting node, ``num_traces`` traces would be generated. A trace would
1. Start from the given node and set ``t`` to 0.
2. Pick and traverse along edge type ``metapath[t]`` from the current node.
3. If no edge can be found, halt. Otherwise, increment ``t`` and go to step 2.
The returned traces all have length ``len(metapath) + 1``, where the first node
is the starting node itself.
If a random walk stops in advance, DGL pads the trace with -1 to have the same
length.
Parameters
----------
g : DGLGraph
The graph. Must be on CPU.
nodes : Tensor
Node ID tensor from which the random walk traces starts.
The tensor must be on CPU, and must have the same dtype as the ID type
of the graph.
metapath : list[str or tuple of str], optional
Metapath, specified as a list of edge types.
Mutually exclusive with :attr:`length`.
If omitted, DGL assumes that ``g`` only has one node & edge type. In this
case, the argument ``length`` specifies the length of random walk traces.
length : int, optional
Length of random walks.
Mutually exclusive with :attr:`metapath`.
Only used when :attr:`metapath` is None.
prob : str, optional
The name of the edge feature tensor on the graph storing the (unnormalized)
probabilities associated with each edge for choosing the next node.
The feature tensor must be non-negative and the sum of the probabilities
must be positive for the outbound edges of all nodes (although they don't have
to sum up to one). The result will be undefined otherwise.
If omitted, DGL assumes that the neighbors are picked uniformly.
restart_prob : float or Tensor, optional
Probability to terminate the current trace before each transition.
If a tensor is given, :attr:`restart_prob` should have the same length as
:attr:`metapath` or :attr:`length`.
Returns
-------
traces : Tensor
A 2-dimensional node ID tensor with shape ``(num_seeds, len(metapath) + 1)`` or
``(num_seeds, length + 1)`` if :attr:`metapath` is None.
types : Tensor
A 1-dimensional node type ID tensor with shape ``(len(metapath) + 1)`` or
``(length + 1)``.
The type IDs match the ones in the original graph ``g``.
Notes
-----
The returned tensors are on CPU.
Examples
--------
The following creates a homogeneous graph:
>>> g1 = dgl.graph([(0, 1), (1, 2), (1, 3), (2, 0), (3, 0)], 'user', 'follow')
Normal random walk:
>>> dgl.sampling.random_walk(g1, [0, 1, 2, 0], length=4)
(tensor([[0, 1, 2, 0, 1],
[1, 3, 0, 1, 3],
[2, 0, 1, 3, 0],
[0, 1, 2, 0, 1]]), tensor([0, 0, 0, 0, 0]))
The first tensor indicates the random walk path for each seed node.
The j-th element in the second tensor indicates the node type ID of the j-th node
in every path. In this case, it is returning all 0 (``user``).
Random walk with restart:
>>> dgl.sampling.random_walk_with_restart(g1, [0, 1, 2, 0], length=4, restart_prob=0.5)
(tensor([[ 0, -1, -1, -1, -1],
[ 1, 3, 0, -1, -1],
[ 2, -1, -1, -1, -1],
[ 0, -1, -1, -1, -1]]), tensor([0, 0, 0, 0, 0]))
Non-uniform random walk:
>>> g1.edata['p'] = torch.FloatTensor([1, 0, 1, 1, 1]) # disallow going from 1 to 2
>>> dgl.sampling.random_walk(g1, [0, 1, 2, 0], length=4, prob='p')
(tensor([[0, 1, 3, 0, 1],
[1, 3, 0, 1, 3],
[2, 0, 1, 3, 0],
[0, 1, 3, 0, 1]]), tensor([0, 0, 0, 0, 0]))
Metapath-based random walk:
>>> g2 = dgl.heterograph({
... ('user', 'follow', 'user'): [(0, 1), (1, 2), (1, 3), (2, 0), (3, 0)],
... ('user', 'view', 'item'): [(0, 0), (0, 1), (1, 1), (2, 2), (3, 2), (3, 1)],
... ('item', 'viewed-by', 'user'): [(0, 0), (1, 0), (1, 1), (2, 2), (2, 3), (1, 3)]})
>>> dgl.sampling.random_walk(
... g2, [0, 1, 2, 0], metapath=['follow', 'view', 'viewed-by'] * 2)
(tensor([[0, 1, 1, 1, 2, 2, 3],
[1, 3, 1, 1, 2, 2, 2],
[2, 0, 1, 1, 3, 1, 1],
[0, 1, 1, 0, 1, 1, 3]]), tensor([0, 0, 1, 0, 0, 1, 0]))
Metapath-based random walk, with restarts only on items (i.e. after traversing a "view"
relationship):
>>> dgl.sampling.random_walk(
... g2, [0, 1, 2, 0], metapath=['follow', 'view', 'viewed-by'] * 2,
... restart_prob=torch.FloatTensor([0, 0.5, 0, 0, 0.5, 0]))
(tensor([[ 0, 1, -1, -1, -1, -1, -1],
[ 1, 3, 1, 0, 1, 1, 0],
[ 2, 0, 1, 1, 3, 2, 2],
[ 0, 1, 1, 3, 0, 0, 0]]), tensor([0, 0, 1, 0, 0, 1, 0]))
"""
assert g.device == F.cpu(), "Graph must be on CPU."
n_etypes = len(g.canonical_etypes)
n_ntypes = len(g.ntypes)
if metapath is None:
if n_etypes > 1 or n_ntypes > 1:
raise DGLError("metapath not specified and the graph is not homogeneous.")
if length is None:
raise ValueError("Please specify either the metapath or the random walk length.")
metapath = [0] * length
else:
metapath = [g.get_etype_id(etype) for etype in metapath]
gidx = g._graph
nodes = F.to_dgl_nd(utils.prepare_tensor(g, nodes, 'nodes'))
metapath = F.to_dgl_nd(utils.prepare_tensor(g, metapath, 'metapath'))
# Load the probability tensor from the edge frames
if prob is None:
p_nd = [nd.array([], ctx=nodes.ctx) for _ in g.canonical_etypes]
else:
p_nd = []
for etype in g.canonical_etypes:
if prob in g.edges[etype].data:
prob_nd = F.to_dgl_nd(g.edges[etype].data[prob])
if prob_nd.ctx != nodes.ctx:
raise ValueError(
'context of seed node array and edges[%s].data[%s] are different' %
(etype, prob))
else:
prob_nd = nd.array([], ctx=nodes.ctx)
p_nd.append(prob_nd)
# Actual random walk
if restart_prob is None:
traces, types = _CAPI_DGLSamplingRandomWalk(gidx, nodes, metapath, p_nd)
elif F.is_tensor(restart_prob):
restart_prob = F.to_dgl_nd(restart_prob)
traces, types = _CAPI_DGLSamplingRandomWalkWithStepwiseRestart(
gidx, nodes, metapath, p_nd, restart_prob)
else:
traces, types = _CAPI_DGLSamplingRandomWalkWithRestart(
gidx, nodes, metapath, p_nd, restart_prob)
traces = F.from_dgl_nd(traces)
types = F.from_dgl_nd(types)
return traces, types
def pack_traces(traces, types):
"""Pack the padded traces returned by ``random_walk()`` into a concatenated array.
The padding values (-1) are removed, and the length and offset of each trace is
returned along with the concatenated node ID and node type arrays.
Parameters
----------
traces : Tensor
A 2-dimensional node ID tensor. Must be on CPU and either ``int32`` or ``int64``.
types : Tensor
A 1-dimensional node type ID tensor. Must be on CPU and either ``int32`` or ``int64``.
Returns
-------
concat_vids : Tensor
An array of all node IDs concatenated and padding values removed.
concat_types : Tensor
An array of node types corresponding for each node in ``concat_vids``.
Has the same length as ``concat_vids``.
lengths : Tensor
Length of each trace in the original traces tensor.
offsets : Tensor
Offset of each trace in the originial traces tensor in the new concatenated tensor.
Notes
-----
The returned tensors are on CPU.
Examples
--------
>>> g2 = dgl.heterograph({
... ('user', 'follow', 'user'): [(0, 1), (1, 2), (1, 3), (2, 0), (3, 0)],
... ('user', 'view', 'item'): [(0, 0), (0, 1), (1, 1), (2, 2), (3, 2), (3, 1)],
... ('item', 'viewed-by', 'user'): [(0, 0), (1, 0), (1, 1), (2, 2), (2, 3), (1, 3)]})
>>> traces, types = dgl.sampling.random_walk(
... g2, [0, 0], metapath=['follow', 'view', 'viewed-by'] * 2,
... restart_prob=torch.FloatTensor([0, 0.5, 0, 0, 0.5, 0]))
>>> traces, types
(tensor([[ 0, 1, -1, -1, -1, -1, -1],
[ 0, 1, 1, 3, 0, 0, 0]]), tensor([0, 0, 1, 0, 0, 1, 0]))
>>> concat_vids, concat_types, lengths, offsets = dgl.sampling.pack_traces(traces, types)
>>> concat_vids
tensor([0, 1, 0, 1, 1, 3, 0, 0, 0])
>>> concat_types
tensor([0, 0, 0, 0, 1, 0, 0, 1, 0])
>>> lengths
tensor([2, 7])
>>> offsets
tensor([0, 2]))
The first tensor ``concat_vids`` is the concatenation of all paths, i.e. flattened array
of ``traces``, excluding all padding values (-1).
The second tensor ``concat_types`` stands for the node type IDs of all corresponding nodes
in the first tensor.
The third and fourth tensor indicates the length and the offset of each path. With these
tensors it is easy to obtain the i-th random walk path with:
>>> vids = concat_vids.split(lengths.tolist())
>>> vtypes = concat_vtypes.split(lengths.tolist())
>>> vids[1], vtypes[1]
(tensor([0, 1, 1, 3, 0, 0, 0]), tensor([0, 0, 1, 0, 0, 1, 0]))
"""
assert F.is_tensor(traces) and F.context(traces) == F.cpu(), "traces must be a CPU tensor"
assert F.is_tensor(types) and F.context(types) == F.cpu(), "types must be a CPU tensor"
traces = F.to_dgl_nd(traces)
types = F.to_dgl_nd(types)
concat_vids, concat_types, lengths, offsets = _CAPI_DGLSamplingPackTraces(traces, types)
concat_vids = F.from_dgl_nd(concat_vids)
concat_types = F.from_dgl_nd(concat_types)
lengths = F.from_dgl_nd(lengths)
offsets = F.from_dgl_nd(offsets)
return concat_vids, concat_types, lengths, offsets
_init_api('dgl.sampling.randomwalks', __name__)
| [
[
[
57,
66
],
[
10331,
10340
]
],
[
[
82,
94
],
[
5265,
5266
],
[
5781,
5782
],
[
5849,
5850
],
[
6197,
6198
],
[
6700,
6701
],
[
6750,
6751
],
[
7046,
7047
],
[
7080,
7081
],
[
9773,
9774
],
[
9797,
9798
],
[
9818,
9819
],
[
9868,
9869
],
[
9891,
9892
],
[
9911,
9912
],
[
9962,
9963
],
[
9994,
9995
],
[
10126,
10127
],
[
10172,
10173
],
[
10214,
10215
],
[
10251,
10252
]
],
[
[
114,
122
],
[
5451,
5459
]
],
[
[
138,
151
],
[
6001,
6003
],
[
6494,
6496
]
],
[
[
167,
172
],
[
5793,
5798
],
[
5861,
5866
]
],
[
[
174,
181
]
],
[
[
229,
240
]
],
[
[
7131,
7142
]
]
] |
import vdomr as vd
import spikeforest as sf
from cairio import client as ca
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
class AccuracyPlot(vd.components.Pyplot):
def __init__(self, snrs, accuracies):
vd.components.Pyplot.__init__(self)
self._snrs = snrs
self._accuracies = accuracies
def plot(self):
plt.scatter(self._snrs, self._accuracies)
class StudySorterFigure(vd.Component):
def __init__(self, sfdata):
vd.Component.__init__(self)
self._plot = None
self._SF_data = sfdata
self._study = None
self._sorter = None
def setStudySorter(self, *, study, sorter):
self._study = study
self._sorter = sorter
self._update_plot()
def _update_plot(self):
SF = self._SF_data
study = SF.study(self._study)
b = _get_study_sorting_results(study)
a = b[self._sorter]
snrs = a['true_unit_snrs']
accuracies = a['num_matches'] / \
(a['num_matches']+a['num_false_positives']+a['num_false_negatives'])
self._plot = AccuracyPlot(snrs, accuracies)
self.refresh()
def render(self):
if self._plot is None:
return vd.div('Nothing')
return vd.div(
vd.div('test '+self._study+' '+self._sorter),
self._plot
)
class SFBrowser(vd.Component):
def __init__(self, output_id):
vd.Component.__init__(self)
self._output_id = output_id
a = ca.loadObject(
key=dict(name='spikeforest_results'),
subkey=output_id
)
if not a:
print('ERROR: unable to open results: '+output_id)
return
if ('recordings' not in a) or ('studies' not in a) or ('sorting_results' not in a):
print('ERROR: problem with output: '+output_id)
return
studies = a['studies']
recordings = a['recordings']
sorting_results = a['sorting_results']
SF = sf.SFData()
SF.loadStudies(studies)
SF.loadRecordings2(recordings)
SF.loadSortingResults(sorting_results)
# sorter_names=[]
# for SR in sorting_results:
# sorter_names.append(SR['sorter']['name'])
# sorter_names=list(set(sorter_names))
# sorter_names.sort()
self._SF_data = SF
self._accuracy_threshold_input = vd.components.LineEdit(
value=0.8, dtype=float, style=dict(width='70px'))
self._update_button = vd.components.Button(
onclick=self._on_update, class_='button', label='Update')
self._study_sorter_fig = StudySorterFigure(SF)
self._study_sorter_table = vd.div() # dummy
vd.devel.loadBootstrap()
self._update_accuracy_table()
def _on_update(self):
self._update_accuracy_table()
def _update_accuracy_table(self):
accuracy_threshold = self._accuracy_threshold_input.value()
self._accuracy_table_data, self._sorters = self._get_accuracy_table_data(
accuracy_threshold=accuracy_threshold)
self._accuracy_table = self._to_table(
self._accuracy_table_data, ['study']+self._sorters)
print(self._accuracy_table_data)
self.refresh()
def _open_study_sorter_fig(self, *, sorter, study):
self._study_sorter_fig.setStudySorter(study=study, sorter=sorter)
def _get_accuracy_table_data(self, *, accuracy_threshold):
SF = self._SF_data
accuracy_table = []
sorters = set()
for sname in SF.studyNames():
print('STUDY: '+sname)
study = SF.study(sname)
b = _get_study_sorting_results(study)
tmp = dict(
study=dict( # first column
text=sname
)
)
for sorter in b:
sorters.add(sorter)
a = b[sorter]
accuracies = a['num_matches'] / \
(a['num_matches']+a['num_false_positives'] +
a['num_false_negatives'])
tmp[sorter] = dict(
text=str(np.count_nonzero(
accuracies >= accuracy_threshold)),
callback=lambda sorter=sorter, study=sname: self._open_study_sorter_fig(
sorter=sorter, study=study)
)
accuracy_table.append(tmp)
sorters = list(sorters)
sorters.sort()
return accuracy_table, sorters
def _to_table(self, X, column_names):
rows = []
rows.append(vd.tr([vd.th(cname) for cname in column_names]))
for x in X:
elmts = []
for cname in column_names:
tmp = x.get(cname)
if tmp:
if 'callback' in tmp:
elmt = vd.a(tmp['text'], onclick=tmp['callback'])
else:
elmt = vd.span(tmp['text'])
else:
elmt = vd.span('N/A')
elmts.append(elmt)
rows.append(vd.tr([vd.td(elmt) for elmt in elmts]))
return vd.table(rows, class_='table')
def render(self):
return vd.div(
vd.table(
vd.tr(
vd.td('Accuracy threshold:'),
vd.td(self._accuracy_threshold_input),
vd.td(self._update_button)
),
class_='table',
style={'max-width': '200px'}
),
vd.components.ScrollArea(
self._accuracy_table,
height=500
),
self._study_sorter_fig,
style=dict(padding='15px')
)
def _get_study_sorting_results(study):
results = []
for rname in study.recordingNames():
rec = study.recording(rname)
true_units_info = rec.trueUnitsInfo(format='json')
true_units_info_by_id = dict()
for true_unit in true_units_info:
true_units_info_by_id[true_unit['unit_id']] = true_unit
for srname in rec.sortingResultNames():
a = rec.sortingResult(srname)
res0 = dict(sorter=srname, recording=rname, study=study.name())
tmp = a.comparisonWithTruth(format='json')
for i in tmp:
tmp[i]['true_unit_info'] = true_units_info_by_id[tmp[i]['unit_id']]
res0['comparison_with_truth'] = tmp
results.append(res0)
sorters = list(set([a['sorter'] for a in results]))
sorters.sort()
units_by_sorter = dict()
for sorter in sorters:
units_by_sorter[sorter] = []
for obj in results:
sorter0 = obj['sorter']
units = [obj['comparison_with_truth'][i]
for i in obj['comparison_with_truth']]
units_by_sorter[sorter0] = units_by_sorter[sorter0]+units
ret = dict()
for sorter in sorters:
units = units_by_sorter[sorter]
try:
ret[sorter] = dict(
true_unit_ids=[unit['unit_id'] for unit in units],
true_unit_snrs=np.array(
[unit['true_unit_info']['snr'] for unit in units]),
true_unit_firing_rates=np.array(
[unit['true_unit_info']['firing_rate'] for unit in units]),
num_matches=np.array([unit['num_matches'] for unit in units]),
num_false_positives=np.array(
[unit['num_false_positives'] for unit in units]),
num_false_negatives=np.array(
[unit['num_false_negatives'] for unit in units])
)
except:
print('WARNING: Problem loading results for sorter: '+sorter)
ret[sorter] = dict(
true_unit_ids=[],
true_unit_snrs=np.array([]),
true_unit_firing_rates=np.array([]),
num_matches=np.array([]),
num_false_positives=np.array([]),
num_false_negatives=np.array([])
)
return ret
| [
[
[
7,
18
],
[
173,
175
],
[
443,
445
],
[
1398,
1400
],
[
246,
248
],
[
498,
500
],
[
1248,
1250
],
[
1281,
1283
],
[
1301,
1303
],
[
1456,
1458
],
[
2437,
2439
],
[
2553,
2555
],
[
2735,
2737
],
[
2762,
2764
],
[
4648,
4650
],
[
4655,
4657
],
[
4911,
4913
],
[
5011,
5013
],
[
5081,
5083
],
[
5155,
5157
],
[
5162,
5164
],
[
5210,
5212
],
[
5279,
5281
],
[
5299,
5301
],
[
5325,
5327
],
[
5352,
5354
],
[
5402,
5404
],
[
5461,
5463
],
[
5611,
5613
]
],
[
[
26,
43
],
[
2040,
2042
]
],
[
[
63,
75
],
[
1534,
1536
]
],
[
[
83,
95
]
],
[
[
103,
114
],
[
4192,
4194
],
[
7184,
7186
],
[
7305,
7307
],
[
7423,
7425
],
[
7510,
7512
],
[
7626,
7628
],
[
7906,
7908
],
[
7959,
7961
],
[
8001,
8003
],
[
8051,
8053
],
[
8101,
8103
]
],
[
[
138,
151
],
[
375,
378
]
],
[
[
160,
172
],
[
1121,
1133
]
],
[
[
425,
442
],
[
2678,
2695
]
],
[
[
1388,
1397
]
],
[
[
5808,
5834
],
[
879,
905
],
[
3705,
3731
]
]
] |
import sys
import unittest
from unittest import mock
from warnings import catch_warnings
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.item import ABCMeta, DictItem, Field, Item, ItemMeta
PY36_PLUS = (sys.version_info.major >= 3) and (sys.version_info.minor >= 6)
class ItemTest(unittest.TestCase):
def assertSortedEqual(self, first, second, msg=None):
return self.assertEqual(sorted(first), sorted(second), msg)
def test_simple(self):
class TestItem(Item):
name = Field()
i = TestItem()
i['name'] = u'name'
self.assertEqual(i['name'], u'name')
def test_init(self):
class TestItem(Item):
name = Field()
i = TestItem()
self.assertRaises(KeyError, i.__getitem__, 'name')
i2 = TestItem(name=u'john doe')
self.assertEqual(i2['name'], u'john doe')
i3 = TestItem({'name': u'john doe'})
self.assertEqual(i3['name'], u'john doe')
i4 = TestItem(i3)
self.assertEqual(i4['name'], u'john doe')
self.assertRaises(KeyError, TestItem, {'name': u'john doe',
'other': u'foo'})
def test_invalid_field(self):
class TestItem(Item):
pass
i = TestItem()
self.assertRaises(KeyError, i.__setitem__, 'field', 'text')
self.assertRaises(KeyError, i.__getitem__, 'field')
def test_repr(self):
class TestItem(Item):
name = Field()
number = Field()
i = TestItem()
i['name'] = u'John Doe'
i['number'] = 123
itemrepr = repr(i)
self.assertEqual(itemrepr,
"{'name': 'John Doe', 'number': 123}")
i2 = eval(itemrepr)
self.assertEqual(i2['name'], 'John Doe')
self.assertEqual(i2['number'], 123)
def test_private_attr(self):
class TestItem(Item):
name = Field()
i = TestItem()
i._private = 'test'
self.assertEqual(i._private, 'test')
def test_raise_getattr(self):
class TestItem(Item):
name = Field()
i = TestItem()
self.assertRaises(AttributeError, getattr, i, 'name')
def test_raise_setattr(self):
class TestItem(Item):
name = Field()
i = TestItem()
self.assertRaises(AttributeError, setattr, i, 'name', 'john')
def test_custom_methods(self):
class TestItem(Item):
name = Field()
def get_name(self):
return self['name']
def change_name(self, name):
self['name'] = name
i = TestItem()
self.assertRaises(KeyError, i.get_name)
i['name'] = u'lala'
self.assertEqual(i.get_name(), u'lala')
i.change_name(u'other')
self.assertEqual(i.get_name(), 'other')
def test_metaclass(self):
class TestItem(Item):
name = Field()
keys = Field()
values = Field()
i = TestItem()
i['name'] = u'John'
self.assertEqual(list(i.keys()), ['name'])
self.assertEqual(list(i.values()), ['John'])
i['keys'] = u'Keys'
i['values'] = u'Values'
self.assertSortedEqual(list(i.keys()), ['keys', 'values', 'name'])
self.assertSortedEqual(list(i.values()), [u'Keys', u'Values', u'John'])
def test_metaclass_with_fields_attribute(self):
class TestItem(Item):
fields = {'new': Field(default='X')}
item = TestItem(new=u'New')
self.assertSortedEqual(list(item.keys()), ['new'])
self.assertSortedEqual(list(item.values()), [u'New'])
def test_metaclass_inheritance(self):
class BaseItem(Item):
name = Field()
keys = Field()
values = Field()
class TestItem(BaseItem):
keys = Field()
i = TestItem()
i['keys'] = 3
self.assertEqual(list(i.keys()), ['keys'])
self.assertEqual(list(i.values()), [3])
def test_metaclass_multiple_inheritance_simple(self):
class A(Item):
fields = {'load': Field(default='A')}
save = Field(default='A')
class B(A):
pass
class C(Item):
fields = {'load': Field(default='C')}
save = Field(default='C')
class D(B, C):
pass
item = D(save='X', load='Y')
self.assertEqual(item['save'], 'X')
self.assertEqual(item['load'], 'Y')
self.assertEqual(D.fields, {'load': {'default': 'A'},
'save': {'default': 'A'}})
# D class inverted
class E(C, B):
pass
self.assertEqual(E(save='X')['save'], 'X')
self.assertEqual(E(load='X')['load'], 'X')
self.assertEqual(E.fields, {'load': {'default': 'C'},
'save': {'default': 'C'}})
def test_metaclass_multiple_inheritance_diamond(self):
class A(Item):
fields = {'update': Field(default='A')}
save = Field(default='A')
load = Field(default='A')
class B(A):
pass
class C(A):
fields = {'update': Field(default='C')}
save = Field(default='C')
class D(B, C):
fields = {'update': Field(default='D')}
load = Field(default='D')
self.assertEqual(D(save='X')['save'], 'X')
self.assertEqual(D(load='X')['load'], 'X')
self.assertEqual(D.fields, {'save': {'default': 'C'},
'load': {'default': 'D'}, 'update': {'default': 'D'}})
# D class inverted
class E(C, B):
load = Field(default='E')
self.assertEqual(E(save='X')['save'], 'X')
self.assertEqual(E(load='X')['load'], 'X')
self.assertEqual(E.fields, {'save': {'default': 'C'},
'load': {'default': 'E'}, 'update': {'default': 'C'}})
def test_metaclass_multiple_inheritance_without_metaclass(self):
class A(Item):
fields = {'load': Field(default='A')}
save = Field(default='A')
class B(A):
pass
class C(object):
fields = {'load': Field(default='C')}
not_allowed = Field(default='not_allowed')
save = Field(default='C')
class D(B, C):
pass
self.assertRaises(KeyError, D, not_allowed='value')
self.assertEqual(D(save='X')['save'], 'X')
self.assertEqual(D.fields, {'save': {'default': 'A'},
'load': {'default': 'A'}})
# D class inverted
class E(C, B):
pass
self.assertRaises(KeyError, E, not_allowed='value')
self.assertEqual(E(save='X')['save'], 'X')
self.assertEqual(E.fields, {'save': {'default': 'A'},
'load': {'default': 'A'}})
def test_to_dict(self):
class TestItem(Item):
name = Field()
i = TestItem()
i['name'] = u'John'
self.assertEqual(dict(i), {'name': u'John'})
def test_copy(self):
class TestItem(Item):
name = Field()
item = TestItem({'name': 'lower'})
copied_item = item.copy()
self.assertNotEqual(id(item), id(copied_item))
copied_item['name'] = copied_item['name'].upper()
self.assertNotEqual(item['name'], copied_item['name'])
def test_deepcopy(self):
class TestItem(Item):
tags = Field()
item = TestItem({'tags': ['tag1']})
copied_item = item.deepcopy()
item['tags'].append('tag2')
assert item['tags'] != copied_item['tags']
def test_dictitem_deprecation_warning(self):
"""Make sure the DictItem deprecation warning is not issued for
Item"""
with catch_warnings(record=True) as warnings:
item = Item()
self.assertEqual(len(warnings), 0)
class SubclassedItem(Item):
pass
subclassed_item = SubclassedItem()
self.assertEqual(len(warnings), 0)
class ItemMetaTest(unittest.TestCase):
def test_new_method_propagates_classcell(self):
new_mock = mock.Mock(side_effect=ABCMeta.__new__)
base = ItemMeta.__bases__[0]
with mock.patch.object(base, '__new__', new_mock):
class MyItem(Item):
if not PY36_PLUS:
# This attribute is an internal attribute in Python 3.6+
# and must be propagated properly. See
# https://docs.python.org/3.6/reference/datamodel.html#creating-the-class-object
# In <3.6, we add a dummy attribute just to ensure the
# __new__ method propagates it correctly.
__classcell__ = object()
def f(self):
# For rationale of this see:
# https://github.com/python/cpython/blob/ee1a81b77444c6715cbe610e951c655b6adab88b/Lib/test/test_super.py#L222
return __class__ # noqa https://github.com/scrapy/scrapy/issues/2836
MyItem()
(first_call, second_call) = new_mock.call_args_list[-2:]
mcs, class_name, bases, attrs = first_call[0]
assert '__classcell__' not in attrs
mcs, class_name, bases, attrs = second_call[0]
assert '__classcell__' in attrs
class ItemMetaClassCellRegression(unittest.TestCase):
def test_item_meta_classcell_regression(self):
class MyItem(Item, metaclass=ItemMeta):
def __init__(self, *args, **kwargs):
# This call to super() trigger the __classcell__ propagation
# requirement. When not done properly raises an error:
# TypeError: __class__ set to <class '__main__.MyItem'>
# defining 'MyItem' as <class '__main__.MyItem'>
super(MyItem, self).__init__(*args, **kwargs)
class DictItemTest(unittest.TestCase):
def test_deprecation_warning(self):
with catch_warnings(record=True) as warnings:
dict_item = DictItem()
self.assertEqual(len(warnings), 1)
self.assertEqual(warnings[0].category, ScrapyDeprecationWarning)
with catch_warnings(record=True) as warnings:
class SubclassedDictItem(DictItem):
pass
subclassed_dict_item = SubclassedDictItem()
self.assertEqual(len(warnings), 1)
self.assertEqual(warnings[0].category, ScrapyDeprecationWarning)
if __name__ == "__main__":
unittest.main()
| [
[
[
7,
10
],
[
225,
228
],
[
259,
262
]
],
[
[
18,
26
],
[
305,
313
],
[
8099,
8107
],
[
9431,
9439
],
[
9968,
9976
],
[
10578,
10586
]
],
[
[
48,
52
],
[
8191,
8195
],
[
8281,
8285
]
],
[
[
74,
88
],
[
7808,
7822
],
[
10042,
10056
],
[
10255,
10269
]
],
[
[
120,
144
],
[
10216,
10240
],
[
10519,
10543
]
],
[
[
169,
176
],
[
8213,
8220
]
],
[
[
178,
186
],
[
10107,
10115
],
[
10333,
10341
]
],
[
[
188,
193
],
[
529,
534
],
[
709,
714
],
[
1507,
1512
],
[
1536,
1541
],
[
1958,
1963
],
[
2147,
2152
],
[
2325,
2330
],
[
2512,
2517
],
[
2975,
2980
],
[
3002,
3007
],
[
3031,
3036
],
[
3523,
3528
],
[
3793,
3798
],
[
3820,
3825
],
[
3849,
3854
],
[
3911,
3916
],
[
4176,
4181
],
[
4215,
4220
],
[
4326,
4331
],
[
4365,
4370
],
[
5039,
5044
],
[
5078,
5083
],
[
5116,
5121
],
[
5226,
5231
],
[
5265,
5270
],
[
5340,
5345
],
[
5379,
5384
],
[
5700,
5705
],
[
6074,
6079
],
[
6113,
6118
],
[
6226,
6231
],
[
6272,
6277
],
[
6320,
6325
],
[
6952,
6957
],
[
7140,
7145
],
[
7480,
7485
]
],
[
[
195,
199
],
[
503,
507
],
[
683,
687
],
[
1256,
1260
],
[
1481,
1485
],
[
1932,
1936
],
[
2121,
2125
],
[
2299,
2303
],
[
2486,
2490
],
[
2949,
2953
],
[
3487,
3491
],
[
3767,
3771
],
[
4139,
4143
],
[
4289,
4293
],
[
5000,
5004
],
[
6037,
6041
],
[
6926,
6930
],
[
7114,
7118
],
[
7454,
7458
],
[
7868,
7872
],
[
7956,
7960
],
[
8353,
8357
],
[
9524,
9528
]
],
[
[
201,
209
],
[
8245,
8253
],
[
9540,
9548
]
],
[
[
212,
221
],
[
8383,
8392
]
],
[
[
296,
304
]
],
[
[
8086,
8098
]
],
[
[
9403,
9430
]
],
[
[
9955,
9967
]
]
] |
"""
A CapitalT class and methods that use the Cross class.
Authors: David Mutchler, Vibha Alangar, Dave Fisher, Amanda Stouder,
their colleagues and Xiaolong Chen (Harry).
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
import rosegraphics as rg
import math
def main():
"""
Calls the test functions.
As you implement CapitalT method uncomment the appropriate tests.
"""
# --------------------------------------------------------------
# Uncomment only 1 test at a time as you develop your code.
# --------------------------------------------------------------
print('Un-comment the calls in MAIN one by one')
print(' to run the testing code as you complete the TODOs.')
run_test_simple_t()
run_test_set_colors()
run_test_move_by()
run_test_clone()
def run_test_simple_t():
"""
Tests for the __init__ method and attach_to method.
See the simple_t PDF for expected output.
"""
print()
print('--------------------------------------------------')
print('Testing __init__ and attach_to ')
print('--------------------------------------------------')
window = rg.RoseWindow(600, 400, 'Test 1 - Simple Ts')
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
print("Expected: Point(250.0, 40.0) Point(350.0, 60.0)")
print("Actual: ", t1.h_rect.get_upper_left_corner(), t1.h_rect.get_lower_right_corner())
print("Expected: Point(290.0, 40.0) Point(310.0, 240.0)")
print("Actual: ", t1.v_rect.get_upper_left_corner(), t1.v_rect.get_lower_right_corner())
t1.attach_to(window)
t2 = CapitalT(rg.Point(150, 150), 100, 150, 40)
t2.attach_to(window)
t3 = CapitalT(rg.Point(450, 150), 10, 15, 4)
t3.attach_to(window)
window.render()
print("See graphics window and compare to the simple_t PDF")
window.close_on_mouse_click()
def run_test_set_colors():
""" Tests for the set_colors method. See the set_colors PDF for expected output. """
window = rg.RoseWindow(600, 400, 'Test 2 - Colorful Ts')
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
t1.set_colors('red', 'magenta')
t1.attach_to(window)
t2 = CapitalT(rg.Point(150, 150), 100, 150, 40)
t2.set_colors('green', 'purple')
t2.attach_to(window)
t3 = CapitalT(rg.Point(450, 150), 10, 15, 4)
t3.set_colors('blue', 'gray')
t3.attach_to(window)
window.render()
window.close_on_mouse_click()
def run_test_move_by():
""" Tests for the move_by method. See the move_by PDF for expected output. """
window = rg.RoseWindow(600, 400, 'Test 3 - Moving T')
little_red_t = CapitalT(rg.Point(300, 50), 60, 80, 5)
little_red_t.set_colors('red', 'gray')
little_red_t.attach_to(window)
window.render(0.5)
little_red_t.move_by(0, 100)
window.render(0.5)
little_red_t.move_by(0, 100)
window.render(0.5)
for k in range(40):
little_red_t.move_by(5, -2)
window.render(0.05)
window.close_on_mouse_click()
def run_test_clone():
""" Tests for the clone method. See the clone PDF for expected output. """
window = rg.RoseWindow(650, 400, 'Test 4 - Cloning Ts')
first_t = CapitalT(rg.Point(75, 50), 80, 80, 40)
first_t.set_colors('blue', 'cyan')
for k in range(6):
t = first_t.clone()
if k < 2:
t.set_colors('white', 'black')
t.move_by(100 * k, 20 * k)
t.attach_to(window)
first_t.move_by(0, 200)
first_t.attach_to(window)
window.render()
window.close_on_mouse_click()
########################################################################
# The CapitalT class (and its methods) begins here.
########################################################################
class CapitalT(object):
"""
Manages a CapitalT graphics object which is made up of two rectangles.
See the PDFs, especially dimenstions.pdf, to help you understand this.
"""
def __init__(self, intersection_center, width, height, letter_thickness):
"""
What comes in:
-- self
-- an rg.Point for the intersection center of the CapitalT
-- This point is also center of the horizontal rectangle.
-- a int for the width of the CapitalT (the width of the horizontal rectangle)
-- a int for the height of the CapitalT (the height of the vertical rectangle)
-- a int for the thickness of each rectangle (the letter's thickness)
What goes out: Nothing (i.e., None).
Side effects: Sets two instance variables named:
-- h_rect (to represent the horizontal rectangle in the T, the top bar)
-- v_rect (to represent the vertical rectangle in the T, the | part of the T)
*** See the dimensions PDF for the exact placement of the rectangles in the T. ***
Each rectangle is an rg.Rectangle. Unlike prior modules you are NOT
allowed to make any other instance variables. You may only use
exactly these two and must figure out how to do the problem with ONLY
those two instance variables.
Example:
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
-- t1.h_rect would have an upper left corner of (250, 40)
-- t1.h_rect would have an lower right corner of (350, 60)
-- t1.v_rect would have an upper left corner of (290, 40)
-- t1.v_rect would have an lower right corner of (310, 240)
Type hints:
:type intersection_center: rg.Point
:type width: int
:type height: int
:type letter_thickness: int
"""
hupperright = rg.Point(intersection_center.x+(1/2)*width, intersection_center.y-(1/2)*letter_thickness)
hlowerleft = rg.Point(intersection_center.x-(1/2)*width,intersection_center.y+(1/2)*letter_thickness)
self.h_rect = rg.Rectangle(hupperright,hlowerleft)
vupperright = rg.Point(intersection_center.x + (1/2)*letter_thickness,hupperright.y)
vlowerleft = rg.Point(intersection_center.x-(1/2)*letter_thickness,hlowerleft.y+(height-letter_thickness))
self.v_rect = rg.Rectangle(vupperright, vlowerleft)
# --------------------------------------------------------------
# DONE: 3.
# READ the above specification, including the Example.
# Implement this method
# Note: you will need to also implement attach_to before testing
# --------------------------------------------------------------
def attach_to(self, window):
"""
What comes in:
-- self
-- an rg.RoseWindow
What goes out: Nothing (i.e., None).
Side effects:
-- Attaches both instance rectangles to the given window.
-- Hint: Attach h_rect second to make it draw in front of v_rect
Example:
window = rg.RoseWindow()
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
t1.attach_to(window)
Type hints:
:type window: rg.RoseWindow
"""
self.v_rect.attach_to(window)
self.h_rect.attach_to(window)
# --------------------------------------------------------------
# DONE: 4.
# READ the above specification, including the Example.
# Implement and test this method by looking at the console and
# the graphics window (compare it to simple_t.pdf)
# --------------------------------------------------------------
def set_colors(self, fill_color, outline_color):
"""
What comes in:
-- self
-- a string that represents a valid rosegraphics color
-- a string that represents a valid rosegraphics color
What goes out: Nothing (i.e., None).
Side effects:
-- sets the fill_color of both rectangles to the given fill color
-- sets the outline_color of both rectangles to the given outline color
Example:
window = rg.RoseWindow()
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
t1.set_color('red', 'blue')
Type hints:
:type fill_color: str
:type outline_color: str
"""
self.h_rect.fill_color = fill_color
self.v_rect.fill_color = fill_color
self.h_rect.outline_color = outline_color
self.v_rect.outline_color =outline_color
# --------------------------------------------------------------
# DONE: 5.
# READ the above specification, including the Example.
# Implement and test this method by uncommenting the appropriate
# run_test method in main. Compare the graphics window to
# set_colors.pdf.
# --------------------------------------------------------------
def move_by(self, dx, dy):
"""
What comes in:
-- self
-- an int amount to move in the x direction
-- an int amount to move in the y direction
What goes out: Nothing (i.e., None).
Side effects:
-- Moves both h_rect and v_rect the specified dx and dy amounts.
Example:
window = rg.RoseWindow()
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
t1.attach_to(window)
window.render(0.5)
t1.move_by(100, 200) # Moves the T 100 pixels right and 200 down.
window.render() # necessary to see the change
Type hints:
:type dx: int
:type dy: int
"""
self.h_rect.corner_1.x +=dx
self.h_rect .corner_2.x +=dx
self.h_rect.corner_1.y += dy
self.h_rect.corner_2.y += dy
self.v_rect.corner_1.x += dx
self.v_rect.corner_2.x += dx
self.v_rect.corner_1.y += dy
self.v_rect.corner_2.y += dy
# --------------------------------------------------------------
# DONE: 6.
# READ the above specification, including the Example.
# Implement and test this method by uncommenting the appropriate
# run_test method in main. Compare the graphics window to
# move_by.pdf. Note: the pdf shows the different locations
# that the T moves through, but there is only 1 T at any moment.
# --------------------------------------------------------------
def clone(self):
"""
What comes in:
-- self
What goes out:
-- Returns a new CapitalT that is located in the same position as
this CapitalT with the same colors for the rectangles.
Side effects:
-- None
Example:
window = rg.RoseWindow()
t1 = CapitalT(rg.Point(300, 50), 100, 200, 20)
t1.set_color('red', 'blue')
t2 = t1.clone() # t2 is at the same location WITH THE SAME COLORS
Type hints:
:rtype: CapitalT
"""
h = self.h_rect
v = self.v_rect
intersect = rg.Point((1/2)*h.get_upper_right_corner().x,(1/2)*h.get_upper_right_corner().y+h.get_lower_left_corner().y)
thickness = math.fabs(h.get_upper_right_corner().y-h.get_lower_left_corner().y)
clone = CapitalT(intersect,h.get_width(),v.get_height(),thickness)
clone.set_colors(self.h_rect.fill_color,self.h_rect .outline_color)
return clone
# --------------------------------------------------------------
# DONE: 7.
# READ the above specification, including the Example.
# Implement and test this method by uncommenting the appropriate
# run_test method in main. Compare the graphics window to
# clone.pdf.
# --------------------------------------------------------------
# ----------------------------------------------------------------------
# If this module is running at the top level (as opposed to being
# imported by another module), then call the 'main' function.
# ----------------------------------------------------------------------
if __name__ == '__main__':
main()
| [
[
[
243,
261
],
[
1173,
1175
],
[
1237,
1239
],
[
1624,
1626
],
[
1701,
1703
],
[
2008,
2010
],
[
2074,
2076
],
[
2186,
2188
],
[
2300,
2302
],
[
2567,
2569
],
[
2640,
2642
],
[
3122,
3124
],
[
3192,
3194
],
[
5702,
5704
],
[
5813,
5815
],
[
5924,
5926
],
[
5984,
5986
],
[
6076,
6078
],
[
6192,
6194
],
[
11072,
11074
]
],
[
[
269,
273
],
[
11200,
11204
]
],
[
[
280,
284
],
[
12149,
12153
]
],
[
[
827,
844
],
[
731,
748
]
],
[
[
1882,
1901
],
[
755,
774
]
],
[
[
2450,
2466
],
[
781,
797
]
],
[
[
3011,
3025
],
[
804,
818
]
],
[
[
3760,
3768
],
[
1228,
1236
],
[
1615,
1623
],
[
1692,
1700
],
[
2065,
2073
],
[
2177,
2185
],
[
2291,
2299
],
[
2631,
2639
],
[
3183,
3191
],
[
11285,
11293
]
]
] |
import time
from collections import deque
from datetime import datetime
import backtrader as bt
from backtrader.feed import DataBase
from backtrader.utils.py3 import with_metaclass
from .cryptostore import CryptoStore
class MetaCryptoFeed(DataBase.__class__):
def __init__(cls, name, bases, dct):
'''Class has already been created ... register'''
# Initialize the class
super(MetaCryptoFeed, cls).__init__(name, bases, dct)
# Register with the store
CryptoStore.DataCls = cls
class CryptoFeed(with_metaclass(MetaCryptoFeed, DataBase)):
"""
CryptoCurrency eXchange Trading Library Data Feed.
Params:
- ``historical`` (default: ``False``)
If set to ``True`` the data feed will stop after doing the first
download of data.
The standard data feed parameters ``fromdate`` and ``todate`` will be
used as reference.
- ``backfill_start`` (default: ``True``)
Perform backfilling at the start. The maximum possible historical data
will be fetched in a single request.
Changes From Ed's pacakge
- Added option to send some additional fetch_ohlcv_params. Some exchanges (e.g Bitmex)
support sending some additional fetch parameters.
- Added drop_newest option to avoid loading incomplete candles where exchanges
do not support sending ohlcv params to prevent returning partial data
"""
params = (
('historical', False), # only historical download
('backfill_start', False), # do backfilling at the start
('fetch_ohlcv_params', {}),
('ohlcv_limit', 20),
('drop_newest', False),
('debug', False)
)
_store = CryptoStore
# States for the Finite State Machine in _load
_ST_LIVE, _ST_HISTORBACK, _ST_OVER = range(3)
# def __init__(self, exchange, symbol, ohlcv_limit=None, config={}, retries=5):
def __init__(self, **kwargs):
# self.store = CryptoStore(exchange, config, retries)
self.store = self._store(**kwargs)
self._data = deque() # data queue for price data
self._last_id = '' # last processed trade id for ohlcv
self._last_ts = 0 # last processed timestamp for ohlcv
self._ts_delta = None # timestamp delta for ohlcv
def start(self, ):
DataBase.start(self)
if self.p.fromdate:
self._state = self._ST_HISTORBACK
self.put_notification(self.DELAYED)
self._fetch_ohlcv(self.p.fromdate)
else:
self._state = self._ST_LIVE
self.put_notification(self.LIVE)
def _load(self):
if self._state == self._ST_OVER:
return False
while True:
if self._state == self._ST_LIVE:
if self._timeframe == bt.TimeFrame.Ticks:
return self._load_ticks()
else:
# INFO: Fix to address slow loading time after enter into LIVE state.
if len(self._data) == 0:
# INFO: Only call _fetch_ohlcv when self._data is fully consumed as it will cause execution
# inefficiency due to network latency. Furthermore it is extremely inefficiency to fetch
# an amount of bars but only load one bar at a given time.
self._fetch_ohlcv()
ret = self._load_ohlcv()
if self.p.debug:
print('---- LOAD ----')
print('{} Load OHLCV Returning: {}'.format(datetime.utcnow(), ret))
return ret
elif self._state == self._ST_HISTORBACK:
ret = self._load_ohlcv()
if ret:
return ret
else:
# End of historical data
if self.p.historical: # only historical
self.put_notification(self.DISCONNECTED)
self._state = self._ST_OVER
return False # end of historical
else:
self._state = self._ST_LIVE
self.put_notification(self.LIVE)
def _fetch_ohlcv(self, fromdate=None):
"""Fetch OHLCV data into self._data queue"""
granularity = self.store.get_granularity(self._timeframe, self._compression)
if self.store.cache is not None and self.p.todate:
print("Loading from cache", self.p.dataname, granularity, fromdate, self.p.todate)
data = sorted(self.store.cache.query(self.p.dataname, granularity, fromdate, self.p.todate))
if self.p.drop_newest:
del data[-1]
if len(data) > 0:
self._data.extend(data)
self._last_ts = data[-1][0]
else:
till = int((self.p.todate - datetime(1970, 1, 1)).total_seconds() * 1000) if self.p.todate else None
if fromdate:
since = int((fromdate - datetime(1970, 1, 1)).total_seconds() * 1000)
else:
if self._last_ts > 0:
if self._ts_delta is None:
since = self._last_ts
else:
since = self._last_ts + self._ts_delta
else:
since = None
limit = self.p.ohlcv_limit
while True:
dlen = len(self._data)
if self.p.debug:
# TESTING
since_dt = datetime.utcfromtimestamp(since // 1000) if since is not None else 'NA'
print('---- NEW REQUEST ----')
print('{} - Requesting: Since TS:{} Since date:{} granularity:{}, limit:{}, params:{}'.format(
datetime.utcnow(), since, since_dt, granularity, limit, self.p.fetch_ohlcv_params))
data = sorted(self.store.fetch_ohlcv(self.p.dataname, timeframe=granularity,
since=since, limit=limit, params=self.p.fetch_ohlcv_params))
try:
for i, ohlcv in enumerate(data):
tstamp, open_, high, low, close, volume = ohlcv
print('{} - Data {}: {} - TS {} Time {}'.format(datetime.utcnow(), i,
datetime.utcfromtimestamp(tstamp // 1000),
tstamp, (time.time() * 1000)))
# ------------------------------------------------------------------
except IndexError:
print('Index Error: Data = {}'.format(data))
print('---- REQUEST END ----')
else:
data = sorted(self.store.fetch_ohlcv(self.p.dataname, timeframe=granularity,
since=since, limit=limit, params=self.p.fetch_ohlcv_params))
# Check to see if dropping the latest candle will help with
# exchanges which return partial data
if self.p.drop_newest:
del data[-1]
prev_tstamp = None
tstamp = None
for ohlcv in data:
if None in ohlcv:
continue
tstamp = ohlcv[0]
if prev_tstamp is not None and self._ts_delta is None:
# INFO: Record down the TS delta so that it can be used to increment since TS
self._ts_delta = tstamp - prev_tstamp
# Prevent from loading incomplete data
# if tstamp > (time.time() * 1000):
# continue
if tstamp > self._last_ts:
if self.p.debug:
print('Adding: {}'.format(ohlcv))
self._data.append(ohlcv)
self._last_ts = tstamp
if till and tstamp >= till:
break
if prev_tstamp is None:
prev_tstamp = tstamp
# print("?", tstamp, till, dlen, len(self._data))
if till and tstamp is not None:
if tstamp >= till:
break
since = tstamp
if dlen == len(self._data):
break
def _load_ticks(self):
if self._last_id is None:
# first time get the latest trade only
trades = [self.store.fetch_trades(self.p.dataname)[-1]]
else:
trades = self.store.fetch_trades(self.p.dataname)
if len(trades) <= 1:
if len(trades) == 1:
trade = trades[0]
trade_time = datetime.strptime(trade['datetime'], '%Y-%m-%dT%H:%M:%S.%fZ')
self._data.append((trade_time, float(trade['price']), float(trade['amount'])))
else:
trade_dict_list = []
index = 0
# Since we only need the last 2 trades, just loop through the last 2 trades to speed up the for loop
for trade in trades[-2:]:
trade_id = trade['id']
if trade_id > self._last_id:
trade_time = datetime.strptime(trade['datetime'], '%Y-%m-%dT%H:%M:%S.%fZ')
trade_dict = dict(index=index, trade_time=trade_time, price=float(trade['price']),
amount=float(trade['amount']))
trade_dict_list.append(trade_dict)
self._last_id = trade_id
index += 1
if len(trade_dict_list) > 0:
# The order of self._data should be in reversed order by trade datetime
reverse = True
selection_key = 'index'
trade_dict_list.sort(key = lambda k: k[selection_key], reverse = reverse) # sorts in place
for trade_dict in trade_dict_list:
self._data.append((trade_dict['trade_time'], trade_dict['price'], trade_dict['amount']))
# Break here once we got the first data is sufficient as we only look for the first data
break
try:
trade = self._data.popleft()
except IndexError:
return None # no data in the queue
trade_time, price, size = trade
self.lines.datetime[0] = bt.date2num(trade_time)
self.lines.open[0] = price
self.lines.high[0] = price
self.lines.low[0] = price
self.lines.close[0] = price
self.lines.volume[0] = size
return True
def _load_ohlcv(self):
try:
ohlcv = self._data.popleft()
except IndexError:
return None # no data in the queue
tstamp, open_, high, low, close, volume = ohlcv
dtime = datetime.utcfromtimestamp(tstamp // 1000)
self.lines.datetime[0] = bt.date2num(dtime)
self.lines.open[0] = open_
self.lines.high[0] = high
self.lines.low[0] = low
self.lines.close[0] = close
self.lines.volume[0] = volume
return True
def haslivedata(self):
return self._state == self._ST_LIVE and self._data
def islive(self):
return not self.p.historical
| [
[
[
7,
11
],
[
6635,
6639
]
],
[
[
36,
41
],
[
2086,
2091
]
],
[
[
63,
71
],
[
3620,
3628
],
[
4937,
4945
],
[
5076,
5084
],
[
5614,
5622
],
[
5876,
5884
],
[
6409,
6417
],
[
6507,
6515
],
[
9046,
9054
],
[
9541,
9549
],
[
11212,
11220
]
],
[
[
80,
96
],
[
2825,
2827
],
[
10740,
10742
],
[
11288,
11290
]
],
[
[
125,
133
],
[
243,
251
],
[
575,
583
],
[
2342,
2350
]
],
[
[
167,
181
],
[
544,
558
]
],
[
[
208,
219
],
[
1727,
1738
],
[
499,
510
]
],
[
[
228,
242
],
[
559,
573
],
[
408,
422
]
],
[
[
533,
543
]
]
] |
"""This script creates some informative graphs on subgroups of income quartile, gender, and race."""
# %%
import os
import matplotlib.pyplot as plt
import seaborn as sns
from pathlib import Path
# %%
# Set up folder path
code_folder = Path(os.path.abspath(''))
print(code_folder)
project_dir = os.path.dirname(code_folder)
os.chdir(project_dir)
print(project_dir)
# %%
from setup_fin_dataset import get_dataset
# %%
os.chdir(code_folder)
print(code_folder)
# %%
'''Plot scores by income quartile
'''
df = get_dataset()
#%%
df.dropna(axis=0, how='any', subset=['AFQT_1','ROSENBERG_SCORE', 'ROTTER_SCORE'], inplace=True)
# %%
ax = plt.figure().add_subplot(111)
for group in ['first quartile', 'second quartile', 'third quartile', 'fourth quartile']:
cond = df['FAMILY_INCOME_QUARTILE'] == group
dat = df.loc[df['SURVEY_YEAR'] == 1978, ['AFQT_1']].loc[cond].dropna()
sns.distplot(dat, label=group.capitalize())
csfont = {'fontname':'Times New Roman'}
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.set_xlabel('AFQT Scores', **csfont)
ax.set_xlim([0, 120])
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-inc-quartile-afqt.png')
for score in ['ROTTER', 'ROSENBERG']:
ax = plt.figure().add_subplot(111)
for group in ['first quartile', 'second quartile', 'third quartile', 'fourth quartile']:
label = score + '_SCORE'
cond = df['FAMILY_INCOME_QUARTILE'] == group
dat = df[cond].loc[df['SURVEY_YEAR'] == 1978, [label]].dropna()
sns.distplot(dat, label=group)
ax.set_xlabel(score.lower().capitalize() + ' Scores', **csfont)
if score == 'ROTTER':
plt.gca().invert_xaxis()
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-inc-quartile-' + score.lower() + '.png')
# %%
'''Plot scores by gender
'''
df = get_dataset()
#%%
df.dropna(axis=0, how='any', subset=['AFQT_1','ROSENBERG_SCORE', 'ROTTER_SCORE'], inplace=True)
ax = plt.figure().add_subplot(111)
for group in [1, 2]:
cond = df['GENDER'] == group
dat = df.loc[df['SURVEY_YEAR'] == 1978, ['AFQT_1']].loc[cond].dropna()
sns.distplot(dat, label=group)
csfont = {'fontname':'Times New Roman'}
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.set_xlabel('AFQT Scores', **csfont)
ax.set_xlim([0, 120])
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-aptitude-gender.png')
for score in ['ROTTER', 'ROSENBERG']:
ax = plt.figure().add_subplot(111)
for group in [1, 2]:
label = score + '_SCORE'
cond = df['GENDER'] == group
dat = df[cond].loc[df['SURVEY_YEAR'] == 1978, [label]].dropna()
sns.distplot(dat, label=group)
ax.set_xlabel(score.lower().capitalize() + ' Scores', **csfont)
if score == 'ROTTER':
plt.gca().invert_xaxis()
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-attitude-gender-' + score.lower() + '.png')
# %%
'''Plot scores by race
'''
df = get_dataset()
#%%
df.dropna(axis=0, how='any', subset=['AFQT_1','ROSENBERG_SCORE', 'ROTTER_SCORE'], inplace=True)
ax = plt.figure().add_subplot(111)
for group in [1, 2, 3]:
cond = df['RACE'] == group
dat = df.loc[df['SURVEY_YEAR'] == 1978, ['AFQT_1']].loc[cond].dropna()
sns.distplot(dat, label=group)
csfont = {'fontname':'Times New Roman'}
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.set_xlabel('AFQT Scores', **csfont)
ax.set_xlim([0, 120])
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-aptitude-race.png')
for score in ['ROTTER', 'ROSENBERG']:
ax = plt.figure().add_subplot(111)
for group in [1, 2, 3]:
label = score + '_SCORE'
cond = df['RACE'] == group
dat = df[cond].loc[df['SURVEY_YEAR'] == 1978, [label]].dropna()
sns.distplot(dat, label=group)
ax.set_xlabel(score.lower().capitalize() + ' Scores', **csfont)
if score == 'ROTTER':
plt.gca().invert_xaxis()
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-attitude-race-' + score.lower() + '.png')
# %%
'''Plot by parental educational attainment, mother
'''
df = get_dataset()
#%%
df.dropna(axis=0, how='any', subset=['AFQT_1','ROSENBERG_SCORE', 'ROTTER_SCORE'], inplace=True)
# %%
df['MOTHER_EDU'].nunique()
# %%
df['FATHER_EDU'].nunique()
# %%
df_mother = df.groupby('MOTHER_EDU')['IDENTIFIER'].nunique().sort_values(ascending=False)
df_mother
# %%
df_father = df.groupby('FATHER_EDU')['IDENTIFIER'].nunique().sort_values(ascending=False)
df_father
# %%
ax = plt.figure().add_subplot(111)
for group in ['Less than HS', 'HS or more']:
cond = df['MOTHER_EDU'] == group
dat = df['AFQT_1'].loc[cond].dropna()
sns.distplot(dat, label=group)
csfont = {'fontname':'Times New Roman'}
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.set_xlabel('AFQT Scores', **csfont)
ax.set_xlim([0, 120])
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-aptitude-mother-edu.png')
for score in ['ROTTER', 'ROSENBERG']:
ax = plt.figure().add_subplot(111)
for group in ['Less than HS', 'HS or more']:
label = score + '_SCORE'
cond = df['MOTHER_EDU'] == group
dat = df[cond].loc[df['SURVEY_YEAR'] == 1978, [label]].dropna()
sns.distplot(dat, label=group)
ax.set_xlabel(score.lower().capitalize() + ' Scores', **csfont)
if score == 'ROTTER':
plt.gca().invert_xaxis()
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-attitude-mother-edu-' + score.lower() + '.png')
# %%
'''Plot by parental educational attainment, father
'''
# %%
df = get_dataset()
#%%
df.dropna(axis=0, how='any', subset=['AFQT_1','ROSENBERG_SCORE', 'ROTTER_SCORE'], inplace=True)
ax = plt.figure().add_subplot(111)
for group in ['Less than HS', 'HS or more']:
cond = df['FATHER_EDU'] == group
dat = df['AFQT_1'].loc[cond].dropna()
sns.distplot(dat, label=group)
csfont = {'fontname':'Times New Roman'}
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.set_xlabel('AFQT Scores', **csfont)
ax.set_xlim([0, 120])
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-aptitude-father-edu.png')
for score in ['ROTTER', 'ROSENBERG']:
ax = plt.figure().add_subplot(111)
for group in ['Less than HS', 'HS or more']:
label = score + '_SCORE'
cond = df['FATHER_EDU'] == group
dat = df[cond].loc[df['SURVEY_YEAR'] == 1978, [label]].dropna()
sns.distplot(dat, label=group)
ax.set_xlabel(score.lower().capitalize() + ' Scores', **csfont)
if score == 'ROTTER':
plt.gca().invert_xaxis()
ax.yaxis.get_major_ticks()[0].set_visible(False)
ax.legend()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.savefig('fig-attitude-father-edu-' + score.lower() + '.png')
# %%
| [
[
[
113,
115
],
[
242,
244
],
[
296,
298
],
[
325,
327
],
[
422,
424
]
],
[
[
123,
147
],
[
641,
644
],
[
1170,
1173
],
[
1261,
1264
],
[
1684,
1687
],
[
1865,
1868
],
[
2087,
2090
],
[
2519,
2522
],
[
2607,
2610
],
[
2946,
2949
],
[
3127,
3130
],
[
3351,
3354
],
[
3785,
3788
],
[
3871,
3874
],
[
4211,
4214
],
[
4392,
4395
],
[
4928,
4931
],
[
5355,
5358
],
[
5447,
5450
],
[
5814,
5817
],
[
5995,
5998
],
[
6254,
6257
],
[
6681,
6684
],
[
6773,
6776
],
[
7140,
7143
],
[
7321,
7324
]
],
[
[
155,
169
],
[
888,
891
],
[
1550,
1553
],
[
2250,
2253
],
[
2812,
2815
],
[
3515,
3518
],
[
4077,
4080
],
[
5086,
5089
],
[
5680,
5683
],
[
6412,
6415
],
[
7006,
7009
]
],
[
[
190,
194
],
[
237,
241
]
],
[
[
223,
234
],
[
269,
280
],
[
312,
323
],
[
431,
442
],
[
450,
461
]
],
[
[
282,
293
],
[
334,
345
],
[
353,
364
]
],
[
[
403,
414
],
[
514,
525
],
[
1965,
1976
],
[
3229,
3240
],
[
4520,
4531
],
[
6132,
6143
]
],
[
[
509,
511
],
[
533,
535
],
[
771,
773
],
[
819,
821
],
[
826,
828
],
[
1432,
1434
],
[
1484,
1486
],
[
1497,
1499
]
],
[
[
636,
638
],
[
973,
975
],
[
1022,
1024
],
[
1061,
1063
],
[
1083,
1085
],
[
1095,
1097
],
[
1131,
1133
]
],
[
[
675,
680
],
[
803,
808
],
[
912,
917
]
],
[
[
764,
768
],
[
869,
873
]
],
[
[
813,
816
],
[
901,
904
]
],
[
[
933,
939
],
[
1053,
1059
],
[
1642,
1648
]
],
[
[
1217,
1222
],
[
1400,
1405
],
[
1600,
1605
],
[
1657,
1662
],
[
1899,
1904
]
],
[
[
1256,
1258
],
[
1586,
1588
],
[
1713,
1715
],
[
1766,
1768
],
[
1782,
1784
],
[
1822,
1824
]
],
[
[
1299,
1304
],
[
1464,
1469
],
[
1574,
1579
]
],
[
[
1392,
1397
],
[
1525,
1530
]
],
[
[
1425,
1429
],
[
1487,
1491
]
],
[
[
1478,
1481
],
[
1563,
1566
]
],
[
[
1960,
1962
],
[
1984,
1986
],
[
2149,
2151
],
[
2181,
2183
],
[
2188,
2190
],
[
2710,
2712
],
[
2746,
2748
],
[
2759,
2761
]
],
[
[
2082,
2084
],
[
2322,
2324
],
[
2371,
2373
],
[
2410,
2412
],
[
2432,
2434
],
[
2444,
2446
],
[
2480,
2482
]
],
[
[
2121,
2126
],
[
2165,
2170
],
[
2274,
2279
]
],
[
[
2142,
2146
],
[
2231,
2235
]
],
[
[
2175,
2178
],
[
2263,
2266
]
],
[
[
2282,
2288
],
[
2402,
2408
],
[
2904,
2910
]
],
[
[
2563,
2568
],
[
2678,
2683
],
[
2862,
2867
],
[
2919,
2924
],
[
3164,
3169
]
],
[
[
2602,
2604
],
[
2848,
2850
],
[
2975,
2977
],
[
3028,
3030
],
[
3044,
3046
],
[
3084,
3086
]
],
[
[
2645,
2650
],
[
2726,
2731
],
[
2836,
2841
]
],
[
[
2670,
2675
],
[
2787,
2792
]
],
[
[
2703,
2707
],
[
2749,
2753
]
],
[
[
2740,
2743
],
[
2825,
2828
]
],
[
[
3224,
3226
],
[
3248,
3250
],
[
3416,
3418
],
[
3446,
3448
],
[
3453,
3455
],
[
3977,
3979
],
[
4011,
4013
],
[
4024,
4026
]
],
[
[
3346,
3348
],
[
3588,
3590
],
[
3637,
3639
],
[
3676,
3678
],
[
3698,
3700
],
[
3710,
3712
],
[
3746,
3748
]
],
[
[
3385,
3390
],
[
3430,
3435
],
[
3539,
3544
]
],
[
[
3409,
3413
],
[
3496,
3500
]
],
[
[
3440,
3443
],
[
3528,
3531
]
],
[
[
3547,
3553
],
[
3668,
3674
],
[
4169,
4175
]
],
[
[
3827,
3832
],
[
3945,
3950
],
[
4127,
4132
],
[
4184,
4189
],
[
4427,
4432
]
],
[
[
3866,
3868
],
[
4113,
4115
],
[
4240,
4242
],
[
4293,
4295
],
[
4309,
4311
],
[
4349,
4351
]
],
[
[
3909,
3914
],
[
3991,
3996
],
[
4101,
4106
]
],
[
[
3937,
3942
],
[
4052,
4057
]
],
[
[
3970,
3974
],
[
4014,
4018
]
],
[
[
4005,
4008
],
[
4090,
4093
]
],
[
[
4515,
4517
],
[
4539,
4541
],
[
4641,
4643
],
[
4674,
4676
],
[
4720,
4722
],
[
4826,
4828
],
[
5014,
5016
],
[
5050,
5052
],
[
5574,
5576
],
[
5614,
5616
],
[
5627,
5629
]
],
[
[
4708,
4717
],
[
4798,
4807
]
],
[
[
4814,
4823
],
[
4904,
4913
]
],
[
[
4923,
4925
],
[
5158,
5160
],
[
5207,
5209
],
[
5246,
5248
],
[
5268,
5270
],
[
5280,
5282
],
[
5316,
5318
]
],
[
[
4962,
4967
],
[
5034,
5039
],
[
5110,
5115
]
],
[
[
5007,
5011
],
[
5067,
5071
]
],
[
[
5044,
5047
],
[
5099,
5102
]
],
[
[
5118,
5124
],
[
5238,
5244
],
[
5772,
5778
]
],
[
[
5403,
5408
],
[
5542,
5547
],
[
5730,
5735
],
[
5787,
5792
],
[
6036,
6041
]
],
[
[
5442,
5444
],
[
5716,
5718
],
[
5843,
5845
],
[
5896,
5898
],
[
5912,
5914
],
[
5952,
5954
]
],
[
[
5485,
5490
],
[
5594,
5599
],
[
5704,
5709
]
],
[
[
5534,
5539
],
[
5655,
5660
]
],
[
[
5567,
5571
],
[
5617,
5621
]
],
[
[
5608,
5611
],
[
5693,
5696
]
],
[
[
6127,
6129
],
[
6151,
6153
],
[
6340,
6342
],
[
6376,
6378
],
[
6900,
6902
],
[
6940,
6942
],
[
6953,
6955
]
],
[
[
6249,
6251
],
[
6484,
6486
],
[
6533,
6535
],
[
6572,
6574
],
[
6594,
6596
],
[
6606,
6608
],
[
6642,
6644
]
],
[
[
6288,
6293
],
[
6360,
6365
],
[
6436,
6441
]
],
[
[
6333,
6337
],
[
6393,
6397
]
],
[
[
6370,
6373
],
[
6425,
6428
]
],
[
[
6444,
6450
],
[
6564,
6570
],
[
7098,
7104
]
],
[
[
6729,
6734
],
[
6868,
6873
],
[
7056,
7061
],
[
7113,
7118
],
[
7362,
7367
]
],
[
[
6768,
6770
],
[
7042,
7044
],
[
7169,
7171
],
[
7222,
7224
],
[
7238,
7240
],
[
7278,
7280
]
],
[
[
6811,
6816
],
[
6920,
6925
],
[
7030,
7035
]
],
[
[
6860,
6865
],
[
6981,
6986
]
],
[
[
6893,
6897
],
[
6943,
6947
]
],
[
[
6934,
6937
],
[
7019,
7022
]
]
] |
import pytest
import logging
from traitlets.config.loader import PyFileConfigLoader
from traitlets import TraitError
from jupyter_telemetry.eventlog import EventLog
GOOD_CONFIG = """
import logging
c.EventLog.handlers = [
logging.StreamHandler()
]
"""
BAD_CONFIG = """
import logging
c.EventLog.handlers = [
0
]
"""
def get_config_from_file(path, content):
# Write config file
filename = 'config.py'
config_file = path / filename
config_file.write_text(content)
# Load written file.
loader = PyFileConfigLoader(filename, path=str(path))
cfg = loader.load_config()
return cfg
def test_good_config_file(tmp_path):
cfg = get_config_from_file(tmp_path, GOOD_CONFIG)
# Pass config to EventLog
e = EventLog(config=cfg)
# Assert the
assert len(e.handlers) > 0
assert isinstance(e.handlers[0], logging.Handler)
def test_bad_config_file(tmp_path):
cfg = get_config_from_file(tmp_path, BAD_CONFIG)
with pytest.raises(TraitError):
e = EventLog(config=cfg)
| [
[
[
7,
13
],
[
981,
987
]
],
[
[
21,
28
],
[
863,
870
]
],
[
[
65,
83
],
[
532,
550
]
],
[
[
106,
116
],
[
995,
1005
]
],
[
[
157,
165
],
[
755,
763
],
[
1020,
1028
]
],
[
[
167,
178
],
[
703,
714
]
],
[
[
260,
270
],
[
959,
969
]
],
[
[
335,
355
],
[
672,
692
],
[
928,
948
]
],
[
[
629,
650
]
],
[
[
886,
906
]
]
] |
"""
Definition of the :class:`NativeRegistration` class.
"""
from pathlib import Path
from typing import Tuple
from typing import Union
import nibabel as nib
from brain_parts.parcellation.parcellations import (
Parcellation as parcellation_manager,
)
from nilearn.image.resampling import resample_to_img
from nipype.interfaces.base import TraitError
from tqdm import tqdm
from dmriprep_analyses.manager import DmriprepManager
from dmriprep_analyses.registrations.messages import REFERENCE_FILE_MISSING
from dmriprep_analyses.registrations.utils import DEFAULT_PARCELLATION_NAMING
from dmriprep_analyses.registrations.utils import PROBSEG_THRESHOLD
from dmriprep_analyses.registrations.utils import QUERIES
from dmriprep_analyses.registrations.utils import TRANSFORMS
class NativeRegistration(DmriprepManager):
QUERIES = QUERIES
#: Naming
DEFAULT_PARCELLATION_NAMING = DEFAULT_PARCELLATION_NAMING
#: Types of transformations
TRANSFORMS = TRANSFORMS
#: Default probability segmentations' threshold
PROBSEG_THRESHOLD = PROBSEG_THRESHOLD
def __init__(
self,
base_dir: Path,
participant_labels: Union[str, list] = None,
) -> None:
super().__init__(base_dir, participant_labels)
self.parcellation_manager = parcellation_manager()
def initiate_subject(
self, participant_label: str
) -> Tuple[dict, Path, Path]:
"""
Query initially-required patricipant's files
Parameters
----------
participant_label : str
Specific participant's label to be queried
Returns
-------
Tuple[dict,Path,Path]
A tuple of required files for parcellation registration.
"""
return [
grabber(participant_label, queries=self.QUERIES)
for grabber in [
self.get_transforms,
self.get_reference,
self.get_probseg,
]
]
def build_output_dictionary(
self,
parcellation_scheme: str,
reference: Path,
reference_type: str,
) -> dict:
"""
Based on a *reference* image,
reconstruct output names for native parcellation naming.
Parameters
----------
reference : Path
The reference image.
reference_type : str
The reference image type (either "anat" or "dwi")
Returns
-------
dict
A dictionary with keys of "whole-brain" and "gm-cropped" and their
corresponding paths
"""
basic_query = dict(
atlas=parcellation_scheme,
resolution=reference_type,
**self.DEFAULT_PARCELLATION_NAMING.copy(),
)
outputs = dict()
for key, label in zip(["whole_brain", "gm_cropped"], ["", "GM"]):
query = basic_query.copy()
query["label"] = label
outputs[key] = self.data_grabber.build_path(reference, query)
return outputs
def register_to_anatomical(
self,
parcellation_scheme: str,
participant_label: str,
probseg_threshold: float = None,
force: bool = False,
) -> dict:
"""
Register a *parcellation scheme* from standard to native anatomical space. # noqa
Parameters
----------
parcellation_scheme : str
A string representing existing key within *self.parcellation_manager.parcellations*.
participant_label : str
Specific participant's label
probseg_threshold : float, optional
Threshold for probability segmentation masking, by default None
force : bool, optional
Whether to re-write existing files, by default False
Returns
-------
dict
A dictionary with keys of "whole_brain" and "gm_cropped" native-spaced parcellation schemes.
"""
transforms, reference, gm_probseg = self.initiate_subject(
participant_label
)
whole_brain, gm_cropped = [
self.build_output_dictionary(
parcellation_scheme, reference, "anat"
).get(key)
for key in ["whole_brain", "gm_cropped"]
]
self.parcellation_manager.register_parcellation_scheme(
parcellation_scheme,
participant_label,
reference,
transforms.get("mni2native"),
whole_brain,
force=force,
)
self.parcellation_manager.crop_to_probseg(
parcellation_scheme,
participant_label,
whole_brain,
gm_probseg,
gm_cropped,
masking_threshold=probseg_threshold or self.PROBSEG_THRESHOLD,
force=force,
)
return whole_brain, gm_cropped
def register_dwi(
self,
parcellation_scheme: str,
participant_label: str,
session: str,
anatomical_whole_brain: Path,
anatomical_gm_cropped: Path,
force: bool = False,
):
"""
Resample parcellation scheme from anatomical to DWI space.
Parameters
----------
parcellation_scheme : str
A string representing existing key within *self.parcellation_manager.parcellations*. # noqa
participant_label : str
Specific participant's label
anatomical_whole_brain : Path
Participant's whole-brain parcellation scheme in anatomical space
anatomical_gm_cropped : Path
Participant's GM-cropped parcellation scheme in anatomical space
force : bool, optional
Whether to re-write existing files, by default False
"""
reference = self.get_reference(
participant_label,
"dwi",
{"session": session},
queries=self.QUERIES,
)
if not reference:
raise FileNotFoundError(
REFERENCE_FILE_MISSING.format(
participant_label=participant_label
)
)
whole_brain, gm_cropped = [
self.build_output_dictionary(
parcellation_scheme, reference, "dwi"
).get(key)
for key in ["whole_brain", "gm_cropped"]
]
for source, target in zip(
[anatomical_whole_brain, anatomical_gm_cropped],
[whole_brain, gm_cropped],
):
if not target.exists() or force:
img = resample_to_img(
str(source), str(reference), interpolation="nearest"
)
nib.save(img, target)
return whole_brain, gm_cropped
def run_single_subject(
self,
parcellation_scheme: str,
participant_label: str,
session: Union[str, list] = None,
probseg_threshold: float = None,
force: bool = False,
) -> dict:
"""
Parameters
----------
parcellation_scheme : str
A string representing existing key within *self.parcellation_manager.parcellations*. # noqa
participant_label : str
Specific participant's label
session : Union[str, list], optional
Specific sessions available for *participant_label*, by default None # noqa
probseg_threshold : float, optional
Threshold for probability segmentation masking, by default None
force : bool, optional
Whether to re-write existing files, by default False
Returns
-------
dict
A dictionary with keys of "anat" and available or requested sessions,
and corresponding natice parcellations as keys.
"""
outputs = {}
anat_whole_brain, anat_gm_cropped = self.register_to_anatomical(
parcellation_scheme, participant_label, probseg_threshold, force
)
outputs["anat"] = {
"whole_brain": anat_whole_brain,
"gm_cropped": anat_gm_cropped,
}
sessions = self.subjects.get(participant_label) or session
if isinstance(sessions, str):
sessions = [sessions]
for session in sessions:
whole_brain, gm_cropped = self.register_dwi(
parcellation_scheme,
participant_label,
session,
anat_whole_brain,
anat_gm_cropped,
force,
)
outputs[session] = {
"whole_brain": whole_brain,
"gm_cropped": gm_cropped,
}
return outputs
def run_dataset(
self,
parcellation_scheme: str,
participant_label: Union[str, list] = None,
probseg_threshold: float = None,
force: bool = False,
):
"""
Register *parcellation_scheme* to all available (or requested) subjects' native space.
Parameters
----------
parcellation_scheme : str
A string representing existing key within *self.parcellation_manager.parcellations*. # noqa
participant_label : Union[str, list], optional
Specific subject/s within the dataset to run, by default None
probseg_threshold : float, optional
Threshold for probability segmentation masking, by default None
force : bool, optional
Whether to remove existing products and generate new ones, by default False # noqa
"""
native_parcellations = {}
if participant_label:
if isinstance(participant_label, str):
participant_labels = [participant_label]
elif isinstance(participant_label, list):
participant_labels = participant_label
else:
participant_labels = list(sorted(self.subjects.keys()))
for participant_label in tqdm(participant_labels):
try:
native_parcellations[
participant_label
] = self.run_single_subject(
parcellation_scheme,
participant_label,
probseg_threshold=probseg_threshold,
force=force,
)
except (FileNotFoundError, TraitError):
continue
return native_parcellations
| [
[
[
81,
85
],
[
1124,
1128
],
[
1397,
1401
],
[
1403,
1407
],
[
2084,
2088
],
[
5042,
5046
],
[
5079,
5083
]
],
[
[
105,
110
],
[
1385,
1390
]
],
[
[
130,
135
],
[
1158,
1163
],
[
6897,
6902
],
[
8809,
8814
]
],
[
[
144,
158
],
[
6709,
6712
]
],
[
[
216,
252
],
[
1289,
1309
]
],
[
[
293,
308
],
[
6585,
6600
]
],
[
[
344,
354
],
[
10369,
10379
]
],
[
[
372,
376
],
[
9978,
9982
]
],
[
[
416,
431
],
[
800,
815
]
],
[
[
485,
507
],
[
6035,
6057
]
],
[
[
558,
585
],
[
889,
916
]
],
[
[
636,
653
],
[
1055,
1072
]
],
[
[
704,
711
],
[
832,
839
]
],
[
[
762,
772
],
[
967,
977
]
],
[
[
781,
799
]
]
] |
"""
Copyright (C) Microsoft Corporation. All rights reserved.
Microsoft Corporation (“Microsoft”) grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. This license does not purport to express any claim of ownership over
data you may have shared with Microsoft in the creation of the Software Code.
Unless applicable law gives you more rights, Microsoft reserves all other
rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import repackage
repackage.add("../../azure-enterprise-scale-ml/esml/common/")
import azureml.core
from azureml.core.authentication import AzureCliAuthentication
from esml import ESMLProject
from baselayer_azure_ml import AutoMLFactory, azure_metric_regression,azure_metric_classification
print("SDK Version:", azureml.core.VERSION)
p = ESMLProject.get_project_from_env_command_line()
p.describe()
cli_auth = AzureCliAuthentication()
ws = p.get_workspace_from_config(cli_auth) # Reads the current environment (dev,test, prod)config.json | Use CLI auth if MLOps
p.init(ws) # Automapping from datalake to Azure ML datasets, prints status
# FEATURE ENGINEERING
# Feture engineering: Bronze 2 Gold - working with Azure ML Datasets with Bronze, Silver, Gold concept
print("DEMO MLOPS FOLDER settings - remove this after you copies this folder)") # remove this after you copies this folder
esml_dataset = p.DatasetByName("ds01_diabetes") # Get dataset
df_bronze = esml_dataset.Bronze.to_pandas_dataframe()
p.save_silver(esml_dataset,df_bronze) #Bronze -> Silver
df = esml_dataset.Silver.to_pandas_dataframe()
df_filtered = df[df.AGE > 0.015]
gold_train = p.save_gold(df_filtered) #Silver -> Gold
# SAVE GOLD - Last step that must happen
gold_train = p.save_gold(df_filtered) | [
[
[
1491,
1500
],
[
1501,
1510
]
],
[
[
1570,
1582
],
[
1796,
1803
]
],
[
[
1623,
1645
],
[
1896,
1918
]
],
[
[
1663,
1674
],
[
1823,
1834
]
],
[
[
1706,
1719
]
],
[
[
1721,
1744
]
],
[
[
1745,
1772
]
],
[
[
1819,
1820
],
[
1871,
1872
],
[
1927,
1928
],
[
2049,
2050
],
[
2389,
2390
],
[
2490,
2491
],
[
2642,
2643
],
[
2739,
2740
]
],
[
[
1885,
1893
],
[
1955,
1963
]
],
[
[
1922,
1924
],
[
2056,
2058
]
],
[
[
2374,
2386
],
[
2448,
2460
],
[
2504,
2516
],
[
2552,
2564
]
],
[
[
2436,
2445
],
[
2517,
2526
]
],
[
[
2547,
2549
],
[
2609,
2611
],
[
2612,
2614
]
],
[
[
2595,
2606
],
[
2654,
2665
],
[
2751,
2762
]
],
[
[
2629,
2639
]
],
[
[
2726,
2736
]
]
] |
'''
This file defines the testing module. This needs the following:
1. The system under test
2. The specification or the function which we are trying to minimize
3. Domains of the uncertainities
'''
from .optimizers import *
from .func_tree import *
from .utils import *
from sklearn.decomposition import KernelPCA
import copy
import GPy
class test_module:
def __init__(self, sut, bounds, spec=None,f_tree=None, optimizer=None,
normalizer=False,seed=None, **kwargs):
self.system_under_test = sut
# Choosing the optimizer function
if spec is None:
self.f_acqu = f_tree
else:
self.spec = spec
# To implement parser to convert from specification to the function f
self.bounds = bounds
self.normalizer=normalizer
self.seed=seed
if 'cost_model' in kwargs:
self.cost_model = kwargs['cost_model']
else:
self.cost_model = lambda x: 1
# Choosing the optimizers
if 'opt_name' in kwargs:
self.optimizer = select_opt(kwargs[opt_name])(bounds, **kwargs)
elif optimizer is None:
self.optimizer = sample_opt(bounds=bounds, cost=self.cost_model)
else:
self.optimizer = optimizer
# Number of samples for initializing GPs
if 'init_sample' in kwargs:
self.init_sample = kwargs['init_sample']
else:
self.init_sample = 2*len(bounds)
# Model GPs for the smooth functions
if 'with_smooth' in kwargs:
self.with_smooth = kwargs['with_smooth']
else:
self.with_smooth = True
# Model GPs for the top level requirement, potentially modeling
# non-smooth function
if 'with_ns' in kwargs:
self.with_ns = kwargs['with_ns']
else:
self.with_ns = False
# Random sampling
if 'with_random' in kwargs:
self.with_random = kwargs['with_random']
else:
self.with_random = False
# Exploration weight for GP-LCB
if 'exp_weight' in kwargs:
self.k = kwargs['exp_weight']
else:
self.k = 10
# Optimize retsrats for hyper parameter optimization for GPs
if 'optimize_restarts' in kwargs:
self.optimize_restarts = kwargs['optimize_restarts']
else:
self.optimize_restarts = 1
# Search in lower dimension
if 'low_dim' in kwargs:
self.using_kpca=True
self.low_dim = kwargs['low_dim']
if 'kernel_type' in kwargs:
self.kernel = kwargs['kernel_type'](self.low_dim)
elif 'kernel' in kwargs:
self.kernel = kwargs['kernel']
self.using_kpca = True
self.low_dim = self.kernel.input_dim
else:
self.using_kpca=False
if 'kernel_type' in kwargs:
self.kernel = kwargs['kernel_type'](len(bounds))
else:
self.kernel = GPy.kern.Matern32(len(bounds), ARD=True)
if self.using_kpca:
if isinstance(self.optimizer, lbfgs_opt) or \
isinstance(self.optimizer, direct_opt):
print('Can use only sample_opt or delta_opt!')
print('Changing optimizer to sample_opt!')
self.optimizer = sample_opt(bounds, **kwargs)
# Sending in pre sampled data
if 'X' in kwargs:
self.X = kwargs['X']
else:
self.X = []
def initialize(self):
if len(self.X) == 0:
X = sample_from(self.init_sample, self.bounds)
self.X = X
trajs = []
for x in self.X:
trajs.append(self.system_under_test(x))
Y = self.f_acqu.eval_robustness(trajs)
if self.with_smooth:
self.smooth_X = copy.deepcopy(self.X)
if self.using_kpca:
self.kpca_s = KernelPCA(kernel='rbf', fit_inverse_transform=True,
copy_X=True, n_components=self.low_dim)
X_s = self.kpca_s.fit_transform(self.smooth_X)
else:
X_s = self.smooth_X
self.f_acqu.init_GPs(X_s, trajs,
kernel=copy.deepcopy(self.kernel),
optimize_restarts=self.optimize_restarts,
normalizer=self.normalizer)
if self.with_ns:
self.ns_X = copy.deepcopy(self.X)
if self.using_kpca:
self.kpca_ns = KernelPCA(kernel='rbf', fit_inverse_transform=True,
copy_X=True, n_components=self.low_dim)
X_ns = self.kpca_ns.fit_transform(self.ns_X)
else:
X_ns = copy.deepcopy(self.ns_X)
self.ns_GP = GPy.models.GPRegression(X_ns, Y,
kernel=copy.deepcopy(self.kernel),
normalizer=self.normalizer)
self.ns_GP.optimize_restarts(self.optimize_restarts)
if self.with_random:
self.random_X = copy.deepcopy(self.X)
self.random_Y = Y
def run_BO(self, iters_BO):
for ib in range(iters_BO):
print('BO iteration:', ib)
if self.with_smooth:
def f(x):
if self.using_kpca:
x_s = self.kpca_s.transform(x)
else:
x_s = x
if isinstance(self.optimizer, lbfgs_opt):
df = self.f_acqu.eval_df(x_s, k = self.k)
else:
df=None
return self.f_acqu.evaluate(x_s, k=self.k), df
x,f= self.optimizer.optimize(f=lambda x:f(x)[0],
df = lambda x:f(x)[1])
self.smooth_X = np.vstack((self.smooth_X, np.atleast_2d(x)))
trajs = [self.system_under_test(x_i) for x_i in x]
if self.using_kpca:
X_s = self.kpca_s.fit_transform(self.smooth_X)
else:
X_s = self.smooth_X
self.f_acqu.update_GPs(X_s, trajs,
optimize_restarts=self.optimize_restarts)
if self.with_ns:
def f(X):
if self.using_kpca:
X_ns = self.kpca_ns.transform(X)
else:
X_ns = X
m,v = self.ns_GP.predict(X_ns)
if isinstance(self.optimizer, lbfgs_opt):
dm,dv = self.ns_GP.predictive_gradients(X_ns)
dm = dm[:,:,0]
df = dm - (self.k/2)*(dv/np.sqrt(v))
else:
df =None
return m - self.k*np.sqrt(v), df
x,f = self.optimizer.optimize(f=lambda x: f(x)[0],
df = lambda x:f(x)[1])
trajs = [self.system_under_test(x_i) for x_i in x]
f_x = self.f_acqu.eval_robustness(trajs)
self.ns_X = np.vstack((self.ns_X, np.atleast_2d(x)))
if self.using_kpca:
X_ns = self.kpca_ns.fit_transform(self.ns_X)
else:
X_ns = self.ns_X
self.ns_GP.set_XY(X_ns,
np.vstack((self.ns_GP.Y, np.atleast_2d(f_x))))
self.ns_GP.optimize_restarts(self.optimize_restarts)
if self.with_random:
if self.seed is not None:
np.random.seed(self.seed)
sample_from(self.init_sample, self.bounds)
rand_x = sample_from(iters_BO, self.bounds)
trajs = []
for x in rand_x:
trajs.append(self.system_under_test(x))
self.random_X = np.vstack((self.random_X, rand_x))
rand_y = self.f_acqu.eval_robustness(trajs)
self.random_Y = np.vstack((self.random_Y, rand_y))
if self.with_smooth:
vals = self.f_acqu.find_GP_func()
self.smooth_min_val = np.array(vals).min()
self.smooth_min_loc = np.array(vals).argmin()
self.smooth_min_x = self.smooth_X[self.smooth_min_loc]
self.smooth_count = np.sum(np.array(vals) < 0)
self.smooth_ce = np.flatnonzero(np.array(vals) < 0)
if self.with_ns:
self.ns_min_val = self.ns_GP.Y.min()
self.ns_min_loc = self.ns_GP.Y.argmin()
self.ns_min_x = self.ns_GP.X[self.ns_min_loc]
self.ns_count = np.sum(self.ns_GP.Y < 0)
self.ns_ce = np.flatnonzero(self.ns_GP.Y < 0)
if self.with_random:
self.rand_min_val = self.random_Y.min()
self.rand_min_loc = self.random_Y.argmin()
self.rand_min_x = self.random_X[self.rand_min_loc]
self.rand_count = np.sum(self.random_Y < 0)
self.rand_ce = np.flatnonzero(self.random_Y < 0)
| [
[
[
224,
225
]
],
[
[
249,
250
]
],
[
[
270,
271
],
[
1082,
1092
],
[
1100,
1108
],
[
1190,
1200
],
[
3181,
3190
],
[
3244,
3254
],
[
3412,
3422
],
[
3650,
3661
],
[
5980,
5982
],
[
6006,
6008
],
[
7283,
7285
],
[
7305,
7307
],
[
7558,
7560
],
[
7583,
7585
],
[
7757,
7759
],
[
7799,
7810
],
[
7863,
7874
],
[
8034,
8036
],
[
8153,
8155
],
[
8299,
8301
],
[
8354,
8356
],
[
8478,
8480
],
[
8485,
8487
],
[
8534,
8536
],
[
8549,
8551
],
[
8783,
8785
],
[
8833,
8835
],
[
9097,
9099
],
[
9150,
9152
],
[
5612,
5621
],
[
6701,
6710
],
[
6871,
6873
],
[
6980,
6982
]
],
[
[
306,
315
],
[
4001,
4010
],
[
4620,
4629
]
],
[
[
324,
328
],
[
3917,
3921
],
[
4321,
4325
],
[
4535,
4539
],
[
4840,
4844
],
[
4970,
4974
],
[
5190,
5194
]
],
[
[
336,
339
],
[
3069,
3072
],
[
4890,
4893
]
],
[
[
347,
358
]
]
] |
import argparse
from suzieq.cli.sqcmds import *
from suzieq.cli.sqcmds import context_commands
from suzieq.cli.sqcmds import sqcmds_all
from suzieq.cli.sq_nubia_context import NubiaSuzieqContext
from suzieq.cli.sq_nubia_statusbar import NubiaSuzieqStatusBar
from nubia import PluginInterface, CompletionDataSource
from nubia.internal.blackcmd import CommandBlacklist
from nubia.internal.cmdbase import AutoCommand
class NubiaSuzieqPlugin(PluginInterface):
"""
The PluginInterface class is a way to customize nubia for every customer
use case. It allowes custom argument validation, control over command
loading, custom context objects, and much more.
"""
def create_context(self):
"""
Must create an object that inherits from `Context` parent class.
The plugin can return a custom context but it has to inherit from the
correct parent class.
"""
return NubiaSuzieqContext()
def validate_args(self, args):
"""
This will be executed when starting nubia, the args passed is a
dict-like object that contains the argparse result after parsing the
command line arguments. The plugin can choose to update the context
with the values, and/or decide to raise `ArgsValidationError` with
the error message.
"""
pass
def get_commands(self):
cmds = [AutoCommand(getattr(globals()[x], x))
for x in sqcmds_all if not x.startswith('_')]
cmds.append(AutoCommand(context_commands.set_ctxt))
cmds.append(AutoCommand(context_commands.clear_ctxt))
return cmds
def get_opts_parser(self, add_help=True):
"""
Builds the ArgumentParser that will be passed to , use this to
build your list of arguments that you want for your shell.
"""
opts_parser = argparse.ArgumentParser(
description="Suzieq CLI",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=add_help,
)
opts_parser.add_argument(
"--config", "-c", default="", type=str, help="Configuration File"
)
opts_parser.add_argument(
"--verbose",
"-v",
action="count",
default=0,
help="Increase verbosity, can be specified " "multiple times",
)
opts_parser.add_argument(
"--stderr",
"-s",
action="store_true",
default=True,
help="By default the logging output goes to stderr "
"Enable this feature to send it to a temporary logfile"
)
# we only support pandas now, so we don't want this option
# opts_parser.add_argument(
# "--use-engine", "-e", help="Which analysis engine to use", default="pandas"
# )
return opts_parser
def get_completion_datasource_for_global_argument(self, argument):
if argument == "--config":
return ConfigFileCompletionDataSource()
if argument == "--use-engine":
return ConfigEngineCompletionDataSource()
return None
def create_usage_logger(self, context):
"""
Override this and return you own usage logger.
Must be a subtype of UsageLoggerInterface.
"""
return None
def get_status_bar(self, context):
"""
This returns the StatusBar object that handles the bottom status bar
and the right-side per-line status
"""
return NubiaSuzieqStatusBar(context)
def getBlacklistPlugin(self):
blacklister = CommandBlacklist()
blacklister.add_blocked_command("topcpu")
blacklister.add_blocked_command("topmem")
return blacklister
class ConfigFileCompletionDataSource(CompletionDataSource):
def get_all(self):
return ["/tmp/c1", "/tmp/c2"]
class ConfigEngineCompletionDataSource(CompletionDataSource):
def get_all(self):
return ["pandas"]
| [
[
[
7,
15
],
[
1867,
1875
],
[
1958,
1966
]
],
[
[
46,
47
]
],
[
[
78,
94
],
[
1526,
1542
],
[
1586,
1602
]
],
[
[
125,
135
],
[
1457,
1467
]
],
[
[
176,
194
],
[
928,
946
]
],
[
[
237,
257
],
[
3549,
3569
]
],
[
[
276,
291
],
[
440,
455
]
],
[
[
293,
313
],
[
3821,
3841
],
[
3946,
3966
]
],
[
[
350,
366
],
[
3636,
3652
]
],
[
[
402,
413
],
[
1394,
1405
],
[
1514,
1525
],
[
1574,
1585
]
],
[
[
422,
439
]
],
[
[
3790,
3820
],
[
3009,
3039
]
],
[
[
3913,
3945
],
[
3100,
3132
]
]
] |
from django.db import models
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
import os
from django.urls import reverse
class standard(models.Model):
name = models.CharField(max_length=100, unique=True)
slug = models.SlugField(null=True,blank=True)
description = models.TextField(max_length=550,blank=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
def save_subject_image(instance,filename):
upload_to = 'images'
ext = filename.split('.')[-1]
#get filename
if instance.subject_id:
filename = 'Subject_Pictures/{}'.format(instance.subject_id,ext)
return os.path.join(upload_to, filename)
class subject(models.Model):
subject_id =models.CharField(max_length=100,unique=True)
name = models.CharField(max_length=100)
slug = models.SlugField(null=True,blank=True)
standard = models.ForeignKey(standard,on_delete = models.CASCADE, related_name='subjects')
image = models.ImageField(upload_to = save_subject_image,blank=True,verbose_name ='subject image')
description = models.TextField(max_length=550, blank=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.subject_id)
super().save(*args, **kwargs)
def save_lesson_files(instance,filename):
upload_to = 'images'
ext = filename.split('.')[-1]
#get filename
if instance.lesson_id:
filename = 'lesson_files/{}/{}.{}'.format(instance.lesson_id,instance.lesson_id,ext)
if os.path.exists(filename):
new_name = str(instance.lesson_id) + str('1')
filename = 'lesson_images/{}/{}.{}'.format(instance.lesson_id, new_name,ext)
return os.path.join(upload_to, filename)
class lesson(models.Model):
lesson_id = models.CharField(max_length=100, unique=True)
standard = models.ForeignKey(standard,on_delete=models.CASCADE)
created_by = models.ForeignKey(User,on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
subject= models.ForeignKey(subject, on_delete=models.CASCADE, related_name='lessons')
name = models.CharField(max_length=150)
position = models.PositiveSmallIntegerField(verbose_name= 'Chapter No')
slug = models.SlugField(null=True,blank=True)
video = models.FileField(upload_to=save_lesson_files,verbose_name='video',blank=True,null=True)
ppt = models.FileField(upload_to=save_lesson_files,verbose_name='ppt',blank=True)
notes = models.FileField(upload_to=save_lesson_files,verbose_name='notes',blank=True)
class Meta:
ordering = ['position']
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
def get_absolute_url(self):
return reverse("curriculum:lesson_list", kwargs={'slug':self.subject.slug, 'standard':self.standard.slug})
class comment(models.Model):
lesson_name = models.ForeignKey(lesson,null=True,on_delete=models.CASCADE,related_name='comments')
comment_name = models.CharField(max_length=150, blank=True)
#reply = models.ForeignKey("Comment",null=True, blank = True, on_delete = models.CASCADE, related_name='replies')
author = models.ForeignKey(User,on_delete=models.CASCADE)
body = models.TextField(max_length=500)
date_added = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
self.comment_name = slugify("Comment by"+"-"+str(self.author)+str(self.date_added))
super().save(*args, **kwargs)
def __str__(self):
return self.comment_name
class Meta:
ordering = ['-date_added']
class reply(models.Model):
comment_name = models.ForeignKey(comment,on_delete=models.CASCADE,related_name='replies')
reply_body = models.TextField(max_length=500)
author = models.ForeignKey(User, on_delete=models.CASCADE)
date_added = models.DateTimeField(auto_now_add=True)
def __str__(self):
return "reply to "+str(self.comment_name.comment_name) | [
[
[
22,
28
],
[
184,
190
],
[
210,
216
],
[
267,
273
],
[
324,
330
],
[
819,
825
],
[
850,
856
],
[
906,
912
],
[
950,
956
],
[
1004,
1010
],
[
1043,
1049
],
[
1096,
1102
],
[
1205,
1211
],
[
1910,
1916
],
[
1941,
1947
],
[
2002,
2008
],
[
2039,
2045
],
[
2072,
2078
],
[
2105,
2111
],
[
2138,
2144
],
[
2191,
2197
],
[
2228,
2234
],
[
2279,
2285
],
[
2327,
2333
],
[
2399,
2405
],
[
2450,
2456
],
[
2548,
2554
],
[
2636,
2642
],
[
3107,
3113
],
[
3140,
3146
],
[
3185,
3191
],
[
3244,
3250
],
[
3420,
3426
],
[
3453,
3459
],
[
3481,
3487
],
[
3531,
3537
],
[
3866,
3872
],
[
3900,
3906
],
[
3936,
3942
],
[
3992,
3998
],
[
4038,
4044
],
[
4072,
4078
],
[
4105,
4111
]
],
[
[
69,
73
],
[
2090,
2094
],
[
3438,
3442
],
[
4056,
4060
]
],
[
[
117,
124
],
[
479,
486
],
[
1361,
1368
],
[
2874,
2881
],
[
3637,
3644
]
],
[
[
132,
134
],
[
770,
772
],
[
1677,
1679
],
[
1861,
1863
]
],
[
[
159,
166
],
[
2979,
2986
]
],
[
[
175,
183
],
[
1022,
1030
],
[
2020,
2028
]
],
[
[
542,
560
],
[
1126,
1144
]
],
[
[
811,
818
],
[
2209,
2216
]
],
[
[
1431,
1448
],
[
2477,
2494
],
[
2575,
2592
],
[
2663,
2680
]
],
[
[
1903,
1909
],
[
3158,
3164
]
],
[
[
3099,
3106
],
[
3918,
3925
]
],
[
[
3860,
3865
]
]
] |
from django.shortcuts import render
from rest_framework import generics, authentication, permissions
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from user.serializers import UserSerializer, AuthTokenSerializer
class CreateUserView(generics.CreateAPIView):
"""Create a new user in the system"""
serializer_class = UserSerializer
class CreateTokenview(ObtainAuthToken):
"""Create a new auth token for user"""
serializer_class = AuthTokenSerializer
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class ManageUserView(generics.RetrieveUpdateAPIView):
"""Manage the authenticated user"""
serializer_class = UserSerializer
authentication_classes = (authentication.TokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_object(self):
"""Retrieve and return authentication user"""
return self.request.user
| [
[
[
29,
35
]
],
[
[
63,
71
],
[
299,
307
],
[
616,
624
]
],
[
[
73,
87
],
[
757,
771
]
],
[
[
89,
100
],
[
820,
831
]
],
[
[
144,
159
],
[
428,
443
]
],
[
[
196,
208
],
[
555,
567
]
],
[
[
240,
254
],
[
389,
403
],
[
712,
726
]
],
[
[
256,
275
],
[
512,
531
]
],
[
[
284,
298
]
],
[
[
412,
427
]
],
[
[
601,
615
]
]
] |
import requests
import numpy as np
import collections
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from PIL import Image
from io import BytesIO
class Image_Data:
image = None
@property
def Array(self) -> np.ndarray:
"""
Return image array (RGB)
"""
return self.image
@property
def Color_Hex(self) -> list:
hex = []
def convert_RGB2HEX(color):
return "#{:02x}{:02x}{:02x}".format(int(color[0]), int(color[1]), int(color[2]))
image = self.image
image_height = len(image)
for y in range(image_height):
for x in image[y]:
hex.append(convert_RGB2HEX(x))
return hex
def __init__(self, image_path: str):
if 'http' in image_path:
# Online image
image_req = requests.get(image_path, stream=True)
if image_req.status_code == 200:
self.image = np.array(Image.open(BytesIO(image_req.content)))
else:
# Local image
self.image = np.array(Image.open(image_path))
def show(self):
Image.fromarray(self.image, 'RGB').show()
class Color:
color = []
@property
def Total(self) -> int:
return len(self.color)
@property
def Count(self) -> dict:
"""
Return total unique color
"""
color_count = dict(collections.Counter(self.color))
# Sort dict by highest value
color_count = {
key: value for key, value in sorted(color_count.items(), key=lambda x: x[1], reverse=True)
}
return color_count
@property
def Listed_Count(self) -> list[dict]:
"""
Return total unique color in list of dictionary
"""
list_colors = []
colors = self.Count.items()
# List each dict item
for key, val in colors:
item = "{'%(key)s' : %(val)s}" % {'key': key, 'val': val}
list_colors.append(eval(item))
return list_colors
def __init__(self, color: list):
self.color = color
def plot(self, min_value = 1):
"""
Plot color data with value more than min_value
"""
color_count = self.Count
color_count = {key : value for key, value in color_count.items() if value >= min_value}
color = list(color_count.keys())
count = list(color_count.values())
bar_colors = color
# Draw plot
#fig_width = len(color)
#fig_height
figure = plt.figure('Color Distribution', tight_layout=True)
plt.barh(color, count, color=bar_colors, edgecolor='#aaaaaa')
plt.title('Color Distribution')
plt.ylabel('Color')
plt.xlabel('Count')
plt.show()
# Render figure
canvas = FigureCanvas(figure)
canvas.draw()
width, height = figure.get_size_inches() * figure.get_dpi()
image = np.frombuffer(canvas.tostring_rgb(), dtype='uint8').reshape(int(height), int(width), 3)
return image | [
[
[
7,
15
],
[
903,
911
]
],
[
[
23,
34
],
[
281,
283
],
[
1015,
1017
],
[
1130,
1132
],
[
3043,
3045
]
],
[
[
42,
53
],
[
1466,
1477
]
],
[
[
61,
85
],
[
2635,
2638
],
[
2696,
2699
],
[
2766,
2769
],
[
2806,
2809
],
[
2834,
2837
],
[
2862,
2865
]
],
[
[
130,
161
],
[
2915,
2927
]
],
[
[
178,
183
],
[
1024,
1029
],
[
1139,
1144
],
[
1192,
1197
]
],
[
[
199,
206
],
[
1035,
1042
]
],
[
[
214,
224
]
],
[
[
1241,
1246
]
]
] |
from pymongo import MongoClient
client = MongoClient()
# carPricingDB = client["carPricing"]
# firstOffersCollection = carPricingDB.create_collection("firstOffers")
# firstOffersCollection.insert_one({"item":"initialone"})
carPricingDB = client.carPricing
firstOffersCollection = carPricingDB.firstOffers
firstOffersCollection.insert_one(
{"item": "canvas",
"qty": 100,
"tags": ["cotton"],
"size": {"h": 28, "w": 35.5, "uom": "cm"}})
coss = firstOffersCollection.find({"item":"canvas"})[0]
print(coss) | [
[
[
20,
31
],
[
43,
54
]
],
[
[
34,
40
],
[
243,
249
]
],
[
[
228,
240
],
[
285,
297
]
],
[
[
261,
282
],
[
310,
331
],
[
466,
487
]
],
[
[
459,
463
],
[
522,
526
]
]
] |
# coding: utf-8
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from __future__ import print_function
import xml.dom.minidom as minidom
import os
# import PIL
import numpy as np
import scipy.sparse
import subprocess
try:
import cPickle
except ImportError:
import pickle as cPickle
import math
import glob
import uuid
import scipy.io as sio
import xml.etree.ElementTree as ET
from .imdb import imdb
from .imdb import ROOT_DIR
from . import ds_utils
from .adas_eval import adas_eval
# TODO: make fast_rcnn irrelevant
# >>>> obsolete, because it depends on sth outside of this project
from model.utils.config import cfg
# <<<< obsolete
class adas(imdb):
def __init__(self, image_set, year, devkit_path=None, sub_type='car'):
imdb.__init__(self, 'adas_' + year + '_' + image_set)
self._year = year
self._image_set = image_set
self._devkit_path = self._get_default_path() if devkit_path is None \
else devkit_path
self._data_path = os.path.join(self._devkit_path, 'ADAS' + self._year)
if sub_type == 'car':
self._classes = ('__background__', #always index 0
'car',)
elif sub_type == 'tired':
self._classes = ('__background__', #always index 0
'o','s','w')
self._class_to_ind = dict(zip(self.classes, range(self.num_classes)))
self._image_ext = '.jpg'
self._image_index = self._load_image_set_index()
# Default to roidb handler
# self._roidb_handler = self.selective_search_roidb
self._roidb_handler = self.gt_roidb
self._salt = str(uuid.uuid4())
self._comp_id = 'comp4'
# PASCAL specific config options
self.config = {'cleanup': True,
'use_salt': True,
'use_diff': False,
'matlab_eval': False,
'rpn_file': None,
'min_size': 2}
assert os.path.exists(self._devkit_path), \
'ADASdevkit path does not exist: {}'.format(self._devkit_path)
assert os.path.exists(self._data_path), \
'Path does not exist: {}'.format(self._data_path)
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self.image_path_from_index(self._image_index[i])
def image_id_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return i
def image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
image_path = os.path.join(self._data_path, 'JPEGImages',
index + self._image_ext)
assert os.path.exists(image_path), \
'Path does not exist: {}'.format(image_path)
return image_path
def _load_image_set_index(self):
"""
Load the indexes listed in this dataset's image set file.
"""
# Example path to image set file:
# self._devkit_path + /ADASdevkit2007/ADAS2007/ImageSets/Main/val.txt
image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main',
self._image_set + '.txt')
assert os.path.exists(image_set_file), \
'Path does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
image_index = [x.strip() for x in f.readlines()]
return image_index
def _get_default_path(self):
"""
Return the default path where PASCAL ADAS is expected to be installed.
"""
return os.path.join(cfg.DATA_DIR, 'ADASdevkit' + self._year)
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
print(cache_file)
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_pascal_annotation(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
print('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
def selective_search_roidb(self):
"""
Return the database of selective search regions of interest.
Ground-truth ROIs are also included.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path,
self.name + '_selective_search_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print('{} ss roidb loaded from {}'.format(self.name, cache_file))
return roidb
if int(self._year) == 2007 or self._image_set != 'test':
gt_roidb = self.gt_roidb()
ss_roidb = self._load_selective_search_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, ss_roidb)
else:
roidb = self._load_selective_search_roidb(None)
with open(cache_file, 'wb') as fid:
cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
print('wrote ss roidb to {}'.format(cache_file))
return roidb
def rpn_roidb(self):
if int(self._year) == 2007 or self._image_set != 'test':
gt_roidb = self.gt_roidb()
rpn_roidb = self._load_rpn_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb)
else:
roidb = self._load_rpn_roidb(None)
return roidb
def _load_rpn_roidb(self, gt_roidb):
filename = self.config['rpn_file']
print('loading {}'.format(filename))
assert os.path.exists(filename), \
'rpn data not found at: {}'.format(filename)
with open(filename, 'rb') as f:
box_list = cPickle.load(f)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_selective_search_roidb(self, gt_roidb):
filename = os.path.abspath(os.path.join(cfg.DATA_DIR,
'selective_search_data',
self.name + '.mat'))
assert os.path.exists(filename), \
'Selective search data not found at: {}'.format(filename)
raw_data = sio.loadmat(filename)['boxes'].ravel()
box_list = []
for i in range(raw_data.shape[0]):
boxes = raw_data[i][:, (1, 0, 3, 2)] - 1
keep = ds_utils.unique_boxes(boxes)
boxes = boxes[keep, :]
keep = ds_utils.filter_small_boxes(boxes, self.config['min_size'])
boxes = boxes[keep, :]
box_list.append(boxes)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_pascal_annotation(self, index):
"""
Load image and bounding boxes info from XML file in the PASCAL ADAS
format.
"""
filename = os.path.join(self._data_path, 'Annotations', index + '.xml')
tree = ET.parse(filename)
objs = tree.findall('object')
# if not self.config['use_diff']:
# # Exclude the samples labeled as difficult
# non_diff_objs = [
# obj for obj in objs if int(obj.find('difficult').text) == 0]
# # if len(non_diff_objs) != len(objs):
# # print 'Removed {} difficult objects'.format(
# # len(objs) - len(non_diff_objs))
# objs = non_diff_objs
num_objs = len(objs)
boxes = np.zeros((num_objs, 4), dtype=np.uint16)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
# "Seg" area for pascal is just the box area
seg_areas = np.zeros((num_objs), dtype=np.float32)
ishards = np.zeros((num_objs), dtype=np.int32)
# Load object bounding boxes into a data frame.
for ix, obj in enumerate(objs):
bbox = obj.find('bndbox')
# Make pixel indexes 0-based
x1 = float(bbox.find('xmin').text) - 1
y1 = float(bbox.find('ymin').text) - 1
x2 = float(bbox.find('xmax').text) - 1
y2 = float(bbox.find('ymax').text) - 1
diffc = obj.find('difficult')
difficult = 0 if diffc == None else int(diffc.text)
ishards[ix] = difficult
cls = self._class_to_ind[obj.find('name').text.lower().strip()]
boxes[ix, :] = [x1, y1, x2, y2]
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1)
overlaps = scipy.sparse.csr_matrix(overlaps)
return {'boxes': boxes,
'gt_classes': gt_classes,
'gt_ishard': ishards,
'gt_overlaps': overlaps,
'flipped': False,
'seg_areas': seg_areas}
def _get_comp_id(self):
comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt']
else self._comp_id)
return comp_id
def _get_adas_results_file_template(self):
# ADASdevkit/results/ADAS2007/Main/<comp_id>_det_test_aeroplane.txt
filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt'
filedir = os.path.join(self._devkit_path, 'results', 'ADAS' + self._year, 'Main')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def _write_adas_results_file(self, all_boxes):
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
print('Writing {} ADAS results file'.format(cls))
filename = self._get_adas_results_file_template().format(cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(self.image_index):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
# the ADASdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index, dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def _do_python_eval(self, output_dir='output'):
annopath = os.path.join(
self._devkit_path,
'ADAS' + self._year,
'Annotations',
'{:s}.xml')
imagesetfile = os.path.join(
self._devkit_path,
'ADAS' + self._year,
'ImageSets',
'Main',
self._image_set + '.txt')
cachedir = os.path.join(self._devkit_path, 'annotations_cache')
aps = []
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(self._classes):
if cls == '__background__':
continue
filename = self._get_adas_results_file_template().format(cls)
rec, prec, ap = adas_eval(
filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'w') as f:
cPickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('Recompute with `./tools/reval.py --matlab ...` for your paper.')
print('-- Thanks, The Management')
print('--------------------------------------------------------------')
def _do_matlab_eval(self, output_dir='output'):
print('-----------------------------------------------------')
print('Computing results with the official MATLAB eval code.')
print('-----------------------------------------------------')
path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets',
'ADASdevkit-matlab-wrapper')
cmd = 'cd {} && '.format(path)
cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB)
cmd += '-r "dbstop if error; '
cmd += 'adas_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \
.format(self._devkit_path, self._get_comp_id(),
self._image_set, output_dir)
print('Running:\n{}'.format(cmd))
status = subprocess.call(cmd, shell=True)
def evaluate_detections(self, all_boxes, output_dir):
self._write_adas_results_file(all_boxes)
self._do_python_eval(output_dir)
if self.config['matlab_eval']:
self._do_matlab_eval(output_dir)
if self.config['cleanup']:
for cls in self._classes:
if cls == '__background__':
continue
filename = self._get_adas_results_file_template().format(cls)
os.remove(filename)
def competition_mode(self, on):
if on:
self.config['use_salt'] = False
self.config['cleanup'] = False
else:
self.config['use_salt'] = True
self.config['cleanup'] = True
if __name__ == '__main__':
d = adas('trainval', '2017')
res = d.roidb
from IPython import embed;
embed()
| [
[
[
287,
301
]
],
[
[
310,
336
]
],
[
[
345,
347
],
[
1201,
1203
],
[
2207,
2209
],
[
2334,
2336
],
[
2917,
2919
],
[
3035,
3037
],
[
3421,
3423
],
[
3551,
3553
],
[
3926,
3928
],
[
4197,
4199
],
[
4267,
4269
],
[
5098,
5100
],
[
5217,
5219
],
[
6400,
6402
],
[
6705,
6707
],
[
6721,
6723
],
[
6905,
6907
],
[
7662,
7664
],
[
10064,
10066
],
[
10151,
10153
],
[
10188,
10190
],
[
10224,
10226
],
[
11277,
11279
],
[
11429,
11431
],
[
11609,
11611
],
[
11695,
11697
],
[
11734,
11736
],
[
12162,
12164
],
[
13264,
13266
],
[
14257,
14259
]
],
[
[
368,
379
],
[
8259,
8261
],
[
8289,
8291
],
[
8321,
8323
],
[
8348,
8350
],
[
8377,
8379
],
[
8422,
8424
],
[
8507,
8509
],
[
8534,
8536
],
[
8564,
8566
],
[
8591,
8593
],
[
12326,
12328
],
[
12485,
12487
]
],
[
[
387,
399
],
[
9402,
9407
]
],
[
[
407,
417
],
[
13751,
13761
]
],
[
[
435,
442
],
[
4397,
4404
],
[
4682,
4689
],
[
4710,
4717
],
[
5317,
5324
],
[
5798,
5805
],
[
5823,
5830
],
[
6548,
6555
],
[
12232,
12239
]
],
[
[
474,
491
],
[
4397,
4404
],
[
4682,
4689
],
[
4710,
4717
],
[
5317,
5324
],
[
5798,
5805
],
[
5823,
5830
],
[
6548,
6555
],
[
12232,
12239
]
],
[
[
500,
504
]
],
[
[
512,
516
]
],
[
[
524,
528
],
[
1856,
1860
]
],
[
[
536,
551
],
[
7022,
7025
]
],
[
[
559,
586
],
[
7738,
7740
]
],
[
[
606,
610
],
[
862,
866
],
[
952,
956
],
[
5630,
5634
],
[
6133,
6137
]
],
[
[
629,
637
]
],
[
[
652,
660
],
[
7199,
7207
],
[
7282,
7290
]
],
[
[
684,
693
],
[
11970,
11979
]
],
[
[
827,
830
],
[
3939,
3942
],
[
6734,
6737
],
[
13277,
13280
],
[
13458,
13461
]
],
[
[
857,
861
],
[
14552,
14556
]
],
[
[
14548,
14549
],
[
14587,
14588
]
],
[
[
14581,
14584
]
],
[
[
14619,
14624
],
[
14631,
14636
]
]
] |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for lots of functionality provided by L{twisted.internet}.
"""
from __future__ import division, absolute_import
import os
import sys
import time
from twisted.python.compat import _PY3
from twisted.trial import unittest
from twisted.internet import reactor, protocol, error, abstract, defer
from twisted.internet import interfaces, base
try:
from twisted.internet import ssl
except ImportError:
ssl = None
if ssl and not ssl.supported:
ssl = None
from twisted.internet.defer import Deferred
if not _PY3:
from twisted.python import util
class ThreePhaseEventTests(unittest.TestCase):
"""
Tests for the private implementation helpers for system event triggers.
"""
def setUp(self):
"""
Create a trigger, an argument, and an event to be used by tests.
"""
self.trigger = lambda x: None
self.arg = object()
self.event = base._ThreePhaseEvent()
def test_addInvalidPhase(self):
"""
L{_ThreePhaseEvent.addTrigger} should raise L{KeyError} when called
with an invalid phase.
"""
self.assertRaises(
KeyError,
self.event.addTrigger, 'xxx', self.trigger, self.arg)
def test_addBeforeTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'before'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the before list.
"""
self.event.addTrigger('before', self.trigger, self.arg)
self.assertEqual(
self.event.before,
[(self.trigger, (self.arg,), {})])
def test_addDuringTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'during'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the during list.
"""
self.event.addTrigger('during', self.trigger, self.arg)
self.assertEqual(
self.event.during,
[(self.trigger, (self.arg,), {})])
def test_addAfterTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'after'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the after list.
"""
self.event.addTrigger('after', self.trigger, self.arg)
self.assertEqual(
self.event.after,
[(self.trigger, (self.arg,), {})])
def test_removeTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should accept an opaque object
previously returned by L{_ThreePhaseEvent.addTrigger} and remove the
associated trigger.
"""
handle = self.event.addTrigger('before', self.trigger, self.arg)
self.event.removeTrigger(handle)
self.assertEqual(self.event.before, [])
def test_removeNonexistentTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} when given
an object not previously returned by L{_ThreePhaseEvent.addTrigger}.
"""
self.assertRaises(ValueError, self.event.removeTrigger, object())
def test_removeRemovedTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} the second
time it is called with an object returned by
L{_ThreePhaseEvent.addTrigger}.
"""
handle = self.event.addTrigger('before', self.trigger, self.arg)
self.event.removeTrigger(handle)
self.assertRaises(ValueError, self.event.removeTrigger, handle)
def test_removeAlmostValidTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} if it is
given a trigger handle which resembles a valid trigger handle aside
from its phase being incorrect.
"""
self.assertRaises(
KeyError,
self.event.removeTrigger, ('xxx', self.trigger, (self.arg,), {}))
def test_fireEvent(self):
"""
L{_ThreePhaseEvent.fireEvent} should call I{before}, I{during}, and
I{after} phase triggers in that order.
"""
events = []
self.event.addTrigger('after', events.append, ('first', 'after'))
self.event.addTrigger('during', events.append, ('first', 'during'))
self.event.addTrigger('before', events.append, ('first', 'before'))
self.event.addTrigger('before', events.append, ('second', 'before'))
self.event.addTrigger('during', events.append, ('second', 'during'))
self.event.addTrigger('after', events.append, ('second', 'after'))
self.assertEqual(events, [])
self.event.fireEvent()
self.assertEqual(events,
[('first', 'before'), ('second', 'before'),
('first', 'during'), ('second', 'during'),
('first', 'after'), ('second', 'after')])
def test_asynchronousBefore(self):
"""
L{_ThreePhaseEvent.fireEvent} should wait for any L{Deferred} returned
by a I{before} phase trigger before proceeding to I{during} events.
"""
events = []
beforeResult = Deferred()
self.event.addTrigger('before', lambda: beforeResult)
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.assertEqual(events, [])
self.event.fireEvent()
self.assertEqual(events, [])
beforeResult.callback(None)
self.assertEqual(events, ['during', 'after'])
def test_beforeTriggerException(self):
"""
If a before-phase trigger raises a synchronous exception, it should be
logged and the remaining triggers should be run.
"""
events = []
class DummyException(Exception):
pass
def raisingTrigger():
raise DummyException()
self.event.addTrigger('before', raisingTrigger)
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', events.append, 'during')
self.event.fireEvent()
self.assertEqual(events, ['before', 'during'])
errors = self.flushLoggedErrors(DummyException)
self.assertEqual(len(errors), 1)
def test_duringTriggerException(self):
"""
If a during-phase trigger raises a synchronous exception, it should be
logged and the remaining triggers should be run.
"""
events = []
class DummyException(Exception):
pass
def raisingTrigger():
raise DummyException()
self.event.addTrigger('during', raisingTrigger)
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.assertEqual(events, ['during', 'after'])
errors = self.flushLoggedErrors(DummyException)
self.assertEqual(len(errors), 1)
def test_synchronousRemoveAlreadyExecutedBefore(self):
"""
If a before-phase trigger tries to remove another before-phase trigger
which has already run, a warning should be emitted.
"""
events = []
def removeTrigger():
self.event.removeTrigger(beforeHandle)
beforeHandle = self.event.addTrigger('before', events.append, ('first', 'before'))
self.event.addTrigger('before', removeTrigger)
self.event.addTrigger('before', events.append, ('second', 'before'))
self.assertWarns(
DeprecationWarning,
"Removing already-fired system event triggers will raise an "
"exception in a future version of Twisted.",
__file__,
self.event.fireEvent)
self.assertEqual(events, [('first', 'before'), ('second', 'before')])
def test_synchronousRemovePendingBefore(self):
"""
If a before-phase trigger removes another before-phase trigger which
has not yet run, the removed trigger should not be run.
"""
events = []
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(beforeHandle))
beforeHandle = self.event.addTrigger(
'before', events.append, ('first', 'before'))
self.event.addTrigger('before', events.append, ('second', 'before'))
self.event.fireEvent()
self.assertEqual(events, [('second', 'before')])
def test_synchronousBeforeRemovesDuring(self):
"""
If a before-phase trigger removes a during-phase trigger, the
during-phase trigger should not be run.
"""
events = []
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(duringHandle))
duringHandle = self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.assertEqual(events, ['after'])
def test_asynchronousBeforeRemovesDuring(self):
"""
If a before-phase trigger returns a L{Deferred} and later removes a
during-phase trigger before the L{Deferred} fires, the during-phase
trigger should not be run.
"""
events = []
beforeResult = Deferred()
self.event.addTrigger('before', lambda: beforeResult)
duringHandle = self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.event.removeTrigger(duringHandle)
beforeResult.callback(None)
self.assertEqual(events, ['after'])
def test_synchronousBeforeRemovesConspicuouslySimilarDuring(self):
"""
If a before-phase trigger removes a during-phase trigger which is
identical to an already-executed before-phase trigger aside from their
phases, no warning should be emitted and the during-phase trigger
should not be run.
"""
events = []
def trigger():
events.append('trigger')
self.event.addTrigger('before', trigger)
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(duringTrigger))
duringTrigger = self.event.addTrigger('during', trigger)
self.event.fireEvent()
self.assertEqual(events, ['trigger'])
def test_synchronousRemovePendingDuring(self):
"""
If a during-phase trigger removes another during-phase trigger which
has not yet run, the removed trigger should not be run.
"""
events = []
self.event.addTrigger(
'during', lambda: self.event.removeTrigger(duringHandle))
duringHandle = self.event.addTrigger(
'during', events.append, ('first', 'during'))
self.event.addTrigger(
'during', events.append, ('second', 'during'))
self.event.fireEvent()
self.assertEqual(events, [('second', 'during')])
def test_triggersRunOnce(self):
"""
A trigger should only be called on the first call to
L{_ThreePhaseEvent.fireEvent}.
"""
events = []
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.event.fireEvent()
self.assertEqual(events, ['before', 'during', 'after'])
def test_finishedBeforeTriggersCleared(self):
"""
The temporary list L{_ThreePhaseEvent.finishedBefore} should be emptied
and the state reset to C{'BASE'} before the first during-phase trigger
executes.
"""
events = []
def duringTrigger():
events.append('during')
self.assertEqual(self.event.finishedBefore, [])
self.assertEqual(self.event.state, 'BASE')
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', duringTrigger)
self.event.fireEvent()
self.assertEqual(events, ['before', 'during'])
class SystemEventTests(unittest.TestCase):
"""
Tests for the reactor's implementation of the C{fireSystemEvent},
C{addSystemEventTrigger}, and C{removeSystemEventTrigger} methods of the
L{IReactorCore} interface.
@ivar triggers: A list of the handles to triggers which have been added to
the reactor.
"""
def setUp(self):
"""
Create an empty list in which to store trigger handles.
"""
self.triggers = []
def tearDown(self):
"""
Remove all remaining triggers from the reactor.
"""
while self.triggers:
trigger = self.triggers.pop()
try:
reactor.removeSystemEventTrigger(trigger)
except (ValueError, KeyError):
pass
def addTrigger(self, event, phase, func):
"""
Add a trigger to the reactor and remember it in C{self.triggers}.
"""
t = reactor.addSystemEventTrigger(event, phase, func)
self.triggers.append(t)
return t
def removeTrigger(self, trigger):
"""
Remove a trigger by its handle from the reactor and from
C{self.triggers}.
"""
reactor.removeSystemEventTrigger(trigger)
self.triggers.remove(trigger)
def _addSystemEventTriggerTest(self, phase):
eventType = 'test'
events = []
def trigger():
events.append(None)
self.addTrigger(phase, eventType, trigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [None])
def test_beforePhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'before'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('before')
def test_duringPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'during'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('during')
def test_afterPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'after'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('after')
def test_unknownPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should reject phases other than
C{'before'}, C{'during'}, or C{'after'}.
"""
eventType = 'test'
self.assertRaises(
KeyError, self.addTrigger, 'xxx', eventType, lambda: None)
def test_beforePreceedsDuring(self):
"""
L{IReactorCore.addSystemEventTrigger} should call triggers added to the
C{'before'} phase before it calls triggers added to the C{'during'}
phase.
"""
eventType = 'test'
events = []
def beforeTrigger():
events.append('before')
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, beforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['before', 'during'])
def test_duringPreceedsAfter(self):
"""
L{IReactorCore.addSystemEventTrigger} should call triggers added to the
C{'during'} phase before it calls triggers added to the C{'after'}
phase.
"""
eventType = 'test'
events = []
def duringTrigger():
events.append('during')
def afterTrigger():
events.append('after')
self.addTrigger('during', eventType, duringTrigger)
self.addTrigger('after', eventType, afterTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['during', 'after'])
def test_beforeReturnsDeferred(self):
"""
If a trigger added to the C{'before'} phase of an event returns a
L{Deferred}, the C{'during'} phase should be delayed until it is called
back.
"""
triggerDeferred = Deferred()
eventType = 'test'
events = []
def beforeTrigger():
return triggerDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, beforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [])
triggerDeferred.callback(None)
self.assertEqual(events, ['during'])
def test_multipleBeforeReturnDeferred(self):
"""
If more than one trigger added to the C{'before'} phase of an event
return L{Deferred}s, the C{'during'} phase should be delayed until they
are all called back.
"""
firstDeferred = Deferred()
secondDeferred = Deferred()
eventType = 'test'
events = []
def firstBeforeTrigger():
return firstDeferred
def secondBeforeTrigger():
return secondDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.addTrigger('before', eventType, secondBeforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [])
firstDeferred.callback(None)
self.assertEqual(events, [])
secondDeferred.callback(None)
self.assertEqual(events, ['during'])
def test_subsequentBeforeTriggerFiresPriorBeforeDeferred(self):
"""
If a trigger added to the C{'before'} phase of an event calls back a
L{Deferred} returned by an earlier trigger in the C{'before'} phase of
the same event, the remaining C{'before'} triggers for that event
should be run and any further L{Deferred}s waited on before proceeding
to the C{'during'} events.
"""
eventType = 'test'
events = []
firstDeferred = Deferred()
secondDeferred = Deferred()
def firstBeforeTrigger():
return firstDeferred
def secondBeforeTrigger():
firstDeferred.callback(None)
def thirdBeforeTrigger():
events.append('before')
return secondDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.addTrigger('before', eventType, secondBeforeTrigger)
self.addTrigger('before', eventType, thirdBeforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['before'])
secondDeferred.callback(None)
self.assertEqual(events, ['before', 'during'])
def test_removeSystemEventTrigger(self):
"""
A trigger removed with L{IReactorCore.removeSystemEventTrigger} should
not be called when the event fires.
"""
eventType = 'test'
events = []
def firstBeforeTrigger():
events.append('first')
def secondBeforeTrigger():
events.append('second')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.removeTrigger(
self.addTrigger('before', eventType, secondBeforeTrigger))
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['first'])
def test_removeNonExistentSystemEventTrigger(self):
"""
Passing an object to L{IReactorCore.removeSystemEventTrigger} which was
not returned by a previous call to
L{IReactorCore.addSystemEventTrigger} or which has already been passed
to C{removeSystemEventTrigger} should result in L{TypeError},
L{KeyError}, or L{ValueError} being raised.
"""
b = self.addTrigger('during', 'test', lambda: None)
self.removeTrigger(b)
self.assertRaises(
TypeError, reactor.removeSystemEventTrigger, None)
self.assertRaises(
ValueError, reactor.removeSystemEventTrigger, b)
self.assertRaises(
KeyError,
reactor.removeSystemEventTrigger,
(b[0], ('xxx',) + b[1][1:]))
def test_interactionBetweenDifferentEvents(self):
"""
L{IReactorCore.fireSystemEvent} should behave the same way for a
particular system event regardless of whether Deferreds are being
waited on for a different system event.
"""
events = []
firstEvent = 'first-event'
firstDeferred = Deferred()
def beforeFirstEvent():
events.append(('before', 'first'))
return firstDeferred
def afterFirstEvent():
events.append(('after', 'first'))
secondEvent = 'second-event'
secondDeferred = Deferred()
def beforeSecondEvent():
events.append(('before', 'second'))
return secondDeferred
def afterSecondEvent():
events.append(('after', 'second'))
self.addTrigger('before', firstEvent, beforeFirstEvent)
self.addTrigger('after', firstEvent, afterFirstEvent)
self.addTrigger('before', secondEvent, beforeSecondEvent)
self.addTrigger('after', secondEvent, afterSecondEvent)
self.assertEqual(events, [])
# After this, firstEvent should be stuck before 'during' waiting for
# firstDeferred.
reactor.fireSystemEvent(firstEvent)
self.assertEqual(events, [('before', 'first')])
# After this, secondEvent should be stuck before 'during' waiting for
# secondDeferred.
reactor.fireSystemEvent(secondEvent)
self.assertEqual(events, [('before', 'first'), ('before', 'second')])
# After this, firstEvent should have finished completely, but
# secondEvent should be at the same place.
firstDeferred.callback(None)
self.assertEqual(events, [('before', 'first'), ('before', 'second'),
('after', 'first')])
# After this, secondEvent should have finished completely.
secondDeferred.callback(None)
self.assertEqual(events, [('before', 'first'), ('before', 'second'),
('after', 'first'), ('after', 'second')])
class TimeTests(unittest.TestCase):
"""
Tests for the IReactorTime part of the reactor.
"""
def test_seconds(self):
"""
L{twisted.internet.reactor.seconds} should return something
like a number.
1. This test specifically does not assert any relation to the
"system time" as returned by L{time.time} or
L{twisted.python.runtime.seconds}, because at some point we
may find a better option for scheduling calls than
wallclock-time.
2. This test *also* does not assert anything about the type of
the result, because operations may not return ints or
floats: For example, datetime-datetime == timedelta(0).
"""
now = reactor.seconds()
self.assertEqual(now-now+now, now)
def test_callLaterUsesReactorSecondsInDelayedCall(self):
"""
L{reactor.callLater<twisted.internet.interfaces.IReactorTime.callLater>}
should use the reactor's seconds factory
to produce the time at which the DelayedCall will be called.
"""
oseconds = reactor.seconds
reactor.seconds = lambda: 100
try:
call = reactor.callLater(5, lambda: None)
self.assertEqual(call.getTime(), 105)
finally:
reactor.seconds = oseconds
def test_callLaterUsesReactorSecondsAsDelayedCallSecondsFactory(self):
"""
L{reactor.callLater<twisted.internet.interfaces.IReactorTime.callLater>}
should propagate its own seconds factory
to the DelayedCall to use as its own seconds factory.
"""
oseconds = reactor.seconds
reactor.seconds = lambda: 100
try:
call = reactor.callLater(5, lambda: None)
self.assertEqual(call.seconds(), 100)
finally:
reactor.seconds = oseconds
def test_callLater(self):
"""
Test that a DelayedCall really calls the function it is
supposed to call.
"""
d = Deferred()
reactor.callLater(0, d.callback, None)
d.addCallback(self.assertEqual, None)
return d
def test_cancelDelayedCall(self):
"""
Test that when a DelayedCall is cancelled it does not run.
"""
called = []
def function():
called.append(None)
call = reactor.callLater(0, function)
call.cancel()
# Schedule a call in two "iterations" to check to make sure that the
# above call never ran.
d = Deferred()
def check():
try:
self.assertEqual(called, [])
except:
d.errback()
else:
d.callback(None)
reactor.callLater(0, reactor.callLater, 0, check)
return d
def test_cancelCancelledDelayedCall(self):
"""
Test that cancelling a DelayedCall which has already been cancelled
raises the appropriate exception.
"""
call = reactor.callLater(0, lambda: None)
call.cancel()
self.assertRaises(error.AlreadyCancelled, call.cancel)
def test_cancelCalledDelayedCallSynchronous(self):
"""
Test that cancelling a DelayedCall in the DelayedCall's function as
that function is being invoked by the DelayedCall raises the
appropriate exception.
"""
d = Deferred()
def later():
try:
self.assertRaises(error.AlreadyCalled, call.cancel)
except:
d.errback()
else:
d.callback(None)
call = reactor.callLater(0, later)
return d
def test_cancelCalledDelayedCallAsynchronous(self):
"""
Test that cancelling a DelayedCall after it has run its function
raises the appropriate exception.
"""
d = Deferred()
def check():
try:
self.assertRaises(error.AlreadyCalled, call.cancel)
except:
d.errback()
else:
d.callback(None)
def later():
reactor.callLater(0, check)
call = reactor.callLater(0, later)
return d
def testCallLaterTime(self):
d = reactor.callLater(10, lambda: None)
try:
self.assertTrue(d.getTime() - (time.time() + 10) < 1)
finally:
d.cancel()
def testDelayedCallStringification(self):
# Mostly just make sure str() isn't going to raise anything for
# DelayedCalls within reason.
dc = reactor.callLater(0, lambda x, y: None, 'x', y=10)
str(dc)
dc.reset(5)
str(dc)
dc.cancel()
str(dc)
dc = reactor.callLater(0, lambda: None, x=[({'hello': u'world'}, 10j), reactor], *range(10))
str(dc)
dc.cancel()
str(dc)
def calledBack(ignored):
str(dc)
d = Deferred().addCallback(calledBack)
dc = reactor.callLater(0, d.callback, None)
str(dc)
return d
def testDelayedCallSecondsOverride(self):
"""
Test that the C{seconds} argument to DelayedCall gets used instead of
the default timing function, if it is not None.
"""
def seconds():
return 10
dc = base.DelayedCall(5, lambda: None, (), {}, lambda dc: None,
lambda dc: None, seconds)
self.assertEqual(dc.getTime(), 5)
dc.reset(3)
self.assertEqual(dc.getTime(), 13)
class CallFromThreadStopsAndWakeUpTests(unittest.TestCase):
def testWakeUp(self):
# Make sure other threads can wake up the reactor
d = Deferred()
def wake():
time.sleep(0.1)
# callFromThread will call wakeUp for us
reactor.callFromThread(d.callback, None)
reactor.callInThread(wake)
return d
if interfaces.IReactorThreads(reactor, None) is None:
testWakeUp.skip = "Nothing to wake up for without thread support"
def _stopCallFromThreadCallback(self):
self.stopped = True
def _callFromThreadCallback(self, d):
reactor.callFromThread(self._callFromThreadCallback2, d)
reactor.callLater(0, self._stopCallFromThreadCallback)
def _callFromThreadCallback2(self, d):
try:
self.assertTrue(self.stopped)
except:
# Send the error to the deferred
d.errback()
else:
d.callback(None)
def testCallFromThreadStops(self):
"""
Ensure that callFromThread from inside a callFromThread
callback doesn't sit in an infinite loop and lets other
things happen too.
"""
self.stopped = False
d = defer.Deferred()
reactor.callFromThread(self._callFromThreadCallback, d)
return d
class DelayedTests(unittest.TestCase):
def setUp(self):
self.finished = 0
self.counter = 0
self.timers = {}
self.deferred = defer.Deferred()
def tearDown(self):
for t in self.timers.values():
t.cancel()
def checkTimers(self):
l1 = self.timers.values()
l2 = list(reactor.getDelayedCalls())
# There should be at least the calls we put in. There may be other
# calls that are none of our business and that we should ignore,
# though.
missing = []
for dc in l1:
if dc not in l2:
missing.append(dc)
if missing:
self.finished = 1
self.assertFalse(missing, "Should have been missing no calls, instead "
+ "was missing " + repr(missing))
def callback(self, tag):
del self.timers[tag]
self.checkTimers()
def addCallback(self, tag):
self.callback(tag)
self.addTimer(15, self.callback)
def done(self, tag):
self.finished = 1
self.callback(tag)
self.deferred.callback(None)
def addTimer(self, when, callback):
self.timers[self.counter] = reactor.callLater(when * 0.01, callback,
self.counter)
self.counter += 1
self.checkTimers()
def testGetDelayedCalls(self):
if not hasattr(reactor, "getDelayedCalls"):
return
# This is not a race because we don't do anything which might call
# the reactor until we have all the timers set up. If we did, this
# test might fail on slow systems.
self.checkTimers()
self.addTimer(35, self.done)
self.addTimer(20, self.callback)
self.addTimer(30, self.callback)
which = self.counter
self.addTimer(29, self.callback)
self.addTimer(25, self.addCallback)
self.addTimer(26, self.callback)
self.timers[which].cancel()
del self.timers[which]
self.checkTimers()
self.deferred.addCallback(lambda x : self.checkTimers())
return self.deferred
def test_active(self):
"""
L{IDelayedCall.active} returns False once the call has run.
"""
dcall = reactor.callLater(0.01, self.deferred.callback, True)
self.assertTrue(dcall.active())
def checkDeferredCall(success):
self.assertFalse(dcall.active())
return success
self.deferred.addCallback(checkDeferredCall)
return self.deferred
resolve_helper = """
from __future__ import print_function
import %(reactor)s
%(reactor)s.install()
from twisted.internet import reactor
class Foo:
def __init__(self):
reactor.callWhenRunning(self.start)
self.timer = reactor.callLater(3, self.failed)
def start(self):
reactor.resolve('localhost').addBoth(self.done)
def done(self, res):
print('done', res)
reactor.stop()
def failed(self):
print('failed')
self.timer = None
reactor.stop()
f = Foo()
reactor.run()
"""
class ChildResolveProtocol(protocol.ProcessProtocol):
def __init__(self, onCompletion):
self.onCompletion = onCompletion
def connectionMade(self):
self.output = []
self.error = []
def outReceived(self, out):
self.output.append(out)
def errReceived(self, err):
self.error.append(err)
def processEnded(self, reason):
self.onCompletion.callback((reason, self.output, self.error))
self.onCompletion = None
class ResolveTests(unittest.TestCase):
def testChildResolve(self):
# I've seen problems with reactor.run under gtk2reactor. Spawn a
# child which just does reactor.resolve after the reactor has
# started, fail if it does not complete in a timely fashion.
helperPath = os.path.abspath(self.mktemp())
with open(helperPath, 'w') as helperFile:
# Eeueuuggg
reactorName = reactor.__module__
helperFile.write(resolve_helper % {'reactor': reactorName})
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
helperDeferred = Deferred()
helperProto = ChildResolveProtocol(helperDeferred)
reactor.spawnProcess(helperProto, sys.executable, ("python", "-u", helperPath), env)
def cbFinished(result):
(reason, output, error) = result
# If the output is "done 127.0.0.1\n" we don't really care what
# else happened.
output = b''.join(output)
if output != b'done 127.0.0.1\n':
self.fail((
"The child process failed to produce the desired results:\n"
" Reason for termination was: %r\n"
" Output stream was: %r\n"
" Error stream was: %r\n") % (reason.getErrorMessage(), output, b''.join(error)))
helperDeferred.addCallback(cbFinished)
return helperDeferred
if not interfaces.IReactorProcess(reactor, None):
ResolveTests.skip = (
"cannot run test: reactor doesn't support IReactorProcess")
class CallFromThreadTests(unittest.TestCase):
"""
Task scheduling from threads tests.
"""
if interfaces.IReactorThreads(reactor, None) is None:
skip = "Nothing to test without thread support"
def setUp(self):
self.counter = 0
self.deferred = Deferred()
def schedule(self, *args, **kwargs):
"""
Override in subclasses.
"""
reactor.callFromThread(*args, **kwargs)
def test_lotsOfThreadsAreScheduledCorrectly(self):
"""
L{IReactorThreads.callFromThread} can be used to schedule a large
number of calls in the reactor thread.
"""
def addAndMaybeFinish():
self.counter += 1
if self.counter == 100:
self.deferred.callback(True)
for i in range(100):
self.schedule(addAndMaybeFinish)
return self.deferred
def test_threadsAreRunInScheduledOrder(self):
"""
Callbacks should be invoked in the order they were scheduled.
"""
order = []
def check(_):
self.assertEqual(order, [1, 2, 3])
self.deferred.addCallback(check)
self.schedule(order.append, 1)
self.schedule(order.append, 2)
self.schedule(order.append, 3)
self.schedule(reactor.callFromThread, self.deferred.callback, None)
return self.deferred
def test_scheduledThreadsNotRunUntilReactorRuns(self):
"""
Scheduled tasks should not be run until the reactor starts running.
"""
def incAndFinish():
self.counter = 1
self.deferred.callback(True)
self.schedule(incAndFinish)
# Callback shouldn't have fired yet.
self.assertEqual(self.counter, 0)
return self.deferred
class MyProtocol(protocol.Protocol):
"""
Sample protocol.
"""
class MyFactory(protocol.Factory):
"""
Sample factory.
"""
protocol = MyProtocol
class ProtocolTests(unittest.TestCase):
def testFactory(self):
factory = MyFactory()
protocol = factory.buildProtocol(None)
self.assertEqual(protocol.factory, factory)
self.assertIsInstance(protocol, factory.protocol)
class DummyProducer(object):
"""
Very uninteresting producer implementation used by tests to ensure the
right methods are called by the consumer with which it is registered.
@type events: C{list} of C{str}
@ivar events: The producer/consumer related events which have happened to
this producer. Strings in this list may be C{'resume'}, C{'stop'}, or
C{'pause'}. Elements are added as they occur.
"""
def __init__(self):
self.events = []
def resumeProducing(self):
self.events.append('resume')
def stopProducing(self):
self.events.append('stop')
def pauseProducing(self):
self.events.append('pause')
class SillyDescriptor(abstract.FileDescriptor):
"""
A descriptor whose data buffer gets filled very fast.
Useful for testing FileDescriptor's IConsumer interface, since
the data buffer fills as soon as at least four characters are
written to it, and gets emptied in a single doWrite() cycle.
"""
bufferSize = 3
connected = True
def writeSomeData(self, data):
"""
Always write all data.
"""
return len(data)
def startWriting(self):
"""
Do nothing: bypass the reactor.
"""
stopWriting = startWriting
class ReentrantProducer(DummyProducer):
"""
Similar to L{DummyProducer}, but with a resumeProducing method which calls
back into an L{IConsumer} method of the consumer against which it is
registered.
@ivar consumer: The consumer with which this producer has been or will
be registered.
@ivar methodName: The name of the method to call on the consumer inside
C{resumeProducing}.
@ivar methodArgs: The arguments to pass to the consumer method invoked in
C{resumeProducing}.
"""
def __init__(self, consumer, methodName, *methodArgs):
super(ReentrantProducer, self).__init__()
self.consumer = consumer
self.methodName = methodName
self.methodArgs = methodArgs
def resumeProducing(self):
super(ReentrantProducer, self).resumeProducing()
getattr(self.consumer, self.methodName)(*self.methodArgs)
class ProducerTests(unittest.TestCase):
"""
Test abstract.FileDescriptor's consumer interface.
"""
def test_doubleProducer(self):
"""
Verify that registering a non-streaming producer invokes its
resumeProducing() method and that you can only register one producer
at a time.
"""
fd = abstract.FileDescriptor()
fd.connected = 1
dp = DummyProducer()
fd.registerProducer(dp, 0)
self.assertEqual(dp.events, ['resume'])
self.assertRaises(RuntimeError, fd.registerProducer, DummyProducer(), 0)
def test_unconnectedFileDescriptor(self):
"""
Verify that registering a producer when the connection has already
been closed invokes its stopProducing() method.
"""
fd = abstract.FileDescriptor()
fd.disconnected = 1
dp = DummyProducer()
fd.registerProducer(dp, 0)
self.assertEqual(dp.events, ['stop'])
def _dontPausePullConsumerTest(self, methodName):
"""
Pull consumers don't get their C{pauseProducing} method called if the
descriptor buffer fills up.
@param _methodName: Either 'write', or 'writeSequence', indicating
which transport method to write data to.
"""
descriptor = SillyDescriptor()
producer = DummyProducer()
descriptor.registerProducer(producer, streaming=False)
self.assertEqual(producer.events, ['resume'])
del producer.events[:]
# Fill up the descriptor's write buffer so we can observe whether or
# not it pauses its producer in that case.
if methodName == "writeSequence":
descriptor.writeSequence([b'1', b'2', b'3', b'4'])
else:
descriptor.write(b'1234')
self.assertEqual(producer.events, [])
def test_dontPausePullConsumerOnWrite(self):
"""
Verify that FileDescriptor does not call producer.pauseProducing() on a
non-streaming pull producer in response to a L{IConsumer.write} call
which results in a full write buffer. Issue #2286.
"""
return self._dontPausePullConsumerTest('write')
def test_dontPausePullConsumerOnWriteSequence(self):
"""
Like L{test_dontPausePullConsumerOnWrite}, but for a call to
C{writeSequence} rather than L{IConsumer.write}.
C{writeSequence} is not part of L{IConsumer}, but
L{abstract.FileDescriptor} has supported consumery behavior in response
to calls to L{writeSequence} forever.
"""
return self._dontPausePullConsumerTest('writeSequence')
def _reentrantStreamingProducerTest(self, methodName):
descriptor = SillyDescriptor()
if methodName == "writeSequence":
data = [b's', b'p', b'am']
else:
data = b"spam"
producer = ReentrantProducer(descriptor, methodName, data)
descriptor.registerProducer(producer, streaming=True)
# Start things off by filling up the descriptor's buffer so it will
# pause its producer.
getattr(descriptor, methodName)(data)
# Sanity check - make sure that worked.
self.assertEqual(producer.events, ['pause'])
del producer.events[:]
# After one call to doWrite, the buffer has been emptied so the
# FileDescriptor should resume its producer. That will result in an
# immediate call to FileDescriptor.write which will again fill the
# buffer and result in the producer being paused.
descriptor.doWrite()
self.assertEqual(producer.events, ['resume', 'pause'])
del producer.events[:]
# After a second call to doWrite, the exact same thing should have
# happened. Prior to the bugfix for which this test was written,
# FileDescriptor would have incorrectly believed its producer was
# already resumed (it was paused) and so not resume it again.
descriptor.doWrite()
self.assertEqual(producer.events, ['resume', 'pause'])
def test_reentrantStreamingProducerUsingWrite(self):
"""
Verify that FileDescriptor tracks producer's paused state correctly.
Issue #811, fixed in revision r12857.
"""
return self._reentrantStreamingProducerTest('write')
def test_reentrantStreamingProducerUsingWriteSequence(self):
"""
Like L{test_reentrantStreamingProducerUsingWrite}, but for calls to
C{writeSequence}.
C{writeSequence} is B{not} part of L{IConsumer}, however
C{abstract.FileDescriptor} has supported consumery behavior in response
to calls to C{writeSequence} forever.
"""
return self._reentrantStreamingProducerTest('writeSequence')
class PortStringificationTests(unittest.TestCase):
if interfaces.IReactorTCP(reactor, None) is not None:
def testTCP(self):
p = reactor.listenTCP(0, protocol.ServerFactory())
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
if interfaces.IReactorUDP(reactor, None) is not None:
def testUDP(self):
p = reactor.listenUDP(0, protocol.DatagramProtocol())
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
if interfaces.IReactorSSL(reactor, None) is not None and ssl:
def testSSL(self, ssl=ssl):
pem = util.sibpath(__file__, 'server.pem')
p = reactor.listenSSL(0, protocol.ServerFactory(), ssl.DefaultOpenSSLContextFactory(pem, pem))
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
if _PY3:
testSSL.skip = ("Re-enable once the Python 3 SSL port is done.")
| [
[
[
170,
178
]
],
[
[
180,
195
]
],
[
[
204,
206
],
[
33945,
33947
],
[
34184,
34186
],
[
34230,
34232
]
],
[
[
214,
217
],
[
34246,
34249
],
[
34395,
34398
]
],
[
[
225,
229
],
[
27474,
27478
],
[
28872,
28876
]
],
[
[
265,
269
],
[
597,
601
],
[
45749,
45753
]
],
[
[
296,
304
],
[
668,
676
],
[
12339,
12347
],
[
23105,
23113
],
[
28713,
28721
],
[
30028,
30036
],
[
33660,
33668
],
[
35285,
35293
],
[
37266,
37274
],
[
39718,
39726
],
[
44537,
44545
]
],
[
[
334,
341
],
[
29081,
29088
],
[
35146,
35153
],
[
35395,
35402
],
[
44587,
44594
],
[
44941,
44948
],
[
45298,
45305
],
[
13000,
13007
],
[
13264,
13271
],
[
13526,
13533
],
[
13855,
13862
],
[
15599,
15606
],
[
16263,
16270
],
[
16966,
16973
],
[
17937,
17944
],
[
19335,
19342
],
[
20098,
20105
],
[
20723,
20730
],
[
20814,
20821
],
[
20912,
20919
],
[
22216,
22223
],
[
22421,
22428
],
[
23842,
23849
],
[
24208,
24215
],
[
24232,
24239
],
[
24294,
24301
],
[
24408,
24415
],
[
24747,
24754
],
[
24771,
24778
],
[
24833,
24840
],
[
24947,
24954
],
[
25151,
25158
],
[
25475,
25482
],
[
25851,
25858
],
[
25872,
25879
],
[
26124,
26131
],
[
26744,
26751
],
[
27290,
27297
],
[
27382,
27389
],
[
27708,
27715
],
[
27861,
27868
],
[
27927,
27934
],
[
28115,
28122
],
[
29002,
29009
],
[
29302,
29309
],
[
29367,
29374
],
[
29934,
29941
],
[
30353,
30360
],
[
31225,
31232
],
[
31446,
31453
],
[
32315,
32322
],
[
34077,
34084
],
[
34361,
34368
],
[
35664,
35671
],
[
36571,
36578
],
[
44658,
44665
],
[
45012,
45019
],
[
45441,
45448
],
[
27247,
27254
],
[
28953,
28960
]
],
[
[
343,
351
],
[
33184,
33192
],
[
37088,
37096
],
[
37162,
37170
],
[
44679,
44687
],
[
45033,
45041
],
[
45462,
45470
]
],
[
[
353,
358
],
[
26207,
26212
],
[
26596,
26601
],
[
27081,
27086
]
],
[
[
360,
368
],
[
38217,
38225
],
[
40046,
40054
],
[
40506,
40514
]
],
[
[
370,
375
],
[
29909,
29914
],
[
30169,
30174
]
],
[
[
405,
415
],
[
29054,
29064
],
[
35119,
35129
],
[
35368,
35378
],
[
44564,
44574
],
[
44918,
44928
],
[
45275,
45285
]
],
[
[
417,
421
],
[
985,
989
],
[
28451,
28455
]
],
[
[
461,
464
],
[
503,
506
],
[
515,
518
],
[
45329,
45332
],
[
45364,
45367
]
],
[
[
489,
492
],
[
503,
506
],
[
515,
518
],
[
45329,
45332
],
[
45364,
45367
]
],
[
[
534,
537
],
[
45329,
45332
],
[
45364,
45367
]
],
[
[
581,
589
],
[
5278,
5286
],
[
9434,
9442
],
[
16614,
16622
],
[
17406,
17414
],
[
17442,
17450
],
[
18675,
18683
],
[
18711,
18719
],
[
21342,
21350
],
[
21605,
21613
],
[
25132,
25140
],
[
25650,
25658
],
[
26513,
26521
],
[
26998,
27006
],
[
28067,
28075
],
[
28829,
28837
],
[
34282,
34290
],
[
35546,
35554
]
],
[
[
634,
638
],
[
45388,
45392
]
],
[
[
647,
667
]
],
[
[
12322,
12338
]
],
[
[
23095,
23104
]
],
[
[
28679,
28712
]
],
[
[
30015,
30027
]
],
[
[
32609,
32623
],
[
34126,
34140
]
],
[
[
33163,
33183
],
[
34315,
34335
]
],
[
[
33647,
33659
],
[
35166,
35178
]
],
[
[
35265,
35284
]
],
[
[
37077,
37087
],
[
37233,
37243
]
],
[
[
37152,
37161
],
[
37332,
37341
]
],
[
[
37252,
37265
]
],
[
[
37509,
37522
],
[
38824,
38837
],
[
40110,
40123
],
[
40270,
40283
],
[
40573,
40586
],
[
41051,
41064
]
],
[
[
38201,
38216
],
[
41014,
41029
],
[
42435,
42450
]
],
[
[
38806,
38823
],
[
39396,
39413
],
[
39586,
39603
],
[
42594,
42611
]
],
[
[
39704,
39717
]
],
[
[
44512,
44536
]
]
] |
import setuptools
with open('README.md') as file:
readme = file.read()
name = 'aio4chan'
module = __import__(name)
version = module.__version__
author = 'Exahilosys'
url = f'https://github.com/{author}/{name}'
download_url = f'{url}/archive/v{version}.tar.gz'
setuptools.setup(
name = name,
version = version,
author = author,
url = url,
download_url = download_url,
packages = setuptools.find_packages(),
license = 'MIT',
description = 'API wrapper for 4chan.',
long_description = readme,
long_description_content_type = 'text/markdown',
include_package_data = True,
install_requires = ['aiohttp'],
py_modules = [name],
classifiers = [
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
)
| [
[
[
7,
17
],
[
273,
283
],
[
415,
425
]
],
[
[
45,
49
],
[
65,
69
]
],
[
[
56,
62
],
[
531,
537
]
],
[
[
78,
82
],
[
117,
121
],
[
214,
218
],
[
302,
306
],
[
679,
683
]
],
[
[
97,
103
],
[
134,
140
]
],
[
[
124,
131
],
[
255,
262
],
[
322,
329
]
],
[
[
154,
160
],
[
205,
211
],
[
344,
350
]
],
[
[
177,
180
],
[
240,
243
],
[
362,
365
]
],
[
[
222,
234
],
[
386,
398
]
]
] |
# coding: utf-8
"""
Signing Today Web
*Signing Today* is the perfect Digital Signature Gateway. Whenever in Your workflow You need to add one or more Digital Signatures to Your document, *Signing Today* is the right choice. You prepare Your documents, *Signing Today* takes care of all the rest: send invitations (`signature tickets`) to signers, collects their signatures, send You back the signed document. Integrating *Signing Today* in Your existing applications is very easy. Just follow these API specifications and get inspired by the many examples presented hereafter. # noqa: E501
The version of the OpenAPI document: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import signing_today_client
from signing_today_client.api.bit4id_pathgroup_digital_signature_transactions_api import Bit4idPathgroupDigitalSignatureTransactionsApi # noqa: E501
from signing_today_client.rest import ApiException
class TestBit4idPathgroupDigitalSignatureTransactionsApi(unittest.TestCase):
"""Bit4idPathgroupDigitalSignatureTransactionsApi unit test stubs"""
def setUp(self):
self.api = signing_today_client.api.bit4id_pathgroup_digital_signature_transactions_api.Bit4idPathgroupDigitalSignatureTransactionsApi() # noqa: E501
def tearDown(self):
pass
def test_d_s_ts_get(self):
"""Test case for d_s_ts_get
Retrieve DSTs # noqa: E501
"""
pass
def test_d_s_ts_post(self):
"""Test case for d_s_ts_post
Create a new DST # noqa: E501
"""
pass
def test_d_st_id_audit_get(self):
"""Test case for d_st_id_audit_get
Retrieve the audit records associated to the DST # noqa: E501
"""
pass
def test_d_st_id_delete(self):
"""Test case for d_st_id_delete
Delete a DST # noqa: E501
"""
pass
def test_d_st_id_fill_patch(self):
"""Test case for d_st_id_fill_patch
Fill a form of a DST # noqa: E501
"""
pass
def test_d_st_id_get(self):
"""Test case for d_st_id_get
Retrieve a DST # noqa: E501
"""
pass
def test_d_st_id_instantiate_post(self):
"""Test case for d_st_id_instantiate_post
Instantiate a DST from a template # noqa: E501
"""
pass
def test_d_st_id_modify_post(self):
"""Test case for d_st_id_modify_post
Modify a published DST template # noqa: E501
"""
pass
def test_d_st_id_notify_post(self):
"""Test case for d_st_id_notify_post
Send notifications for a DST # noqa: E501
"""
pass
def test_d_st_id_publish_post(self):
"""Test case for d_st_id_publish_post
Publish a DST # noqa: E501
"""
pass
def test_d_st_id_put(self):
"""Test case for d_st_id_put
Update a DST # noqa: E501
"""
pass
def test_d_st_id_replace_post(self):
"""Test case for d_st_id_replace_post
Replace a rejected DST # noqa: E501
"""
pass
def test_d_st_id_sign_doc_id_sign_id_get(self):
"""Test case for d_st_id_sign_doc_id_sign_id_get
Return the address for signing # noqa: E501
"""
pass
def test_d_st_id_templatize_post(self):
"""Test case for d_st_id_templatize_post
Create a template from a DST # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
[
[
728,
743
]
],
[
[
752,
760
],
[
1050,
1058
],
[
3569,
3577
]
],
[
[
769,
789
],
[
1184,
1204
]
],
[
[
879,
925
]
],
[
[
978,
990
]
],
[
[
999,
1049
]
]
] |
import os
import sys
import traceback
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
from _pydev_bundle._pydev_calltip_util import get_description
from _pydev_imps._pydev_saved_modules import thread
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, dict_iter_items, NEXT_VALUE_SEPARATOR, Null,
get_global_debugger)
import signal
from contextlib import contextmanager
from _pydev_bundle import pydev_log
try:
import cStringIO as StringIO # may not always be available @UnusedImport
except:
try:
import StringIO # @Reimport
except:
import io as StringIO
# =======================================================================================================================
# BaseStdIn
# =======================================================================================================================
class BaseStdIn:
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
try:
self.encoding = sys.stdin.encoding
except:
# Not sure if it's available in all Python versions...
pass
self.original_stdin = original_stdin
try:
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
except:
# Not sure if it's available in all Python versions...
pass
def readline(self, *args, **kwargs):
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
# which is not something we want.
return '\n'
def write(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def flush(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def read(self, *args, **kwargs):
# in the interactive interpreter, a read and a readline are the same.
return self.readline()
def close(self, *args, **kwargs):
pass # expected in StdIn
def __iter__(self):
# BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation
return self.original_stdin.__iter__()
def __getattr__(self, item):
# it's called if the attribute wasn't found
if hasattr(self.original_stdin, item):
return getattr(self.original_stdin, item)
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
# =======================================================================================================================
# StdIn
# =======================================================================================================================
class StdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
BaseStdIn.__init__(self, original_stdin)
self.interpreter = interpreter
self.client_port = client_port
self.host = host
def readline(self, *args, **kwargs):
# Ok, callback into the client to get the new input
try:
server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
requested_input = server.RequestInput()
if not requested_input:
return '\n' # Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
else:
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
requested_input += '\n'
return requested_input
except KeyboardInterrupt:
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
except:
return '\n'
def close(self, *args, **kwargs):
pass # expected in StdIn
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, py_db, original_stdin):
'''
:param py_db:
If None, get_global_debugger() is used.
'''
BaseStdIn.__init__(self, original_stdin)
self._py_db = py_db
self._in_notification = 0
def __send_input_requested_message(self, is_started):
try:
py_db = self._py_db
if py_db is None:
py_db = get_global_debugger()
cmd = py_db.cmd_factory.make_input_requested_message(is_started)
py_db.writer.add_command(cmd)
except Exception:
pydev_log.exception()
@contextmanager
def notify_input_requested(self):
self._in_notification += 1
if self._in_notification == 1:
self.__send_input_requested_message(True)
try:
yield
finally:
self._in_notification -= 1
if self._in_notification == 0:
self.__send_input_requested_message(False)
def readline(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.readline(*args, **kwargs)
def read(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.read(*args, **kwargs)
class CodeFragment:
def __init__(self, text, is_single_line=True):
self.text = text
self.is_single_line = is_single_line
def append(self, code_fragment):
self.text = self.text + "\n" + code_fragment.text
if not code_fragment.is_single_line:
self.is_single_line = False
# =======================================================================================================================
# BaseInterpreterInterface
# =======================================================================================================================
class BaseInterpreterInterface:
def __init__(self, mainThread, connect_status_queue=None):
self.mainThread = mainThread
self.interruptable = False
self.exec_queue = _queue.Queue(0)
self.buffer = None
self.banner_shown = False
self.connect_status_queue = connect_status_queue
self.mpl_modules_for_patching = {}
self.init_mpl_modules_for_patching()
def build_banner(self):
return 'print({0})\n'.format(repr(self.get_greeting_msg()))
def get_greeting_msg(self):
return 'PyDev console: starting.\n'
def init_mpl_modules_for_patching(self):
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot
self.mpl_modules_for_patching = {
"matplotlib": lambda: activate_matplotlib(self.enableGui),
"matplotlib.pyplot": activate_pyplot,
"pylab": activate_pylab
}
def need_more_for_code(self, source):
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
# Strangely even the IPython console is_complete said it was complete
# even with a continuation char at the end.
if source.endswith('\\'):
return True
if hasattr(self.interpreter, 'is_complete'):
return not self.interpreter.is_complete(source)
try:
# At this point, it should always be single.
# If we don't do this, things as:
#
# for i in range(10): print(i)
#
# (in a single line) don't work.
# Note that it won't give an error and code will be None (so, it'll
# use execMultipleLines in the next call in this case).
symbol = 'single'
code = self.interpreter.compile(source, '<input>', symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
return False
if code is None:
# Case 2
return True
# Case 3
return False
def need_more(self, code_fragment):
if self.buffer is None:
self.buffer = code_fragment
else:
self.buffer.append(code_fragment)
return self.need_more_for_code(self.buffer.text)
def create_std_in(self, debugger=None, original_std_in=None):
if debugger is None:
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
else:
return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in)
def add_exec(self, code_fragment, debugger=None):
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
# (showtraceback does it on python 3.5 onwards)
sys.excepthook = sys.__excepthook__
try:
original_in = sys.stdin
try:
help = None
if 'pydoc' in sys.modules:
pydoc = sys.modules['pydoc'] # Don't import it if it still is not there.
if hasattr(pydoc, 'help'):
# You never know how will the API be changed, so, let's code defensively here
help = pydoc.help
if not hasattr(help, 'input'):
help = None
except:
# Just ignore any error here
pass
more = False
try:
sys.stdin = self.create_std_in(debugger, original_in)
try:
if help is not None:
# This will enable the help() function to work.
try:
try:
help.input = sys.stdin
except AttributeError:
help._input = sys.stdin
except:
help = None
if not self._input_error_printed:
self._input_error_printed = True
sys.stderr.write('\nError when trying to update pydoc.help.input\n')
sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n')
traceback.print_exc()
try:
self.start_exec()
if hasattr(self, 'debugger'):
self.debugger.enable_tracing()
more = self.do_add_exec(code_fragment)
if hasattr(self, 'debugger'):
self.debugger.disable_tracing()
self.finish_exec(more)
finally:
if help is not None:
try:
try:
help.input = original_in
except AttributeError:
help._input = original_in
except:
pass
finally:
sys.stdin = original_in
except SystemExit:
raise
except:
traceback.print_exc()
finally:
sys.__excepthook__ = sys.excepthook
return more
def do_add_exec(self, codeFragment):
'''
Subclasses should override.
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
'''
raise NotImplementedError()
def get_namespace(self):
'''
Subclasses should override.
@return: dict with namespace.
'''
raise NotImplementedError()
def __resolve_reference__(self, text):
"""
:type text: str
"""
obj = None
if '.' not in text:
try:
obj = self.get_namespace()[text]
except KeyError:
pass
if obj is None:
try:
obj = self.get_namespace()['__builtins__'][text]
except:
pass
if obj is None:
try:
obj = getattr(self.get_namespace()['__builtins__'], text, None)
except:
pass
else:
try:
last_dot = text.rindex('.')
parent_context = text[0:last_dot]
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
obj = getattr(res, text[last_dot + 1:])
except:
pass
return obj
def getDescription(self, text):
try:
obj = self.__resolve_reference__(text)
if obj is None:
return ''
return get_description(obj)
except:
return ''
def do_exec_code(self, code, is_single_line):
try:
code_fragment = CodeFragment(code, is_single_line)
more = self.need_more(code_fragment)
if not more:
code_fragment = self.buffer
self.buffer = None
self.exec_queue.put(code_fragment)
return more
except:
traceback.print_exc()
return False
def execLine(self, line):
return self.do_exec_code(line, True)
def execMultipleLines(self, lines):
if IS_JYTHON:
more = False
for line in lines.split('\n'):
more = self.do_exec_code(line, True)
return more
else:
return self.do_exec_code(lines, False)
def interrupt(self):
self.buffer = None # Also clear the buffer when it's interrupted.
try:
if self.interruptable:
called = False
try:
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
if os.name == 'posix':
# On Linux we can't interrupt 0 as in Windows because it's
# actually owned by a process -- on the good side, signals
# work much better on Linux!
os.kill(os.getpid(), signal.SIGINT)
called = True
elif os.name == 'nt':
# Stupid windows: sending a Ctrl+C to a process given its pid
# is absurdly difficult.
# There are utilities to make it work such as
# http://www.latenighthacking.com/projects/2003/sendSignal/
# but fortunately for us, it seems Python does allow a CTRL_C_EVENT
# for the current process in Windows if pid 0 is passed... if we needed
# to send a signal to another process the approach would be
# much more difficult.
# Still, note that CTRL_C_EVENT is only Python 2.7 onwards...
# Also, this doesn't seem to be documented anywhere!? (stumbled
# upon it by chance after digging quite a lot).
os.kill(0, signal.CTRL_C_EVENT)
called = True
except:
# Many things to go wrong (from CTRL_C_EVENT not being there
# to failing import signal)... if that's the case, ask for
# forgiveness and go on to the approach which will interrupt
# the main thread (but it'll only work when it's executing some Python
# code -- not on sleep() for instance).
pass
if not called:
if hasattr(thread, 'interrupt_main'): # Jython doesn't have it
thread.interrupt_main()
else:
self.mainThread._thread.interrupt() # Jython
self.finish_exec(False)
return True
except:
traceback.print_exc()
return False
def close(self):
sys.exit(0)
def start_exec(self):
self.interruptable = True
def get_server(self):
if getattr(self, 'host', None) is not None:
return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
else:
return None
server = property(get_server)
def ShowConsole(self):
server = self.get_server()
if server is not None:
server.ShowConsole()
def finish_exec(self, more):
self.interruptable = False
server = self.get_server()
if server is not None:
return server.NotifyFinished(more)
else:
return True
def getFrame(self):
xml = StringIO.StringIO()
hidden_ns = self.get_ipython_hidden_vars_dict()
xml.write("<xml>")
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
xml.write("</xml>")
return xml.getvalue()
def getVariable(self, attributes):
xml = StringIO.StringIO()
xml.write("<xml>")
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
if val_dict is None:
val_dict = {}
keys = val_dict.keys()
for k in keys:
val = val_dict[k]
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
xml.write("</xml>")
return xml.getvalue()
def getArray(self, attr, roffset, coffset, rows, cols, format):
name = attr.split("\t")[-1]
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
def evaluate(self, expression):
xml = StringIO.StringIO()
xml.write("<xml>")
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
xml.write(pydevd_vars.var_to_xml(result, expression))
xml.write("</xml>")
return xml.getvalue()
def loadFullValue(self, seq, scope_attrs):
"""
Evaluate full value for async Console variables in a separate thread and send results to IDE side
:param seq: id of command
:param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR
(i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2)
:return:
"""
frame_variables = self.get_namespace()
var_objects = []
vars = scope_attrs.split(NEXT_VALUE_SEPARATOR)
for var_attrs in vars:
if '\t' in var_attrs:
name, attrs = var_attrs.split('\t', 1)
else:
name = var_attrs
attrs = None
if name in frame_variables:
var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs)
var_objects.append((var_object, name))
else:
var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables)
var_objects.append((var_object, name))
from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole
t = GetValueAsyncThreadConsole(self.get_server(), seq, var_objects)
t.start()
def changeVariable(self, attr, value):
def do_change_variable():
Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace())
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_change_variable)
def connectToDebugger(self, debuggerPort, debugger_options=None):
'''
Used to show console with variables connection.
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
'''
if debugger_options is None:
debugger_options = {}
env_key = "PYDEVD_EXTRA_ENVS"
if env_key in debugger_options:
for (env_name, value) in dict_iter_items(debugger_options[env_key]):
existing_value = os.environ.get(env_name, None)
if existing_value:
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)
else:
os.environ[env_name] = value
if env_name == "PYTHONPATH":
sys.path.append(value)
del debugger_options[env_key]
def do_connect_to_debugger():
try:
# Try to import the packages needed to attach the debugger
import pydevd
from _pydev_imps._pydev_saved_modules import threading
except:
# This happens on Jython embedded in host eclipse
traceback.print_exc()
sys.stderr.write('pydevd is not available, cannot connect\n')
from _pydevd_bundle.pydevd_constants import set_thread_id
from _pydev_bundle import pydev_localhost
set_thread_id(threading.currentThread(), "console_main")
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
f = FakeFrame()
f.f_back = None
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
f.f_locals = self.get_namespace()
self.debugger = pydevd.PyDB()
self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f)
try:
pydevd.apply_debugger_options(debugger_options)
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
self.debugger.prepare_to_run()
self.debugger.disable_tracing()
except:
traceback.print_exc()
sys.stderr.write('Failed to connect to target debugger.\n')
# Register to process commands when idle
self.debugrunning = False
try:
import pydevconsole
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
except:
traceback.print_exc()
sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n')
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_connect_to_debugger)
return ('connect complete',)
def handshake(self):
if self.connect_status_queue is not None:
self.connect_status_queue.put(True)
return "PyCharm"
def get_connect_status_queue(self):
return self.connect_status_queue
def hello(self, input_str):
# Don't care what the input string is
return ("Hello eclipse",)
def enableGui(self, guiname):
''' Enable the GUI specified in guiname (see inputhook for list).
As with IPython, enabling multiple GUIs isn't an error, but
only the last one's main loop runs and it may not work
'''
def do_enable_gui():
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
if versionok_for_gui():
try:
from pydev_ipython.inputhook import enable_gui
enable_gui(guiname)
except:
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
traceback.print_exc()
elif guiname not in ['none', '', None]:
# Only print a warning if the guiname was going to do something
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
# Return value does not matter, so return back what was sent
return guiname
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_enable_gui)
def get_ipython_hidden_vars_dict(self):
return None
# =======================================================================================================================
# FakeFrame
# =======================================================================================================================
class FakeFrame:
'''
Used to show console with variables connection.
A class to be used as a mock of a frame.
'''
| [
[
[
7,
9
],
[
14707,
14709
],
[
14970,
14972
],
[
14978,
14980
],
[
15070,
15072
],
[
15965,
15967
],
[
21147,
21149
],
[
21284,
21286
],
[
21233,
21235
],
[
21350,
21352
]
],
[
[
17,
20
],
[
1002,
1005
],
[
3165,
3168
],
[
1072,
1075
],
[
1276,
1279
],
[
9351,
9354
],
[
9334,
9337
],
[
9409,
9412
],
[
9494,
9497
],
[
9535,
9538
],
[
10033,
10036
],
[
10328,
10331
],
[
10435,
10438
],
[
10676,
10679
],
[
10777,
10780
],
[
11782,
11785
],
[
11967,
11970
],
[
11946,
11949
],
[
16901,
16904
],
[
21444,
21447
],
[
21882,
21885
],
[
22996,
22999
],
[
23360,
23363
],
[
24579,
24582
],
[
24854,
24857
]
],
[
[
28,
37
],
[
10911,
10920
],
[
11895,
11904
],
[
14002,
14011
],
[
16824,
16833
],
[
21844,
21853
],
[
22958,
22967
],
[
23322,
23331
],
[
24684,
24693
]
],
[
[
78,
87
],
[
3465,
3474
],
[
17072,
17081
]
],
[
[
89,
95
],
[
6710,
6716
]
],
[
[
97,
101
],
[
20386,
20390
]
],
[
[
149,
164
],
[
13559,
13574
]
],
[
[
210,
216
],
[
16539,
16545
],
[
16616,
16622
]
],
[
[
244,
255
],
[
13181,
13192
],
[
17961,
17972
],
[
18281,
18292
],
[
18535,
18546
],
[
18628,
18639
],
[
18831,
18842
],
[
18933,
18944
],
[
19840,
19851
],
[
20003,
20014
]
],
[
[
283,
293
],
[
17719,
17729
],
[
18216,
18226
]
],
[
[
339,
348
],
[
14177,
14186
]
],
[
[
350,
365
],
[
21070,
21085
]
],
[
[
367,
387
],
[
19548,
19568
]
],
[
[
389,
393
]
],
[
[
399,
418
],
[
5047,
5066
]
],
[
[
427,
433
],
[
14991,
14997
],
[
15976,
15982
]
],
[
[
457,
471
],
[
5254,
5268
]
],
[
[
498,
507
],
[
5226,
5235
]
],
[
[
525,
546
],
[
17598,
17606
],
[
17895,
17903
],
[
18767,
18775
]
],
[
[
624,
632
],
[
17598,
17606
],
[
17895,
17903
],
[
18767,
18775
]
],
[
[
673,
687
],
[
17598,
17606
],
[
17895,
17903
],
[
18767,
18775
]
],
[
[
952,
961
],
[
2970,
2979
],
[
4508,
4517
],
[
3185,
3194
],
[
4786,
4795
]
],
[
[
2964,
2969
],
[
8927,
8932
]
],
[
[
4490,
4507
],
[
9033,
9050
]
],
[
[
5926,
5938
],
[
13710,
13722
]
],
[
[
6522,
6546
]
],
[
[
25564,
25573
],
[
22306,
22315
]
]
] |
from .writer import saveMeshTracks
from .reader import loadMeshTracks
from .meshdata import Track, Mesh
| [
[
[
20,
34
]
],
[
[
55,
69
]
],
[
[
93,
98
]
],
[
[
100,
104
]
]
] |
"""Utilities for setting up a project's settings.
The default way to use this is to import and call :func:`init_settings`
in a project's settings module:
# project/top_level_package/settings.py
from arcutils.settings import init_settings
init_settings()
This adds a few default settings for bootstrapping purposes and then
loads the project's local settings--the django-local-settings variety.
Pass ``local_settings=False`` to :func:`init_settings` if the project
doesn't use django-local-settings.
"""
import base64
import inspect
import ipaddress
import os
import pkg_resources
from datetime import datetime
from django import VERSION as DJANGO_VERSION
from django.conf import settings as django_settings
from django.utils import timezone
from local_settings import NO_DEFAULT, load_and_check_settings, LocalSetting, SecretSetting
from local_settings.settings import DottedAccessDict, Settings as LocalSettings
ARCUTILS_PACKAGE_DIR = pkg_resources.resource_filename('arcutils', '')
class _InternalIPsType:
"""Used to construct a convenient INTERNAL_IPS setting for dev.
An *instance* of this type considers any standard loopback or
private IP address a valid internal IP address.
"""
def __contains__(self, addr):
addr = ipaddress.ip_address(addr)
return addr.is_loopback or addr.is_private
INTERNAL_IPS = _InternalIPsType()
def init_settings(settings=None, local_settings=True, prompt=None, quiet=None, package_level=0,
stack_level=2, drop=(), settings_processors=()):
"""Initialize project settings.
Basic Usage
===========
By default, it's assumed that the project is structured like so,
with the settings module in the top level package::
project/
package/
__init__.py
settings.py
README
setup.py
It's also assumed that :func:`init_settings` will be called from the
global scope of the project's settings module::
# package/settings.py
from arcutils.settings import init_settings
init_settings()
A few default settings that are commonly used in local settings
files will be added (if not explicitly set before calling this
function):
- ARCUTILS_PACKAGE_DIR
- PACKAGE (top level project package)
- PACKAGE_DIR (top level project package directory)
- ROOT_DIR (project directory; should only be used in dev)
- START_TIME (current date/time; will be an "aware" UTC datetime
object if the project has time zone support enabled)
If the project has additional local settings, they must be defined
*before* this function is called.
Advanced Usage
==============
Generally, you won't need to pass ``settings``, but if you do, it
should be a dict of settings as you'd get from calling ``globals()``
in the project's settings module.
If the settings module is in a sub-package, ``package_level`` will
need to be adjusted accordingly. If :func:`init_settings` is being
called from another function, ``stack_level`` will have to be
adjusted accordingly. See :func:`derive_top_level_package_name` for
more info about these args.
The ``PACKAGE``, ``PACKAGE_DIR``, and ``ROOT_DIR`` settings will be
derived based on the location of the settings module this function
is called from. If this isn't working, ensure the ``package_level``
and ``stack_level`` options are correct; or, set the ``PACKAGE``
setting explicitly before calling this function::
PACKAGE = 'quickticket'
init_settings()
``PACKAGE_DIR`` and ``ROOT_DIR`` can also be set explicitly if
necessary.
.. note:: If the package name and related settings can't be derived
automatically, that indicates a bug in this function.
To drop unused default settings, specify a list of such settings via
the ``drop`` arg.
To process settings in any custom manner needed, pass a list of
functions via ``settings_processors``. Each processor will be passed
the settings to be manipulated as necessary.
"""
settings = settings if settings is not None else get_module_globals(stack_level)
if not settings.get('ARCUTILS_PACKAGE_DIR'):
settings['ARCUTILS_PACKAGE_DIR'] = ARCUTILS_PACKAGE_DIR
if not settings.get('PACKAGE'):
# The default value for PACKAGE is derived by figuring out where
# init_settings was called from in terms of package and scope.
settings['PACKAGE'] = derive_top_level_package_name(package_level, stack_level)
if not settings.get('PACKAGE_DIR'):
# The default value for PACKAGE_DIR is simply the directory
# corresponding to PACKAGE.
settings['PACKAGE_DIR'] = pkg_resources.resource_filename(settings['PACKAGE'], '')
if not settings.get('ROOT_DIR'):
# The default value for ROOT_DIR is the directory N levels up
# from PACKAGE_DIR, where N is equal to the package depth of the
# top level package. Note that in most cases N is 1; it will be
# greater than 1 when the top level package is contained in a
# namespace package.
package_depth = len(settings['PACKAGE'].split('.'))
parts = os.path.split(settings['PACKAGE_DIR'])
root_dir = os.path.join(*parts[:package_depth])
settings['ROOT_DIR'] = root_dir
if local_settings:
init_local_settings(settings, prompt=prompt, quiet=quiet)
# NOTE: We can't simply use Django's timezone.now() here because it
# accesses settings.USE_TZ, but at this point the settings
# may not be considered fully configured by Django, so we have
# to do this to avoid an ImproperlyConfigured exception.
use_tz = settings.get('USE_TZ', False)
now = datetime.utcnow().replace(tzinfo=timezone.utc) if use_tz else datetime.now()
settings.setdefault('START_TIME', now)
# Remove the MIDDLEWARE_CLASSES setting on Django >= 1.10, but only
# if the MIDDLEWARE setting is present *and* set.
if DJANGO_VERSION[:2] >= (1, 10):
if settings.get('MIDDLEWARE'):
settings.pop('MIDDLEWARE_CLASSES', None)
# Drop irrelevant settings.
for name in drop:
del settings[name]
for processor in settings_processors:
processor(settings)
return settings
def init_local_settings(settings, prompt=None, quiet=None):
"""Initialize the local settings defined in ``settings``.
Args:
settings (dict): A dict of settings as you'd get from calling
``globals()`` in a Django settings module.
quiet (bool): Squelch standard out when loading local settings.
.. note:: If your project has additional local settings, they must
be defined *before* this function is called.
"""
suggested_secret_key = base64.b64encode(os.urandom(64)).decode('utf-8')
defaults = {
'DEBUG': LocalSetting(False),
'ADMINS': LocalSetting([]),
'ALLOWED_HOSTS': LocalSetting([]),
'GOOGLE': {
'analytics': {
'tracking_id': LocalSetting(
None, doc='Enter Google Analytics tracking ID (UA-NNNNNNNN-N)'
),
},
},
'MANAGERS': LocalSetting([]),
'SECRET_KEY': SecretSetting(doc='Suggested: "{suggested_secret_key}"'.format(**locals())),
'DATABASES': {
'default': {
'ENGINE': LocalSetting('django.db.backends.postgresql'),
'NAME': LocalSetting(settings.get('PACKAGE', NO_DEFAULT)),
'USER': LocalSetting(''),
'PASSWORD': SecretSetting(),
'HOST': LocalSetting(''),
},
},
}
for k, v in defaults.items():
settings.setdefault(k, v)
settings.update(load_and_check_settings(settings, prompt=prompt, quiet=quiet))
def get_setting(name, default=NO_DEFAULT, settings=None):
"""Get setting for ``name``, falling back to ``default`` if passed.
``name`` should be a string like 'ARC.cdn.hosts' or 'X.Y.0'. The
name is split on dots into path segments, then the settings are
traversed like this:
- Set current value to django.conf.settings.{first segment}
- For each other segment
- Get current_value[segment] if current value is a dict
- Get current_value[int(segment)] if current value is a list
If the setting isn't found, the ``default`` value will be returned
if specified; otherwise, a ``KeyError`` will be raised.
``settings`` can be used to retrieve the setting from a settings
object other than the default ``django.conf.settings``.
:class:`local_settings.settings.DottedAccessDict` is used to
implement this functionality. See the django-local-settings project
for more details about settings traversal.
"""
if settings is None:
settings = django_settings
if not isinstance(settings, LocalSettings):
settings = DottedAccessDict(get_settings_dict(settings))
return settings.get_dotted(name, default)
class PrefixedSettings:
"""Read-only settings for a given ``prefix``.
Args:
prefix: An upper case setting name such as "CAS" or "LDAP"
defaults: A dict of defaults for the prefix
The idea is to make it easy to fetch sub-settings within a given
package.
For example::
>>> DEFAULT_CAS_SETTINGS = {
... 'base_url': 'https://example.com/cas/',
... # plus a bunch more CAS settings...
... }
>>> cas_settings = PrefixedSettings('CAS', DEFAULT_CAS_SETTINGS)
>>> cas_settings.get('base_url')
'https://example.com/cas/'
>>> cas_settings.get('logout_path', default='/default/logout/path')
'/default/logout/path'
See the ``cas``, ``ldap``, and ``masquerade`` packages for concrete
examples of how this is used.
"""
def __init__(self, prefix, defaults=None, settings=None):
defaults = get_settings_dict(defaults)
settings = get_settings_dict(settings if settings is not None else django_settings)
self.__prefix = prefix
self.__defaults = DottedAccessDict(defaults)
self.__settings = DottedAccessDict(settings)
def get(self, name, default=NO_DEFAULT):
"""Get setting for configured ``prefix``.
Args:
name: setting name without ``prefix``
default: value to use if setting isn't present in the
project's settings or in the ``defaults``
Returns:
object: Value of setting
Attempt to get setting from:
1. Project settings for ``prefix``
2. Default settings from ``defaults``
3. ``default`` arg
Raises:
KeyError: When the setting isn't found in the project's
settings or in the ``defaults`` and no fallback is
passed via the ``default`` keyword arg
"""
qualified_name = '{prefix}.{name}'.format(prefix=self.__prefix, name=name)
try:
return self.__settings.get_dotted(qualified_name)
except KeyError:
return self.__defaults.get_dotted(name, default=default)
def __getitem__(self, key):
return PrefixedSettings.get(self, key, NO_DEFAULT)
# Internal helper functions
def get_settings_dict(settings):
"""For a given settings object, return a dict.
Args:
settings (object): Usually either a Django settings object or
a dict; can also be a sequence that can be converted to
a dict or some other non-dict mapping
Returns:
empty dict: ``settings`` is ``None``
vars(settings._wrapped): ``settings`` is (or appears to be)
a Django settings object
dict(settings): ``settings`` is any other type of object
"""
if settings is None:
return {}
if hasattr(settings, '_wrapped'):
# A Django settings object
# TODO: Find a better way to check for Django settings?
return vars(settings._wrapped)
return dict(settings)
def derive_top_level_package_name(package_level=0, stack_level=1):
"""Return top level package name.
Args:
package_level (int): How many package levels down the caller
is. 0 indicates this function is being called from the top
level package, 1 indicates that it's being called from a
sub-package, etc.
stack_level (int): How many levels down the stack the caller is
from here. 1 indicates this function is being called from
module scope, 2 indicates this function is being called from
another function, etc.
This will first get the package name of the module containing the
caller. ``package_level`` segments will be then be chopped off of
the package name.
If this is called from a sub-package, ``package_level`` will have to
be adjusted accordingly (add 1 for each sub-package).
If this is called indirectly (e.g., via :func:`init_settings`)
``stack_level`` will have to be adjusted accordingly (add 1 for each
nested function).
"""
assert package_level >= 0, 'Package level should be greater than or equal to 0'
assert stack_level > 0, 'Stack level should be greater than 0'
frame = inspect.stack()[stack_level][0]
package = frame.f_globals['__package__']
package = package.rsplit('.', package_level)[0]
return package
def get_module_globals(stack_level=2):
frame = inspect.stack()[stack_level][0]
return frame.f_globals
| [
[
[
527,
533
],
[
6902,
6908
]
],
[
[
541,
548
],
[
13472,
13479
],
[
13673,
13680
]
],
[
[
556,
565
],
[
1278,
1287
]
],
[
[
573,
575
],
[
5298,
5300
],
[
5356,
5358
],
[
6919,
6921
]
],
[
[
583,
596
],
[
957,
970
],
[
4813,
4826
]
],
[
[
618,
626
],
[
5858,
5866
],
[
5920,
5928
]
],
[
[
647,
672
],
[
6112,
6126
]
],
[
[
697,
724
],
[
8989,
9004
],
[
10192,
10207
]
],
[
[
750,
758
],
[
5891,
5899
]
],
[
[
787,
797
],
[
7982,
7992
],
[
10379,
10389
],
[
7624,
7634
],
[
11425,
11435
]
],
[
[
799,
822
],
[
7887,
7910
]
],
[
[
824,
836
],
[
6985,
6997
],
[
7024,
7036
],
[
7067,
7079
],
[
7163,
7175
],
[
7325,
7337
],
[
7516,
7528
],
[
7587,
7599
],
[
7662,
7674
],
[
7749,
7761
]
],
[
[
838,
851
],
[
7365,
7378
],
[
7708,
7721
]
],
[
[
888,
904
],
[
9073,
9089
],
[
10266,
10282
],
[
10319,
10335
]
],
[
[
906,
931
],
[
9038,
9051
]
],
[
[
934,
954
],
[
4344,
4364
]
],
[
[
1013,
1029
],
[
1373,
1389
]
],
[
[
1358,
1370
]
],
[
[
1398,
1411
]
],
[
[
6415,
6434
],
[
5465,
5484
]
],
[
[
7956,
7967
]
],
[
[
9174,
9190
],
[
11393,
11409
]
],
[
[
11473,
11490
],
[
9090,
9107
],
[
10089,
10106
],
[
10136,
10153
]
],
[
[
12241,
12270
],
[
4576,
4605
]
],
[
[
13626,
13644
],
[
4219,
4237
]
]
] |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift import gettext_ as _
from urllib import unquote
from swift.account.utils import account_listing_response
from swift.common.request_helpers import get_listing_content_type
from swift.common.utils import public
from swift.common.constraints import check_metadata, MAX_ACCOUNT_NAME_LENGTH
from swift.common.http import HTTP_NOT_FOUND, HTTP_GONE
from swift.proxy.controllers.base import Controller, clear_info_cache
from swift.common.swob import HTTPBadRequest, HTTPMethodNotAllowed
class AccountController(Controller):
"""WSGI controller for account requests"""
server_type = 'Account'
def __init__(self, app, account_name, **kwargs):
Controller.__init__(self, app)
self.account_name = unquote(account_name)
if not self.app.allow_account_management:
self.allowed_methods.remove('PUT')
self.allowed_methods.remove('DELETE')
def GETorHEAD(self, req):
"""Handler for HTTP GET/HEAD requests."""
if len(self.account_name) > MAX_ACCOUNT_NAME_LENGTH:
resp = HTTPBadRequest(request=req)
resp.body = 'Account name length of %d longer than %d' % \
(len(self.account_name), MAX_ACCOUNT_NAME_LENGTH)
return resp
partition, nodes = self.app.account_ring.get_nodes(self.account_name)
resp = self.GETorHEAD_base(
req, _('Account'), self.app.account_ring, partition,
req.path_info.rstrip('/'))
if resp.status_int == HTTP_NOT_FOUND:
if resp.headers.get('X-Account-Status', '').lower() == 'deleted':
resp.status = HTTP_GONE
elif self.app.account_autocreate:
resp = account_listing_response(self.account_name, req,
get_listing_content_type(req))
if not req.environ.get('swift_owner', False):
for key in self.app.swift_owner_headers:
if key in resp.headers:
del resp.headers[key]
return resp
@public
def PUT(self, req):
"""HTTP PUT request handler."""
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
error_response = check_metadata(req, 'account')
if error_response:
return error_response
if len(self.account_name) > MAX_ACCOUNT_NAME_LENGTH:
resp = HTTPBadRequest(request=req)
resp.body = 'Account name length of %d longer than %d' % \
(len(self.account_name), MAX_ACCOUNT_NAME_LENGTH)
return resp
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'PUT',
req.path_info, [headers] * len(accounts))
return resp
@public
def POST(self, req):
"""HTTP POST request handler."""
if len(self.account_name) > MAX_ACCOUNT_NAME_LENGTH:
resp = HTTPBadRequest(request=req)
resp.body = 'Account name length of %d longer than %d' % \
(len(self.account_name), MAX_ACCOUNT_NAME_LENGTH)
return resp
error_response = check_metadata(req, 'account')
if error_response:
return error_response
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req, transfer=True)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.path_info, [headers] * len(accounts))
if resp.status_int == HTTP_NOT_FOUND and self.app.account_autocreate:
self.autocreate_account(req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'POST',
req.path_info, [headers] * len(accounts))
return resp
@public
def DELETE(self, req):
"""HTTP DELETE request handler."""
# Extra safety in case someone typos a query string for an
# account-level DELETE request that was really meant to be caught by
# some middleware.
if req.query_string:
return HTTPBadRequest(request=req)
if not self.app.allow_account_management:
return HTTPMethodNotAllowed(
request=req,
headers={'Allow': ', '.join(self.allowed_methods)})
account_partition, accounts = \
self.app.account_ring.get_nodes(self.account_name)
headers = self.generate_request_headers(req)
clear_info_cache(self.app, req.environ, self.account_name)
resp = self.make_requests(
req, self.app.account_ring, account_partition, 'DELETE',
req.path_info, [headers] * len(accounts))
return resp
| [
[
[
613,
626
],
[
1980,
1981
]
],
[
[
646,
653
],
[
1321,
1328
]
],
[
[
687,
711
],
[
2300,
2324
]
],
[
[
753,
777
],
[
2397,
2421
]
],
[
[
809,
815
],
[
2643,
2649
],
[
3715,
3721
],
[
4916,
4922
]
],
[
[
853,
867
],
[
2927,
2941
],
[
4090,
4104
]
],
[
[
869,
892
],
[
1607,
1630
],
[
1799,
1822
],
[
3055,
3078
],
[
3247,
3270
],
[
3824,
3847
],
[
4016,
4039
]
],
[
[
923,
937
],
[
2097,
2111
],
[
4606,
4620
]
],
[
[
939,
948
],
[
2221,
2230
]
],
[
[
990,
1000
],
[
1112,
1122
],
[
1262,
1272
]
],
[
[
1002,
1018
],
[
3475,
3491
],
[
4361,
4377
],
[
5592,
5608
]
],
[
[
1049,
1063
],
[
1651,
1665
],
[
3099,
3113
],
[
3868,
3882
],
[
5212,
5226
]
],
[
[
1065,
1085
],
[
2783,
2803
],
[
5309,
5329
]
],
[
[
1094,
1111
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 9 21:03:57 2020
@author: Mehul
"""
#importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import random
import warnings
from matplotlib import style
from collections import Counter
from math import sqrt
style.use('fivethirtyeight')
#defining knn function
def k_nearest_neighbors(data,predict,k=3):
distances=[]
if(len(data)>=k):
#this is not an error it is just a warning , the algorithm still works
warnings.warn('The value of k is less than the number of voting groups.')
for group in data:
#data is a dictionary of lists with different groups of classes
for features in data[group]:
#features represent the points in the dataset
#original way
#euclidean_distance=sqrt((features[0]-predict[0])**2+(features[1]-predict[1])**2)
#faster way
euclidean_distance=np.linalg.norm(np.array(features)-np.array(predict))
distances.append([euclidean_distance,group])
#once we have the distances we dont care about them
#we populate the list of votes which has the top k neighbors to the prediction point
votes=[i[1] for i in sorted(distances)[:k] ]
#using counter we calculate the most common out of the nearest neighbors
vote_result=Counter(votes).most_common(1)[0][0]
#we can also give our confidence,confidence is the probability of your prediction being right
#confidence=Counter(votes).most_common(1)[0][1]/k
return vote_result
def accuracy_of_result(train_set,test_set):
#intialising
correct=0
total=0
#testing and finding accuracy
for group in test_set:
for data in test_set[group]:
#iterating through all the data in a class
result=k_nearest_neighbors(train_set,data,k=5)
if (group==result):
correct=correct+1
total=total+1
accuracy=correct/total
return accuracy
''''
#trial data
#our data is in form of dictionary of lists
dataset={'k':[[1,2],[2,3,],[3,1]],'r':[[6,5],[7,7],[8,6]]}
new_features=[5,7]
#plotting the data
plt.scatter(new_features[0],new_features[1],s=50)
for i in dataset:
for j in dataset[i]:
print(j)
plt.scatter(j[0],j[1],s=100,color=i)
#applying knn model
result=k_nearest_neighbors(dataset,new_features,k=3)#result represents the class the prediction point belongs to
#plotting the prediction
plt.scatter(new_features[0],new_features[1],s=50,color=result)
for i in dataset:
for j in dataset[i]:
print(j)
plt.scatter(j[0],j[1],s=100,color=i)
'''
#Implmenting the model on the test dataset
#importing the dataset
dataset=pd.read_csv('breast-cancer-wisconsin.data.txt')
#replacing missing instances with large numbers
dataset.replace('?',-99999,inplace=True)
dataset.drop(['id'],1,inplace=True)
dataset=dataset.astype(float).values.tolist()
#shuffling to data to include some randomness
#this does not change the raltionship between the data
#this is what can be used for cross-validation
random.shuffle(dataset)
#splitting the dataset into test set and train set
test_size=0.2
#the train set and the test set are dictionary of lists
train_set={2:[],4:[]}
test_set={2:[],4:[]}
#slicing the data into train_data and test_data
train_data=dataset[:-int(test_size*len(dataset))] #all the data upto the last 20%
test_data=dataset[-int(test_size*len(dataset)):] #the last 20%
#populating the dictionary
#here we take the data from the train_data and the test_data and use it to populate our dictionaries
for i in train_data:
train_set[i[-1]].append(i[:-1])# i[-1] represents the class of the particular row
for i in test_data:
test_set[i[-1]].append(i[:-1])# i[-1] represents the class of the particular row
#getting the accuracy of our knn model on the dataset
print('Accuracy of the result:',accuracy_of_result(train_set,test_set)) | [
[
[
125,
136
],
[
926,
928
],
[
941,
943
],
[
960,
962
]
],
[
[
145,
169
]
],
[
[
178,
190
],
[
2627,
2629
]
],
[
[
199,
205
],
[
3008,
3014
]
],
[
[
214,
222
],
[
522,
530
]
],
[
[
247,
252
],
[
310,
315
]
],
[
[
278,
285
],
[
1309,
1316
]
],
[
[
304,
308
]
],
[
[
370,
389
],
[
1756,
1775
]
],
[
[
1527,
1545
],
[
3839,
3857
]
],
[
[
2619,
2626
],
[
2728,
2735
],
[
2770,
2777
],
[
2815,
2822
]
],
[
[
2807,
2814
],
[
3023,
3030
],
[
3268,
3275
],
[
3296,
3303
],
[
3350,
3357
],
[
3377,
3384
]
],
[
[
3087,
3096
],
[
3282,
3291
],
[
3363,
3372
]
],
[
[
3161,
3170
],
[
3561,
3570
],
[
3858,
3867
]
],
[
[
3184,
3192
],
[
3668,
3676
],
[
3868,
3876
]
],
[
[
3257,
3267
],
[
3547,
3557
]
],
[
[
3340,
3349
],
[
3655,
3664
]
],
[
[
3542,
3543
],
[
3571,
3572
],
[
3585,
3586
]
],
[
[
3650,
3651
],
[
3677,
3678
],
[
3691,
3692
]
]
] |
# -*- coding:utf-8 -*-
from mongoengine import (IntField, DateTimeField, StringField, ReferenceField, DictField)
from model import BaseModel
# from ext import db
class Account(BaseModel):
name = StringField(max_length=5000, null=False)
tel = IntField(null=False)
password = StringField(max_length=5000, null=False)
head_img_key = StringField(max_length=5000, null=False)
meta = {'collection': 'account'}
| [
[
[
48,
56
],
[
253,
261
]
],
[
[
58,
71
]
],
[
[
73,
84
],
[
202,
213
],
[
289,
300
],
[
349,
360
]
],
[
[
86,
100
]
],
[
[
102,
111
]
],
[
[
131,
140
],
[
179,
188
]
],
[
[
171,
178
]
]
] |
import autofit as af
import autolens as al
from test_autolens.integration.tests.interferometer import runner
test_type = "lens_only"
test_name = "lens_x2_light__hyper"
data_type = "lens_x2_light"
data_resolution = "sma"
def make_pipeline(
name,
phase_folders,
real_space_shape_2d=(100, 100),
real_space_pixel_scales=(0.1, 0.1),
non_linear_class=af.MultiNest,
):
class LensPlaneGalaxyX2Phase(al.PhaseInterferometer):
def customize_priors(self, results):
self.galaxies.lens_0.light.centre_0 = -1.0
self.galaxies.lens_0.light.centre_1 = -1.0
self.galaxies.lens_1.light.centre_0 = 1.0
self.galaxies.lens_1.light.centre_1 = 1.0
phase1 = LensPlaneGalaxyX2Phase(
phase_name="phase_1",
phase_folders=phase_folders,
galaxies=dict(
lens_0=al.GalaxyModel(redshift=0.5, light=al.lp.EllipticalSersic),
lens_1=al.GalaxyModel(redshift=0.5, light=al.lp.EllipticalSersic),
),
real_space_shape_2d=real_space_shape_2d,
real_space_pixel_scales=real_space_pixel_scales,
non_linear_class=non_linear_class,
)
phase1.optimizer.const_efficiency_mode = True
phase1.optimizer.n_live_points = 40
phase1.optimizer.sampling_efficiency = 0.8
phase1 = phase1.extend_with_multiple_hyper_phases(hyper_galaxy=True)
phase2 = al.PhaseInterferometer(
phase_name="phase_2",
phase_folders=phase_folders,
galaxies=dict(
lens_0=al.GalaxyModel(
redshift=0.5,
light=phase1.result.model.galaxies.lens_0.light,
hyper_galaxy=phase1.result.hyper_combined.instance.galaxies.lens_0.hyper_galaxy,
),
lens_1=al.GalaxyModel(
redshift=0.5,
light=phase1.result.model.galaxies.lens_1.light,
hyper_galaxy=phase1.result.hyper_combined.instance.galaxies.lens_1.hyper_galaxy,
),
),
real_space_shape_2d=real_space_shape_2d,
real_space_pixel_scales=real_space_pixel_scales,
non_linear_class=non_linear_class,
)
phase2.optimizer.const_efficiency_mode = True
phase2.optimizer.n_live_points = 40
phase2.optimizer.sampling_efficiency = 0.8
return al.PipelineDataset(name, phase1, phase2)
if __name__ == "__main__":
import sys
runner.run(sys.modules[__name__])
| [
[
[
7,
20
],
[
368,
370
]
],
[
[
28,
42
],
[
418,
420
],
[
855,
857
],
[
890,
892
],
[
934,
936
],
[
969,
971
],
[
1386,
1388
],
[
1519,
1521
],
[
1761,
1763
],
[
2300,
2302
]
],
[
[
102,
108
],
[
2390,
2396
]
],
[
[
110,
119
]
],
[
[
134,
143
]
],
[
[
169,
178
]
],
[
[
197,
212
]
],
[
[
227,
240
]
],
[
[
2381,
2384
],
[
2401,
2404
]
]
] |
from django.test import TestCase, override_settings
from model_bakery import baker
from rest_framework.test import APIClient
from accounts.models import User
from core.models import CoreSettings
from rest_framework.authtoken.models import Token
class TacticalTestCase(TestCase):
def authenticate(self):
self.john = User(username="john")
self.john.set_password("hunter2")
self.john.save()
self.alice = User(username="alice")
self.alice.set_password("hunter2")
self.alice.save()
self.client_setup()
self.client.force_authenticate(user=self.john)
def setup_agent_auth(self, agent):
agent_user = User.objects.create_user(
username=agent.agent_id, password=User.objects.make_random_password(60)
)
Token.objects.create(user=agent_user)
def client_setup(self):
self.client = APIClient()
# fixes tests waiting 2 minutes for mesh token to appear
@override_settings(
MESH_TOKEN_KEY="41410834b8bb4481446027f87d88ec6f119eb9aa97860366440b778540c7399613f7cabfef4f1aa5c0bd9beae03757e17b2e990e5876b0d9924da59bdf24d3437b3ed1a8593b78d65a72a76c794160d9"
)
def setup_coresettings(self):
self.coresettings = CoreSettings.objects.create()
def check_not_authenticated(self, method, url):
self.client.logout()
switch = {
"get": self.client.get(url),
"post": self.client.post(url),
"put": self.client.put(url),
"patch": self.client.patch(url),
"delete": self.client.delete(url),
}
r = switch.get(method)
self.assertEqual(r.status_code, 401)
def create_checks(self, policy=None, agent=None, script=None):
if not policy and not agent:
return
# will create 1 of every check and associate it with the policy object passed
check_recipes = [
"checks.diskspace_check",
"checks.ping_check",
"checks.cpuload_check",
"checks.memory_check",
"checks.winsvc_check",
"checks.script_check",
"checks.eventlog_check",
]
checks = list()
for recipe in check_recipes:
if not script:
checks.append(baker.make_recipe(recipe, policy=policy, agent=agent))
else:
checks.append(
baker.make_recipe(recipe, policy=policy, agent=agent, script=script)
)
return checks
| [
[
[
24,
32
],
[
272,
280
]
],
[
[
34,
51
],
[
973,
990
]
],
[
[
77,
82
],
[
2296,
2301
],
[
2420,
2425
]
],
[
[
116,
125
],
[
894,
903
]
],
[
[
155,
159
],
[
331,
335
],
[
441,
445
],
[
677,
681
],
[
749,
753
]
],
[
[
184,
196
],
[
1246,
1258
]
],
[
[
241,
246
],
[
805,
810
]
],
[
[
255,
271
]
]
] |
#
# Copyright (c) 2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Union
import numpy as np
import scipy.signal
from rl_coach.agents.policy_optimization_agent import PolicyOptimizationAgent, PolicyGradientRescaler
from rl_coach.architectures.tensorflow_components.heads.policy_head import PolicyHeadParameters
from rl_coach.architectures.tensorflow_components.heads.v_head import VHeadParameters
from rl_coach.architectures.tensorflow_components.middlewares.fc_middleware import FCMiddlewareParameters
from rl_coach.base_parameters import AlgorithmParameters, NetworkParameters, \
AgentParameters
from rl_coach.logger import screen
from rl_coach.memories.episodic.single_episode_buffer import SingleEpisodeBufferParameters
from rl_coach.spaces import DiscreteActionSpace
from rl_coach.utils import last_sample
from rl_coach.architectures.tensorflow_components.embedders.embedder import InputEmbedderParameters
class ActorCriticAlgorithmParameters(AlgorithmParameters):
def __init__(self):
super().__init__()
self.policy_gradient_rescaler = PolicyGradientRescaler.A_VALUE
self.apply_gradients_every_x_episodes = 5
self.beta_entropy = 0
self.num_steps_between_gradient_updates = 5000 # this is called t_max in all the papers
self.gae_lambda = 0.96
self.estimate_state_value_using_gae = False
class ActorCriticNetworkParameters(NetworkParameters):
def __init__(self):
super().__init__()
self.input_embedders_parameters = {'observation': InputEmbedderParameters()}
self.middleware_parameters = FCMiddlewareParameters()
self.heads_parameters = [VHeadParameters(), PolicyHeadParameters()]
self.loss_weights = [0.5, 1.0]
self.rescale_gradient_from_head_by_factor = [1, 1]
self.optimizer_type = 'Adam'
self.clip_gradients = 40.0
self.async_training = True
class ActorCriticAgentParameters(AgentParameters):
def __init__(self):
super().__init__(algorithm=ActorCriticAlgorithmParameters(),
exploration=None, #TODO this should be different for continuous (ContinuousEntropyExploration)
# and discrete (CategoricalExploration) action spaces.
memory=SingleEpisodeBufferParameters(),
networks={"main": ActorCriticNetworkParameters()})
@property
def path(self):
return 'rl_coach.agents.actor_critic_agent:ActorCriticAgent'
# Actor Critic - https://arxiv.org/abs/1602.01783
class ActorCriticAgent(PolicyOptimizationAgent):
def __init__(self, agent_parameters, parent: Union['LevelManager', 'CompositeAgent']=None):
super().__init__(agent_parameters, parent)
self.last_gradient_update_step_idx = 0
self.action_advantages = self.register_signal('Advantages')
self.state_values = self.register_signal('Values')
self.value_loss = self.register_signal('Value Loss')
self.policy_loss = self.register_signal('Policy Loss')
# Discounting function used to calculate discounted returns.
def discount(self, x, gamma):
return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1]
def get_general_advantage_estimation_values(self, rewards, values):
# values contain n+1 elements (t ... t+n+1), rewards contain n elements (t ... t + n)
bootstrap_extended_rewards = np.array(rewards.tolist() + [values[-1]])
# Approximation based calculation of GAE (mathematically correct only when Tmax = inf,
# although in practice works even in much smaller Tmax values, e.g. 20)
deltas = rewards + self.ap.algorithm.discount * values[1:] - values[:-1]
gae = self.discount(deltas, self.ap.algorithm.discount * self.ap.algorithm.gae_lambda)
if self.ap.algorithm.estimate_state_value_using_gae:
discounted_returns = np.expand_dims(gae + values[:-1], -1)
else:
discounted_returns = np.expand_dims(np.array(self.discount(bootstrap_extended_rewards,
self.ap.algorithm.discount)), 1)[:-1]
return gae, discounted_returns
def learn_from_batch(self, batch):
# batch contains a list of episodes to learn from
network_keys = self.ap.network_wrappers['main'].input_embedders_parameters.keys()
# get the values for the current states
result = self.networks['main'].online_network.predict(batch.states(network_keys))
current_state_values = result[0]
self.state_values.add_sample(current_state_values)
# the targets for the state value estimator
num_transitions = batch.size
state_value_head_targets = np.zeros((num_transitions, 1))
# estimate the advantage function
action_advantages = np.zeros((num_transitions, 1))
if self.policy_gradient_rescaler == PolicyGradientRescaler.A_VALUE:
if batch.game_overs()[-1]:
R = 0
else:
R = self.networks['main'].online_network.predict(last_sample(batch.next_states(network_keys)))[0]
for i in reversed(range(num_transitions)):
R = batch.rewards()[i] + self.ap.algorithm.discount * R
state_value_head_targets[i] = R
action_advantages[i] = R - current_state_values[i]
elif self.policy_gradient_rescaler == PolicyGradientRescaler.GAE:
# get bootstraps
bootstrapped_value = self.networks['main'].online_network.predict(last_sample(batch.next_states(network_keys)))[0]
values = np.append(current_state_values, bootstrapped_value)
if batch.game_overs()[-1]:
values[-1] = 0
# get general discounted returns table
gae_values, state_value_head_targets = self.get_general_advantage_estimation_values(batch.rewards(), values)
action_advantages = np.vstack(gae_values)
else:
screen.warning("WARNING: The requested policy gradient rescaler is not available")
action_advantages = action_advantages.squeeze(axis=-1)
actions = batch.actions()
if not isinstance(self.spaces.action, DiscreteActionSpace) and len(actions.shape) < 2:
actions = np.expand_dims(actions, -1)
# train
result = self.networks['main'].online_network.accumulate_gradients({**batch.states(network_keys),
'output_1_0': actions},
[state_value_head_targets, action_advantages])
# logging
total_loss, losses, unclipped_grads = result[:3]
self.action_advantages.add_sample(action_advantages)
self.unclipped_grads.add_sample(unclipped_grads)
self.value_loss.add_sample(losses[0])
self.policy_loss.add_sample(losses[1])
return total_loss, losses, unclipped_grads
def get_prediction(self, states):
tf_input_state = self.prepare_batch_for_inference(states, "main")
return self.networks['main'].online_network.predict(tf_input_state)[1:] # index 0 is the state value
| [
[
[
610,
615
],
[
3198,
3203
]
],
[
[
624,
635
],
[
3975,
3977
],
[
4464,
4466
],
[
4549,
4551
],
[
4564,
4566
],
[
5317,
5319
],
[
5419,
5421
],
[
6215,
6217
],
[
6542,
6544
],
[
6888,
6890
]
],
[
[
643,
655
],
[
3709,
3714
]
],
[
[
711,
734
],
[
3123,
3146
]
],
[
[
736,
758
],
[
1611,
1633
],
[
5495,
5517
],
[
6010,
6032
]
],
[
[
834,
854
],
[
2209,
2229
]
],
[
[
925,
940
],
[
2190,
2205
]
],
[
[
1024,
1046
],
[
2132,
2154
]
],
[
[
1084,
1103
],
[
1498,
1517
]
],
[
[
1105,
1122
],
[
1939,
1956
]
],
[
[
1130,
1145
],
[
2473,
2488
]
],
[
[
1174,
1180
],
[
6590,
6596
]
],
[
[
1242,
1271
],
[
2835,
2864
]
],
[
[
1300,
1319
],
[
6817,
6836
]
],
[
[
1347,
1358
],
[
5671,
5682
],
[
6145,
6156
]
],
[
[
1435,
1458
],
[
2068,
2091
]
],
[
[
1467,
1497
],
[
2550,
2580
]
],
[
[
1910,
1938
],
[
2911,
2939
]
],
[
[
2446,
2472
]
],
[
[
3106,
3122
]
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.