hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
29ea67d659da7653aa136c153a4cb40ea40cdc95 | 3,826 | py | Python | configs/MJOD_Net/MJOD_Net_ratio_1111_coco_mahjong_lr_0.02_batch_8_epoch_24.py | jaheel/MJOD-2136 | 81d4b8b79316f010279ef2c13a30827ae6b25c87 | [
"Apache-2.0"
] | null | null | null | configs/MJOD_Net/MJOD_Net_ratio_1111_coco_mahjong_lr_0.02_batch_8_epoch_24.py | jaheel/MJOD-2136 | 81d4b8b79316f010279ef2c13a30827ae6b25c87 | [
"Apache-2.0"
] | null | null | null | configs/MJOD_Net/MJOD_Net_ratio_1111_coco_mahjong_lr_0.02_batch_8_epoch_24.py | jaheel/MJOD-2136 | 81d4b8b79316f010279ef2c13a30827ae6b25c87 | [
"Apache-2.0"
] | null | null | null | _base_ = [
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
model = dict(
type='MJODNet',
pretrained='open-mmlab://regnetx_400mf',
backbone=dict(
type='RegNet',
arch='regnetx_400mf',
out_indices=(3, ),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='DepthwiseSeparableDilatedEncoder',
in_channels=384,
out_channels=512,
block_mid_channels=128,
num_residual_blocks=4,
block_dilations=[1, 1, 1, 1]),
bbox_head=dict(
type='MJODNetHead',
num_classes=34,
in_channels=512,
reg_decoded_bbox=True,
anchor_generator=dict(
type='AnchorGenerator',
ratios=[1.0],
scales=[1, 2, 4, 8, 16],
strides=[32]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1., 1., 1., 1.],
add_ctr_clamp=True,
ctr_clamp=32),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='GIoULoss', loss_weight=1.0)),
# training and testing settings
train_cfg=dict(
assigner=dict(
type='UniformAssigner', pos_ignore_thr=0.15, neg_ignore_thr=0.7),
allowed_border=-1,
pos_weight=-1,
debug=False),
test_cfg=dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.6),
max_per_img=100))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco_mahjong/'
# use caffe img_norm
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='RandomShift', shift_ratio=0.5, max_shift_px=32),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=8,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[16, 22])
runner = dict(type='EpochBasedRunner', max_epochs=24)
| 31.105691 | 77 | 0.600105 |
229dfdf9d194e0199778bec6ee0bd53635b538ad | 32,650 | py | Python | counterblock/lib/modules/dex/__init__.py | droplister/counterblock | 92de24fe0881388b7ffa31ea68eab72f7f1a47d0 | [
"MIT"
] | null | null | null | counterblock/lib/modules/dex/__init__.py | droplister/counterblock | 92de24fe0881388b7ffa31ea68eab72f7f1a47d0 | [
"MIT"
] | null | null | null | counterblock/lib/modules/dex/__init__.py | droplister/counterblock | 92de24fe0881388b7ffa31ea68eab72f7f1a47d0 | [
"MIT"
] | null | null | null | """
Implements counterwallet asset-related support as a counterblock plugin
DEPENDENCIES: This module requires the assets module to be loaded before it.
Python 2.x, as counterblock is still python 2.x
"""
import os
import sys
import time
import datetime
import logging
import decimal
import urllib.request
import urllib.parse
import urllib.error
import json
import operator
import base64
import configparser
import calendar
import pymongo
from bson.son import SON
import dateutil.parser
from counterblock.lib import config, util, blockfeed, blockchain
from counterblock.lib.modules import DEX_PRIORITY_PARSE_TRADEBOOK
from counterblock.lib.processor import MessageProcessor, MempoolMessageProcessor, BlockProcessor, StartUpProcessor, CaughtUpProcessor, RollbackProcessor, API, start_task
from . import assets_trading, dex
D = decimal.Decimal
EIGHT_PLACES = decimal.Decimal(10) ** -8
COMPILE_MARKET_PAIR_INFO_PERIOD = 10 * 60 # in seconds (this is every 10 minutes currently)
COMPILE_ASSET_MARKET_INFO_PERIOD = 30 * 60 # in seconds (this is every 30 minutes currently)
logger = logging.getLogger(__name__)
@API.add_method
def get_market_price_summary(asset1, asset2, with_last_trades=0):
# DEPRECATED 1.5
result = assets_trading.get_market_price_summary(asset1, asset2, with_last_trades)
return result if result is not None else False
#^ due to current bug in our jsonrpc stack, just return False if None is returned
@API.add_method
def get_market_cap_history(start_ts=None, end_ts=None):
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
data = {}
results = {}
#^ format is result[market_cap_as][asset] = [[block_time, market_cap], [block_time2, market_cap2], ...]
for market_cap_as in (config.XCP, config.BTC):
caps = config.mongo_db.asset_marketcap_history.aggregate([
{"$match": {
"market_cap_as": market_cap_as,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"asset": 1,
"market_cap": 1,
}},
{"$sort": {"block_time": pymongo.ASCENDING}},
{"$group": {
"_id": {"asset": "$asset", "year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"market_cap": {"$avg": "$market_cap"}, # use the average marketcap during the interval
}},
])
data[market_cap_as] = {}
for e in caps:
interval_time = int(calendar.timegm(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day'], e['_id']['hour']).timetuple()) * 1000)
data[market_cap_as].setdefault(e['_id']['asset'], [])
data[market_cap_as][e['_id']['asset']].append([interval_time, e['market_cap']])
results[market_cap_as] = []
for asset in data[market_cap_as]:
#for z in data[market_cap_as][asset]: assert z[0] and z[0] > 0 and z[1] and z[1] >= 0
results[market_cap_as].append(
{'name': asset, 'data': sorted(data[market_cap_as][asset], key=operator.itemgetter(0))})
return results
@API.add_method
def get_market_info(assets):
assets_market_info = list(config.mongo_db.asset_market_info.find({'asset': {'$in': assets}}, {'_id': 0}))
extended_asset_info = config.mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): # skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for a in assets_market_info:
if a['asset'] in extended_asset_info_dict and extended_asset_info_dict[a['asset']].get('processed', False):
extended_info = extended_asset_info_dict[a['asset']]
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
a['extended_pgpsig'] = extended_info.get('pgpsig', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = a['extended_pgpsig'] = ''
return assets_market_info
@API.add_method
def get_market_info_leaderboard(limit=100):
"""returns market leaderboard data for both the XCP and BTC markets"""
# do two queries because we limit by our sorted results, and we might miss an asset with a high BTC trading value
# but with little or no XCP trading activity, for instance if we just did one query
assets_market_info_xcp = list(config.mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.XCP.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info_btc = list(config.mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.BTC.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info = {
config.XCP.lower(): [a for a in assets_market_info_xcp if a['price_in_{}'.format(config.XCP.lower())]],
config.BTC.lower(): [a for a in assets_market_info_btc if a['price_in_{}'.format(config.BTC.lower())]]
}
# throw on extended info, if it exists for a given asset
assets = list(set([a['asset'] for a in assets_market_info[config.XCP.lower()]] + [a['asset'] for a in assets_market_info[config.BTC.lower()]]))
extended_asset_info = config.mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): # skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for r in (assets_market_info[config.XCP.lower()], assets_market_info[config.BTC.lower()]):
for a in r:
if a['asset'] in extended_asset_info_dict:
extended_info = extended_asset_info_dict[a['asset']]
if 'extended_image' not in a or 'extended_description' not in a or 'extended_website' not in a:
continue # asset has been recognized as having a JSON file description, but has not been successfully processed yet
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = ''
return assets_market_info
@API.add_method
def get_market_price_history(asset1, asset2, start_ts=None, end_ts=None, as_dict=False):
"""Return block-by-block aggregated market history data for the specified asset pair, within the specified date range.
@returns List of lists (or list of dicts, if as_dict is specified).
* If as_dict is False, each embedded list has 8 elements [block time (epoch in MS), open, high, low, close, volume, # trades in block, block index]
* If as_dict is True, each dict in the list has the keys: block_time (epoch in MS), block_index, open, high, low, close, vol, count
Aggregate on an an hourly basis
"""
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 180 days before the end date
start_ts = end_ts - (180 * 24 * 60 * 60)
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
# get ticks -- open, high, low, close, volume
result = config.mongo_db.trades.aggregate([
{"$match": {
"base_asset": base_asset,
"quote_asset": quote_asset,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"block_index": 1,
"unit_price": 1,
"base_quantity_normalized": 1 # to derive volume
}},
{"$group": {
"_id": {"year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"open": {"$first": "$unit_price"},
"high": {"$max": "$unit_price"},
"low": {"$min": "$unit_price"},
"close": {"$last": "$unit_price"},
"vol": {"$sum": "$base_quantity_normalized"},
"count": {"$sum": 1},
}},
{"$sort": SON([("_id.year", pymongo.ASCENDING), ("_id.month", pymongo.ASCENDING), ("_id.day", pymongo.ASCENDING), ("_id.hour", pymongo.ASCENDING)])},
])
result = list(result)
if not len(result):
return False
midline = [((r['high'] + r['low']) / 2.0) for r in result]
if as_dict:
for i in range(len(result)):
result[i]['interval_time'] = int(calendar.timegm(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000)
result[i]['midline'] = midline[i]
del result[i]['_id']
return result
else:
list_result = []
for i in range(len(result)):
list_result.append([
int(calendar.timegm(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000),
result[i]['open'], result[i]['high'], result[i]['low'], result[i]['close'], result[i]['vol'],
result[i]['count'], midline[i]
])
return list_result
@API.add_method
def get_trade_history(asset1=None, asset2=None, start_ts=None, end_ts=None, limit=50):
"""
Gets last N of trades within a specific date range (normally, for a specified asset pair, but this can
be left blank to get any/all trades).
"""
assert (asset1 and asset2) or (not asset1 and not asset2) # cannot have one asset, but not the other
if limit > 500:
raise Exception("Requesting history of too many trades")
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
filters = {
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}
if asset1 and asset2:
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
filters["base_asset"] = base_asset
filters["quote_asset"] = quote_asset
last_trades = config.mongo_db.trades.find(filters, {'_id': 0}).sort("block_time", pymongo.DESCENDING).limit(limit)
if not last_trades.count():
return False # no suitable trade data to form a market price
last_trades = list(last_trades)
return last_trades
def _get_order_book(base_asset, quote_asset,
bid_book_min_pct_fee_provided=None, bid_book_min_pct_fee_required=None, bid_book_max_pct_fee_required=None,
ask_book_min_pct_fee_provided=None, ask_book_min_pct_fee_required=None, ask_book_max_pct_fee_required=None):
"""Gets the current order book for a specified asset pair
@param: normalized_fee_required: Only specify if buying BTC. If specified, the order book will be pruned down to only
show orders at and above this fee_required
@param: normalized_fee_provided: Only specify if selling BTC. If specified, the order book will be pruned down to only
show orders at and above this fee_provided
"""
base_asset_info = config.mongo_db.tracked_assets.find_one({'asset': base_asset})
quote_asset_info = config.mongo_db.tracked_assets.find_one({'asset': quote_asset})
if not base_asset_info or not quote_asset_info:
raise Exception("Invalid asset(s)")
# TODO: limit # results to 8 or so for each book (we have to sort as well to limit)
base_bid_filters = [
{"field": "get_asset", "op": "==", "value": base_asset},
{"field": "give_asset", "op": "==", "value": quote_asset},
]
base_ask_filters = [
{"field": "get_asset", "op": "==", "value": quote_asset},
{"field": "give_asset", "op": "==", "value": base_asset},
]
if base_asset == config.BTC or quote_asset == config.BTC:
extra_filters = [
{'field': 'give_remaining', 'op': '>', 'value': 0}, # don't show empty BTC orders
{'field': 'get_remaining', 'op': '>', 'value': 0}, # don't show empty BTC orders
{'field': 'fee_required_remaining', 'op': '>=', 'value': 0},
{'field': 'fee_provided_remaining', 'op': '>=', 'value': 0},
]
base_bid_filters += extra_filters
base_ask_filters += extra_filters
base_bid_orders = util.call_jsonrpc_api(
"get_orders", {
'filters': base_bid_filters,
'show_expired': False,
'status': 'open',
'order_by': 'block_index',
'order_dir': 'asc',
}, abort_on_error=True)['result']
base_ask_orders = util.call_jsonrpc_api(
"get_orders", {
'filters': base_ask_filters,
'show_expired': False,
'status': 'open',
'order_by': 'block_index',
'order_dir': 'asc',
}, abort_on_error=True)['result']
def get_o_pct(o):
if o['give_asset'] == config.BTC: # NB: fee_provided could be zero here
pct_fee_provided = float((D(o['fee_provided_remaining']) / D(o['give_quantity'])))
else:
pct_fee_provided = None
if o['get_asset'] == config.BTC: # NB: fee_required could be zero here
pct_fee_required = float((D(o['fee_required_remaining']) / D(o['get_quantity'])))
else:
pct_fee_required = None
return pct_fee_provided, pct_fee_required
# filter results by pct_fee_provided and pct_fee_required for BTC pairs as appropriate
filtered_base_bid_orders = []
filtered_base_ask_orders = []
if base_asset == config.BTC or quote_asset == config.BTC:
for o in base_bid_orders:
pct_fee_provided, pct_fee_required = get_o_pct(o)
addToBook = True
if bid_book_min_pct_fee_provided is not None and pct_fee_provided is not None and pct_fee_provided < bid_book_min_pct_fee_provided:
addToBook = False
if bid_book_min_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required < bid_book_min_pct_fee_required:
addToBook = False
if bid_book_max_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required > bid_book_max_pct_fee_required:
addToBook = False
if addToBook:
filtered_base_bid_orders.append(o)
for o in base_ask_orders:
pct_fee_provided, pct_fee_required = get_o_pct(o)
addToBook = True
if ask_book_min_pct_fee_provided is not None and pct_fee_provided is not None and pct_fee_provided < ask_book_min_pct_fee_provided:
addToBook = False
if ask_book_min_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required < ask_book_min_pct_fee_required:
addToBook = False
if ask_book_max_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required > ask_book_max_pct_fee_required:
addToBook = False
if addToBook:
filtered_base_ask_orders.append(o)
else:
filtered_base_bid_orders += base_bid_orders
filtered_base_ask_orders += base_ask_orders
def make_book(orders, isBidBook):
book = {}
for o in orders:
if o['give_asset'] == base_asset:
if base_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue # filter dust orders, if necessary
give_quantity = blockchain.normalize_quantity(o['give_quantity'], base_asset_info['divisible'])
get_quantity = blockchain.normalize_quantity(o['get_quantity'], quote_asset_info['divisible'])
unit_price = float((D(get_quantity) / D(give_quantity)))
remaining = blockchain.normalize_quantity(o['give_remaining'], base_asset_info['divisible'])
else:
if quote_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue # filter dust orders, if necessary
give_quantity = blockchain.normalize_quantity(o['give_quantity'], quote_asset_info['divisible'])
get_quantity = blockchain.normalize_quantity(o['get_quantity'], base_asset_info['divisible'])
unit_price = float((D(give_quantity) / D(get_quantity)))
remaining = blockchain.normalize_quantity(o['get_remaining'], base_asset_info['divisible'])
id = "%s_%s_%s" % (base_asset, quote_asset, unit_price)
#^ key = {base}_{bid}_{unit_price}, values ref entries in book
book.setdefault(id, {'unit_price': unit_price, 'quantity': 0, 'count': 0})
book[id]['quantity'] += remaining # base quantity outstanding
book[id]['count'] += 1 # num orders at this price level
book = sorted(iter(book.values()), key=operator.itemgetter('unit_price'), reverse=isBidBook)
#^ convert to list and sort -- bid book = descending, ask book = ascending
return book
# compile into a single book, at volume tiers
base_bid_book = make_book(filtered_base_bid_orders, True)
base_ask_book = make_book(filtered_base_ask_orders, False)
# get stats like the spread and median
if base_bid_book and base_ask_book:
# don't do abs(), as this is "the amount by which the ask price exceeds the bid", so I guess it could be negative
# if there is overlap in the book (right?)
bid_ask_spread = float((D(base_ask_book[0]['unit_price']) - D(base_bid_book[0]['unit_price'])))
bid_ask_median = float((D(max(base_ask_book[0]['unit_price'], base_bid_book[0]['unit_price'])) - (D(abs(bid_ask_spread)) / 2)))
else:
bid_ask_spread = 0
bid_ask_median = 0
# compose depth and round out quantities
bid_depth = D(0)
for o in base_bid_book:
o['quantity'] = float(D(o['quantity']))
bid_depth += D(o['quantity'])
o['depth'] = float(D(bid_depth))
bid_depth = float(D(bid_depth))
ask_depth = D(0)
for o in base_ask_book:
o['quantity'] = float(D(o['quantity']))
ask_depth += D(o['quantity'])
o['depth'] = float(D(ask_depth))
ask_depth = float(D(ask_depth))
# compose raw orders
orders = filtered_base_bid_orders + filtered_base_ask_orders
for o in orders:
# add in the blocktime to help makes interfaces more user-friendly (i.e. avoid displaying block
# indexes and display datetimes instead)
o['block_time'] = calendar.timegm(util.get_block_time(o['block_index']).timetuple()) * 1000
result = {
'base_bid_book': base_bid_book,
'base_ask_book': base_ask_book,
'bid_depth': bid_depth,
'ask_depth': ask_depth,
'bid_ask_spread': bid_ask_spread,
'bid_ask_median': bid_ask_median,
'raw_orders': orders,
'base_asset': base_asset,
'quote_asset': quote_asset
}
return result
@API.add_method
def get_order_book_simple(asset1, asset2, min_pct_fee_provided=None, max_pct_fee_required=None):
# DEPRECATED 1.5
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
result = _get_order_book(
base_asset, quote_asset,
bid_book_min_pct_fee_provided=min_pct_fee_provided,
bid_book_max_pct_fee_required=max_pct_fee_required,
ask_book_min_pct_fee_provided=min_pct_fee_provided,
ask_book_max_pct_fee_required=max_pct_fee_required)
return result
@API.add_method
def get_order_book_buysell(buy_asset, sell_asset, pct_fee_provided=None, pct_fee_required=None):
# DEPRECATED 1.5
base_asset, quote_asset = util.assets_to_asset_pair(buy_asset, sell_asset)
bid_book_min_pct_fee_provided = None
bid_book_min_pct_fee_required = None
bid_book_max_pct_fee_required = None
ask_book_min_pct_fee_provided = None
ask_book_min_pct_fee_required = None
ask_book_max_pct_fee_required = None
if base_asset == config.BTC:
if buy_asset == config.BTC:
# if BTC is base asset and we're buying it, we're buying the BASE. we require a BTC fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that require a BTC fee >= what we require (our side of the book)
# - show BASE sellers (ask book) that provide a BTC fee >= what we require
bid_book_min_pct_fee_required = pct_fee_required # my competition at the given fee required
ask_book_min_pct_fee_provided = pct_fee_required
elif sell_asset == config.BTC:
# if BTC is base asset and we're selling it, we're selling the BASE. we provide a BTC fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we provide
# - show BASE sellers (ask book) that require a BTC fee <= what we provide (our side of the book)
bid_book_max_pct_fee_required = pct_fee_provided
ask_book_min_pct_fee_provided = pct_fee_provided # my competition at the given fee provided
elif quote_asset == config.BTC:
assert base_asset == config.XCP # only time when this is the case
if buy_asset == config.BTC:
# if BTC is quote asset and we're buying it, we're selling the BASE. we require a BTC fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we require
# - show BASE sellers (ask book) that require a BTC fee >= what we require (our side of the book)
bid_book_min_pct_fee_provided = pct_fee_required
ask_book_min_pct_fee_required = pct_fee_required # my competition at the given fee required
elif sell_asset == config.BTC:
# if BTC is quote asset and we're selling it, we're buying the BASE. we provide a BTC fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we provide (our side of the book)
# - show BASE sellers (ask book) that require a BTC fee <= what we provide
bid_book_min_pct_fee_provided = pct_fee_provided # my compeitition at the given fee provided
ask_book_max_pct_fee_required = pct_fee_provided
result = _get_order_book(
base_asset, quote_asset,
bid_book_min_pct_fee_provided=bid_book_min_pct_fee_provided,
bid_book_min_pct_fee_required=bid_book_min_pct_fee_required,
bid_book_max_pct_fee_required=bid_book_max_pct_fee_required,
ask_book_min_pct_fee_provided=ask_book_min_pct_fee_provided,
ask_book_min_pct_fee_required=ask_book_min_pct_fee_required,
ask_book_max_pct_fee_required=ask_book_max_pct_fee_required)
# filter down raw_orders to be only open sell orders for what the caller is buying
open_sell_orders = []
for o in result['raw_orders']:
if o['give_asset'] == buy_asset:
open_sell_orders.append(o)
result['raw_orders'] = open_sell_orders
return result
@API.add_method
def get_users_pairs(addresses=[], max_pairs=12):
return dex.get_users_pairs(addresses, max_pairs, quote_assets=['XCP', 'XBTC'])
@API.add_method
def get_market_orders(asset1, asset2, addresses=[], min_fee_provided=0.95, max_fee_required=0.95):
return dex.get_market_orders(asset1, asset2, addresses, None, min_fee_provided, max_fee_required)
@API.add_method
def get_market_trades(asset1, asset2, addresses=[], limit=50):
return dex.get_market_trades(asset1, asset2, addresses, limit)
@API.add_method
def get_markets_list(quote_asset=None, order_by=None):
return dex.get_markets_list(quote_asset=quote_asset, order_by=order_by)
@API.add_method
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95):
return dex.get_market_details(asset1, asset2, min_fee_provided, max_fee_required)
def task_compile_asset_pair_market_info():
assets_trading.compile_asset_pair_market_info()
# all done for this run...call again in a bit
start_task(task_compile_asset_pair_market_info, delay=COMPILE_MARKET_PAIR_INFO_PERIOD)
def task_compile_asset_market_info():
assets_trading.compile_asset_market_info()
# all done for this run...call again in a bit
start_task(task_compile_asset_market_info, delay=COMPILE_ASSET_MARKET_INFO_PERIOD)
@MessageProcessor.subscribe(priority=DEX_PRIORITY_PARSE_TRADEBOOK)
def parse_trade_book(msg, msg_data):
# book trades
if(msg['category'] == 'order_matches' and
((msg['command'] == 'update' and msg_data['status'] == 'completed') or # for a trade with BTC involved, but that is settled (completed)
('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC)
)
): # or for a trade without BTC on either end
if msg['command'] == 'update' and msg_data['status'] == 'completed':
# an order is being updated to a completed status (i.e. a BTCpay has completed)
tx0_hash, tx1_hash = msg_data['order_match_id'][:64], msg_data['order_match_id'][65:]
# get the order_match this btcpay settles
order_match = util.jsonrpc_api(
"get_order_matches",
{'filters': [
{'field': 'tx0_hash', 'op': '==', 'value': tx0_hash},
{'field': 'tx1_hash', 'op': '==', 'value': tx1_hash}]
}, abort_on_error=False)['result'][0]
else:
assert msg_data['status'] == 'completed' # should not enter a pending state for non BTC matches
order_match = msg_data
forward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': order_match['forward_asset']})
backward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': order_match['backward_asset']})
assert forward_asset_info and backward_asset_info
base_asset, quote_asset = util.assets_to_asset_pair(order_match['forward_asset'], order_match['backward_asset'])
# don't create trade records from order matches with BTC that are under the dust limit
if((order_match['forward_asset'] == config.BTC and
order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF)
or (order_match['backward_asset'] == config.BTC and
order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF)):
logger.debug("Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC))
return 'ABORT_THIS_MESSAGE_PROCESSING'
# take divisible trade quantities to floating point
forward_quantity = blockchain.normalize_quantity(order_match['forward_quantity'], forward_asset_info['divisible'])
backward_quantity = blockchain.normalize_quantity(order_match['backward_quantity'], backward_asset_info['divisible'])
# compose trade
trade = {
'block_index': config.state['cur_block']['block_index'],
'block_time': config.state['cur_block']['block_time_obj'],
'message_index': msg['message_index'], # secondary temporaral ordering off of when
'order_match_id': order_match['tx0_hash'] + '_' + order_match['tx1_hash'],
'order_match_tx0_index': order_match['tx0_index'],
'order_match_tx1_index': order_match['tx1_index'],
'order_match_tx0_address': order_match['tx0_address'],
'order_match_tx1_address': order_match['tx1_address'],
'base_asset': base_asset,
'quote_asset': quote_asset,
'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'],
'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'],
'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity,
'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity,
}
d = D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized'])
d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=30))
trade['unit_price'] = float(d)
d = D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized'])
d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=30))
trade['unit_price_inverse'] = float(d)
config.mongo_db.trades.insert(trade)
logger.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade))
@StartUpProcessor.subscribe()
def init():
# init db and indexes
# trades
config.mongo_db.trades.ensure_index(
[("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING),
("block_time", pymongo.DESCENDING)
])
config.mongo_db.trades.ensure_index( # tasks.py and elsewhere (for singlular block_index index access)
[("block_index", pymongo.ASCENDING),
("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING)
])
# asset_market_info
config.mongo_db.asset_market_info.ensure_index('asset', unique=True)
# asset_marketcap_history
config.mongo_db.asset_marketcap_history.ensure_index('block_index')
config.mongo_db.asset_marketcap_history.ensure_index( # tasks.py
[
("market_cap_as", pymongo.ASCENDING),
("asset", pymongo.ASCENDING),
("block_index", pymongo.DESCENDING)
])
config.mongo_db.asset_marketcap_history.ensure_index( # api.py
[
("market_cap_as", pymongo.ASCENDING),
("block_time", pymongo.DESCENDING)
])
# asset_pair_market_info
config.mongo_db.asset_pair_market_info.ensure_index( # event.py, api.py
[("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING)
], unique=True)
config.mongo_db.asset_pair_market_info.ensure_index('last_updated')
@CaughtUpProcessor.subscribe()
def start_tasks():
start_task(task_compile_asset_pair_market_info)
start_task(task_compile_asset_market_info)
@RollbackProcessor.subscribe()
def process_rollback(max_block_index):
if not max_block_index: # full reparse
config.mongo_db.trades.drop()
config.mongo_db.asset_market_info.drop()
config.mongo_db.asset_marketcap_history.drop()
config.mongo_db.pair_market_info.drop()
else: # rollback
config.mongo_db.trades.remove({"block_index": {"$gt": max_block_index}})
config.mongo_db.asset_marketcap_history.remove({"block_index": {"$gt": max_block_index}})
| 49.620061 | 174 | 0.656815 |
3ce69e30cf81b3eeb5702376274eb1c9eaada075 | 3,702 | py | Python | devilry/utils/graphviz/dot.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | 29 | 2015-01-18T22:56:23.000Z | 2020-11-10T21:28:27.000Z | devilry/utils/graphviz/dot.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | 786 | 2015-01-06T16:10:18.000Z | 2022-03-16T11:10:50.000Z | devilry/utils/graphviz/dot.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | 15 | 2015-04-06T06:18:43.000Z | 2021-02-24T12:28:30.000Z | class UmlField(list):
def __init__(self, name, fieldtype='', visibility='+'):
self.name = name
self.fieldtype = fieldtype
self.visibility = visibility
def __str__(self):
return '%(visibility)s %(name)s: %(fieldtype)s' % self.__dict__
class UmlClassLabel(object):
table_tpl = '<\n<TABLE BORDER="0" CELLBORDER="1" CELLPADDING="6" '\
'CELLSPACING="0">\n%s</TABLE>>'
headrow_tpl = ' <TR><TD bgcolor="#222222" align="CENTER">'\
'<FONT COLOR="#ffffff" point-size="12">%s</FONT></TD></TR>\n'
partrow_tpl = ' <TR><TD bgcolor="#ffffff" balign="LEFT" align="LEFT">%s</TD></TR>\n'
def __init__(self, title, values=[], methods=[]):
self.title = title
self.values = values
self.methods = methods
def __str__(self):
label = [self.headrow_tpl % self.title]
def add(part):
label.append(self.partrow_tpl % '<BR/>\n'.join(
[str(x) for x in part]))
if self.values:
add(self.values)
if self.methods:
add(self.methods)
return self.table_tpl % '\n'.join(label)
class Edge(object):
def __init__(self, taillabel="", headlabel="", label='',
arrowhead='none', color='#777777'):
self.headlabel = headlabel
self.taillabel = taillabel
self.label = label
self.arrowhead = arrowhead
self.color = color
def __str__(self):
return ('edge[arrowhead="%(arrowhead)s", '
'color="%(color)s", '
'label="%(label)s", '
'headlabel="%(headlabel)s", '
'taillabel="%(taillabel)s"]') % self.__dict__
class Association(object):
def __init__(self, a, b, edge):
self.a = a
self.b = b
self.edge = edge
def tostring(self, edgeop):
edge = self.edge
a = self.a
b = self.b
return '%(edge)s\n %(a)s %(edgeop)s %(b)s' % vars()
class Node(object):
def __init__(self, id, label):
if id.lower() in ('node', 'edge', 'graph', 'digraph', 'subgraph'):
self.id = '_' + id
else:
self.id = id
self.label = label
def __str__(self):
return '%(id)s [label=%(label)s]' % self.__dict__
def pixels_to_inches(px, dpi=75):
return px / float(dpi)
class Graph(object):
tpl = """
%(graphtype)s G {
fontname = "Lucida Grande"
fontsize = 10
%(size)s
node [
fontname = "Lucida Grande"
fontsize = 10
shape = "none"
]
edge [
fontname = "Lucida Grande"
fontsize = 10
]
%(items)s
}"""
def __init__(self, items, width=None, height=None):
self.set_directed()
self.items = list(items)
self.size = ''
if width:
w = pixels_to_inches(width)
h = pixels_to_inches(height)
self.size = 'size = "%.3f,%.3f"' % (w, h)
def __str__(self):
return self.tpl % dict(
size = self.size,
graphtype = self.graphtype,
items = '\n\n '.join(self.stritems()))
def stritems(self):
def formatitem(item):
if isinstance(item, Association):
return item.tostring(self.edgeop)
else:
return str(item)
return [formatitem(i) for i in self.items]
def set_directed(self):
self.graphtype = 'digraph'
self.edgeop = '->'
#def set_undirected(self):
#self.graphtype = 'raph'
#self.edgeop = '--'
def append(self, item):
self.items.append(item)
def extend(self, items):
self.items.extend(items)
| 28.259542 | 89 | 0.531334 |
4f0bd608791427979fa3c5d2bffac736abf2cc5c | 14,408 | py | Python | wrappers/python/virgil_crypto_lib/foundation/_c_bridge/_vscf_aes256_gcm.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 26 | 2018-12-17T13:45:25.000Z | 2022-01-16T20:00:04.000Z | wrappers/python/virgil_crypto_lib/foundation/_c_bridge/_vscf_aes256_gcm.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 4 | 2019-01-03T12:08:52.000Z | 2021-12-02T05:21:13.000Z | wrappers/python/virgil_crypto_lib/foundation/_c_bridge/_vscf_aes256_gcm.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 8 | 2019-01-24T08:22:06.000Z | 2022-02-07T11:37:00.000Z | # Copyright (C) 2015-2021 Virgil Security, Inc.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# (1) Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Lead Maintainer: Virgil Security Inc. <support@virgilsecurity.com>
from virgil_crypto_lib._libs import *
from ctypes import *
from ._vscf_impl import vscf_impl_t
from virgil_crypto_lib.common._c_bridge import vsc_data_t
from virgil_crypto_lib.common._c_bridge import vsc_buffer_t
class vscf_aes256_gcm_t(Structure):
pass
class VscfAes256Gcm(object):
"""Implementation of the symmetric cipher AES-256 bit in a GCM mode.
Note, this implementation contains dynamic memory allocations,
this should be improved in the future releases."""
# Cipher nfonce length or IV length in bytes, or 0 if nonce is not required.
NONCE_LEN = 12
# Cipher key length in bytes.
KEY_LEN = 32
# Cipher key length in bits.
KEY_BITLEN = 256
# Cipher block length in bytes.
BLOCK_LEN = 16
# Defines authentication tag length in bytes.
AUTH_TAG_LEN = 16
def __init__(self):
"""Create underlying C context."""
self._ll = LowLevelLibs()
self._lib = self._ll.foundation
def vscf_aes256_gcm_new(self):
vscf_aes256_gcm_new = self._lib.vscf_aes256_gcm_new
vscf_aes256_gcm_new.argtypes = []
vscf_aes256_gcm_new.restype = POINTER(vscf_aes256_gcm_t)
return vscf_aes256_gcm_new()
def vscf_aes256_gcm_delete(self, ctx):
vscf_aes256_gcm_delete = self._lib.vscf_aes256_gcm_delete
vscf_aes256_gcm_delete.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_delete.restype = None
return vscf_aes256_gcm_delete(ctx)
def vscf_aes256_gcm_alg_id(self, ctx):
"""Provide algorithm identificator."""
vscf_aes256_gcm_alg_id = self._lib.vscf_aes256_gcm_alg_id
vscf_aes256_gcm_alg_id.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_alg_id.restype = c_int
return vscf_aes256_gcm_alg_id(ctx)
def vscf_aes256_gcm_produce_alg_info(self, ctx):
"""Produce object with algorithm information and configuration parameters."""
vscf_aes256_gcm_produce_alg_info = self._lib.vscf_aes256_gcm_produce_alg_info
vscf_aes256_gcm_produce_alg_info.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_produce_alg_info.restype = POINTER(vscf_impl_t)
return vscf_aes256_gcm_produce_alg_info(ctx)
def vscf_aes256_gcm_restore_alg_info(self, ctx, alg_info):
"""Restore algorithm configuration from the given object."""
vscf_aes256_gcm_restore_alg_info = self._lib.vscf_aes256_gcm_restore_alg_info
vscf_aes256_gcm_restore_alg_info.argtypes = [POINTER(vscf_aes256_gcm_t), POINTER(vscf_impl_t)]
vscf_aes256_gcm_restore_alg_info.restype = c_int
return vscf_aes256_gcm_restore_alg_info(ctx, alg_info)
def vscf_aes256_gcm_encrypt(self, ctx, data, out):
"""Encrypt given data."""
vscf_aes256_gcm_encrypt = self._lib.vscf_aes256_gcm_encrypt
vscf_aes256_gcm_encrypt.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, POINTER(vsc_buffer_t)]
vscf_aes256_gcm_encrypt.restype = c_int
return vscf_aes256_gcm_encrypt(ctx, data, out)
def vscf_aes256_gcm_encrypted_len(self, ctx, data_len):
"""Calculate required buffer length to hold the encrypted data."""
vscf_aes256_gcm_encrypted_len = self._lib.vscf_aes256_gcm_encrypted_len
vscf_aes256_gcm_encrypted_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_encrypted_len.restype = c_size_t
return vscf_aes256_gcm_encrypted_len(ctx, data_len)
def vscf_aes256_gcm_precise_encrypted_len(self, ctx, data_len):
"""Precise length calculation of encrypted data."""
vscf_aes256_gcm_precise_encrypted_len = self._lib.vscf_aes256_gcm_precise_encrypted_len
vscf_aes256_gcm_precise_encrypted_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_precise_encrypted_len.restype = c_size_t
return vscf_aes256_gcm_precise_encrypted_len(ctx, data_len)
def vscf_aes256_gcm_decrypt(self, ctx, data, out):
"""Decrypt given data."""
vscf_aes256_gcm_decrypt = self._lib.vscf_aes256_gcm_decrypt
vscf_aes256_gcm_decrypt.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, POINTER(vsc_buffer_t)]
vscf_aes256_gcm_decrypt.restype = c_int
return vscf_aes256_gcm_decrypt(ctx, data, out)
def vscf_aes256_gcm_decrypted_len(self, ctx, data_len):
"""Calculate required buffer length to hold the decrypted data."""
vscf_aes256_gcm_decrypted_len = self._lib.vscf_aes256_gcm_decrypted_len
vscf_aes256_gcm_decrypted_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_decrypted_len.restype = c_size_t
return vscf_aes256_gcm_decrypted_len(ctx, data_len)
def vscf_aes256_gcm_set_nonce(self, ctx, nonce):
"""Setup IV or nonce."""
vscf_aes256_gcm_set_nonce = self._lib.vscf_aes256_gcm_set_nonce
vscf_aes256_gcm_set_nonce.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t]
vscf_aes256_gcm_set_nonce.restype = None
return vscf_aes256_gcm_set_nonce(ctx, nonce)
def vscf_aes256_gcm_set_key(self, ctx, key):
"""Set cipher encryption / decryption key."""
vscf_aes256_gcm_set_key = self._lib.vscf_aes256_gcm_set_key
vscf_aes256_gcm_set_key.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t]
vscf_aes256_gcm_set_key.restype = None
return vscf_aes256_gcm_set_key(ctx, key)
def vscf_aes256_gcm_start_encryption(self, ctx):
"""Start sequential encryption."""
vscf_aes256_gcm_start_encryption = self._lib.vscf_aes256_gcm_start_encryption
vscf_aes256_gcm_start_encryption.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_start_encryption.restype = None
return vscf_aes256_gcm_start_encryption(ctx)
def vscf_aes256_gcm_start_decryption(self, ctx):
"""Start sequential decryption."""
vscf_aes256_gcm_start_decryption = self._lib.vscf_aes256_gcm_start_decryption
vscf_aes256_gcm_start_decryption.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_start_decryption.restype = None
return vscf_aes256_gcm_start_decryption(ctx)
def vscf_aes256_gcm_update(self, ctx, data, out):
"""Process encryption or decryption of the given data chunk."""
vscf_aes256_gcm_update = self._lib.vscf_aes256_gcm_update
vscf_aes256_gcm_update.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, POINTER(vsc_buffer_t)]
vscf_aes256_gcm_update.restype = None
return vscf_aes256_gcm_update(ctx, data, out)
def vscf_aes256_gcm_out_len(self, ctx, data_len):
"""Return buffer length required to hold an output of the methods
"update" or "finish" in an current mode.
Pass zero length to define buffer length of the method "finish"."""
vscf_aes256_gcm_out_len = self._lib.vscf_aes256_gcm_out_len
vscf_aes256_gcm_out_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_out_len.restype = c_size_t
return vscf_aes256_gcm_out_len(ctx, data_len)
def vscf_aes256_gcm_encrypted_out_len(self, ctx, data_len):
"""Return buffer length required to hold an output of the methods
"update" or "finish" in an encryption mode.
Pass zero length to define buffer length of the method "finish"."""
vscf_aes256_gcm_encrypted_out_len = self._lib.vscf_aes256_gcm_encrypted_out_len
vscf_aes256_gcm_encrypted_out_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_encrypted_out_len.restype = c_size_t
return vscf_aes256_gcm_encrypted_out_len(ctx, data_len)
def vscf_aes256_gcm_decrypted_out_len(self, ctx, data_len):
"""Return buffer length required to hold an output of the methods
"update" or "finish" in an decryption mode.
Pass zero length to define buffer length of the method "finish"."""
vscf_aes256_gcm_decrypted_out_len = self._lib.vscf_aes256_gcm_decrypted_out_len
vscf_aes256_gcm_decrypted_out_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_decrypted_out_len.restype = c_size_t
return vscf_aes256_gcm_decrypted_out_len(ctx, data_len)
def vscf_aes256_gcm_finish(self, ctx, out):
"""Accomplish encryption or decryption process."""
vscf_aes256_gcm_finish = self._lib.vscf_aes256_gcm_finish
vscf_aes256_gcm_finish.argtypes = [POINTER(vscf_aes256_gcm_t), POINTER(vsc_buffer_t)]
vscf_aes256_gcm_finish.restype = c_int
return vscf_aes256_gcm_finish(ctx, out)
def vscf_aes256_gcm_auth_encrypt(self, ctx, data, auth_data, out, tag):
"""Encrypt given data.
If 'tag' is not given, then it will written to the 'enc'."""
vscf_aes256_gcm_auth_encrypt = self._lib.vscf_aes256_gcm_auth_encrypt
vscf_aes256_gcm_auth_encrypt.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, vsc_data_t, POINTER(vsc_buffer_t), POINTER(vsc_buffer_t)]
vscf_aes256_gcm_auth_encrypt.restype = c_int
return vscf_aes256_gcm_auth_encrypt(ctx, data, auth_data, out, tag)
def vscf_aes256_gcm_auth_encrypted_len(self, ctx, data_len):
"""Calculate required buffer length to hold the authenticated encrypted data."""
vscf_aes256_gcm_auth_encrypted_len = self._lib.vscf_aes256_gcm_auth_encrypted_len
vscf_aes256_gcm_auth_encrypted_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_auth_encrypted_len.restype = c_size_t
return vscf_aes256_gcm_auth_encrypted_len(ctx, data_len)
def vscf_aes256_gcm_auth_decrypt(self, ctx, data, auth_data, tag, out):
"""Decrypt given data.
If 'tag' is not given, then it will be taken from the 'enc'."""
vscf_aes256_gcm_auth_decrypt = self._lib.vscf_aes256_gcm_auth_decrypt
vscf_aes256_gcm_auth_decrypt.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, vsc_data_t, vsc_data_t, POINTER(vsc_buffer_t)]
vscf_aes256_gcm_auth_decrypt.restype = c_int
return vscf_aes256_gcm_auth_decrypt(ctx, data, auth_data, tag, out)
def vscf_aes256_gcm_auth_decrypted_len(self, ctx, data_len):
"""Calculate required buffer length to hold the authenticated decrypted data."""
vscf_aes256_gcm_auth_decrypted_len = self._lib.vscf_aes256_gcm_auth_decrypted_len
vscf_aes256_gcm_auth_decrypted_len.argtypes = [POINTER(vscf_aes256_gcm_t), c_size_t]
vscf_aes256_gcm_auth_decrypted_len.restype = c_size_t
return vscf_aes256_gcm_auth_decrypted_len(ctx, data_len)
def vscf_aes256_gcm_set_auth_data(self, ctx, auth_data):
"""Set additional data for for AEAD ciphers."""
vscf_aes256_gcm_set_auth_data = self._lib.vscf_aes256_gcm_set_auth_data
vscf_aes256_gcm_set_auth_data.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t]
vscf_aes256_gcm_set_auth_data.restype = None
return vscf_aes256_gcm_set_auth_data(ctx, auth_data)
def vscf_aes256_gcm_finish_auth_encryption(self, ctx, out, tag):
"""Accomplish an authenticated encryption and place tag separately.
Note, if authentication tag should be added to an encrypted data,
method "finish" can be used."""
vscf_aes256_gcm_finish_auth_encryption = self._lib.vscf_aes256_gcm_finish_auth_encryption
vscf_aes256_gcm_finish_auth_encryption.argtypes = [POINTER(vscf_aes256_gcm_t), POINTER(vsc_buffer_t), POINTER(vsc_buffer_t)]
vscf_aes256_gcm_finish_auth_encryption.restype = c_int
return vscf_aes256_gcm_finish_auth_encryption(ctx, out, tag)
def vscf_aes256_gcm_finish_auth_decryption(self, ctx, tag, out):
"""Accomplish an authenticated decryption with explicitly given tag.
Note, if authentication tag is a part of an encrypted data then,
method "finish" can be used for simplicity."""
vscf_aes256_gcm_finish_auth_decryption = self._lib.vscf_aes256_gcm_finish_auth_decryption
vscf_aes256_gcm_finish_auth_decryption.argtypes = [POINTER(vscf_aes256_gcm_t), vsc_data_t, POINTER(vsc_buffer_t)]
vscf_aes256_gcm_finish_auth_decryption.restype = c_int
return vscf_aes256_gcm_finish_auth_decryption(ctx, tag, out)
def vscf_aes256_gcm_shallow_copy(self, ctx):
vscf_aes256_gcm_shallow_copy = self._lib.vscf_aes256_gcm_shallow_copy
vscf_aes256_gcm_shallow_copy.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_shallow_copy.restype = POINTER(vscf_aes256_gcm_t)
return vscf_aes256_gcm_shallow_copy(ctx)
def vscf_aes256_gcm_impl(self, ctx):
vscf_aes256_gcm_impl = self._lib.vscf_aes256_gcm_impl
vscf_aes256_gcm_impl.argtypes = [POINTER(vscf_aes256_gcm_t)]
vscf_aes256_gcm_impl.restype = POINTER(vscf_impl_t)
return vscf_aes256_gcm_impl(ctx)
| 52.776557 | 146 | 0.754026 |
38dfa920b23e8b8e31867a1ea8d33ea33ef19a35 | 3,076 | py | Python | tests/operators/test_node_pod_operator.py | p-pekala/kedro-airflow-k8s | e619300ede95581d4acdbf43d3060a37594944b0 | [
"Apache-2.0"
] | null | null | null | tests/operators/test_node_pod_operator.py | p-pekala/kedro-airflow-k8s | e619300ede95581d4acdbf43d3060a37594944b0 | [
"Apache-2.0"
] | null | null | null | tests/operators/test_node_pod_operator.py | p-pekala/kedro-airflow-k8s | e619300ede95581d4acdbf43d3060a37594944b0 | [
"Apache-2.0"
] | 1 | 2021-05-11T09:50:57.000Z | 2021-05-11T09:50:57.000Z | import unittest
from kedro_airflow_k8s.operators.node_pod import NodePodOperator
class TestNodePodOperator(unittest.TestCase):
def test_task_create(self):
task = NodePodOperator(
node_name="test_node_name",
namespace="airflow",
volume_disabled=False,
pvc_name="shared_storage",
image="registry.gitlab.com/test_image",
image_pull_policy="Always",
env="test-pipelines",
task_id="test-node-name",
startup_timeout=120,
volume_owner=100,
mlflow_enabled=False,
requests_cpu="500m",
requests_memory="2Gi",
limits_cpu="2",
limits_memory="10Gi",
node_selector_labels={
"size/k8s.io": "huge",
},
labels={
"running": "airflow"
},
pipeline="data_science_pipeline",
)
pod = task.create_pod_request_obj()
assert pod.metadata.name.startswith("test-node-name")
assert "test-node-name" != pod.metadata.name
assert pod.metadata.namespace == "airflow"
assert len(pod.spec.containers) == 1
container = pod.spec.containers[0]
assert container.image == "registry.gitlab.com/test_image"
assert container.image_pull_policy == "Always"
assert container.args == [
"kedro",
"run",
"-e",
"test-pipelines",
"--pipeline",
"data_science_pipeline",
"--node",
"test_node_name",
]
assert len(pod.spec.volumes) == 1
volume = pod.spec.volumes[0]
assert volume.name == "storage"
assert volume.persistent_volume_claim.claim_name == "shared_storage"
assert len(container.volume_mounts) == 1
volume_mount = container.volume_mounts[0]
assert volume_mount.mount_path == "/home/kedro/data"
assert volume_mount.name == "storage"
assert pod.spec.security_context.fs_group == 100
assert container.resources.limits == {"cpu": "2", "memory": "10Gi"}
assert container.resources.requests == {"cpu": "500m", "memory": "2Gi"}
assert pod.spec.node_selectors == {"size/k8s.io": "huge"}
assert pod.spec.labels == {"running": "airflow"}
def test_task_create_no_limits_and_requests(self):
task = NodePodOperator(
node_name="test_node_name",
namespace="airflow",
pvc_name="shared_storage",
image="registry.gitlab.com/test_image",
image_pull_policy="Always",
env="test-pipelines",
task_id="test-node-name",
volume_owner=100,
mlflow_enabled=False,
)
pod = task.create_pod_request_obj()
assert len(pod.spec.containers) == 1
container = pod.spec.containers[0]
assert container.resources.limits == {}
assert container.resources.requests == {}
assert pod.spec.node_selector is None
| 34.954545 | 79 | 0.579649 |
6371962883398e184b72e7b73e8bbb196c37767a | 250 | py | Python | 6 kyu/Mexican Wave.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | 6 | 2020-09-03T09:32:25.000Z | 2020-12-07T04:10:01.000Z | 6 kyu/Mexican Wave.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | 1 | 2021-12-13T15:30:21.000Z | 2021-12-13T15:30:21.000Z | 6 kyu/Mexican Wave.py | mwk0408/codewars_solutions | 9b4f502b5f159e68024d494e19a96a226acad5e5 | [
"MIT"
] | null | null | null | def wave(people):
temp=""
result=[]
for i in range(0,len(people)):
if people[i].isalpha():
temp+=people[0:i]+people[i].upper()+people[i+1:]
result.append(temp)
temp=""
return result
| 25 | 60 | 0.496 |
e39a97dbd6523b9fb2247eb1a593e105ec8c4ce3 | 28,790 | py | Python | queue_fair_adapter/queue_fair_adapter.py | Queue-Fair/python | 4846398d58809c4ff42d63b524c8162d5a39bb67 | [
"MIT"
] | null | null | null | queue_fair_adapter/queue_fair_adapter.py | Queue-Fair/python | 4846398d58809c4ff42d63b524c8162d5a39bb67 | [
"MIT"
] | null | null | null | queue_fair_adapter/queue_fair_adapter.py | Queue-Fair/python | 4846398d58809c4ff42d63b524c8162d5a39bb67 | [
"MIT"
] | null | null | null | from queue_fair_adapter.queue_fair_config import QueueFairConfig
from queue_fair_adapter.queue_fair_logger import QueueFairLogger
import json
import urllib
import traceback
import hashlib
import hmac
import time
import shelve
import os
class QueueFairAdapter:
COOKIE_NAME_BASE = 'QueueFair-Pass-'
def __init__(self, service, requestedURL, userAgent,
remoteIPAddress, extra):
self.service = service
self.continuePage = True
self.parsing = False
self.protocol = 'https'
self.settings = None
self.adapterResult = None
self.adapterQueue = None
self.passedString = None
self.passedQueues = dict([])
self.uid = None
self.requestedURL = requestedURL
self.userAgent = userAgent
self.remoteIPAddress = remoteIPAddress
self.extra = extra
self.addedCacheControl = False
self.d = QueueFairConfig.DEBUG
def setUIDFromCookie(self):
cookieBase = 'QueueFair-Store-' + QueueFairConfig.ACCOUNT
uidCookie = self.service.getCookie(cookieBase)
if uidCookie == '':
return
i = uidCookie.find('=')
if i == -1:
i = uidCookie.find(':')
if i == -1:
if self.d:
self.log('separator not found in UID Cookie! ' + uidCookie)
return
self.uid = uidCookie[i+1:]
if self.d:
self.log('UID set to ' + self.uid)
def checkAndAddCacheControl(self):
if self.addedCacheControl:
return
self.service.addHeader('Cache-Control',
'no-store, max-age=0')
self.addedCacheControl = True
@staticmethod
def hash(secret, message):
signature = hmac.new(
bytes(secret, 'utf-8'),
msg=bytes(message, 'utf-8'),
digestmod=hashlib.sha256
).hexdigest().lower()
return signature
def validateQuery(self, queue):
try:
parsedUrl = urllib.parse.urlparse(self.requestedURL)
qstr = parsedUrl.query
q = urllib.parse.parse_qs(qstr)
if self.d:
self.log('Validating Passed Query ' + qstr)
hpos = qstr.rfind('qfh=')
if hpos == -1:
if self.d:
self.log('No Hash In Query')
return False
if 'qfh' not in q:
if self.d:
self.log('Malformed hash')
return False
queryHash = q['qfh'][0]
qpos = qstr.rfind('qfqid=')
if qpos == -1:
if self.d:
self.log('No Queue Identifier')
return False
if 'qfts' not in q:
if self.d:
self.log('No Timestamp')
return False
queryTS = q['qfts'][0]
if not queryTS.isnumeric():
if self.d:
self.log('Timestamp Not Numeric')
return False
queryTS = int(queryTS)
if queryTS > (time.time() +
QueueFairConfig.QUERY_TIME_LIMIT_SECONDS):
if self.d:
self.log('Too Late ' +
str(queryTS) + ' ' + str(time.time()))
return False
if queryTS < (time.time() -
QueueFairConfig.QUERY_TIME_LIMIT_SECONDS):
if self.d:
self.log('Too Early ' + str(queryTS) + ' ' +
str(time.time()))
return False
check = qstr[qpos:hpos]
checkInput = QueueFairAdapter.processIdentifier(self.userAgent)
checkInput += check
checkHash = QueueFairAdapter.hash(queue['secret'], checkInput)
if checkHash != queryHash:
if self.d:
self.log('Failed Hash')
return False
return True
except Exception as exc:
if self.d:
self.log('Error validating query'+str(exc))
return False
def validateCookieFromQueue(self, queue, cookie):
return self.validateCookie(queue['secret'], int(queue['passedLifetimeMinutes']), cookie)
def validateCookie(self, secret, passedLifetimeMinutes, cookie):
try:
if self.d:
self.log('Validating cookie ' + cookie)
parsed = urllib.parse.parse_qs(cookie)
if 'qfh' not in parsed:
return False
mHash = parsed['qfh'][0]
hpos = cookie.rfind('qfh=')
check = cookie[0:hpos]
checkInput = QueueFairAdapter.processIdentifier(self.userAgent)
checkInput += check
checkHash = QueueFairAdapter.hash(secret, checkInput)
if mHash != checkHash:
if self.d:
self.log('Cookie Hash Mismatch Given ' + mHash +
' Should be ' + checkHash)
return False
tspos = int(parsed['qfts'][0])
if tspos < time.time() - passedLifetimeMinutes * 60:
if self.d:
self.log('Cookie timestamp too old ' +
(time.time() - tspos))
return False
if self.d:
self.log('Cookie Validated ')
return True
except Exception as exc:
if self.d:
self.log('Cookie Validation failed with error '+str(exc))
return False
def checkQueryString(self):
urlParams = self.requestedURL
if self.d:
self.log('Checking URL for Passed String ' + urlParams)
q = urlParams.find('qfqid=')
if q == -1:
return
if self.d:
self.log('Passed string found')
i = urlParams.find('qfq=')
if i == -1:
return
if self.d:
self.log('Passed String with Queue Name found')
j = urlParams.find('&', i)
subStart = i + len('qfq=')
queueName = urlParams[subStart:j]
if self.d:
self.log('Queue name is ' + queueName)
for queue in self.settings['queues']:
if queue['name'] != queueName:
continue
if self.d:
self.log('Found queue for querystring ' + queueName)
value = urlParams
value = value[value.find('qfqid'):]
if not self.validateQuery(queue):
# This can happen if it's a stale query string
# too - check for valid cookie.
cName = QueueFairAdapter.COOKIE_NAME_BASE + queueName
queueCookie = self.service.getCookie(cName)
if '' != queueCookie:
if self.d:
self.log('Query validation failed but cookie ' +
queueCookie)
if self.validateCookieFromQueue(queue, queueCookie):
if self.d:
self.log('The cookie is valid. That\'s fine')
return
if self.d:
self.log('Query AND Cookie validation failed!!!')
else:
if self.d:
self.log('Bad queueCookie for ' +
queueName + ' ' + queueCookie)
if self.d:
self.log('Query not validl. Redirecting to error page')
loc = self.protocol + '://' + queue['queueServer'] + '/'
loc += queue['name'] + '?qfError=InvalidQuery'
self.redirect(loc, 1)
return
if self.d:
self.log('Query validation succeeded for ' + value)
self.passedString = value
self.setCookie(queueName, value,
int(queue['passedLifetimeMinutes']) * 60,
QueueFairAdapter.optional(queue, 'cookieDomain'))
if not self.continuePage:
return
if self.d:
self.log('Marking ' + queueName + ' as passed by queryString')
self.passedQueues[queueName] = True
def gotSettings(self):
if self.d:
self.log('Got client settings.')
self.checkQueryString()
if not self.continuePage:
return
self.parseSettings()
def isMatch(self, queue):
if queue is None:
return False
if 'activation' not in queue:
return False
if 'rules' not in queue['activation']:
return False
return self.isMatchArray(queue['activation']['rules'])
def isMatchArray(self, arr):
if arr is None:
return False
firstOp = True
state = False
i = 0
for rule in arr:
i = i+1
if not firstOp and rule['operator'] is not None:
if rule['operator'] == 'And' and not state:
return False
elif rule['operator'] == 'Or' and state:
return True
ruleMatch = self.isRuleMatch(rule)
if firstOp:
state = ruleMatch
firstOp = False
if self.d:
self.log(' Rule 1: ' + str(ruleMatch))
else:
if self.d:
self.log(' Rule ' + (i+1) + ': ' + str(ruleMatch))
if rule['operator'] == 'And':
state = (state and ruleMatch)
if not state:
break
elif rule['operator'] == 'Or':
state = (state or ruleMatch)
if state:
break
if self.d:
self.log('Final result is ' + str(state))
return state
def isRuleMatch(self, rule):
comp = self.requestedURL
if rule['component'] == 'Domain':
comp = comp.replace('http://', '')
comp = comp.replace('https://', '')
comp = comp.split('?')[0]
comp = comp.split('#')[0]
comp = comp.split('/')[0]
comp = comp.split(':')[0]
elif rule['component'] == 'Path':
domain = comp.replace('http://', '')
domain = domain.replace('https://', '')
domain = domain.split('?')[0]
domain = domain.split('#')[0]
domain = domain.split('/')[0]
domain = domain.split(':')[0]
comp = comp[comp.find(domain) + len(domain):]
if comp.startswith(':'):
i = comp.find('/')
if i != -1:
comp = comp[i:]
else:
comp = ''
i = comp.find('#')
if i != -1:
comp = comp[0:i]
i = comp.find('?')
if i != -1:
comp = comp[0:i]
if comp == '':
comp = '/'
elif rule['component'] == 'Query':
if comp.find('?') == -1:
comp = ''
elif comp == '?':
comp = ''
else:
comp = comp[comp.find('?') + 1:]
elif rule['component'] == 'Cookie':
comp = self.service.getCookie(rule['name'])
test = rule['value']
if not rule['caseSensitive']:
comp = comp.lower()
test = test.lower()
if self.d:
self.log(' Testing ' + rule['component'] + ' ' + test +
' against ' + comp)
ret = False
if rule['match'] == 'Equal' and comp == test:
ret = True
elif (rule['match'] == 'Contain' and
comp is not None and
comp != '' and comp.find(test) != -1):
ret = True
elif rule['match'] == 'Exist':
if comp is None or '' == comp:
ret = False
else:
ret = True
if rule['negate']:
ret = not ret
return ret
def isPassed(self, queue):
if queue['name'] in self.passedQueues:
if self.d:
self.log('Queue ' + queue['name'] +
' marked as passed already.')
return True
_ = QueueFairAdapter.COOKIE_NAME_BASE + queue['name']
queueCookie = self.service.getCookie(_)
if queueCookie == '':
if self.d:
self.log('No cookie found for queue ' + queue['name'])
return False
if queueCookie.find(queue['name']) == -1:
if self.d:
self.log('Cookie value is invalid for ' + queue['name'])
return False
if not self.validateCookieFromQueue(queue, queueCookie):
if self.d:
self.log('Cookie failed validation ' + queueCookie)
self.setCookie(queue['name'], '', 0,
QueueFairAdapter.optional(queue, 'cookieDomain'))
return False
if self.d:
self.log('Found valid cookie for ' + queue['name'])
return True
def onMatch(self, queue):
if self.isPassed(queue):
if self.d:
self.log('Already passed ' + queue['name'] + '.')
return True
elif not self.continuePage:
return False
if self.d:
self.log('Checking at server ' + queue['displayName'])
self.consultAdapter(queue)
return False
def setCookie(self, queueName, value, lifetimeSeconds, cookieDomain):
if self.d:
self.log('Setting cookie for ' + queueName + ' to ' + value)
lifetimeSeconds = int(lifetimeSeconds)
cookieName = QueueFairAdapter.COOKIE_NAME_BASE + queueName
self.checkAndAddCacheControl()
self.service.setCookie(cookieName, value,
lifetimeSeconds, cookieDomain)
if lifetimeSeconds > 0:
self.passedQueues[queueName] = True
if QueueFairConfig.STRIP_PASSED_STRING:
loc = self.requestedURL
pos = loc.find('qfqid=')
if pos != -1:
if self.d:
self.log('Stripping passedString from URL')
loc = loc[0:pos - 1]
self.redirect(loc, 0)
def log(self, message):
QueueFairLogger.log(message)
def redirect(self, loc, sleepSecs):
if sleepSecs > 0:
time.sleep(sleepSecs)
self.checkAndAddCacheControl()
self.service.redirect(loc)
self.continuePage = False
def parseSettings(self):
if self.settings is None:
if self.d:
self.log('ERROR: Settings not set+')
return
queues = self.settings['queues']
if len(queues) == 0:
if self.d:
self.log('No queues found+')
return
self.parsing = True
if self.d:
self.log('Running through queue rules')
for queue in queues:
if queue['name'] in self.passedQueues:
if self.d:
self.log('Passed from array ' + queue['name'])
continue
if self.d:
self.log('Checking ' + queue['displayName'])
if self.isMatch(queue):
if self.d:
self.log('Got a match ' + queue['displayName'])
if not self.onMatch(queue):
if not self.continuePage:
return
if self.d:
self.log('Found matching unpassed queue ' +
queue['displayName'])
if QueueFairConfig.ADAPTER_MODE == 'simple':
return
else:
continue
if not self.continuePage:
return
# Passed.
self.passedQueues[queue['name']] = True
else:
if self.d:
self.log('Rules did not match ' + queue['displayName'])
if self.d:
self.log('All queues checked')
self.parsing = False
@staticmethod
def urlencode(param):
return urllib.parse.quote_plus(param)
@staticmethod
def urldecode(param):
return urllib.parse.unquote(param)
@staticmethod
def optional(coll, key):
if key not in coll:
return None
return coll[key]
def consultAdapter(self, queue):
if self.d:
self.log('Consulting Adapter Server for queue ' +
queue['name']+' for page '+self.requestedURL)
self.adapterQueue = queue
adapterMode = 'safe'
if 'adapterMode' in queue:
adapterMode = queue['adapterMode']
elif QueueFairConfig.ADAPTER_MODE is not None:
adapterMode = QueueFairConfig.ADAPTER_MODE
if self.d:
self.log('Adapter mode is ' + adapterMode)
if 'safe' == adapterMode:
url = self.protocol + '://' + queue['adapterServer']
url += '/adapter/' + queue['name']
url += '?ipaddress='
url += QueueFairAdapter.urlencode(self.remoteIPAddress)
if self.uid is not None:
url += '&uid=' + self.uid
url += '&identifier='
url += QueueFairAdapter.urlencode(
QueueFairAdapter.processIdentifier(self.userAgent))
if self.d:
self.log('Adapter URL ' + url)
js = QueueFairAdapter.urlToJSON(url)
if js is None:
self.error('No Settings JSON')
return
if self.d:
self.log('Downloaded JSON Settings ' + str(js))
self.adapterResult = js
self.gotAdapter()
if not self.continuePage:
return
else:
url = self.protocol + '://' + queue['queueServer'] + '/'
url += queue['name'] + '?target='
url += QueueFairAdapter.urlencode(self.requestedURL)
url = self.appendVariant(queue, url)
url = self.appendExtra(queue, url)
if self.d:
self.log('Redirecting to adapter server ' + url)
self.redirect(url, 0)
def gotAdapter(self):
if self.d:
self.log('Got adapter')
if not self.adapterResult:
if self.d:
self.log('ERROR: onAdapter() called without result')
return
if 'uid' in self.adapterResult:
if self.uid is not None and self.uid != self.adapterResult['uid']:
self.log(
'UID Cookie Mismatch - expected ' +
self.uid + ' but received ' + self.adapterResult['uid']
)
else:
self.uid = self.adapterResult['uid']
self.service.setCookie('QueueFair-Store-' +
QueueFairConfig.ACCOUNT,
'u:' + self.uid,
self.adapterResult['cookieSeconds'],
self.optional(self.adapterQueue,
'cookieDomain'))
if 'action' not in self.adapterResult:
if self.d:
self.log('ERROR: gotAdapter() called without result action')
return
if self.adapterResult['action'] == 'SendToQueue':
if self.d:
self.log('Sending to queue server')
queryParams = ''
target = self.requestedURL
if self.adapterQueue['dynamicTarget'] != 'disabled':
if self.adapterQueue['dynamicTarget'] == 'path':
i = target.find('?')
if i != -1:
target = target[0:i]
queryParams += 'target='
queryParams += QueueFairAdapter.urlencode(target)
if self.uid is not None:
if queryParams != '':
queryParams += '&'
queryParams += 'qfuid=' + self.uid
redirectLoc = self.adapterResult['location']
if queryParams != '':
redirectLoc = redirectLoc + '?' + queryParams
redirectLoc = self.appendVariant(self.adapterQueue, redirectLoc)
redirectLoc = self.appendExtra(self.adapterQueue, redirectLoc)
if self.d:
self.log('Redirecting to ' + redirectLoc)
self.redirect(redirectLoc, 0)
return
# SafeGuard etc
self.setCookie(self.adapterResult['queue'],
QueueFairAdapter.urldecode(
self.adapterResult['validation']),
int(self.adapterQueue['passedLifetimeMinutes']) * 60,
self.optional(self.adapterQueue, 'cookieDomain'))
if not self.continuePage:
return
if self.d:
self.log('Marking ' + self.adapterResult['queue'] +
' as passed by adapter')
self.passedQueues[self.adapterResult['queue']] = True
def appendVariant(self, queue, redirectLoc):
if self.d:
self.log('Looking for variant')
variant = self.getVariant(queue)
if variant is None:
if self.d:
self.log('No variant found')
return redirectLoc
if self.d:
self.log('Found variant ' + variant)
if redirectLoc.find('?') != -1:
redirectLoc += '&'
else:
redirectLoc += '?'
redirectLoc += 'qfv=' + QueueFairAdapter.urlencode(variant)
return redirectLoc
def appendExtra(self, queue, redirectLoc):
if self.extra == '' or self.extra is None:
return redirectLoc
self.log('Found extra ' + self.extra)
if redirectLoc.find('?') != -1:
redirectLoc += '&'
else:
redirectLoc += '?'
redirectLoc += 'qfx=' + QueueFairAdapter.urlencode(self.extra)
return redirectLoc
def getVariant(self, queue):
if self.d:
self.log('Getting variants for ' + queue['name'])
if 'activation' not in queue:
return None
if 'variantRules' not in queue['activation']:
return None
variantRules = queue['activation']['variantRules']
if self.d:
self.log('Checking variant rules for ' + queue['name'])
for variant in variantRules:
variantName = variant.variant
rules = variant.rules
ret = self.isMatchArray(rules)
if self.d:
self.log('Variant match ' + variantName + ' ' + ret)
if ret:
return variantName
return None
@staticmethod
def processIdentifier(parameter):
if parameter is None:
return None
i = parameter.find('[')
if i == -1:
return parameter
if i < 20:
return parameter
return parameter[0:i]
@staticmethod
def urlToJSON(url):
return json.loads(urllib.request.urlopen(url).read())
def settingsURL(self):
ret = self.protocol + '://'
ret += QueueFairConfig.FILES_SERVER+'/'+QueueFairConfig.ACCOUNT+'/'
ret += QueueFairConfig.ACCOUNT_SECRET+'/queue-fair-settings.json'
return ret
@staticmethod
def create(filename):
try:
with open(filename, 'x') as _:
return False
except FileExistsError:
return True
def writeToShelf(self):
# Only one process may write to the shelf at a time,
# and there must be no reads while writing.
if QueueFairAdapter.create(QueueFairAdapter.getSettingsLockLoc()):
self.settings = QueueFairAdapter.urlToJSON(self.settingsURL())
if self.d:
self.log("Settings lock exists!")
return
try:
self.settings = QueueFairAdapter.urlToJSON(self.settingsURL())
d = shelve.open(QueueFairAdapter.getSettingsLoc(), 'c', None, True)
d['time'] = time.time()
d['settings'] = self.settings
d.close()
if self.d:
self.log("Written settings to shelf")
except Exception as exc:
if self.d:
self.log("Unexpected error storing settings from " +
self.settingsURL() + ": " + str(exc))
finally:
os.remove(QueueFairAdapter.getSettingsLockLoc())
def waitForSettings(self):
unlocked = False
for x in range(0, QueueFairConfig.READ_TIMEOUT):
if not os.path.exists(QueueFairAdapter.getSettingsLockLoc()):
unlocked = True
break
if self.d:
self.log('Sleeping '+str(x))
time.sleep(1)
if unlocked:
return
if self.d:
self.log('Deleting lock')
os.remove(QueueFairConfig.SETTINGS_FILE_CACHE_LOCATION+'/SettingsLock')
@staticmethod
def getSettingsLoc():
w = QueueFairConfig.SETTINGS_FILE_CACHE_LOCATION
return w + '/QueueFairStoredSettings'
@staticmethod
def getSettingsLockLoc():
return QueueFairConfig.SETTINGS_FILE_CACHE_LOCATION+'/SettingsLock'
def loadSettings(self):
if 'DELETE' in QueueFairConfig.ACCOUNT:
raise ValueError('QF bad account name - edit QueueFairConfig.py')
self.waitForSettings()
d = None
# You can have as many read processes as you like.
try:
d = shelve.open(QueueFairAdapter.getSettingsLoc(), 'r')
except Exception:
self.writeToShelf()
if self.d:
self.log('Created settings storage')
return
if 'time' not in d:
d.close()
self.writeToShelf()
if self.d:
self.log("Time not in shelf!.")
return
else:
if 'settings' in d:
if (time.time() - d['time'] <
QueueFairConfig.SETTINGS_FILE_CACHE_LIFETIME_MINUTES *
60):
self.settings = d['settings']
d.close()
if self.d:
self.log("Retrieved settings from cache.")
return
else:
d.close()
self.writeToShelf()
if self.d:
self.log("Refreshed cached settings.")
return
else:
d.close()
self.writeToShelf()
if self.d:
self.log("Time in shelf but not settings!")
return
def isContinue(self):
try:
if self.d:
self.log('----Adapter Starting for '+self.remoteIPAddress)
self.setUIDFromCookie()
self.loadSettings()
if self.settings is None:
return True
self.gotSettings()
if self.d:
self.log('----Adapter Ending for '+self.remoteIPAddress)
return self.continuePage
except Exception as exc:
print('QF ----Adapter Ending with Exception')
print(exc)
print(traceback.format_exc())
return True
| 33.360371 | 97 | 0.478986 |
100c9ed4d942f0afb0a68169e0b2ce0cef4fdb84 | 1,422 | py | Python | SOLVED/valid-parentheses.py | Roxxum/Coding-Challenges | 4212653e9687d002586249df8bb42d17b398f667 | [
"MIT"
] | null | null | null | SOLVED/valid-parentheses.py | Roxxum/Coding-Challenges | 4212653e9687d002586249df8bb42d17b398f667 | [
"MIT"
] | null | null | null | SOLVED/valid-parentheses.py | Roxxum/Coding-Challenges | 4212653e9687d002586249df8bb42d17b398f667 | [
"MIT"
] | null | null | null | """
Given a string s containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid.
An input string is valid if:
Open brackets must be closed by the same type of brackets.
Open brackets must be closed in the correct order.
Example 1:
Input: s = "()"
Output: true
Example 2:
Input: s = "()[]{}"
Output: true
Example 3:
Input: s = "(]"
Output: false
Example 4:
Input: s = "([)]"
Output: false
Example 5:
Input: s = "{[]}"
Output: true
Constraints:
1 <= s.length <= 104
s consists of parentheses only '()[]{}'.
"""
# define an input for testing purposes
s = "([)]"
# actual code to submit
def solution(input):
pairs = {
")" : "(",
"]" : "[",
"}" : "{"}
door = {
"(" : "Open",
"[" : "Open",
"{" : "Open",
")" : "Closed",
"]" : "Closed",
"}" : "Closed"}
slist = list(s)
check = []
for i in slist:
if door[i] == "Open":
check.append(i)
elif door[i] == "Closed" and len(check) > 0:
if pairs[i] == check[-1]:
check.pop()
else:
check.insert(0, i)
else:
check.insert(0, i)
if len(check) == 0:
return True
else:
return False
# use print statement to check if it works
print(solution(s))
# My Submission: https://leetcode.com/submissions/detail/453584002/
| 18.710526 | 120 | 0.512658 |
2a7020eeaa07eb6df9e5c96f4d67b54c22e373ae | 1,737 | py | Python | zerorpc/socket.py | prakatmac/zerorpc-python | 46b90d1d7d00bef45d723b41cdf7383853959549 | [
"MIT"
] | 1 | 2017-05-03T14:44:41.000Z | 2017-05-03T14:44:41.000Z | zerorpc/socket.py | madscheme/zerorpc-python | 3428fdbd615dbc775dca019561a96a5f32638941 | [
"MIT"
] | null | null | null | zerorpc/socket.py | madscheme/zerorpc-python | 3428fdbd615dbc775dca019561a96a5f32638941 | [
"MIT"
] | 1 | 2021-09-08T09:56:24.000Z | 2021-09-08T09:56:24.000Z | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .context import Context
from .events import Events
class SocketBase(object):
def __init__(self, zmq_socket_type, context=None):
self._context = context or Context.get_instance()
self._events = Events(zmq_socket_type, context)
def close(self):
self._events.close()
def connect(self, endpoint, resolve=True):
return self._events.connect(endpoint, resolve)
def bind(self, endpoint, resolve=True):
return self._events.bind(endpoint, resolve)
| 39.477273 | 81 | 0.747841 |
b08265f5bd91c48fc2a4d9340d90ea1db6a53ebc | 1,134 | py | Python | cacheback/queryset.py | coagulant/django-cacheback | b57b77af572a1c47ea8161f96b1e8a7b7cec0d00 | [
"MIT"
] | null | null | null | cacheback/queryset.py | coagulant/django-cacheback | b57b77af572a1c47ea8161f96b1e8a7b7cec0d00 | [
"MIT"
] | null | null | null | cacheback/queryset.py | coagulant/django-cacheback | b57b77af572a1c47ea8161f96b1e8a7b7cec0d00 | [
"MIT"
] | 1 | 2018-08-02T14:38:10.000Z | 2018-08-02T14:38:10.000Z | from cacheback.base import Job
class QuerySetJob(Job):
"""
Helper class for wrapping ORM reads
"""
def __init__(self, model, lifetime=None, fetch_on_miss=None):
"""
:model: The model class to use
"""
super(QuerySetJob, self).__init__()
self.model = model
if lifetime is not None:
self.lifetime = lifetime
if fetch_on_miss is not None:
self.fetch_on_miss = fetch_on_miss
def key(self, *args, **kwargs):
return "%s-%s" % (
self.model.__name__,
super(QuerySetJob, self).key(*args, **kwargs)
)
def get_constructor_kwargs(self):
return {'model': self.model,
'lifetime': self.lifetime}
class QuerySetGetJob(QuerySetJob):
"""
For ORM reads that use the ``get`` method.
"""
def fetch(self, *args, **kwargs):
return self.model.objects.get(**kwargs)
class QuerySetFilterJob(QuerySetJob):
"""
For ORM reads that use the ``filter`` method.
"""
def fetch(self, *args, **kwargs):
return self.model.objects.filter(**kwargs)
| 25.2 | 65 | 0.585538 |
b243e82109ded742382d198c8f27851cddc0e66d | 804 | py | Python | tests/unit/control/test_sub_categories.py | code-R/retail_app | ea7d268a4994d12f8ef6ed609e8593e5337de74f | [
"MIT"
] | 2 | 2018-04-16T02:40:26.000Z | 2019-11-29T15:33:22.000Z | tests/unit/control/test_sub_categories.py | code-R/retail_app | ea7d268a4994d12f8ef6ed609e8593e5337de74f | [
"MIT"
] | null | null | null | tests/unit/control/test_sub_categories.py | code-R/retail_app | ea7d268a4994d12f8ef6ed609e8593e5337de74f | [
"MIT"
] | null | null | null | from sqlalchemy.orm import sessionmaker
from retailstore.control import sub_categories
from retailstore.db.sqlalchemy.models import SubCategory
from retailstore.serializers.schemas import SubCategorySchema
def test_collection_properties(mocker):
session = mocker.MagicMock(sessionmaker)
api = sub_categories.CollectionResource(session)
assert isinstance(api.get_schema, SubCategorySchema)
assert isinstance(api.post_schema, SubCategorySchema)
assert api.get_schema.many
assert api.orm_model == SubCategory
def test_item_properties(mocker):
session = mocker.MagicMock(sessionmaker)
api = sub_categories.ItemResource(session)
assert isinstance(api.schema, SubCategorySchema)
assert api.resource_key == 'sub_category_id'
assert api.orm_model == SubCategory
| 33.5 | 61 | 0.802239 |
3b7851fc4da3d494b4f3e4da84a0cae9440ff1ac | 2,122 | py | Python | tests/graphs/algorithms/test_matching.py | ref-humbold/AlgoLib_Python | 05f725504656ec93b879374a8cd87464d88fff77 | [
"Apache-2.0"
] | null | null | null | tests/graphs/algorithms/test_matching.py | ref-humbold/AlgoLib_Python | 05f725504656ec93b879374a8cd87464d88fff77 | [
"Apache-2.0"
] | null | null | null | tests/graphs/algorithms/test_matching.py | ref-humbold/AlgoLib_Python | 05f725504656ec93b879374a8cd87464d88fff77 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Tests: Hopcroft-Karp algorithm for matching in bipartite graph"""
import unittest
from assertpy import assert_that
from algolib.graphs import MultipartiteGraph
from algolib.graphs.algorithms import match
class MatchingTest(unittest.TestCase):
@staticmethod
def test__match__when_matching_exists__then_maximal_matching():
# given
graph = MultipartiteGraph(2, [[0, 2, 4, 6], [1, 3, 5, 7]])
graph.add_edge_between(graph.get_vertex(0), graph.get_vertex(3))
graph.add_edge_between(graph.get_vertex(0), graph.get_vertex(5))
graph.add_edge_between(graph.get_vertex(1), graph.get_vertex(2))
graph.add_edge_between(graph.get_vertex(3), graph.get_vertex(4))
graph.add_edge_between(graph.get_vertex(3), graph.get_vertex(6))
graph.add_edge_between(graph.get_vertex(6), graph.get_vertex(7))
# when
result = match(graph)
# then
assert_that(result).is_equal_to({graph.get_vertex(0): graph.get_vertex(5),
graph.get_vertex(1): graph.get_vertex(2),
graph.get_vertex(2): graph.get_vertex(1),
graph.get_vertex(3): graph.get_vertex(4),
graph.get_vertex(4): graph.get_vertex(3),
graph.get_vertex(5): graph.get_vertex(0),
graph.get_vertex(6): graph.get_vertex(7),
graph.get_vertex(7): graph.get_vertex(6)})
@staticmethod
def test__match__when_vertices_only_in_group_0__then_empty():
# given
graph = MultipartiteGraph(2, [[0, 1, 2, 3, 4]])
# when
result = match(graph)
# then
assert_that(result).is_empty()
@staticmethod
def test__match__when_vertices_only_in_group_1__then_empty():
# given
graph = MultipartiteGraph(2, [[], [0, 1, 2, 3, 4]])
# when
result = match(graph)
# then
assert_that(result).is_empty()
| 41.607843 | 83 | 0.590009 |
fb4b42d1db76a10439405eb19b6792d7d57c8cd3 | 6,582 | py | Python | utils.py | BL-Lac149597870/drugVQA | 604703d66457c958ddc9eeb35268391edb6c4996 | [
"MIT"
] | null | null | null | utils.py | BL-Lac149597870/drugVQA | 604703d66457c958ddc9eeb35268391edb6c4996 | [
"MIT"
] | null | null | null | utils.py | BL-Lac149597870/drugVQA | 604703d66457c958ddc9eeb35268391edb6c4996 | [
"MIT"
] | null | null | null | import numpy as np
import re
import torch
from torch.autograd import Variable
# from torch.utils.data import Dataset, DataLoader
def create_variable(tensor):
# Do cuda() before wrapping with variable
if torch.cuda.is_available():
return Variable(tensor.cuda())
else:
return Variable(tensor)
def replace_halogen(string):
"""Regex to replace Br and Cl with single letters"""
br = re.compile('Br')
cl = re.compile('Cl')
string = br.sub('R', string)
string = cl.sub('L', string)
return string
# Create necessary variables, lengths, and target
def make_variables(lines, properties,letters):
sequence_and_length = [line2voc_arr(line,letters) for line in lines]
vectorized_seqs = [sl[0] for sl in sequence_and_length]
seq_lengths = torch.LongTensor([sl[1] for sl in sequence_and_length])
return pad_sequences(vectorized_seqs, seq_lengths, properties)
def make_variables_seq(lines,letters):
sequence_and_length = [line2voc_arr(line,letters) for line in lines]
vectorized_seqs = [sl[0] for sl in sequence_and_length]
seq_lengths = torch.LongTensor([sl[1] for sl in sequence_and_length])
return pad_sequences_seq(vectorized_seqs, seq_lengths)
def line2voc_arr(line,letters):
arr = []
regex = '(\[[^\[\]]{1,10}\])'
line = replace_halogen(line)
char_list = re.split(regex, line)
for li, char in enumerate(char_list):
if char.startswith('['):
arr.append(letterToIndex(char,letters))
else:
chars = [unit for unit in char]
for i, unit in enumerate(chars):
arr.append(letterToIndex(unit,letters))
return arr, len(arr)
def letterToIndex(letter,smiles_letters):
return smiles_letters.index(letter)
# pad sequences and sort the tensor
def pad_sequences(vectorized_seqs, seq_lengths, properties):
seq_tensor = torch.zeros((len(vectorized_seqs), seq_lengths.max())).long()
for idx, (seq, seq_len) in enumerate(zip(vectorized_seqs, seq_lengths)):
seq_tensor[idx, :seq_len] = torch.LongTensor(seq)
# Sort tensors by their length
seq_lengths, perm_idx = seq_lengths.sort(0, descending=True)
seq_tensor = seq_tensor[perm_idx]
# Also sort the target (countries) in the same order
target = properties.double()
if len(properties):
target = target[perm_idx]
# Return variables
# DataParallel requires everything to be a Variable
return create_variable(seq_tensor),create_variable(seq_lengths),create_variable(target)
def pad_sequences_seq(vectorized_seqs, seq_lengths):
seq_tensor = torch.zeros((len(vectorized_seqs), seq_lengths.max())).long()
for idx, (seq, seq_len) in enumerate(zip(vectorized_seqs, seq_lengths)):
seq_tensor[idx, :seq_len] = torch.LongTensor(seq)
# Sort tensors by their length
seq_lengths, perm_idx = seq_lengths.sort(0, descending=True)
# print(seq_tensor)
seq_tensor = seq_tensor[perm_idx]
# Return variables
# DataParallel requires everything to be a Variable
return create_variable(seq_tensor), create_variable(seq_lengths)
def construct_vocabulary(smiles_list,fname):
"""Returns all the characters present in a SMILES file.
Uses regex to find characters/tokens of the format '[x]'."""
add_chars = set()
for i, smiles in enumerate(smiles_list):
regex = '(\[[^\[\]]{1,10}\])'
smiles = ds.replace_halogen(smiles)
char_list = re.split(regex, smiles)
for char in char_list:
if char.startswith('['):
add_chars.add(char)
else:
chars = [unit for unit in char]
[add_chars.add(unit) for unit in chars]
print("Number of characters: {}".format(len(add_chars)))
with open(fname, 'w') as f:
f.write('<pad>' + "\n")
for char in add_chars:
f.write(char + "\n")
return add_chars
def readLinesStrip(lines):
for i in range(len(lines)):
lines[i] = lines[i].rstrip('\n')
return lines
def getProteinSeq(path,contactMapName):
proteins = open(path+"/"+contactMapName).readlines()
proteins = readLinesStrip(proteins)
seq = proteins[1]
return seq
def getProtein(path,contactMapName,contactMap = True):
proteins = open(path+"/"+contactMapName).readlines()
proteins = readLinesStrip(proteins)
seq = proteins[1]
if(contactMap):
contactMap = []
for i in range(2,len(proteins)):
contactMap.append(proteins[i])
return seq,contactMap
else:
return seq
def getTrainDataSet(trainFoldPath):
with open(trainFoldPath, 'r') as f:
trainCpi_list = f.read().strip().split('\n')
trainDataSet = [cpi.strip().split() for cpi in trainCpi_list]
return trainDataSet#[[smiles, sequence, interaction],.....]
def getTestProteinList(testFoldPath):
testProteinList = readLinesStrip(open(testFoldPath).readlines())[0].split()
return testProteinList#['kpcb_2i0eA_full','fabp4_2nnqA_full',....]
def getSeqContactDict(contactPath,contactDictPath):# make a seq-contactMap dict
contactDict = open(contactDictPath).readlines()
seqContactDict = {}
for data in contactDict:
_,contactMapName = data.strip().split(':')
seq,contactMap = getProtein(contactPath,contactMapName)
contactmap_np = [list(map(float, x.strip(' ').split(' '))) for x in contactMap]
feature2D = np.expand_dims(contactmap_np, axis=0)
feature2D = torch.FloatTensor(feature2D)
seqContactDict[seq] = feature2D
return seqContactDict
def getLetters(path):
with open(path, 'r') as f:
chars = f.read().split()
return chars
def getDataDict(testProteinList,activePath,decoyPath,contactPath):
dataDict = {}
for x in testProteinList:#'xiap_2jk7A_full'
xData = []
protein = x.split('_')[0]
# print(protein)
proteinActPath = activePath+"/"+protein+"_actives_final.ism"
proteinDecPath = decoyPath+"/"+protein+"_decoys_final.ism"
act = open(proteinActPath,'r').readlines()
dec = open(proteinDecPath,'r').readlines()
actives = [[x.split(' ')[0],1] for x in act] ######
decoys = [[x.split(' ')[0],0] for x in dec]# test
seq = getProtein(contactPath,x,contactMap = False)
for i in range(len(actives)):
xData.append([actives[i][0],seq,actives[i][1]])
for i in range(len(decoys)):
xData.append([decoys[i][0],seq,decoys[i][1]])
# print(len(xData))
dataDict[x] = xData
return dataDict | 40.881988 | 91 | 0.665755 |
0c5a0e8b85cf7c725ad7810e9d1977448f4eaf63 | 850 | py | Python | src/itemsapp/migrations/0021_auto_20211011_1728.py | robertsmoto/sodavault | 200e843be7abe6cc447647bba55c7c1309092e5e | [
"BSD-3-Clause"
] | null | null | null | src/itemsapp/migrations/0021_auto_20211011_1728.py | robertsmoto/sodavault | 200e843be7abe6cc447647bba55c7c1309092e5e | [
"BSD-3-Clause"
] | null | null | null | src/itemsapp/migrations/0021_auto_20211011_1728.py | robertsmoto/sodavault | 200e843be7abe6cc447647bba55c7c1309092e5e | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 3.2.3 on 2021-10-11 17:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('itemsapp', '0020_groupchildren'),
]
operations = [
migrations.AddField(
model_name='item',
name='categories',
field=models.ManyToManyField(blank=True, related_name='category_items', to='itemsapp.Category'),
),
migrations.AddField(
model_name='item',
name='departments',
field=models.ManyToManyField(blank=True, related_name='department_items', to='itemsapp.Department'),
),
migrations.AddField(
model_name='item',
name='tags',
field=models.ManyToManyField(blank=True, related_name='tag_item', to='itemsapp.Tag'),
),
]
| 29.310345 | 112 | 0.602353 |
3974467614d3004877313d4ca3b7efdea25f0322 | 258 | py | Python | classifiers/chapter7/__init__.py | fulequn/DLAction | da2ff080f7a65f89010a5829b86fc1b45beb9dc8 | [
"MIT"
] | null | null | null | classifiers/chapter7/__init__.py | fulequn/DLAction | da2ff080f7a65f89010a5829b86fc1b45beb9dc8 | [
"MIT"
] | null | null | null | classifiers/chapter7/__init__.py | fulequn/DLAction | da2ff080f7a65f89010a5829b86fc1b45beb9dc8 | [
"MIT"
] | null | null | null | from .layers import *
from .dropout_layers import *
from .updater import *
from .bn_layers import *
from .image_utils import *
from .rnn import *
from .rnn_layers import *
from .cnn_layers import *
from .coco_utils import *
from .captioning_trainer import *
| 23.454545 | 33 | 0.767442 |
87b5f015768e37b598a9a02cad758182d184e447 | 945 | py | Python | simulation/python_standard_lib/test/support/logging_helper.py | john-grando/pyExpandObjects | c08b1d1bc45684bc71c0f49b4d2f22c707cd4aa4 | [
"BSD-3-Clause"
] | 1 | 2020-10-25T16:33:22.000Z | 2020-10-25T16:33:22.000Z | Lib/test/support/logging_helper.py | Krrishdhaneja/cpython | 9ae9ad8ba35cdcece7ded73cd2207e4f8cb85578 | [
"0BSD"
] | 1 | 2021-02-03T01:56:56.000Z | 2021-02-03T01:56:56.000Z | Lib/test/support/logging_helper.py | Krrishdhaneja/cpython | 9ae9ad8ba35cdcece7ded73cd2207e4f8cb85578 | [
"0BSD"
] | 1 | 2022-01-11T18:31:05.000Z | 2022-01-11T18:31:05.000Z | import logging.handlers
class TestHandler(logging.handlers.BufferingHandler):
def __init__(self, matcher):
# BufferingHandler takes a "capacity" argument
# so as to know when to flush. As we're overriding
# shouldFlush anyway, we can set a capacity of zero.
# You can call flush() manually to clear out the
# buffer.
logging.handlers.BufferingHandler.__init__(self, 0)
self.matcher = matcher
def shouldFlush(self):
return False
def emit(self, record):
self.format(record)
self.buffer.append(record.__dict__)
def matches(self, **kwargs):
"""
Look for a saved dict whose keys/values match the supplied arguments.
"""
result = False
for d in self.buffer:
if self.matcher.matches(d, **kwargs):
result = True
break
return result
| 31.5 | 78 | 0.591534 |
1952a63d3c02fce1e1eafacfae6d42440cd5f221 | 4,365 | py | Python | computer-vision/worker.py | vivCoding/healtheye | 159d5db62773f75bd695eb1eafd80e1ca802ab46 | [
"MIT"
] | 3 | 2021-04-20T14:19:49.000Z | 2022-03-15T09:30:58.000Z | computer-vision/worker.py | vivCoding/healtheye | 159d5db62773f75bd695eb1eafd80e1ca802ab46 | [
"MIT"
] | null | null | null | computer-vision/worker.py | vivCoding/healtheye | 159d5db62773f75bd695eb1eafd80e1ca802ab46 | [
"MIT"
] | 2 | 2021-04-08T19:37:16.000Z | 2021-06-23T00:59:56.000Z | import queue
import threading
import time
import cv2
import os
import shutil
from draw_detections import draw_objects
import requests
import json
import datetime
class Worker:
def __init__(self, vision, frame_delay=-1):
self.vision = vision
self.frame_delay = frame_delay
self._queue = queue.Queue()
self._thread = threading.Thread(target=self.process_frames)
self._imshow_queue = queue.Queue()
self._dbqueue = queue.Queue()
self._dbthread = threading.Thread(target=self.send_to_db)
self.running = False
self._transactions = 0
self.temp_folder = "temp"
self.max_temp = 30
def add_frame(self, frame):
self._queue.put(frame)
# print ("added frame")
self.start()
self.show_frames()
def process_frames(self):
if not os.path.exists(self.temp_folder):
os.mkdir(self.temp_folder)
else:
shutil.rmtree(self.temp_folder)
os.mkdir(self.temp_folder)
while self.running:
frame = self._queue.get()
file_path = os.path.join(self.temp_folder, str(self._transactions) + ".png")
cv2.imwrite(file_path, frame)
# predictions = []
# people_count = 0
# violations = 0
predictions, people_count, violations = self.vision.analyzeFrame(file_path)
print ("Process:", self._transactions, ", Queued:", self._queue.qsize(), ", People:", people_count, ", Violations:", violations, end="\r")
self._transactions += 1
self._imshow_queue.put([frame, predictions])
self._dbqueue.put([people_count, violations])
if self._transactions >= self.max_temp:
shutil.rmtree(self.temp_folder)
os.mkdir(self.temp_folder)
def start(self):
if not self.running:
self.running = True
self._thread.start()
self._dbthread.start()
def stop(self):
self.running = False
self._thread.join()
self._dbthread.join()
print ("\n")
def join(self):
while self._queue.qsize() > 0 or self._imshow_queue.qsize() > 0 or self._dbqueue.qsize() > 0:
self.show_frames()
# print ("\n", self._dbqueue.qsize(), self._imshow_queue.qsize())
time.sleep(self.frame_delay)
self.running = False
self._thread.join()
self._dbthread.join()
print ("\n")
def show_frames(self):
if self._imshow_queue.qsize() > 0:
data = self._imshow_queue.get()
frame = data[0]
people = data[1]
draw_objects(frame, people)
def send_to_db(self):
location_name = os.getenv("LOCATION_NAME", "no location specified")
location_latitude = os.getenv("LOCATION_LAT", 0)
location_longitude = os.getenv("LOCATION_LONG", 0)
simulated = os.getenv("SIMULATED", "false") == "true"
send_to_db = os.getenv("SEND_TO_DB", "false") == "true"
db_endpoint = os.getenv("DB_ENDPOINT", "")
if simulated:
current_hour = 12
current_min = 0
while self.running and send_to_db:
toadd = self._dbqueue.get()
# this is just to simulate real data
if simulated:
current_min += 1
if current_min > 60:
current_min = 0
current_hour += 1
if current_hour > 24:
current_hour = 0
new_time = datetime.datetime(2021, 4, 1, current_hour, current_min, 21).strftime("%Y-%m-%d %H:%M:%S")
data = {
"people": toadd[0],
"violations": toadd[1],
"time": new_time if simulated else time.strftime("%Y-%m-%d %H:%M:%S"),
"location": {
"name": location_name,
"latitude": location_latitude,
"longitude": location_longitude
}
}
r = requests.post(db_endpoint, json=data)
resp = r.json()
if r.status_code != 200:
print ("error sending to database!")
elif resp.get("status", "error") != "ok":
print ("\n", resp.get("status", "error")) | 36.680672 | 150 | 0.555097 |
b20f3a810c36abc4aae095ecb403606465996519 | 3,338 | py | Python | model.py | JWSoh/DUBD | af2230e947870eebeae80f913b11c74b8ba162cd | [
"MIT"
] | 33 | 2021-01-19T05:14:45.000Z | 2022-03-31T09:38:21.000Z | model.py | JWSoh/DUBD | af2230e947870eebeae80f913b11c74b8ba162cd | [
"MIT"
] | 3 | 2021-01-19T08:07:03.000Z | 2021-08-23T07:26:14.000Z | model.py | JWSoh/DUBD | af2230e947870eebeae80f913b11c74b8ba162cd | [
"MIT"
] | 3 | 2021-03-06T10:11:40.000Z | 2021-12-04T09:28:42.000Z | from ops import *
class Denoiser(object):
def __init__(self, x, sigma, name, reuse=False):
self.input = x
self.sigma= sigma
self.name = name
self.reuse = reuse
self.noise_encoder()
self.build_model()
def build_model(self):
print('Build Model {}'.format(self.name))
with tf.variable_scope(self.name, reuse=self.reuse):
self.conv1 = conv2d(self.input, 64, [3, 3], scope='conv1', activation=None)
self.head = self.conv1
for idx in range(5):
self.head = self.RIRblock(self.head, 5, 'RIRBlock' + repr(idx))
self.conv2 = conv2d(self.head, 64, [3, 3], scope='conv2', activation=None)
self.residual = tf.add(self.conv1, self.conv2)
self.conv3= conv2d(self.residual, 3, [3, 3], scope='conv3', activation=None)
self.output = tf.add(self.conv3, self.input)
tf.add_to_collection('InNOut', self.input)
tf.add_to_collection('InNOut', self.output)
def RIRblock(self, x, num, scope):
with tf.variable_scope(scope):
head = x
for idx in range(num):
head = self.resblock(head, 'RBlock' + repr(idx))
out = conv2d(head, 64, [3, 3], scope='conv_out')
out = out*self.gamma + self.beta
return tf.add(out, x)
def resblock(self, x, scope):
with tf.variable_scope(scope):
net1 = conv2d(x, 64, [3, 3], dilation=1, scope='conv1', activation='ReLU')
out = conv2d(net1, 64, [3, 3], dilation=1, scope='conv2', activation=None)
return tf.add(out, x)
def noise_encoder(self):
with tf.variable_scope('Noise_ENC'):
net = conv2d(self.sigma, 128, [1,1], scope='linear', activation= 'ReLU')
self.gamma = conv2d(net, 64,[1,1], scope='gamma', activation =None)
self.beta = conv2d(net, 64,[1,1], scope='beta', activation =None)
class Estimator(object):
def __init__(self, x, name, reuse=False):
self.input = x
self.name = name
self.reuse = reuse
self.build_model()
def build_model(self):
print('Build Model {}'.format(self.name))
with tf.variable_scope(self.name, reuse=self.reuse):
self.net = conv2d(self.input, 64, [3, 3], strides=2, dilation=1, scope='conv1', activation=None)
self.net = tf.nn.relu(self.net)
self.net = conv2d(self.net, 64, [3, 3], strides=1, dilation=1,scope='conv2', activation=None)
self.net = tf.nn.relu(self.net)
self.net = conv2d(self.net, 64, [3, 3], strides=2, dilation=1, scope='conv3', activation=None)
self.net = tf.nn.relu(self.net)
self.net1 = conv2d(self.net, 64, [3, 3], strides=1, dilation=1, scope='conv4', activation=None)
self.net = tf.nn.relu(self.net1)
self.net2 = conv2d(self.net, 64, [3, 3], strides=1, dilation=1, scope='conv5', activation=None)
self.net = tf.nn.relu(self.net2)
self.net = conv2d(self.net, 3, [3, 3], dilation=1, scope='conv_out', activation=None)
self.output=tf.image.resize_bilinear(self.net, tf.shape(self.input)[1:-1])
tf.add_to_collection('InNOut', self.input)
tf.add_to_collection('InNOut', self.output) | 37.505618 | 108 | 0.581186 |
3269048563011c3524581dd4a07d13bc2c11e3dd | 3,273 | py | Python | s2vegetation/download_data.py | balakumaran247/S2_Vegetation | 402844869e5bb54a556d8b8481b959bdcc4f733e | [
"MIT"
] | 2 | 2021-07-18T13:12:08.000Z | 2021-10-04T18:06:22.000Z | s2vegetation/download_data.py | balakumaran247/S2_Vegetation | 402844869e5bb54a556d8b8481b959bdcc4f733e | [
"MIT"
] | null | null | null | s2vegetation/download_data.py | balakumaran247/S2_Vegetation | 402844869e5bb54a556d8b8481b959bdcc4f733e | [
"MIT"
] | 1 | 2021-07-18T13:12:11.000Z | 2021-07-18T13:12:11.000Z | from sentinelsat import SentinelAPI
import zipfile
import os, sys, shutil
def check_login(username, password, latitude, longitude):
''' Checks the login and location details, SentinelAPI queries the Copernicus database '''
if username==None or password==None:
print('\n Enter Login Details for the Copernicus SciHub\n if not registered, go to:\n https://scihub.copernicus.eu\n')
username = input('\n Username: ')
password = input('\n Password: ')
print('\n')
if latitude==None or longitude==None:
print('Latitude and Longitude in decimal degrees\n')
try:
latitude = float(input('\n Latitude: '))
longitude = float(input('\n Longitude: '))
except:
print('\n Latitude and Longitude are to be entered in decimal degrees\n Program Terminated\n')
sys.exit()
print('\n')
try:
api = SentinelAPI(username,
password, 'https://scihub.copernicus.eu/dhus')
footprint = f'POINT ({latitude} {longitude})'
data_populate = api.query(footprint, date=('NOW-12MONTHS','NOW'),
order_by='cloudcoverpercentage',
platformname='Sentinel-2',
processinglevel='Level-2A',
cloudcoverpercentage=(0,30))
data_database = api.to_geodataframe(data_populate)
return api, data_database
except:
print('\n Incorrect Login Details.\n Program Terminated.')
sys.exit()
def download_data (username, password, latitude, longitude):
''' download the lowest cloudcoverpercentage tile and extract to data directory'''
api, data_database = check_login(username, password, latitude, longitude)
data_database_sorted = data_database.sort_values('cloudcoverpercentage',
ascending=True).reset_index()
# clearing data directory contents before download
for filename in os.listdir(os.path.join('.', 'data')):
if filename != '.gitignore':
file_path = os.path.join('.','data',filename)
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
for item in range(len(data_database_sorted)):
try:
print("\n Fetching " +data_database_sorted['index'].iloc[item]+ " from SciHub...\n")
data_download = api.download(data_database_sorted['index'].iloc[item],
directory_path=os.path.join('.', 'data'))
print("\ndownload complete!\n")
break
except:
continue
# extracting the downloaded file
try:
zip = zipfile.ZipFile(os.path.join('.', 'data',
data_download['title'] + '.zip'))
zip.extractall(os.path.join('.', 'data'))
except:
print('\n Data could not be retrieved.\n Program Terminated.\n')
sys.exit()
return data_download
| 42.506494 | 127 | 0.563397 |
ce4a58162c7f3a0d9205cc8da413858b8c379735 | 4,180 | py | Python | homeassistant/components/daikin/__init__.py | shanbs/home-assistant | 818776d2b4f11e4f51992dc88bc0a6f9055833b2 | [
"Apache-2.0"
] | 1 | 2019-02-18T03:16:32.000Z | 2019-02-18T03:16:32.000Z | homeassistant/components/daikin/__init__.py | shanbs/home-assistant | 818776d2b4f11e4f51992dc88bc0a6f9055833b2 | [
"Apache-2.0"
] | 3 | 2021-09-08T03:29:36.000Z | 2022-03-12T00:59:48.000Z | homeassistant/components/daikin/__init__.py | shanbs/home-assistant | 818776d2b4f11e4f51992dc88bc0a6f9055833b2 | [
"Apache-2.0"
] | 1 | 2019-09-28T07:06:08.000Z | 2019-09-28T07:06:08.000Z | """Platform for the Daikin AC."""
import asyncio
from datetime import timedelta
import logging
from socket import timeout
import async_timeout
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_HOSTS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import Throttle
from . import config_flow # noqa pylint_disable=unused-import
from .const import KEY_HOST
REQUIREMENTS = ['pydaikin==0.9']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'daikin'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
COMPONENT_TYPES = ['climate', 'sensor']
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(
CONF_HOSTS, default=[]
): vol.All(cv.ensure_list, [cv.string]),
})
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Establish connection with Daikin."""
if DOMAIN not in config:
return True
hosts = config[DOMAIN].get(CONF_HOSTS)
if not hosts:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={'source': SOURCE_IMPORT}))
for host in hosts:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={'source': SOURCE_IMPORT},
data={
KEY_HOST: host,
}))
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Establish connection with Daikin."""
conf = entry.data
daikin_api = await daikin_api_setup(hass, conf[KEY_HOST])
if not daikin_api:
return False
hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: daikin_api})
await asyncio.wait([
hass.config_entries.async_forward_entry_setup(entry, component)
for component in COMPONENT_TYPES
])
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await asyncio.wait([
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENT_TYPES
])
hass.data[DOMAIN].pop(config_entry.entry_id)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return True
async def daikin_api_setup(hass, host):
"""Create a Daikin instance only once."""
from pydaikin.appliance import Appliance
try:
with async_timeout.timeout(10):
device = await hass.async_add_executor_job(Appliance, host)
except asyncio.TimeoutError:
_LOGGER.error("Connection to Daikin could not be established")
return None
except Exception: # pylint: disable=broad-except
_LOGGER.error("Unexpected error creating device")
return None
name = device.values['name']
api = DaikinApi(device, name)
return api
class DaikinApi:
"""Keep the Daikin instance in one place and centralize the update."""
def __init__(self, device, name):
"""Initialize the Daikin Handle."""
self.device = device
self.name = name
self.ip_address = device.ip
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self, **kwargs):
"""Pull the latest data from Daikin."""
try:
self.device.update_status()
except timeout:
_LOGGER.warning(
"Connection failed for %s", self.ip_address
)
@property
def mac(self):
"""Return mac-address of device."""
return self.device.values.get(CONNECTION_NETWORK_MAC)
@property
def device_info(self):
"""Return a device description for device registry."""
info = self.device.values
return {
'connections': {(CONNECTION_NETWORK_MAC, self.mac)},
'identifieres': self.mac,
'manufacturer': 'Daikin',
'model': info.get('model'),
'name': info.get('name'),
'sw_version': info.get('ver').replace('_', '.'),
}
| 29.43662 | 79 | 0.656699 |
24634911d739b060d6fd93f0f8f0196e1d4667c3 | 2,770 | py | Python | cvtools/io.py | sineatos/SpatialPyramidMatchExp | 8135cb3be5f96097143a84931bee676e75ea2e2b | [
"Apache-2.0"
] | null | null | null | cvtools/io.py | sineatos/SpatialPyramidMatchExp | 8135cb3be5f96097143a84931bee676e75ea2e2b | [
"Apache-2.0"
] | null | null | null | cvtools/io.py | sineatos/SpatialPyramidMatchExp | 8135cb3be5f96097143a84931bee676e75ea2e2b | [
"Apache-2.0"
] | null | null | null | # -*- encoding:UTF-8 -*-
"""
该模块包含一些读写相关的函数
"""
import pickle
import os
import os.path as ospath
import re
from PIL import Image
import cv2
def save_data(data, path_prefix="data", filename="data.bin", mode="wb"):
"""保存数据
:param data: 数据对象
:param path_prefix: 保存的目录名
:param filename: 数据的文件名
:param mode: 写模式
:return: 如果保存成功返回文件路径
"""
os.makedirs(path_prefix, exist_ok=True)
full_filename = ospath.join(path_prefix, filename)
with open(full_filename, mode) as f:
pickle.dump(data, f)
return full_filename
def load_data(path_prefix="data", filename="data.bin", mode="rb"):
"""导入数据
:param path_prefix: 保存的目录名
:param filename: 数据的文件名
:param mode: 读模式
:return: 返回数据对象
"""
full_filename = ospath.join(path_prefix, filename)
with open(full_filename, mode) as f:
return pickle.load(f)
def load_pil_images(folder_path, suffixes=('jpg', 'png',), recursive=False):
"""迭代读入一个目录中的所有图片,但是不会递归读取。
:param folder_path: 要读取图片的目录
:param suffixes: 接受图片的后缀名元组
:param recursive: 是否递归读取,默认否
:return: 一个迭代器,迭代的时候,每一次返回一个PIL.Image对象
"""
images = []
for root, dirs, files in os.walk(folder_path):
for file in files:
pre, suf = ospath.splitext(file)
if suf in suffixes:
image = Image.open(file)
images.append(image)
if not recursive:
break
return images
def get_images_name(folder_path, suffixes=('.jpg', '.png',), recursive=False):
"""迭代读入一个目录中的所有图片的路径。
:param folder_path: 要读取图片的目录
:param suffixes: 接受图片的后缀名元组
:param recursive: 是否递归读取,默认否
:return: 一个迭代器,迭代的时候,每一次返回一个图片的路径名
"""
for root, dirs, files in os.walk(folder_path):
for file in files:
pre, suf = ospath.splitext(file)
if suf in suffixes:
yield ospath.join(root, file)
if not recursive:
break
def get_image_label_in_filename(paths, label_re=r'^(.*)_.*$'):
"""
从文件命中获取图像的标签,该方法首先会使用os.path.basename获取文件路径中的文件名,然后使用正则表达式获取文件名中的标签
注意:提取的标签为正则表达式中的第一个括号里的内容。
:param paths: 文件路径列表
:param label_re: 正则表达式字符串,默认为文件名以"标签_其他文字和后缀名"作为名称
:return: 返回图像的标签列表
"""
labels = []
for path in paths:
filename = ospath.basename(path)
mo = re.match(label_re, filename)
labels.append(mo.group(1))
return labels
def load_image2ndarray(paths):
"""
根据图像路径并将图像转化为一个numpy.ndarray的对象返回,接收的输入为一个可迭代对象
:param paths: 图像路径列表
:return: numpy.ndarray对象列表
"""
return [cv2.imread(path) for path in paths]
__all__ = ['save_data', 'load_data', 'load_pil_images', 'get_images_name', 'get_image_label_in_filename',
'load_image2ndarray']
| 26.893204 | 105 | 0.641877 |
ebb9f9eb66862084e42842b4121b5fad961ca251 | 13,325 | py | Python | muskit/tasks/svs.py | pppku/Muskits | 9f430db6cd3718e210a72df016084e63907f2559 | [
"Apache-2.0"
] | null | null | null | muskit/tasks/svs.py | pppku/Muskits | 9f430db6cd3718e210a72df016084e63907f2559 | [
"Apache-2.0"
] | null | null | null | muskit/tasks/svs.py | pppku/Muskits | 9f430db6cd3718e210a72df016084e63907f2559 | [
"Apache-2.0"
] | null | null | null | import argparse
import logging
from typing import Callable
from typing import Collection
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
import numpy as np
import torch
from typeguard import check_argument_types
from typeguard import check_return_type
from muskit.layers.abs_normalize import AbsNormalize
from muskit.layers.global_mvn import GlobalMVN
from muskit.tasks.abs_task import AbsTask
from muskit.train.class_choices import ClassChoices
from muskit.train.collate_fn import CommonCollateFn
from muskit.train.preprocessor import CommonPreprocessor
from muskit.train.trainer import Trainer
from muskit.svs.abs_svs import AbsSVS
from muskit.svs.muskit_model import MuskitSVSModel
from muskit.svs.feats_extract.abs_feats_extract import AbsFeatsExtract
from muskit.svs.feats_extract.dio import Dio
from muskit.svs.feats_extract.score_feats_extract import FrameLabelAggregate
from muskit.svs.feats_extract.energy import Energy
from muskit.svs.feats_extract.log_mel_fbank import LogMelFbank
from muskit.svs.feats_extract.log_spectrogram import LogSpectrogram
from muskit.svs.encoder_decoder.transformer.transformer import Transformer
from muskit.svs.bytesing.bytesing import ByteSing
from muskit.svs.naive_rnn.naive_rnn import NaiveRNN
from muskit.utils.get_default_kwargs import get_default_kwargs
from muskit.utils.nested_dict_action import NestedDictAction
from muskit.utils.types import int_or_none
from muskit.utils.types import str2bool
from muskit.utils.types import str_or_none
feats_extractor_choices = ClassChoices(
"feats_extract",
classes=dict(fbank=LogMelFbank, spectrogram=LogSpectrogram),
type_check=AbsFeatsExtract,
default="fbank",
)
score_feats_extractor_choices = ClassChoices(
"score_feats_extract",
classes=dict(score_feats_extract=FrameLabelAggregate),
type_check=AbsFeatsExtract,
default="fbank",
)
pitch_extractor_choices = ClassChoices(
"pitch_extract",
classes=dict(dio=Dio),
type_check=AbsFeatsExtract,
default=None,
optional=True,
)
energy_extractor_choices = ClassChoices(
"energy_extract",
classes=dict(energy=Energy),
type_check=AbsFeatsExtract,
default=None,
optional=True,
)
normalize_choices = ClassChoices(
"normalize",
classes=dict(global_mvn=GlobalMVN),
type_check=AbsNormalize,
default="global_mvn",
optional=True,
)
pitch_normalize_choices = ClassChoices(
"pitch_normalize",
classes=dict(global_mvn=GlobalMVN),
type_check=AbsNormalize,
default=None,
optional=True,
)
energy_normalize_choices = ClassChoices(
"energy_normalize",
classes=dict(global_mvn=GlobalMVN),
type_check=AbsNormalize,
default=None,
optional=True,
)
svs_choices = ClassChoices(
"svs",
classes=dict(
transformer=Transformer,
bytesing=ByteSing,
naive_rnn=NaiveRNN,
),
type_check=AbsSVS,
default="transformer",
)
class SVSTask(AbsTask):
num_optimizers: int = 1
# Add variable objects configurations
class_choices_list = [
# --score_extractor and --score_extractor_conf
score_feats_extractor_choices,
# --feats_extractor and --feats_extractor_conf
feats_extractor_choices,
# --normalize and --normalize_conf
normalize_choices,
# --svs and --svs_conf
svs_choices,
# --pitch_extract and --pitch_extract_conf
pitch_extractor_choices,
# --pitch_normalize and --pitch_normalize_conf
pitch_normalize_choices,
# --energy_extract and --energy_extract_conf
energy_extractor_choices,
# --energy_normalize and --energy_normalize_conf
energy_normalize_choices,
]
# If you need to modify train() or eval() procedures, change Trainer class here
trainer = Trainer
@classmethod
def add_task_arguments(cls, parser: argparse.ArgumentParser):
# NOTE(kamo): Use '_' instead of '-' to avoid confusion
assert check_argument_types()
group = parser.add_argument_group(description="Task related")
# NOTE(kamo): add_arguments(..., required=True) can't be used
# to provide --print_config mode. Instead of it, do as
required = parser.get_default("required")
required += ["token_list"]
group.add_argument(
"--token_list",
type=str_or_none,
default=None,
help="A text mapping int-id to token",
)
group.add_argument(
"--odim",
type=int_or_none,
default=None,
help="The number of dimension of output feature",
)
group.add_argument(
"--model_conf",
action=NestedDictAction,
default=get_default_kwargs(MuskitSVSModel),
help="The keyword arguments for model class.",
)
group = parser.add_argument_group(description="Preprocess related")
group.add_argument(
"--use_preprocessor",
type=str2bool,
default=True,
help="Apply preprocessing to data or not",
)
group.add_argument(
"--token_type",
type=str,
default="phn",
choices=["bpe", "char", "word", "phn"],
help="The text will be tokenized in the specified level token",
)
group.add_argument(
"--bpemodel",
type=str_or_none,
default=None,
help="The model file of sentencepiece",
)
parser.add_argument(
"--non_linguistic_symbols",
type=str_or_none,
help="non_linguistic_symbols file path",
)
parser.add_argument(
"--cleaner",
type=str_or_none,
choices=[None, "tacotron", "jaconv", "vietnamese"],
default=None,
help="Apply text cleaning",
)
parser.add_argument(
"--g2p",
type=str_or_none,
choices=[
None,
"g2p_en",
"g2p_en_no_space",
"pyopenjtalk",
"pyopenjtalk_kana",
"pyopenjtalk_accent",
"pyopenjtalk_accent_with_pause",
"pypinyin_g2p",
"pypinyin_g2p_phone",
"espeak_ng_arabic",
],
default=None,
help="Specify g2p method if --token_type=phn",
)
parser.add_argument(
"--fs",
type=int,
default=16000,
help="sample rate",
)
for class_choices in cls.class_choices_list:
# Append --<name> and --<name>_conf.
# e.g. --encoder and --encoder_conf
class_choices.add_arguments(group)
@classmethod
def build_collate_fn(
cls, args: argparse.Namespace, train: bool
) -> Callable[
[Collection[Tuple[str, Dict[str, np.ndarray]]]],
Tuple[List[str], Dict[str, torch.Tensor]],
]:
assert check_argument_types()
return CommonCollateFn(
float_pad_value=0.0, int_pad_value=0, not_sequence=["spembs"]
)
@classmethod
def build_preprocess_fn(
cls, args: argparse.Namespace, train: bool
) -> Optional[Callable[[str, Dict[str, np.array]], Dict[str, np.ndarray]]]:
assert check_argument_types()
if args.use_preprocessor:
retval = CommonPreprocessor(
train=train,
token_type=args.token_type,
token_list=args.token_list,
bpemodel=args.bpemodel,
non_linguistic_symbols=args.non_linguistic_symbols,
text_cleaner=args.cleaner,
g2p_type=args.g2p,
fs=args.fs,
)
else:
retval = None
assert check_return_type(retval)
return retval
@classmethod
def required_data_names(
cls, train: bool = True, inference: bool = False
) -> Tuple[str, ...]:
if not inference:
retval = ("text", "singing", "midi", "label")
else:
# Inference mode
retval = ("text", "midi", "label")
return retval
@classmethod
def optional_data_names(
cls, train: bool = True, inference: bool = False
) -> Tuple[str, ...]:
if not inference:
retval = ("spembs", "durations", "pitch", "energy")
else:
# Inference mode
retval = ("spembs", "singing", "durations")
return retval
@classmethod
def build_model(cls, args: argparse.Namespace) -> MuskitSVSModel:
assert check_argument_types()
if isinstance(args.token_list, str):
with open(args.token_list, encoding="utf-8") as f:
token_list = [line.rstrip() for line in f]
# "args" is saved as it is in a yaml file by BaseTask.main().
# Overwriting token_list to keep it as "portable".
args.token_list = token_list.copy()
elif isinstance(args.token_list, (tuple, list)):
token_list = args.token_list.copy()
else:
raise RuntimeError("token_list must be str or dict")
vocab_size = len(token_list)
logging.info(f"Vocabulary size: {vocab_size }")
# 1. feats_extract
if args.odim is None:
# Extract features in the model
feats_extract_class = feats_extractor_choices.get_class(args.feats_extract)
feats_extract = feats_extract_class(**args.feats_extract_conf)
odim = feats_extract.output_size()
else:
# Give features from data-loader
args.feats_extract = None
args.feats_extract_conf = None
feats_extract = None
odim = args.odim
# 2. Normalization layer
if args.normalize is not None:
normalize_class = normalize_choices.get_class(args.normalize)
normalize = normalize_class(**args.normalize_conf)
else:
normalize = None
# 3. SVS
svs_class = svs_choices.get_class(args.svs)
svs = svs_class(idim=vocab_size, odim=odim, **args.svs_conf)
# 4. Extra components
score_feats_extract = None
pitch_extract = None
energy_extract = None
pitch_normalize = None
energy_normalize = None
# logging.info(f'args.pitch_extract:{args.pitch_extract}')
if getattr(args, "score_feats_extract", None) is not None:
score_feats_extract_class = score_feats_extractor_choices.get_class(args.score_feats_extract)
score_feats_extract = score_feats_extract_class(**args.score_feats_extract_conf)
if getattr(args, "pitch_extract", None) is not None:
pitch_extract_class = pitch_extractor_choices.get_class(args.pitch_extract)
if args.pitch_extract_conf.get("reduction_factor", None) is not None:
assert args.pitch_extract_conf.get(
"reduction_factor", None
) == args.svs_conf.get("reduction_factor", 1)
else:
args.pitch_extract_conf["reduction_factor"] = args.svs_conf.get(
"reduction_factor", 1
)
pitch_extract = pitch_extract_class(**args.pitch_extract_conf)
# logging.info(f'pitch_extract:{pitch_extract}')
if getattr(args, "energy_extract", None) is not None:
if args.energy_extract_conf.get("reduction_factor", None) is not None:
assert args.energy_extract_conf.get(
"reduction_factor", None
) == args.svs_conf.get("reduction_factor", 1)
else:
args.energy_extract_conf["reduction_factor"] = args.svs_conf.get(
"reduction_factor", 1
)
energy_extract_class = energy_extractor_choices.get_class(
args.energy_extract
)
energy_extract = energy_extract_class(**args.energy_extract_conf)
if getattr(args, "pitch_normalize", None) is not None:
pitch_normalize_class = pitch_normalize_choices.get_class(
args.pitch_normalize
)
pitch_normalize = pitch_normalize_class(**args.pitch_normalize_conf)
if getattr(args, "energy_normalize", None) is not None:
energy_normalize_class = energy_normalize_choices.get_class(
args.energy_normalize
)
energy_normalize = energy_normalize_class(**args.energy_normalize_conf)
# 5. Build model
model = MuskitSVSModel(
text_extract=score_feats_extract,
feats_extract=feats_extract,
score_feats_extract=score_feats_extract,
durations_extract=score_feats_extract,
pitch_extract=pitch_extract,
tempo_extract=score_feats_extract,
energy_extract=energy_extract,
normalize=normalize,
pitch_normalize=pitch_normalize,
energy_normalize=energy_normalize,
svs=svs,
**args.model_conf,
)
assert check_return_type(model)
return model
| 35.344828 | 105 | 0.630919 |
d0f52735d827a973a418d0d16af6c9f36ec80881 | 279 | py | Python | publictitles/health.py | LandRegistry/public-titles | 1d52e5dd80e4632d98f40356262819bbf5c907ed | [
"MIT"
] | null | null | null | publictitles/health.py | LandRegistry/public-titles | 1d52e5dd80e4632d98f40356262819bbf5c907ed | [
"MIT"
] | 1 | 2021-06-01T22:00:40.000Z | 2021-06-01T22:00:40.000Z | publictitles/health.py | LandRegistry/public-titles | 1d52e5dd80e4632d98f40356262819bbf5c907ed | [
"MIT"
] | 1 | 2021-04-11T06:06:27.000Z | 2021-04-11T06:06:27.000Z | from healthcheck import HealthCheck
class Health(object):
def __init__(self, app, endpoint='/health', checks=[]):
self.health = HealthCheck(app, endpoint)
# extra health checks
[self.health.add_check(check) for check in checks if callable(check)]
| 25.363636 | 77 | 0.684588 |
34ceaa092df8f54b0f45918190f83280f35b2d2f | 546 | py | Python | problems/balanced-brackets/balanced-brackets2.py | vidyadeepa/the-coding-interview | 90171b77b6884176a6c28bdccb5d45bd6929b489 | [
"MIT"
] | 1,571 | 2015-12-09T14:08:47.000Z | 2022-03-30T21:34:36.000Z | problems/balanced-brackets/balanced-brackets2.py | vidyadeepa/the-coding-interview | 90171b77b6884176a6c28bdccb5d45bd6929b489 | [
"MIT"
] | 117 | 2015-10-22T05:59:19.000Z | 2021-09-17T00:14:38.000Z | problems/balanced-brackets/balanced-brackets2.py | vidyadeepa/the-coding-interview | 90171b77b6884176a6c28bdccb5d45bd6929b489 | [
"MIT"
] | 452 | 2015-10-21T23:00:58.000Z | 2022-03-18T21:16:50.000Z | def bb(s):
"""
Use a stack to keep track of the brackets, yo!
Runtime: O(n)
"""
brackets = []
matching = {")":"(", "]":"[", "}":"{"}
for p in s:
if p in matching.values():
brackets.append(p)
else:
try:
top = brackets[-1]
if top == matching[p]:
brackets.pop()
except:
return False
return not brackets
print bb('()[]{}(([])){[()][]}') # True
print bb('())[]{}') # False
print bb('[(])') # False
| 23.73913 | 50 | 0.40293 |
1ce9aa8d98ba548ca309153d76d1c359e21e5874 | 1,108 | py | Python | script.py | Dishtermaster/AMR-Url_Uploader | fbf746a6ae3c56b1b88e92c44c3cc9fd2938e0bd | [
"MIT"
] | 1 | 2021-11-08T04:39:11.000Z | 2021-11-08T04:39:11.000Z | script.py | Dishtermaster/AMR-Url_Uploader | fbf746a6ae3c56b1b88e92c44c3cc9fd2938e0bd | [
"MIT"
] | null | null | null | script.py | Dishtermaster/AMR-Url_Uploader | fbf746a6ae3c56b1b88e92c44c3cc9fd2938e0bd | [
"MIT"
] | 7 | 2021-07-14T09:49:07.000Z | 2022-02-08T14:44:28.000Z | class script(object):
START_TEXT = ""
HELP_USER = ""
ABOUT_TEXT = ""
FORMAT_SELECTION = """<b>Choose appropriate option</b> <a href='{}'>⬇️</a>
🎞 - Stream format
📁 - File format
<i>NOTE : Taking high resolutions may result in files above 2GB and hence cannot Upload to TG. So better select a medium resolution.</i> 😇
"""
UPGRADE_TEXT = "PING at @sherrvish"
DOWNLOAD_START = "Trying to download to my server. This may take a while 😴"
UPLOAD_START = "Uploading Now ⬆️"
RCHD_TG_API_LIMIT = "Downloaded in {} seconds.\nDetected File Size: {}\nSorry. But, I cannot upload files greater than 1.95GB due to Telegram API limitations."
AFTER_SUCCESSFUL_UPLOAD_MSG_WITH_TS = ""
SAVED_CUSTOM_THUMB_NAIL = ""
DEL_ETED_CUSTOM_THUMB_NAIL = ""
SHOW_THUMB = ""
NO_THUMB = "SED😕 No saved thumbnails Found!!"
CUSTOM_CAPTION_UL_FILE = "<b>{newname}\n\n©️ @All_Movie_Rockers</b>"
TIMEOUT = "<b><i>Sorry for the delay. It'll help reduce the flood wait</i> 😇\n\nWait for {} sec and try again.</b>"
| 25.767442 | 163 | 0.636282 |
f9d48c55a8069ef01494b706ecfbfc5cc842be51 | 8,403 | py | Python | py/test/selenium/webdriver/common/executing_async_javascript_tests.py | GQAssurance/selenium | fc93242e17385966cd2ad9088e1044ed6e8bf148 | [
"Apache-2.0"
] | 1 | 2019-09-24T11:34:34.000Z | 2019-09-24T11:34:34.000Z | py/test/selenium/webdriver/common/executing_async_javascript_tests.py | GQAssurance/selenium | fc93242e17385966cd2ad9088e1044ed6e8bf148 | [
"Apache-2.0"
] | null | null | null | py/test/selenium/webdriver/common/executing_async_javascript_tests.py | GQAssurance/selenium | fc93242e17385966cd2ad9088e1044ed6e8bf148 | [
"Apache-2.0"
] | 1 | 2019-09-15T11:54:10.000Z | 2019-09-15T11:54:10.000Z | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.common.by import By
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.remote.webelement import WebElement
@pytest.fixture(autouse=True)
def reset_timeouts(driver):
driver.set_script_timeout(5)
yield
driver.set_script_timeout(30)
def testShouldNotTimeoutIfCallbackInvokedImmediately(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script("arguments[arguments.length - 1](123);")
assert type(result) == int
assert 123 == result
def testShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NeitherNoneNorUndefined(driver, pages):
pages.load("ajaxy_page.html")
assert 123 == driver.execute_async_script("arguments[arguments.length - 1](123);")
assert "abc" == driver.execute_async_script("arguments[arguments.length - 1]('abc');")
assert not bool(driver.execute_async_script("arguments[arguments.length - 1](false);"))
assert bool(driver.execute_async_script("arguments[arguments.length - 1](true);"))
def testShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NullAndUndefined(driver, pages):
pages.load("ajaxy_page.html")
assert driver.execute_async_script("arguments[arguments.length - 1](null)") is None
assert driver.execute_async_script("arguments[arguments.length - 1]()") is None
def testShouldBeAbleToReturnAnArrayLiteralFromAnAsyncScript(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script("arguments[arguments.length - 1]([]);")
assert "Expected not to be null!", result is not None
assert type(result) == list
assert len(result) == 0
def testShouldBeAbleToReturnAnArrayObjectFromAnAsyncScript(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script("arguments[arguments.length - 1](new Array());")
assert "Expected not to be null!", result is not None
assert type(result) == list
assert len(result) == 0
def testShouldBeAbleToReturnArraysOfPrimitivesFromAsyncScripts(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script(
"arguments[arguments.length - 1]([null, 123, 'abc', true, false]);")
assert result is not None
assert type(result) == list
assert not bool(result.pop())
assert bool(result.pop())
assert "abc" == result.pop()
assert 123 == result.pop()
assert result.pop() is None
assert len(result) == 0
def testShouldBeAbleToReturnWebElementsFromAsyncScripts(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script("arguments[arguments.length - 1](document.body);")
assert isinstance(result, WebElement)
assert "body" == result.tag_name.lower()
def testShouldBeAbleToReturnArraysOfWebElementsFromAsyncScripts(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script(
"arguments[arguments.length - 1]([document.body, document.body]);")
assert result is not None
assert type(result) == list
list_ = result
assert 2 == len(list_)
assert isinstance(list_[0], WebElement)
assert isinstance(list_[1], WebElement)
assert "body" == list_[0].tag_name
# assert list_[0] == list_[1]
def testShouldTimeoutIfScriptDoesNotInvokeCallback(driver, pages):
pages.load("ajaxy_page.html")
with pytest.raises(TimeoutException):
# Script is expected to be async and explicitly callback, so this should timeout.
driver.execute_async_script("return 1 + 2;")
def testShouldTimeoutIfScriptDoesNotInvokeCallbackWithAZeroTimeout(driver, pages):
pages.load("ajaxy_page.html")
with pytest.raises(TimeoutException):
driver.execute_async_script("window.setTimeout(function() {}, 0);")
@pytest.mark.xfail_marionette
@pytest.mark.xfail_remote
def testShouldNotTimeoutIfScriptCallsbackInsideAZeroTimeout(driver, pages):
pages.load("ajaxy_page.html")
driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.setTimeout(function() { callback(123); }, 0)""")
def testShouldTimeoutIfScriptDoesNotInvokeCallbackWithLongTimeout(driver, pages):
driver.set_script_timeout(0.5)
pages.load("ajaxy_page.html")
with pytest.raises(TimeoutException):
driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.setTimeout(callback, 1500);""")
def testShouldDetectPageLoadsWhileWaitingOnAnAsyncScriptAndReturnAnError(driver, pages):
pages.load("ajaxy_page.html")
driver.set_script_timeout(0.1)
with pytest.raises(WebDriverException):
url = pages.url("dynamic.html")
driver.execute_async_script("window.location = '{0}';".format(url))
def testShouldCatchErrorsWhenExecutingInitialScript(driver, pages):
pages.load("ajaxy_page.html")
with pytest.raises(WebDriverException):
driver.execute_async_script("throw Error('you should catch this!');")
def testShouldBeAbleToExecuteAsynchronousScripts(driver, pages):
pages.load("ajaxy_page.html")
typer = driver.find_element(by=By.NAME, value="typer")
typer.send_keys("bob")
assert "bob" == typer.get_attribute("value")
driver.find_element(by=By.ID, value="red").click()
driver.find_element(by=By.NAME, value="submit").click()
assert 1 == len(driver.find_elements(by=By.TAG_NAME, value='div')), \
"There should only be 1 DIV at this point, which is used for the butter message"
driver.set_script_timeout(10)
text = driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.registerListener(arguments[arguments.length - 1]);""")
assert "bob" == text
assert "" == typer.get_attribute("value")
assert 2 == len(driver.find_elements(by=By.TAG_NAME, value='div')), \
"There should be 1 DIV (for the butter message) + 1 DIV (for the new label)"
def testShouldBeAbleToPassMultipleArgumentsToAsyncScripts(driver, pages):
pages.load("ajaxy_page.html")
result = driver.execute_async_script("""
arguments[arguments.length - 1](arguments[0] + arguments[1]);""", 1, 2)
assert 3 == result
# TODO DavidBurns Disabled till Java WebServer is used
# def testShouldBeAbleToMakeXMLHttpRequestsAndWaitForTheResponse(driver, pages):
# script = """
# var url = arguments[0];
# var callback = arguments[arguments.length - 1];
# // Adapted from http://www.quirksmode.org/js/xmlhttp.html
# var XMLHttpFactories = [
# function () return new XMLHttpRequest(),
# function () return new ActiveXObject('Msxml2.XMLHTTP'),
# function () return new ActiveXObject('Msxml3.XMLHTTP'),
# function () return new ActiveXObject('Microsoft.XMLHTTP')
# ];
# var xhr = false;
# while (!xhr && XMLHttpFactories.length)
# try{
# xhr = XMLHttpFactories.shift().call();
# }catch (e)
#
# if (!xhr) throw Error('unable to create XHR object');
# xhr.open('GET', url, true);
# xhr.onreadystatechange = function()
# if (xhr.readyState == 4) callback(xhr.responseText);
#
# xhr.send('');""" # empty string to stop firefox 3 from choking
#
# pages.load("ajaxy_page.html")
# driver.set_script_timeout(3)
# response = driver.execute_async_script(script, pages.sleepingPage + "?time=2")
# htm = "<html><head><title>Done</title></head><body>Slept for 2s</body></html>"
# assert response.strip() == htm
| 39.266355 | 104 | 0.715816 |
d0f5ef9f5cd56a162a31acd00e6725819c2d1c9b | 12,256 | py | Python | render_functions.py | matteobarbieri/libtcod-tutorial | 2be59978483d1c754b736a0fe96c9554e9ba8547 | [
"MIT"
] | 1 | 2019-03-09T14:20:51.000Z | 2019-03-09T14:20:51.000Z | render_functions.py | matteobarbieri/libtcod-tutorial | 2be59978483d1c754b736a0fe96c9554e9ba8547 | [
"MIT"
] | null | null | null | render_functions.py | matteobarbieri/libtcod-tutorial | 2be59978483d1c754b736a0fe96c9554e9ba8547 | [
"MIT"
] | null | null | null | # import libtcodpy as libtcod
import tcod as libtcod
from enum import Enum, auto
from game_state import GamePhase
from menus import (
character_screen, inventory_menu, item_submenu)
class RenderOrder(Enum):
STAIRS = auto()
CORPSE = auto()
ITEM = auto()
ACTOR = auto()
def check_if_still_in_sight(fov_map, entity):
"""
Checks if an entity is in sight and return it if it is true, else return
None.
"""
if libtcod.map_is_in_fov(fov_map, entity.x, entity.y):
return entity
else:
return None
def get_entity_under_mouse(mouse, entities, fov_map, top_x, top_y):
(x, y) = (mouse.cx, mouse.cy)
entities_list = [
entity for entity in entities if
entity.x == (top_x + x) and # noqa
entity.y == (top_y + y) and # noqa
libtcod.map_is_in_fov(fov_map, entity.x, entity.y)] # noqa
if entities_list:
sorted(entities_list, key=lambda e: e.render_order.value)
return entities_list[-1] # The last one
else:
return None
def get_names_under_mouse(mouse, entities, fov_map, top_x, top_y):
(x, y) = (mouse.cx, mouse.cy)
names = [
entity.name for entity in entities if
entity.x == (top_x + x) and # noqa
entity.y == (top_y + y) and # noqa
libtcod.map_is_in_fov(fov_map, entity.x, entity.y)] # noqa
names = ', '.join(names)
return names.capitalize()
def render_entity_label(terrain_layer, entity, top_x, top_y):
# Print the name of the entity on the top left tile
libtcod.console_put_char(
terrain_layer,
entity.x-top_x-1, entity.y-top_y-1,
'\\', libtcod.BKGND_DEFAULT)
libtcod.console_print_ex(
terrain_layer,
# 0,
# top_x - entity.x - 1, top_y - entity.y - 1,
entity.x - top_x - 1, entity.y - top_y - 2,
libtcod.BKGND_NONE,
libtcod.LEFT,
'{}'.format(entity.name))
def render_entity_frame(entity_frame, entity):
# Draw a rectangle of the background color for the full
# length of the bar
# libtcod.console_set_default_background(entity_frame, libtcod.red)
# libtcod.console_rect(entity_frame, 3, 3, 7, 2,
# False, libtcod.BKGND_SCREEN)
# Extract width and height
w = entity_frame.width
h = entity_frame.height
# Draw frame
entity_frame.draw_frame(
1, 1,
w-2, h-2,
'Info')
# Print the entiy's name
entity_frame.print(
3, 3, '{}'.format(entity.name))
# Draw entity graphics
# TODO
# Mockup for entity detail
# entity_frame.draw_rect(
# 3, 5, 10, 10, 0, bg=libtcod.red)
entity_frame.draw_rect(
3, 5, 10, 10, 0, bg=entity.color)
def render_bar(panel, x, y, total_width,
name, value, maximum,
bar_color, back_color):
# Compute bar width, based on current value and maximum
bar_width = int(float(value) / maximum * total_width)
# Draw a rectangle of the background color for the full
# length of the bar
libtcod.console_set_default_background(panel, back_color)
libtcod.console_rect(panel, x, y, total_width, 1,
False, libtcod.BKGND_SCREEN)
# Now draw the 'active' part of the bar
libtcod.console_set_default_background(panel, bar_color)
if bar_width > 0:
libtcod.console_rect(panel, x, y, bar_width, 1,
False, libtcod.BKGND_SCREEN)
# Draw the event log
libtcod.console_set_default_foreground(panel, libtcod.white)
libtcod.console_print_ex(
panel, int(x + total_width / 2), y,
libtcod.BKGND_NONE,
libtcod.CENTER,
'{0}: {1}/{2}'.format(name, value, maximum))
def draw_entity(terrain_layer, entity,
fov_map, game_map, top_x=0, top_y=0):
# Only draw entities that are in player's fov
if (libtcod.map_is_in_fov(fov_map, entity.x, entity.y) or
(entity.stairs and game_map.tiles[entity.x][entity.y].explored)):
# (entity.c['stairs'] and game_map.tiles[entity.x][entity.y].explored):
# TODO include case for doors
# print("Bgcolor: {}".format(bg_color))
libtcod.console_put_char(
terrain_layer,
entity.x-top_x,
entity.y-top_y,
entity.char,
libtcod.BKGND_NONE)
libtcod.console_set_char_foreground(
terrain_layer, entity.x-top_x, entity.y-top_y, entity.color)
def render_all(terrain_layer, panel, entity_frame, inventory_frame,
main_window,
player,
game_map, fov_map, fov_recompute,
redraw_terrain, redraw_entities, message_log,
constants, mouse,
game_state, current_turn):
### Extract variables from contants dict
screen_width = constants['screen_width']
screen_height = constants['screen_height']
panel_height = constants['panel_height']
bar_width = constants['bar_width']
panel_y = constants['panel_y']
terrain_layer_width = constants['terrain_layer_width']
terrain_layer_height = constants['terrain_layer_height']
frame_width = constants['frame_width']
frame_height = constants['frame_height']
# TODO tmp workaround
game_phase = game_state.game_phase
#########################################
######### Render terrain first ##########
#########################################
# First compute the part of visible map, based on the player's position
# Compute top left corner coordinates
top_x = int(player.x - screen_width/2)
top_x = max(0, top_x)
top_x = min(game_map.width - screen_width, top_x)
top_y = int(player.y - screen_height/2)
top_y = max(0, top_y)
top_y = min(game_map.height - screen_height + panel_height, top_y)
# Only redraw terrain if needed
if redraw_terrain:
# Clear the console before drawing on it
libtcod.console_clear(terrain_layer)
for y in range(top_y, top_y + screen_height - panel_height):
for x in range(top_x, top_x + screen_width):
visible = libtcod.map_is_in_fov(fov_map, x, y)
if visible:
# Render it as visible
# game_map.tiles[x][y].render_at(terrain_layer, x, y, visible)
game_map.tiles[x][y].render_at(
terrain_layer, x-top_x, y-top_y, visible)
game_map.tiles[x][y].explored = True
elif game_map.tiles[x][y].explored:
# Render as currently out of sight
game_map.tiles[x][y].render_at(
terrain_layer, x-top_x, y-top_y, visible)
if game_state.entity_targeted:
visible = libtcod.map_is_in_fov(
fov_map,
game_state.entity_targeted.x, game_state.entity_targeted.y)
if visible:
# print("Targeted {} at ({}, {})".format(
# entity_targeted.name, entity_targeted.x, entity_targeted.y))
libtcod.console_set_char_background(
terrain_layer,
game_state.entity_targeted.x-top_x,
game_state.entity_targeted.y-top_y,
libtcod.red, libtcod.BKGND_SET)
#########################################
########### Render entities ############
#########################################
# if redraw_terrain or redraw_entities:
if redraw_terrain:
# libtcod.console_clear(entities_layer)
# Sort entities by their associated render order
entities_in_render_order = sorted(
game_map.entities, key=lambda x: x.render_order.value)
# Draw all entities in the list in the correct order
for entity in entities_in_render_order:
draw_entity(terrain_layer, entity,
fov_map, game_map, top_x, top_y)
# # Blit terrain layer on root console
# libtcod.console_blit(
# terrain_layer,
# 0, 0, screen_width, screen_height,
# 0,
# 0, 0)
#########################################
############ Render panel ##############
#########################################
# Now render the health bar
libtcod.console_set_default_background(panel, libtcod.black)
libtcod.console_clear(panel)
# Print the game messages, one line at a time
y = 1
for message in message_log.messages:
libtcod.console_set_default_foreground(panel, message.color)
libtcod.console_print_ex(
panel,
message_log.x,
y,
libtcod.BKGND_NONE,
libtcod.LEFT,
message.text)
y += 1
# Render the HP bar
render_bar(
panel, 1, 1, bar_width,
'HP', player.c['fighter'].hp, player.c['fighter'].max_hp,
libtcod.light_red, libtcod.darker_red)
# Show current dungeon level
libtcod.console_print_ex(panel, 1, 3, libtcod.BKGND_NONE, libtcod.LEFT,
'Dungeon level: {0}'.format(
game_map.dungeon_level))
# Show current dungeon level
libtcod.console_print_ex(panel, 1, 5, libtcod.BKGND_NONE, libtcod.LEFT,
'Time: {0}'.format(
current_turn))
# Show info about entities under mouse cursor
libtcod.console_set_default_foreground(panel, libtcod.light_gray)
libtcod.console_print_ex(
panel,
1,
0,
libtcod.BKGND_NONE,
libtcod.LEFT,
get_names_under_mouse(
mouse, game_map.entities, fov_map, top_x, top_y))
# Blit panel console on root console
libtcod.console_blit(
panel, 0, 0,
screen_width, panel_height,
0,
0, panel_y)
#########################################
### Blit terrain layer on root console ##
#########################################
libtcod.console_blit(
terrain_layer,
0, 0, terrain_layer_width, terrain_layer_height,
main_window,
0, 0)
#########################################
######### Render entity label ###########
#########################################
entity_under_mouse = get_entity_under_mouse(
mouse, game_map.entities, fov_map, top_x, top_y)
if entity_under_mouse:
render_entity_label(
main_window, entity_under_mouse,
top_x, top_y)
#########################################
######### Render entity frame ##########
#########################################
# Render the focused entity
if game_phase == GamePhase.ENTITY_INFO:
render_entity_frame(entity_frame, game_state.entity_focused)
# Render the selected inventory item
if game_phase == GamePhase.INVENTORY_ITEM_MENU:
render_entity_frame(entity_frame, game_state.selected_inventory_item)
# Blit the frame on the console below (main window)
if game_phase in (GamePhase.ENTITY_INFO, GamePhase.INVENTORY_ITEM_MENU):
libtcod.console_blit(
entity_frame,
0, 0, frame_width, frame_height,
main_window,
screen_width - frame_width, 0)
# Finally blit main window console on root console
libtcod.console_blit(
main_window,
0, 0, terrain_layer_width, terrain_layer_height,
0,
0, 0)
# Show inventory menu
if game_phase in (GamePhase.INVENTORY_MENU, GamePhase.INVENTORY_ITEM_MENU):
inventory_title = 'Inventory'
inventory_menu(
terrain_layer, inventory_title, player,
inventory_frame, screen_width, screen_height)
# Inventory item submenu
if game_phase == GamePhase.INVENTORY_ITEM_MENU:
item_submenu(
terrain_layer, 'Actions', player,
game_state.selected_inventory_item,
screen_width, screen_height)
# Show character screen
elif game_phase == GamePhase.CHARACTER_SCREEN:
character_screen(player, 30, 10, screen_width, screen_height)
return top_x, top_y
| 32.08377 | 82 | 0.585836 |
db81e8f5f9c06936366eec228951634ee87bc889 | 482 | py | Python | pyriemann/__init__.py | qbarthelemy/pyRiemann | b35873b0a6cf9d81a1db09bbedb72a2fefe7d0c3 | [
"BSD-3-Clause"
] | 301 | 2015-04-19T20:23:21.000Z | 2021-04-28T06:42:46.000Z | pyriemann/__init__.py | qbarthelemy/pyRiemann | b35873b0a6cf9d81a1db09bbedb72a2fefe7d0c3 | [
"BSD-3-Clause"
] | 98 | 2015-04-19T16:09:18.000Z | 2021-04-29T15:21:52.000Z | pyriemann/__init__.py | vishalbelsare/pyRiemann | a55b09e317975f7eaaeffd4e6f2977f4174d3d2d | [
"BSD-3-Clause"
] | 113 | 2015-05-13T07:40:48.000Z | 2021-04-26T01:29:49.000Z | from ._version import __version__
from . import classification
from . import tangentspace
from . import channelselection
from . import estimation
from . import spatialfilters
from . import clustering
from . import stats
from . import embedding
from . import preprocessing
__all__ = [
'__version__',
'classification',
'tangentspace',
'channelselection',
'estimation',
'spatialfilters',
'clustering',
'stats',
'embedding',
'preprocessing',
]
| 19.28 | 33 | 0.711618 |
3819437cc1c1604a2735450c4e4d80769d44321f | 2,003 | py | Python | Result/Potential.py | atily17/research | 0e762e03747995c8a7d1d8a2ec42be31a17209dc | [
"BSD-3-Clause"
] | null | null | null | Result/Potential.py | atily17/research | 0e762e03747995c8a7d1d8a2ec42be31a17209dc | [
"BSD-3-Clause"
] | null | null | null | Result/Potential.py | atily17/research | 0e762e03747995c8a7d1d8a2ec42be31a17209dc | [
"BSD-3-Clause"
] | 1 | 2022-02-25T06:38:29.000Z | 2022-02-25T06:38:29.000Z | import numpy as np
import matplotlib.cm as cm
import matplotlib.pyplot as plt
class Potential(object):
def __init__(self, potentials, grid):
nodes = grid.node.nodes
self.potentials = [{ "potential":potentials[i], "point":nodes[i]["point"]} for i in range(len(nodes))]
def print(self):
print("-----Potential-----")
for i in range(len(self.potentials)):
print(self.potentials[i])
def plot(self, problem, plottype = "normal", sizeRate=10, zeroOrder=-35):
size = np.array([problem.domain.right-problem.domain.left, problem.domain.up-problem.domain.down])
size_normalize=size[0]+size[1]
size = size/size_normalize * sizeRate
fig=plt.figure(figsize=size)
plt.xlim(problem.domain.left,problem.domain.right)
plt.ylim(problem.domain.down,problem.domain.up)
ax =fig.add_subplot(1,1,1)
domain = plt.Polygon(problem.domain.vertexes, zorder=1, fc = "#CCCCFF", ec = "#CCCCFF")
ax.add_patch(domain)
ax.set_axisbelow(True)
co = np.array([[self.potentials[i]["point"][0],self.potentials[i]["point"][1]] for i in range(len(self.potentials))])
val = np.array([self.potentials[i]["potential"] for i in range(len(self.potentials))])
pl =co[val>10**(zeroOrder)];
c0 =co[(val<10**(zeroOrder)) & (val>-10**(zeroOrder))]
mi =co[val<-10**(zeroOrder)];
if (plottype == "normal"):
cmap = plt.scatter(co[:,0],co[:,1] , c=val , cmap=cm.hsv, zorder=2, marker='.')
elif (plottype == "log"):
plt.scatter(pl[:,0],pl[:,1] , c=np.log10(val[val>10**(zeroOrder)]) , cmap=cm.Reds, zorder=2, marker='.')
plt.scatter(c0[:,0],c0[:,1] , c="#FFFFFF", zorder=2, marker='.')
plt.scatter(mi[:,0],mi[:,1] , c=np.log10(-val[val<-10**(zeroOrder)]), cmap=cm.Blues, zorder=2, marker='.')
fig.colorbar(cmap)
plt.show() | 44.511111 | 127 | 0.574638 |
8efcf5e0cf4dbf942a10235f1f4f65f0e9f535d6 | 686 | py | Python | packages/w3af/w3af/plugins/attack/db/sqlmap/waf/secureiis.py | ZooAtmosphereGroup/HelloPackages | 0ccffd33bf927b13d28c8f715ed35004c33465d9 | [
"Apache-2.0"
] | 3 | 2019-04-09T22:59:33.000Z | 2019-06-14T09:23:24.000Z | tools/w3af/w3af/plugins/attack/db/sqlmap/waf/secureiis.py | sravani-m/Web-Application-Security-Framework | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | [
"MIT"
] | null | null | null | tools/w3af/w3af/plugins/attack/db/sqlmap/waf/secureiis.py | sravani-m/Web-Application-Security-Framework | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import re
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "SecureIIS Web Server Security (BeyondTrust)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, _, _ = get_page(get=vector)
retval = re.search(r"SecureIIS[^<]+Web Server Protection", page or "") is not None
retval |= "http://www.eeye.com/SecureIIS/" in (page or "")
retval |= re.search(r"\?subject=[^>]*SecureIIS Error", page or "") is not None
if retval:
break
return retval
| 26.384615 | 90 | 0.654519 |
eb46a7548dfa4a4eaea0f60bfba5ff068eb69273 | 1,360 | py | Python | src/ralph/discovery/management/commands/venturetree.py | deejay1/ralph | 26b7c66912590093e0087ba801e9108290ad0d63 | [
"Apache-2.0"
] | 1 | 2018-09-01T14:14:08.000Z | 2018-09-01T14:14:08.000Z | src/ralph/discovery/management/commands/venturetree.py | srikanth4372/sample | 127b5742ae464d42909a14d71e3c10c241ec3a23 | [
"Apache-2.0"
] | 1 | 2019-08-14T10:03:45.000Z | 2019-08-14T10:03:45.000Z | src/ralph/discovery/management/commands/venturetree.py | srikanth4372/sample | 127b5742ae464d42909a14d71e3c10c241ec3a23 | [
"Apache-2.0"
] | 1 | 2019-08-14T09:59:42.000Z | 2019-08-14T09:59:42.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import textwrap
import re
from django.core.management.base import BaseCommand
from ralph.business.models import Venture
class Command(BaseCommand):
"""Generate a tree of all ventures in a dot format."""
help = textwrap.dedent(__doc__).strip()
requires_model_validation = False
option_list = BaseCommand.option_list
def handle(self, **options):
def norm(v):
return re.sub(r'[^a-zA-Z0-9]', '_', v.symbol).lower()
print('digraph Ventures {')
print(' overlap=prism;')
print(' root [label="Ventures"];')
for v in Venture.objects.all():
for c in v.child_set.all():
print(' %s -> %s;' % (norm(v), norm(c)))
if v.parent is None:
print(' root -> %s;' % norm(v))
attrs = {
'label': '%s\\n[%s]' % (v.name, v.symbol),
'shape': 'box' if v.show_in_ralph else 'ellipse',
'style': 'filled' if v.is_infrastructure else '',
}
a = ','.join('%s="%s"' % a for a in attrs.iteritems())
print((' %s [%s];' % (norm(v), a)).encode('utf8'))
print('}')
| 31.627907 | 66 | 0.568382 |
a218fd3d9fd7dfc48e99379972f8687d1e8a58b4 | 5,542 | py | Python | emsapi/models/adi_ems_web_api_v2_dto_profile_profile_results_event_record_py3.py | ge-flight-analytics/emsapi-python | 2e3a53529758f1bd7a2a850119b1cc1b5ac552e3 | [
"MIT"
] | null | null | null | emsapi/models/adi_ems_web_api_v2_dto_profile_profile_results_event_record_py3.py | ge-flight-analytics/emsapi-python | 2e3a53529758f1bd7a2a850119b1cc1b5ac552e3 | [
"MIT"
] | 2 | 2020-01-16T00:04:35.000Z | 2021-05-26T21:04:06.000Z | emsapi/models/adi_ems_web_api_v2_dto_profile_profile_results_event_record_py3.py | ge-flight-analytics/emsapi-python | 2e3a53529758f1bd7a2a850119b1cc1b5ac552e3 | [
"MIT"
] | 1 | 2021-02-23T08:25:12.000Z | 2021-02-23T08:25:12.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AdiEmsWebApiV2DtoProfileProfileResultsEventRecord(Model):
"""Encapsulates information about an event result stored in the database.
All required parameters must be populated in order to send to Azure.
:param record_number: Required. The unique integer ID of the event in the
database
:type record_number: int
:param event_type: Required. The unique ID of the event definition that
generated the event
:type event_type: int
:param phase_of_flight: Required. The phase of flight where the event
occurred (which is a value from the EMS phase of flight list)
:type phase_of_flight: int
:param severity: Required. The event severity (which is a value from the
EMS severity list)
:type severity: int
:param status: Required. The status of the event (which is a value from
the EMS status list). Typically this defaults to
0 for new events, but in some data-merge scenarios we need to import a
non-default value
from a remote system
:type status: int
:param false_positive: Required. The false positive value for the event (a
value from the EMS false positive list).
Typically this defaults to 0 for new events, but in some data-merge
scenarios we
need to import a non-default value from a remote system
:type false_positive: int
:param start_time: Required. The start offset for the event
:type start_time: float
:param end_time: Required. The end offset for the event
:type end_time: float
:param global_measurements: Required. The global event measurement results
(defined for all events)
:type global_measurements:
list[~emsapi.models.AdiEmsWebApiV2DtoProfileProfileResultValue]
:param global_timepoints: Required. The global event timepoint results
(defined for all events)
:type global_timepoints:
list[~emsapi.models.AdiEmsWebApiV2DtoProfileProfileResultValue]
:param local_measurements: Required. The event-specific measurement
results (different for each event type)
:type local_measurements:
list[~emsapi.models.AdiEmsWebApiV2DtoProfileProfileResultValue]
:param local_timepoints: Required. The event-specific timepoint results
(different for each event type)
:type local_timepoints:
list[~emsapi.models.AdiEmsWebApiV2DtoProfileProfileResultValue]
:param comments: Required. The event comments. Usually this is empty, but
it's required for some data-merge
scenarios.
:type comments:
list[~emsapi.models.AdiEmsWebApiV2DtoProfileProfileResultComment]
"""
_validation = {
'record_number': {'required': True},
'event_type': {'required': True},
'phase_of_flight': {'required': True},
'severity': {'required': True},
'status': {'required': True},
'false_positive': {'required': True},
'start_time': {'required': True},
'end_time': {'required': True},
'global_measurements': {'required': True},
'global_timepoints': {'required': True},
'local_measurements': {'required': True},
'local_timepoints': {'required': True},
'comments': {'required': True},
}
_attribute_map = {
'record_number': {'key': 'recordNumber', 'type': 'int'},
'event_type': {'key': 'eventType', 'type': 'int'},
'phase_of_flight': {'key': 'phaseOfFlight', 'type': 'int'},
'severity': {'key': 'severity', 'type': 'int'},
'status': {'key': 'status', 'type': 'int'},
'false_positive': {'key': 'falsePositive', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'float'},
'end_time': {'key': 'endTime', 'type': 'float'},
'global_measurements': {'key': 'globalMeasurements', 'type': '[AdiEmsWebApiV2DtoProfileProfileResultValue]'},
'global_timepoints': {'key': 'globalTimepoints', 'type': '[AdiEmsWebApiV2DtoProfileProfileResultValue]'},
'local_measurements': {'key': 'localMeasurements', 'type': '[AdiEmsWebApiV2DtoProfileProfileResultValue]'},
'local_timepoints': {'key': 'localTimepoints', 'type': '[AdiEmsWebApiV2DtoProfileProfileResultValue]'},
'comments': {'key': 'comments', 'type': '[AdiEmsWebApiV2DtoProfileProfileResultComment]'},
}
def __init__(self, *, record_number: int, event_type: int, phase_of_flight: int, severity: int, status: int, false_positive: int, start_time: float, end_time: float, global_measurements, global_timepoints, local_measurements, local_timepoints, comments, **kwargs) -> None:
super(AdiEmsWebApiV2DtoProfileProfileResultsEventRecord, self).__init__(**kwargs)
self.record_number = record_number
self.event_type = event_type
self.phase_of_flight = phase_of_flight
self.severity = severity
self.status = status
self.false_positive = false_positive
self.start_time = start_time
self.end_time = end_time
self.global_measurements = global_measurements
self.global_timepoints = global_timepoints
self.local_measurements = local_measurements
self.local_timepoints = local_timepoints
self.comments = comments
| 48.614035 | 276 | 0.675208 |
621198b41a879ee808f725464ce977b9dfad7542 | 8,531 | py | Python | train.py | HubBucket-Team/minigo | 18d43c0950d3623ad33b9035ab91952b79f8c89c | [
"Apache-2.0"
] | 1 | 2019-10-10T06:09:15.000Z | 2019-10-10T06:09:15.000Z | train.py | VonRosenchild/minigo | 18d43c0950d3623ad33b9035ab91952b79f8c89c | [
"Apache-2.0"
] | null | null | null | train.py | VonRosenchild/minigo | 18d43c0950d3623ad33b9035ab91952b79f8c89c | [
"Apache-2.0"
] | 1 | 2019-10-10T06:09:19.000Z | 2019-10-10T06:09:19.000Z | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Train a network.
Usage:
BOARD_SIZE=19 python train.py tfrecord1 tfrecord2 tfrecord3
"""
import logging
from absl import app, flags
import numpy as np
import tensorflow as tf
import bigtable_input
import dual_net
import preprocessing
import utils
# See www.moderndescartes.com/essays/shuffle_viz for discussion on sizing
flags.DEFINE_integer('shuffle_buffer_size', 2000,
'Size of buffer used to shuffle train examples.')
flags.DEFINE_boolean('shuffle_examples', True,
'Whether to shuffle training examples.')
flags.DEFINE_integer('steps_to_train', None,
'Number of training steps to take. If not set, iterates '
'once over training data.')
flags.DEFINE_integer('window_size', 500000,
'Number of games to include in the window')
flags.DEFINE_float('filter_amount', 1.0,
'Fraction of positions to filter from golden chunks,'
'default, 1.0 (no filter)')
flags.DEFINE_string('export_path', None,
'Where to export the model after training.')
flags.DEFINE_bool('use_bt', False,
'Whether to use Bigtable as input. '
'(Only supported with --use_tpu, currently.)')
flags.DEFINE_bool('freeze', False,
'Whether to freeze the graph at the end of training.')
flags.register_multi_flags_validator(
['use_bt', 'use_tpu'],
lambda flags: flags['use_tpu'] if flags['use_bt'] else True,
'`use_bt` flag only valid with `use_tpu` as well')
@flags.multi_flags_validator(
['use_bt', 'cbt_project', 'cbt_instance', 'cbt_table'],
message='Cloud Bigtable configuration flags not correct')
def _bt_checker(flags_dict):
if not flags_dict['use_bt']:
return True
return (flags_dict['cbt_project']
and flags_dict['cbt_instance']
and flags_dict['cbt_table'])
# From dual_net.py
flags.declare_key_flag('work_dir')
flags.declare_key_flag('train_batch_size')
flags.declare_key_flag('num_tpu_cores')
flags.declare_key_flag('use_tpu')
FLAGS = flags.FLAGS
class EchoStepCounterHook(tf.train.StepCounterHook):
"""A hook that logs steps per second."""
def _log_and_record(self, elapsed_steps, elapsed_time, global_step):
s_per_sec = elapsed_steps / elapsed_time
logging.info("{}: {:.3f} steps per second".format(global_step, s_per_sec))
super()._log_and_record(elapsed_steps, elapsed_time, global_step)
def compute_update_ratio(weight_tensors, before_weights, after_weights):
"""Compute the ratio of gradient norm to weight norm."""
deltas = [after - before for after,
before in zip(after_weights, before_weights)]
delta_norms = [np.linalg.norm(d.ravel()) for d in deltas]
weight_norms = [np.linalg.norm(w.ravel()) for w in before_weights]
ratios = [d / w for d, w in zip(delta_norms, weight_norms)]
all_summaries = [
tf.Summary.Value(tag='update_ratios/' +
tensor.name, simple_value=ratio)
for tensor, ratio in zip(weight_tensors, ratios)]
return tf.Summary(value=all_summaries)
class UpdateRatioSessionHook(tf.train.SessionRunHook):
"""A hook that computes ||grad|| / ||weights|| (using frobenius norm)."""
def __init__(self, output_dir, every_n_steps=1000):
self.output_dir = output_dir
self.every_n_steps = every_n_steps
self.before_weights = None
self.file_writer = None
self.weight_tensors = None
self.global_step = None
def begin(self):
# These calls only works because the SessionRunHook api guarantees this
# will get called within a graph context containing our model graph.
self.file_writer = tf.summary.FileWriterCache.get(self.output_dir)
self.weight_tensors = tf.trainable_variables()
self.global_step = tf.train.get_or_create_global_step()
def before_run(self, run_context):
global_step = run_context.session.run(self.global_step)
if global_step % self.every_n_steps == 0:
self.before_weights = run_context.session.run(self.weight_tensors)
def after_run(self, run_context, run_values):
global_step = run_context.session.run(self.global_step)
if self.before_weights is not None:
after_weights = run_context.session.run(self.weight_tensors)
weight_update_summaries = compute_update_ratio(
self.weight_tensors, self.before_weights, after_weights)
self.file_writer.add_summary(
weight_update_summaries, global_step)
self.before_weights = None
def train(*tf_records: "Records to train on"):
"""Train on examples."""
tf.logging.set_verbosity(tf.logging.INFO)
estimator = dual_net.get_estimator()
effective_batch_size = FLAGS.train_batch_size
if FLAGS.use_tpu:
effective_batch_size *= FLAGS.num_tpu_cores
if FLAGS.use_tpu:
if FLAGS.use_bt:
def _input_fn(params):
games = bigtable_input.GameQueue(
FLAGS.cbt_project, FLAGS.cbt_instance, FLAGS.cbt_table)
games_nr = bigtable_input.GameQueue(
FLAGS.cbt_project, FLAGS.cbt_instance, FLAGS.cbt_table + '-nr')
return preprocessing.get_tpu_bt_input_tensors(
games,
games_nr,
params['batch_size'],
number_of_games=FLAGS.window_size,
random_rotation=True)
else:
def _input_fn(params):
return preprocessing.get_tpu_input_tensors(
params['batch_size'],
tf_records,
random_rotation=True)
# Hooks are broken with TPUestimator at the moment.
hooks = []
else:
def _input_fn():
return preprocessing.get_input_tensors(
FLAGS.train_batch_size,
tf_records,
filter_amount=FLAGS.filter_amount,
shuffle_examples=FLAGS.shuffle_examples,
shuffle_buffer_size=FLAGS.shuffle_buffer_size,
random_rotation=True)
hooks = [UpdateRatioSessionHook(FLAGS.work_dir),
EchoStepCounterHook(output_dir=FLAGS.work_dir)]
steps = FLAGS.steps_to_train
logging.info("Training, steps = %s, batch = %s -> %s examples",
steps or '?', effective_batch_size,
(steps * effective_batch_size) if steps else '?')
if FLAGS.use_bt:
games = bigtable_input.GameQueue(
FLAGS.cbt_project, FLAGS.cbt_instance, FLAGS.cbt_table)
if not games.read_wait_cell():
games.require_fresh_games(20000)
latest_game = games.latest_game_number
index_from = max(latest_game, games.read_wait_cell())
print("== Last game before training:", latest_game, flush=True)
print("== Wait cell:", games.read_wait_cell(), flush=True)
try:
estimator.train(_input_fn, steps=steps, hooks=hooks)
if FLAGS.use_bt:
bigtable_input.set_fresh_watermark(games, index_from,
FLAGS.window_size)
except:
if FLAGS.use_bt:
games.require_fresh_games(0)
raise
def main(argv):
"""Train on examples and export the updated model weights."""
tf_records = argv[1:]
logging.info("Training on %s records: %s to %s",
len(tf_records), tf_records[0], tf_records[-1])
with utils.logged_timer("Training"):
train(*tf_records)
if FLAGS.export_path:
dual_net.export_model(FLAGS.export_path)
if FLAGS.freeze:
if FLAGS.use_tpu:
dual_net.freeze_graph_tpu(FLAGS.export_path)
else:
dual_net.freeze_graph(FLAGS.export_path)
if __name__ == "__main__":
app.run(main)
| 36.771552 | 83 | 0.653851 |
a20a0ee7697d4f6a78008da7f79e7610ffb39f61 | 10,141 | py | Python | Vaccine_page/vaccine_heatmaps.py | ScilifelabDataCentre/covid-portal-visualisations | 065084278b0452d003291115ab597d573aeb39ed | [
"MIT"
] | null | null | null | Vaccine_page/vaccine_heatmaps.py | ScilifelabDataCentre/covid-portal-visualisations | 065084278b0452d003291115ab597d573aeb39ed | [
"MIT"
] | null | null | null | Vaccine_page/vaccine_heatmaps.py | ScilifelabDataCentre/covid-portal-visualisations | 065084278b0452d003291115ab597d573aeb39ed | [
"MIT"
] | null | null | null | import argparse
import plotly.express as px
import plotly.graph_objects as go
import pandas as pd
import numpy as np # won't need this when data on 3rd dose for 12-17 year olds becomes available
import os
from vaccine_dataprep_Swedentots import (
first_two_vacc_dose_lan,
third_vacc_dose_lan,
fourth_vacc_dose,
)
aparser = argparse.ArgumentParser(description="Generate text insert json")
aparser.add_argument("--output-dir", nargs="?", default="vaccine_plots",
help="Output directory where the files will be saved")
args = aparser.parse_args()
## Need 3 sets of data - for one dose, two doses, and three doses
# Don't have population size data for these age groups (at least right now), so can't do population level calculations
## data for 3rd dose is held separately - work with data for 1st 2 doses first
first_two_vacc_dose_lan = first_two_vacc_dose_lan[(first_two_vacc_dose_lan["Region"] == "Sweden")]
# Need to change terminology used for the '90 or older' age group
first_two_vacc_dose_lan = first_two_vacc_dose_lan.replace("90 eller äldre", "90+")
# We drop the 'totals' in the dataset as we don't want them
first_two_vacc_dose_lan.drop(
first_two_vacc_dose_lan[(first_two_vacc_dose_lan["Åldersgrupp"] == "Totalt")].index,
inplace=True,
)
# recaculate as a percentage for each age group.
first_two_vacc_dose_lan["Procent vaccinerade"] = (
first_two_vacc_dose_lan["Andel vaccinerade"] * 100
)
# Separate data for one and two doses
# one dose
one_dose = first_two_vacc_dose_lan[
(first_two_vacc_dose_lan["Vaccinationsstatus"] == "Minst 1 dos")
]
one_dose = one_dose[["Åldersgrupp", "Procent vaccinerade", "Vaccinationsstatus"]]
one_dose.reset_index(drop=True, inplace=True)
# data for two doses
two_doses = first_two_vacc_dose_lan[
(first_two_vacc_dose_lan["Vaccinationsstatus"] == "Minst 2 doser")
]
two_doses = two_doses[["Åldersgrupp", "Procent vaccinerade", "Vaccinationsstatus"]]
two_doses.reset_index(drop=True, inplace=True)
## Sort data for three doses. Note - data only currently available for 18+ (from 12 for 1 & 2 dose)
# Limit data to just Sweden and modify for the 90+ age group
third_vacc_dose_lan = third_vacc_dose_lan[(third_vacc_dose_lan["Region"] == "Sweden")]
third_vacc_dose_lan = third_vacc_dose_lan.replace("90 eller äldre", "90+")
# Calculate values as percentages
third_vacc_dose_lan.drop(
third_vacc_dose_lan[(third_vacc_dose_lan["Åldersgrupp"] == "Totalt")].index,
inplace=True,
)
third_vacc_dose_lan["Procent vaccinerade"] = (
third_vacc_dose_lan["Andel vaccinerade"] * 100
)
third_vacc_dose_lan = third_vacc_dose_lan[
["Åldersgrupp", "Procent vaccinerade", "Vaccinationsstatus"]
]
# For now, we need to add two age categories for the third dose (12-15, 16-17)
## REMOVE THIS ROW WHEN THESE AGE CATEGORIES ARE AVAILABLE FOR THIRD DOSE DATA
top_row = pd.DataFrame(
{
"Åldersgrupp": ["12-15", "16-17"],
"Procent vaccinerade": [np.nan, np.nan],
"Vaccinationsstatus": ["3 doser", "3 doser"],
}
)
third_dose = pd.concat([top_row, third_vacc_dose_lan]).reset_index(drop=True)
# Add fourth dose (already as percentages from dataprep, so not needed)
# do need to add additional age group rows (until more are added amd change 90+ )
# Also need to eliminate 'totalt' row
fourth_vacc_dose = fourth_vacc_dose.replace("90 eller äldre", "90+")
# REMOVE BELOW AS MORE AGE CATEGORIES ARE ADDED
top_row_fourth = pd.DataFrame(
{
"Åldersgrupp": [
"12-15",
"16-17",
"18-29",
"30-39",
"40-49",
"50-59",
"60-69",
],
"Procent vaccinerade": [
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
],
"Vaccinationsstatus": [
"4 doser",
"4 doser",
"4 doser",
"4 doser",
"4 doser",
"4 doser",
"4 doser",
],
}
)
fourth_dose = pd.concat([top_row_fourth, fourth_vacc_dose]).reset_index(drop=True)
fourth_dose = fourth_dose[fourth_dose.Åldersgrupp != "Totalt"]
fourth_dose = fourth_dose[fourth_dose.Åldersgrupp != "65-69"]
## Prepare dataframe for heatmap (all data in one place)
heatmap_data = pd.concat(
[one_dose, two_doses, third_dose, fourth_dose],
axis=0,
)
heatmap_data["Vaccinationsstatus"] = heatmap_data["Vaccinationsstatus"].replace(
{
"Minst 1 dos": "1",
"Minst 2 doser": "2",
"3 doser": "3",
"4 doser": "4",
}
)
## Make heatmap figures (one small for front of portal, and one larger for page)
## Same data will be included in both
colours = px.colors.diverging.RdBu
fig_small = go.Figure(
data=go.Heatmap(
z=heatmap_data["Procent vaccinerade"],
zmin=0,
zmax=100,
x=heatmap_data["Vaccinationsstatus"],
y=heatmap_data["Åldersgrupp"],
xgap=1,
ygap=1,
colorbar={
"title": "<b>Percentage of <br>Population Vaccinated<br> </b>",
"yanchor": "top",
"y": 1.0,
"lenmode": "fraction",
"len": 0.95,
"tickvals": [
5,
15,
25,
35,
45,
55,
65,
75,
85,
95,
],
"ticktext": [
"00.00-9.99%",
"10.00-19.99%",
"20.00-29.99%",
"30.00-39.99%",
"40.00-49.99%",
"50.00-59.99%",
"60.00-69.99%",
"70.00-79.99%",
"80.00-89.99%",
"90.00-100.00%",
],
},
colorscale=[
[0.0, colours[10]],
[0.1, colours[10]],
[0.1, colours[9]],
[0.2, colours[9]],
[0.2, colours[8]],
[0.3, colours[8]],
[0.3, colours[7]],
[0.4, colours[7]],
[0.4, colours[6]],
[0.5, colours[6]],
[0.5, "rgb(255,255,204)"],
[0.6, "rgb(255,255,204)"],
[0.6, colours[4]],
[0.7, colours[4]],
[0.7, colours[3]],
[0.8, colours[3]],
[0.8, colours[2]],
[0.9, colours[2]],
[0.9, colours[1]],
[1.0, colours[1]],
],
hovertemplate="<extra></extra>Vaccine Doses Received: %{x} <br>Age Category: %{y}<br>Percentage Vaccinated: %{z:.2f}%",
)
)
fig_small.update_layout(
hoverlabel={
"bgcolor": "white",
"font_size": 12,
}
)
fig_small.update_layout(margin={"r": 0, "t": 0, "l": 0, "b": 0})
fig_small.update_layout(
title=" ",
plot_bgcolor="white",
yaxis={
"title": "<b>Age Group</b>",
"linecolor": "black",
},
font={"size": 12},
# width=2000, # Don't set width/height, it's set in Portal
# height=300, # It's the legend length and font that make this heatmap 'small'
xaxis={
"title": "<b>Doses Received</b>",
"tickangle": 0,
"zeroline": True,
"linecolor": "black",
},
)
# fig_small.show()
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
fig_small.write_json(os.path.join(args.output_dir, "vaccine_heatmap_small.json"))
# fig_small.write_image("Plots/vaccine_heatmap_small.png")
# Now make the larger version
fig = go.Figure(
data=go.Heatmap(
z=heatmap_data["Procent vaccinerade"],
zmin=0,
zmax=100,
x=heatmap_data["Vaccinationsstatus"],
y=heatmap_data["Åldersgrupp"],
xgap=1,
ygap=1,
colorbar={
"title": "<b>Percentage of <br>Population Vaccinated<br> </b>",
"yanchor": "top",
"y": 1.0,
"lenmode": "fraction",
"len": 0.5,
"tickvals": [
5,
15,
25,
35,
45,
55,
65,
75,
85,
95,
],
"ticktext": [
"00.00-9.99%",
"10.00-19.99%",
"20.00-29.99%",
"30.00-39.99%",
"40.00-49.99%",
"50.00-59.99%",
"60.00-69.99%",
"70.00-79.99%",
"80.00-89.99%",
"90.00-100.00%",
],
},
colorscale=[
[0.0, colours[10]],
[0.1, colours[10]],
[0.1, colours[9]],
[0.2, colours[9]],
[0.2, colours[8]],
[0.3, colours[8]],
[0.3, colours[7]],
[0.4, colours[7]],
[0.4, colours[6]],
[0.5, colours[6]],
[0.5, "rgb(255,255,204)"],
[0.6, "rgb(255,255,204)"],
[0.6, colours[4]],
[0.7, colours[4]],
[0.7, colours[3]],
[0.8, colours[3]],
[0.8, colours[2]],
[0.9, colours[2]],
[0.9, colours[1]],
[1.0, colours[1]],
],
hovertemplate="<extra></extra>Vaccine Doses Received: %{x} <br>Age Category: %{y}<br>Percentage Vaccinated: %{z:.2f}%",
)
)
fig.update_layout(
hoverlabel={
"bgcolor": "white",
"font_size": 14,
}
)
fig.update_layout(margin={"r": 0, "t": 0, "l": 0, "b": 0})
fig.update_layout(
title=" ",
plot_bgcolor="white",
yaxis={
"title": "<b>Age Group</b>",
"linecolor": "black",
},
font={"size": 14},
# width=2000, # width/height not set - will depend on portal space
# height=1000, # it's the legend length and font etc. that make this 'larger'
xaxis={
"title": "<b>Doses Received</b>",
"tickangle": 0,
"zeroline": True,
"linecolor": "black",
},
)
# fig.show()
fig.write_json(os.path.join(args.output_dir, "vaccine_heatmap.json"))
# fig.write_image("Plots/vaccine_heatmap.png")
| 30.002959 | 127 | 0.540381 |
6b8708a88d49a1db7103eb5941c6f3c61b6921bd | 1,862 | py | Python | step/tests.py | Juru-10/SSF | 794a2b4ba3bcccb073533ff4ff088085c6a2b080 | [
"MIT"
] | null | null | null | step/tests.py | Juru-10/SSF | 794a2b4ba3bcccb073533ff4ff088085c6a2b080 | [
"MIT"
] | 5 | 2021-02-08T20:30:20.000Z | 2021-09-08T00:58:40.000Z | step/tests.py | Juru-10/SSF | 794a2b4ba3bcccb073533ff4ff088085c6a2b080 | [
"MIT"
] | null | null | null | from django.test import TestCase
from .models import User,School,Level,Guide,Student
import datetime as dt
class SchoolTest(TestCase):
"""Test model for class School."""
def setUp(self):
self.juru = School(name = 'Test',location = 'Test')
def test_instance(self):
self.assertTrue(isinstance(self.juru,School))
def test_save(self):
self.juru.save_school()
schools = School.objects.all()
self.assertTrue(len(schools) >0 )
class LevelTest(TestCase):
"""Test model for class Level."""
def setUp(self):
self.juru = School(name = 'Test',location = 'Test')
self.juru.save_school()
self.new_level = Level(school = self.juru, name = 'test')
def tearDown(self):
School.objects.all().delete()
Level.objects.all().delete()
Student.objects.all().delete()
def test_save(self):
self.juru.save_level()
levels = Level.objects.all()
self.assertTrue(len(levels) >0 )
class GuideTest(TestCase):
"""Test model for class Guide."""
def setUp(self):
self.juru = School(name = 'Test',location = 'Test')
self.juru.save_school()
self.new_guide = Guide(school = self.juru, fname = 'Test', lname = 'test', username = 'test', password = 'test')
def test_save(self):
self.juru.save_level()
levels = Level.objects.all()
self.assertTrue(len(levels) >0 )
class StudentTest(TestCase):
"""Test model for class Student."""
def setUp(self):
self.juru = Level(name = 'Test')
self.juru.save_level()
self.new_student = Student(level = self.juru, fname = 'Test', lname = 'test', email = 'test', ID = 'test')
def test_save(self):
self.juru.save_student()
students = Student.objects.all()
self.assertTrue(len(students) >0 )
| 28.212121 | 120 | 0.616004 |
d4ad2561519f860c54478180714d48d9caeee23f | 2,376 | py | Python | projects/PartialReID/train_net.py | NTU-ROSE/fast-reid | f4551a128ba17ef201301ccf3c986edae014cabd | [
"Apache-2.0"
] | 2,194 | 2020-04-06T01:37:56.000Z | 2022-03-30T22:17:28.000Z | projects/PartialReID/train_net.py | NTU-ROSE/fast-reid | f4551a128ba17ef201301ccf3c986edae014cabd | [
"Apache-2.0"
] | 542 | 2020-04-14T08:00:05.000Z | 2022-03-29T07:39:40.000Z | projects/PartialReID/train_net.py | NTU-ROSE/fast-reid | f4551a128ba17ef201301ccf3c986edae014cabd | [
"Apache-2.0"
] | 667 | 2020-04-08T02:06:03.000Z | 2022-03-29T00:57:32.000Z | #!/usr/bin/env python
# encoding: utf-8
"""
@author: sherlock
@contact: sherlockliao01@gmail.com
"""
import logging
import os
import sys
sys.path.append('.')
from fastreid.config import get_cfg
from fastreid.engine import DefaultTrainer, default_argument_parser, default_setup, launch
from fastreid.utils.checkpoint import Checkpointer
from fastreid.engine import hooks
from partialreid import *
class Trainer(DefaultTrainer):
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_dir=None):
data_loader, num_query = cls.build_test_loader(cfg, dataset_name)
return data_loader, DsrEvaluator(cfg, num_query, output_dir)
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = get_cfg()
add_partialreid_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
cfg = setup(args)
if args.eval_only:
logger = logging.getLogger("fastreid.trainer")
cfg.defrost()
cfg.MODEL.BACKBONE.PRETRAIN = False
model = Trainer.build_model(cfg)
Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model
if cfg.TEST.PRECISE_BN.ENABLED and hooks.get_bn_modules(model):
prebn_cfg = cfg.clone()
prebn_cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN
prebn_cfg.DATASETS.NAMES = tuple([cfg.TEST.PRECISE_BN.DATASET]) # set dataset name for PreciseBN
logger.info("Prepare precise BN dataset")
hooks.PreciseBN(
# Run at the same freq as (but before) evaluation.
model,
# Build a new data loader to not affect training
Trainer.build_train_loader(prebn_cfg),
cfg.TEST.PRECISE_BN.NUM_ITER,
).update_stats()
res = Trainer.test(cfg, model)
return res
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.resume)
return trainer.train()
if __name__ == "__main__":
args = default_argument_parser().parse_args()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| 28.285714 | 109 | 0.664562 |
f24445c2ca7090df81e97b2f2c080fcc71b2f33c | 157 | py | Python | django/token_auth/urls.py | trckr/trckr-backend | c13e71035bf0d5428ed9584c86e1c82215ec8cb8 | [
"MIT"
] | 4 | 2018-03-31T13:56:46.000Z | 2021-09-07T19:21:58.000Z | django/token_auth/urls.py | trckr/trckr-backend | c13e71035bf0d5428ed9584c86e1c82215ec8cb8 | [
"MIT"
] | null | null | null | django/token_auth/urls.py | trckr/trckr-backend | c13e71035bf0d5428ed9584c86e1c82215ec8cb8 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('invalidate/', views.InvalidateAuthToken.as_view(), name='token-invalidation')
]
| 19.625 | 87 | 0.738854 |
18bc834b6503845280a79e7227b2bc5deaee8dbe | 3,109 | py | Python | python/tink/streaming_aead/_rewindable_input_stream.py | fax001/tink | 9f30c97cb84b10bbba6978bc9c12c86478024050 | [
"Apache-2.0"
] | 1 | 2022-03-15T03:21:44.000Z | 2022-03-15T03:21:44.000Z | python/tink/streaming_aead/_rewindable_input_stream.py | fax001/tink | 9f30c97cb84b10bbba6978bc9c12c86478024050 | [
"Apache-2.0"
] | 1 | 2022-03-02T13:25:38.000Z | 2022-03-02T13:25:38.000Z | python/tink/streaming_aead/_rewindable_input_stream.py | fax001/tink | 9f30c97cb84b10bbba6978bc9c12c86478024050 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Raw Input stream wrapper that supports rewinding."""
import io
from typing import Optional, BinaryIO
class RewindableInputStream(io.RawIOBase):
"""Implements a readable io.RawIOBase wrapper that supports rewinding.
The wrapped input_stream can either be a io.RawIOBase or io.BufferedIOBase.
"""
def __init__(self, input_stream: BinaryIO):
super().__init__()
if not input_stream.readable():
raise ValueError('input_stream must be readable')
self._input_stream = input_stream
self._buffer = bytearray()
self._pos = 0
self._rewindable = True
def read(self, size: int = -1) -> Optional[bytes]:
"""Read and return up to size bytes when size >= 0.
If input_stream.read returns None to indicate "No data at the moment", this
function may return None as well. But it will eventually return
some data, or return b'' if EOF is reached.
Args:
size: Maximum number of bytes to be returned, if >= 0. If size is smaller
than 0 or None, return the whole content of the file.
Returns:
bytes read. b'' is returned on EOF, and None if there is currently
no data available, but EOF is not reached yet.
"""
if size is None or size < 0:
return self.readall() # implemented in io.RawIOBase
if self._pos < len(self._buffer):
# buffer has some data left. Return up to 'size' bytes from the buffer
new_pos = min(len(self._buffer), self._pos + size)
b = self._buffer[self._pos:new_pos]
self._pos = new_pos
return bytes(b)
# no data left in buffer
if not self._rewindable and self._buffer:
# buffer is not needed anymore
self._buffer = bytearray()
self._pos = 0
try:
data = self._input_stream.read(size)
except BlockingIOError:
# self._input_stream is a BufferedIOBase and has currently no data
return None
if data is None:
# self._input_stream is a RawIOBase and has currently no data
return None
if self._rewindable:
self._buffer.extend(data)
self._pos += len(data)
return data
def rewind(self) -> None:
if not self._rewindable:
raise ValueError('rewind is disabled')
self._pos = 0
def disable_rewind(self) -> None:
self._rewindable = False
def readable(self) -> bool:
return True
def close(self) -> None:
"""Close the stream and the wrapped input_stream."""
if self.closed: # pylint:disable=using-constant-test
return
self._input_stream.close()
super().close()
| 33.793478 | 79 | 0.689289 |
945d6bac61e08d31c9bf1fafa700f62639cd1d27 | 3,453 | py | Python | PythonDjangoPortfolio/.d_settings.py | jffc-dev/Python-Django-Portfolio | aca1aae3493f47535d01ced47d32b13a00bbc8e4 | [
"MIT"
] | null | null | null | PythonDjangoPortfolio/.d_settings.py | jffc-dev/Python-Django-Portfolio | aca1aae3493f47535d01ced47d32b13a00bbc8e4 | [
"MIT"
] | null | null | null | PythonDjangoPortfolio/.d_settings.py | jffc-dev/Python-Django-Portfolio | aca1aae3493f47535d01ced47d32b13a00bbc8e4 | [
"MIT"
] | null | null | null | """
Django settings for PythonDjangoPortfolio project.
Generated by 'django-admin startproject' using Django 3.2.11.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
from PythonDjangoPortfolio import db as db
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-62m&$174v-x3$_xn9ixr3o-e=$eb^1-*)w&14m^re_1o_%o9m2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Portfolio',
'Helpers',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'PythonDjangoPortfolio.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'PythonDjangoPortfolio.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = db.POSTGRESQL
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
'static'
]
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_URL = '/media/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 25.389706 | 91 | 0.711845 |
95082a519c6713365aa13bce3fa18a1cd77574ce | 9,144 | py | Python | research/object_detection/data_decoders/tf_example_decoder.py | nanmon/red-convolucional | 1cbbcb162f77a04d7922a5ab77c60bbadfa1f0e5 | [
"Apache-2.0"
] | null | null | null | research/object_detection/data_decoders/tf_example_decoder.py | nanmon/red-convolucional | 1cbbcb162f77a04d7922a5ab77c60bbadfa1f0e5 | [
"Apache-2.0"
] | null | null | null | research/object_detection/data_decoders/tf_example_decoder.py | nanmon/red-convolucional | 1cbbcb162f77a04d7922a5ab77c60bbadfa1f0e5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensorflow Example proto decoder for object detection.
A decoder to decode string tensors containing serialized tensorflow.Example
protos for object detection.
"""
import tensorflow as tf
from object_detection.core import data_decoder
from object_detection.core import standard_fields as fields
from object_detection.utils import label_map_util
slim_example_decoder = tf.contrib.slim.tfexample_decoder
class TfExampleDecoder(data_decoder.DataDecoder):
"""Tensorflow Example proto decoder."""
def __init__(self,
load_instance_masks=False,
label_map_proto_file=None,
use_display_name=False):
"""Constructor sets keys_to_features and items_to_handlers.
Args:
load_instance_masks: whether or not to load and handle instance masks.
label_map_proto_file: a file path to a
object_detection.protos.StringIntLabelMap proto. If provided, then the
mapped IDs of 'image/object/class/text' will take precedence over the
existing 'image/object/class/label' ID. Also, if provided, it is
assumed that 'image/object/class/text' will be in the data.
use_display_name: whether or not to use the `display_name` for label
mapping (instead of `name`). Only used if label_map_proto_file is
provided.
"""
self.keys_to_features = {
'image/encoded':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/format':
tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/filename':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/key/sha256':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/source_id':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/height':
tf.FixedLenFeature((), tf.int64, 1),
'image/width':
tf.FixedLenFeature((), tf.int64, 1),
# Object boxes and classes.
'image/object/bbox/xmin':
tf.VarLenFeature(tf.float32),
'image/object/bbox/xmax':
tf.VarLenFeature(tf.float32),
'image/object/bbox/ymin':
tf.VarLenFeature(tf.float32),
'image/object/bbox/ymax':
tf.VarLenFeature(tf.float32),
'image/object/class/label':
tf.VarLenFeature(tf.int64),
'image/object/class/text':
tf.VarLenFeature(tf.string),
'image/object/area':
tf.VarLenFeature(tf.float32),
'image/object/is_crowd':
tf.VarLenFeature(tf.int64),
'image/object/difficult':
tf.VarLenFeature(tf.int64),
'image/object/group_of':
tf.VarLenFeature(tf.int64),
}
self.items_to_handlers = {
fields.InputDataFields.image: slim_example_decoder.Image(
image_key='image/encoded', format_key='image/format', channels=3),
fields.InputDataFields.source_id: (
slim_example_decoder.Tensor('image/source_id')),
fields.InputDataFields.key: (
slim_example_decoder.Tensor('image/key/sha256')),
fields.InputDataFields.filename: (
slim_example_decoder.Tensor('image/filename')),
# Object boxes and classes.
fields.InputDataFields.groundtruth_boxes: (
slim_example_decoder.BoundingBox(
['ymin', 'xmin', 'ymax', 'xmax'], 'image/object/bbox/')),
fields.InputDataFields.groundtruth_area: slim_example_decoder.Tensor(
'image/object/area'),
fields.InputDataFields.groundtruth_is_crowd: (
slim_example_decoder.Tensor('image/object/is_crowd')),
fields.InputDataFields.groundtruth_difficult: (
slim_example_decoder.Tensor('image/object/difficult')),
fields.InputDataFields.groundtruth_group_of: (
slim_example_decoder.Tensor('image/object/group_of'))
}
if load_instance_masks:
self.keys_to_features['image/object/mask'] = tf.VarLenFeature(tf.float32)
self.items_to_handlers[
fields.InputDataFields.groundtruth_instance_masks] = (
slim_example_decoder.ItemHandlerCallback(
['image/object/mask', 'image/height', 'image/width'],
self._reshape_instance_masks))
if label_map_proto_file:
label_map = label_map_util.get_label_map_dict(label_map_proto_file,
use_display_name)
# We use a default_value of -1, but we expect all labels to be contained
# in the label map.
table = tf.contrib.lookup.HashTable(
initializer=tf.contrib.lookup.KeyValueTensorInitializer(
keys=tf.constant(list(label_map.keys())),
values=tf.constant(list(label_map.values()), dtype=tf.int64)),
default_value=-1)
# If the label_map_proto is provided, try to use it in conjunction with
# the class text, and fall back to a materialized ID.
label_handler = slim_example_decoder.BackupHandler(
slim_example_decoder.LookupTensor(
'image/object/class/text', table, default_value=''),
slim_example_decoder.Tensor('image/object/class/label'))
else:
label_handler = slim_example_decoder.Tensor('image/object/class/label')
self.items_to_handlers[
fields.InputDataFields.groundtruth_classes] = label_handler
def decode(self, tf_example_string_tensor):
"""Decodes serialized tensorflow example and returns a tensor dictionary.
Args:
tf_example_string_tensor: a string tensor holding a serialized tensorflow
example proto.
Returns:
A dictionary of the following tensors.
fields.InputDataFields.image - 3D uint8 tensor of shape [None, None, 3]
containing image.
fields.InputDataFields.source_id - string tensor containing original
image id.
fields.InputDataFields.key - string tensor with unique sha256 hash key.
fields.InputDataFields.filename - string tensor with original dataset
filename.
fields.InputDataFields.groundtruth_boxes - 2D float32 tensor of shape
[None, 4] containing box corners.
fields.InputDataFields.groundtruth_classes - 1D int64 tensor of shape
[None] containing classes for the boxes.
fields.InputDataFields.groundtruth_area - 1D float32 tensor of shape
[None] containing containing object mask area in pixel squared.
fields.InputDataFields.groundtruth_is_crowd - 1D bool tensor of shape
[None] indicating if the boxes enclose a crowd.
Optional:
fields.InputDataFields.groundtruth_difficult - 1D bool tensor of shape
[None] indicating if the boxes represent `difficult` instances.
fields.InputDataFields.groundtruth_group_of - 1D bool tensor of shape
[None] indicating if the boxes represent `group_of` instances.
fields.InputDataFields.groundtruth_instance_masks - 3D int64 tensor of
shape [None, None, None] containing instance masks.
"""
serialized_example = tf.reshape(tf_example_string_tensor, shape=[])
decoder = slim_example_decoder.TFExampleDecoder(self.keys_to_features,
self.items_to_handlers)
keys = decoder.list_items()
tensors = decoder.decode(serialized_example, items=keys)
tensor_dict = dict(zip(keys, tensors))
is_crowd = fields.InputDataFields.groundtruth_is_crowd
tensor_dict[is_crowd] = tf.cast(tensor_dict[is_crowd], dtype=tf.bool)
tensor_dict[fields.InputDataFields.image].set_shape([None, None, 3])
return tensor_dict
def _reshape_instance_masks(self, keys_to_tensors):
"""Reshape instance segmentation masks.
The instance segmentation masks are reshaped to [num_instances, height,
width] and cast to boolean type to save memory.
Args:
keys_to_tensors: a dictionary from keys to tensors.
Returns:
A 3-D float tensor of shape [num_instances, height, width] with values
in {0, 1}.
"""
height = keys_to_tensors['image/height']
width = keys_to_tensors['image/width']
to_shape = tf.cast(tf.stack([-1, height, width]), tf.int32)
masks = keys_to_tensors['image/object/mask']
if isinstance(masks, tf.SparseTensor):
masks = tf.sparse_tensor_to_dense(masks)
masks = tf.reshape(tf.to_float(tf.greater(masks, 0.0)), to_shape)
return tf.cast(masks, tf.float32)
| 45.492537 | 80 | 0.675962 |
9d96168d2396f9b55677051607cf168d38a09bcc | 11,293 | py | Python | kojen/cgen.py | kohjaen/kojen | e61855e48617e691d1fa0ddac4fdabac6b6a1eff | [
"MIT"
] | 3 | 2020-07-12T08:17:42.000Z | 2022-02-11T15:44:49.000Z | kojen/cgen.py | kohjaen/kojen | e61855e48617e691d1fa0ddac4fdabac6b6a1eff | [
"MIT"
] | null | null | null | kojen/cgen.py | kohjaen/kojen | e61855e48617e691d1fa0ddac4fdabac6b6a1eff | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'eugene'
from collections import OrderedDict
import os, shutil
try:
from .preservative import *
except (ModuleNotFoundError, ImportError) as e:
from preservative import *
'''
MIT License
Copyright (c) 2015 Eugene Grobbelaar (email : koh.jaen@yahoo.de)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
''' This forms the base for some sorts of code generation.
Step 1) Load template files to memory
Step 2) Search and replace passed-in tags in memory (including filenames).
'''
# Code Model -> Just a bunch of lines, mapped to filenames.
class CCodeModel:
def __init__(self):
self.filenames_to_lines = OrderedDict()
def Merge(self, codemodel):
"""
Will merge the input codemodel with this.
@param codemodel: a CCodeModel object
"""
self.filenames_to_lines.update(codemodel.filenames_to_lines)
'''------------------------------------------------------------------------------------------------------'''
alpha = 97
def __getnextalphabet__():
global alpha
alpha = alpha + 1
if alpha == 120:
alpha = 65
if alpha == 91:
alpha = 97
def __resetalphabet__():
global alpha
alpha = 97
def even_space(str, nospaces=35):
return str + (nospaces - len(str)) * " "
def camel_case(str):
return str.title()
def camel_case_small(str):
return str[0].lower() + str[1:]
def caps(str):
return str.upper()
'''------------------------------------------------------------------------------------------------------'''
class CBASEGenerator:
def __init__(self, inputfiledir, outputfiledir, language=None, author='Anonymous', group='', brief='',namespace_to_folders = False):
self.input_template_file_dir = inputfiledir
self.output_gen_file_dir = outputfiledir
self.language = language
self.author = author
self.group = group
self.brief = brief
self.NAMESPACE_TO_GO_TO_OWN_FOLDER = namespace_to_folders
# Does the input exist
if not os.path.exists(inputfiledir):
raise Exception("Directory '" + inputfiledir + "' does not exist.")
else:
files = os.listdir(inputfiledir)
# Is the input empty
if not files:
raise Exception("Directory '" + inputfiledir + "' is empty.")
else:
# Check the output dir
if not os.path.exists(outputfiledir):
os.makedirs(outputfiledir)
print("Directory '" + outputfiledir + "' does not exist...created.")
def __generate_filenames_from_templates__(self,file, dict_to_replace_filenames):
for tag, desired_text in dict_to_replace_filenames.items():
file = file.replace(tag, desired_text)
return file
''' This will remove multiple newlines directly after each, leaving only 1'''
def __filter_multiple_newlines(self,list_of_lines_in_file):
last = ''
for i in range(len(list_of_lines_in_file)):
was_filtered = False
current = list_of_lines_in_file[i].replace(" ", "")
if i > 0:
if current == last and last == '\n':
list_of_lines_in_file[i] = ''
was_filtered = True
if not was_filtered:
last = current
return list_of_lines_in_file
def __loadtemplates_firstfiltering_FILE__(self, filepath, dict_to_replace_lines, dict_to_replace_filenames, filter_files_containing_in_name = ""):
result = CCodeModel()
if os.path.exists(filepath):
file_without_path = os.path.basename(filepath)
with open(filepath) as f:
lines = []
# Replace the key:value pairs per line...
for line in f:
for tag, desired_text in dict_to_replace_lines.items():
desired_text = self.__preserve_leading_tagwhitespace_in_multiline_searchandreplace(line, tag, desired_text)
line = line.replace(tag, desired_text)
# split multi-line-in-one-string to multi line. Code preservation does not work otherwise.
if line.count('\n') > 1:
lines_in_line = line.rstrip('\n').split('\n')
for l in lines_in_line:
lines.append(l + '\n') # could do
else:
lines.append(line)
# Replace the key:value pairs per filename...
for tag, desired_text in dict_to_replace_filenames.items():
file_without_path = file_without_path.replace(tag, desired_text)
# Remove multiple newlines
lines = self.__filter_multiple_newlines(lines)
result.filenames_to_lines[file_without_path] = lines
return result
def __loadtemplates_firstfiltering__(self, dict_to_replace_lines, dict_to_replace_filenames, filter_files_containing_in_name = ""):
"""
Load Template and do 1st round of filtering. The filtering will replace the TAG
@param dict_to_replace_lines: a dictionary of keys:values to replace per line
@param dict_to_replace_filenames: a dictionary of keys:values to replace per templatefilename. This includes extension.
@param filter_files_containing_in_name: fill process only files that contain this text in the name...or "" for all.
@return: CCodeModel, a dictionary -> {filename,[lines]}
"""
template_file_found = False
result = CCodeModel()
CWD = self.input_template_file_dir
for root, dirs, files in os.walk(CWD):
for file in files:
if (file.lower().find(filter_files_containing_in_name.lower()) > -1 or not filter_files_containing_in_name.strip()) and not file.lower().find(".removed") > -1 :
template_file_found = True
cm = self.__loadtemplates_firstfiltering_FILE__(os.path.join(root, file), dict_to_replace_lines, dict_to_replace_filenames, filter_files_containing_in_name)
result.Merge(cm)
if not template_file_found:
raise Exception("Directory '" + self.input_template_file_dir + "' contains no templates.")
return result
def __preserve_leading_tagwhitespace_in_multiline_searchandreplace(self, line, tag, desired_text):
"""
For the case where the 'desired_text' that should replace the 'tag' in the 'line', if it is a multi-line
replace, it will keep the leading spaces across all lines...otherwise simply returns the input desired_text
@param line:
@param tag:
@param desired_text:
@return:
"""
if line.find(tag) != -1:
desired_text_as_lines = desired_text.rstrip('\n').split('\n')
if len(desired_text_as_lines) > 1:
leading_spaces = (len(line) - len(line.lstrip(' '))) * " "
desired_text = ""
for d in desired_text_as_lines:
if not desired_text:
desired_text = d + "\n"
else:
desired_text = desired_text + leading_spaces + d + "\n"
desired_text = desired_text.rstrip('\n')
return desired_text
def __createoutput__(self, filenames_to_lines):
for f in filenames_to_lines:
print("+++++++++ ", f)
filename = os.path.join(self.output_gen_file_dir, f)
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, 'w') as writer:
for line in filenames_to_lines[f]:
line = line.replace('\t'," ") # Last filter! Convert tabs to 4 spaces...
writer.write(line)
'''Will use the base-class configured 'output directory' if no preserve directory is passed in. '''
def __preserve_usertags_in_files__(self, codemodel, preserve_dir = ""):
# Round-trip Code Preservation. Will load the code to preserve upon creation (if the output dir is not-empty/the same as the one in the compile path).
# TCP gen might have a different output directory (typically COG will put files into an intermediate dir, and them copy them elsewhere
## Preserve only files...
copy_filename_to_lines = codemodel.filenames_to_lines.copy() # prevent mutation whilst iteration.
for filename_nopath in copy_filename_to_lines:
file_to_preserve = ""
if preserve_dir == "":
file_to_preserve = os.path.join(self.output_gen_file_dir, filename_nopath)
else:
file_to_preserve = os.path.join(preserve_dir, filename_nopath)
preservation = Preservative(file_to_preserve)
preservation.Emplace(codemodel.filenames_to_lines)
## Preserve the entire directory
# preservation = None
# if preserve_dir == "":
# preservation = Preservative(self.output_gen_file_dir)
# else:
# preservation = Preservative(preserve_dir)
# preservation.Emplace(codemodel.filenames_to_lines)
'''------------------------------------------------------------------------------------------------------'''
def FileCopyUtil(dir_from, dir_to, list_of_filenames):
"""
Will copy each file from list_of_filenames in dir_from to dir_to.
Will create dir_to (even if its a tree) if it does not exist.
@param dir_from: The directory from, where the list of files reside.
@param dir_to: The directory the list of files should be copied to.
@param list_of_filenames: The list [] of filenames to be copied.
"""
try:
os.makedirs(dir_to, exist_ok=True)
for filename in list_of_filenames:
try:
shutil.copy(os.path.join(dir_from, filename), os.path.join(dir_to, filename))
except OSError:
print("Copy of the file %s failed" % os.path.join(dir_from, filename))
except OSError:
print("Creation of the directory %s failed" % dir_to) | 42.454887 | 176 | 0.619853 |
3d234db44ccc9e505ca50662dfbe06091e5327ff | 2,788 | py | Python | ml/equationGen.py | Shivams9/pythoncodecamp | e6cd27f4704a407ee360414a8c9236b254117a59 | [
"MIT"
] | 6 | 2021-08-04T08:15:22.000Z | 2022-02-02T11:15:56.000Z | ML/equationGen.py | Maurya232Abhishek/Python-repository-for-basics | 3dcec5c529a0847df07c9dcc1424675754ce6376 | [
"MIT"
] | 14 | 2021-08-02T06:28:00.000Z | 2022-03-25T10:44:15.000Z | ML/equationGen.py | Maurya232Abhishek/Python-repository-for-basics | 3dcec5c529a0847df07c9dcc1424675754ce6376 | [
"MIT"
] | 6 | 2021-07-16T04:56:41.000Z | 2022-02-16T04:40:06.000Z | #from sympy import symbols,diff
import cv2
import matplotlib.pyplot as plt
from sklearn.metrics import r2_score
import numpy as np
"""class PredictorImage:
def __init__(self,pic,label):
self.pic = pic
self.label = label"""
def readimg(path):
a= cv2.imread(path)
return a
def showimg(img,imgname):
cv2.imshow(imgname,img)
cv2.waitKey(0)
def f(a): #
sum=0
for i in range(len(a)):
if a[i] == 1:
sum += (i+1)**2
sum +=1
return sum
def getThreshold(pic):
mr,mc,mz=pic.shape
sum = 0
for r in range(mr):
for c in range(mc):
avg = (int(pic[r][c][0])+int(pic[r][c][1])+int(pic[r][c][2]))//3
sum += avg
return int(sum//(mr*mc))
def blackwhite(img):
pic = img.copy()
t= getThreshold(pic)
mr,mc,mz=pic.shape
for r in range(mr):
for c in range(mc):
avg = (int(pic[r][c][0]) + int(pic[r][c][1]) + int(pic[r][c][2])) // 3
if avg <= t:
pic[r][c]=[0,0,0]
else:
pic[r][c]=[255,255,255]
return pic
def grayscale(img):
pic = img.copy()
mr,mc,mz=pic.shape
for r in range(mr):
for c in range(mc):
avg = int(int(pic[r][c][0])+int(pic[r][c][1])+int(pic[r][c][2])//3)
pic[r][c] = [avg,avg,avg]
return pic
def onedarray(pic):
mr,mc,mz=pic.shape
l=[]
#count =1;
for r in range(mr):
for c in range(mc):
#print(count)
if pic[r][c][1] == 255:
l.append(0)
else:
l.append(1)
#count +=1
return l
def imgvalue(img):
bw = blackwhite(img)
oned = onedarray(bw)
return f(oned)
def classification(n,imgvalue1,imgvalue2,imgvalue3,imgvalue4,imgvalue5):
l=[]
for i in range(len(n)):
if n[i] <= imgvalue4:
l.append(4)
elif n[i] <= imgvalue2:
l.append(2)
elif n[i] <= imgvalue3:
l.append(3)
elif n[i] <= imgvalue5:
l.append(5)
elif n[i] <= imgvalue1:
l.append(1)
return l
#listofpics=[PredictorImage(readimg("one.png",1))]
pic1 = readimg("one.PNG")
showimg(pic1,"One")
pic2 = readimg("two.PNG")
pic3 = readimg("three.PNG")
pic4 = readimg("four.PNG")
pic5 = readimg("five.PNG")
showimg(pic5,"five")
print("1",imgvalue(pic1))
print("2",imgvalue(pic2))
print("3",imgvalue(pic3))
print("4",imgvalue(pic4))
print("5",imgvalue(pic5))
l = [1,2,3,4,5]
p = [imgvalue(pic1),imgvalue(pic2),imgvalue(pic3),imgvalue(pic4),imgvalue(pic5)]
imgv = np.linspace(4646160000,7994260792,200)
c=classification(imgv,p[0],p[1],p[2],p[3],p[4])
print(len(c))
print(len(imgv))
plt.plot(imgv,c,color="red",marker="o")
plt.show()
| 25.577982 | 86 | 0.539096 |
b6c30cb572e9faefadc8d9f59113a1efbf8f7af6 | 414 | py | Python | main/migrations/0015_auto_20190719_0743.py | gda2048/thefirst | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 5 | 2019-08-19T14:49:29.000Z | 2019-12-19T19:03:54.000Z | main/migrations/0015_auto_20190719_0743.py | Sirkirill/PhychoBlog | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 10 | 2020-02-12T00:46:12.000Z | 2022-02-10T09:16:47.000Z | main/migrations/0015_auto_20190719_0743.py | Sirkirill/PhychoBlog | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 1 | 2019-10-10T13:04:11.000Z | 2019-10-10T13:04:11.000Z | # Generated by Django 2.2.3 on 2019-07-19 07:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0014_auto_20190719_0742'),
]
operations = [
migrations.AlterField(
model_name='achievement',
name='priority',
field=models.IntegerField(default=2, verbose_name='Приоритет'),
),
]
| 23 | 75 | 0.615942 |
7912de6de99984a6a6d22a2cdf088ad7c95fa135 | 1,406 | py | Python | Python/kraken/ui/DataTypeWidgets/ScalarWidgetImpl.py | FabricExile/Kraken | d8c1f5189cb191945e2c18a1369c458d05305afc | [
"BSD-3-Clause"
] | 7 | 2017-12-04T16:57:42.000Z | 2021-09-07T07:02:38.000Z | Python/kraken/ui/DataTypeWidgets/ScalarWidgetImpl.py | xtvjxk123456/Kraken | d8c1f5189cb191945e2c18a1369c458d05305afc | [
"BSD-3-Clause"
] | null | null | null | Python/kraken/ui/DataTypeWidgets/ScalarWidgetImpl.py | xtvjxk123456/Kraken | d8c1f5189cb191945e2c18a1369c458d05305afc | [
"BSD-3-Clause"
] | 6 | 2017-11-14T06:50:48.000Z | 2021-08-21T22:47:29.000Z | from PySide import QtGui, QtCore
from AttributeWidgetImpl import AttributeWidget
class ScalarWidget(AttributeWidget):
def __init__(self, attribute, parentWidget=None, addNotificationListener = True):
super(ScalarWidget, self).__init__(attribute, parentWidget=parentWidget, addNotificationListener = addNotificationListener)
hbox = QtGui.QHBoxLayout()
self._widget = QtGui.QLineEdit(self)
validator = QtGui.QDoubleValidator(self)
validator.setDecimals(3)
self._widget.setValidator(validator)
hbox.addWidget(self._widget, 1)
hbox.addStretch(0)
hbox.setContentsMargins(0, 0, 0, 0)
self.setLayout(hbox)
self.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
self.updateWidgetValue()
if self.isEditable():
self._widget.editingFinished.connect(self._invokeSetter)
else:
self._widget.setReadOnly(True)
def getWidgetValue(self):
return float(self._widget.text())
def setWidgetValue(self, value):
self._widget.setText(str(round(value, 4)))
@classmethod
def canDisplay(cls, attribute):
return(
attribute.getDataType() == 'Scalar' or
attribute.getDataType() == 'Float32' or
attribute.getDataType() == 'Float64'
)
ScalarWidget.registerPortWidget()
| 31.244444 | 131 | 0.667141 |
9ce6597ab4af34316217df467ab1d52694f75742 | 3,325 | py | Python | scrape_mars.py | dosterman09/web-scraping-challenge | 53d4020bb67c7e0b9c0693bc9804048d7b499f42 | [
"ADSL"
] | null | null | null | scrape_mars.py | dosterman09/web-scraping-challenge | 53d4020bb67c7e0b9c0693bc9804048d7b499f42 | [
"ADSL"
] | null | null | null | scrape_mars.py | dosterman09/web-scraping-challenge | 53d4020bb67c7e0b9c0693bc9804048d7b499f42 | [
"ADSL"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# Scraping Mars!
# In[1]:
import pandas as pd
import datetime as dt
from flask import Flask
import requests
from splinter import Browser
from bs4 import BeautifulSoup
from webdriver_manager.chrome import ChromeDriverManager
def scrape():
#create empty dictionary
mars_info_dict = {}
# In[2]:
executable_path = {'executable_path': ChromeDriverManager().install()}
browser = Browser('chrome', **executable_path, headless=False)
# Mars News
# In[3]:
#Nasa Mars News Site
url = "https://redplanetscience.com/"
browser.visit(url)
# In[4]:
#Scrape using BeautifulSoup
html = browser.html
news_soup = BeautifulSoup(html, 'html.parser')
# In[5]:
#Retrieve Title and News Paragraph
article = news_soup.find("div", class_='list_text')
news_title = article.find("div", class_="content_title").text
news_p = article.find("div", class_ ="article_teaser_body").text
print(news_title)
print(news_p)
#add to dict
mars_info_dict['news_title'] = news_title
# Mars Space Image
# In[6]:
mars_image_url = 'https://spaceimages-mars.com/'
browser.visit(mars_image_url)
# In[7]:
#Scrape using BeautifulSoup
html = browser.html
image_soup = BeautifulSoup(html, 'html.parser')
# In[8]:
image_soup = image_soup.find('img', class_='headerimage')['src']
mars_image_url = f'https://spaceimages-mars.com/{mars_image_url}'
mars_image_url
print(mars_image_url)
#add dict
mars_info_dict['mars_image_url'] = mars_image_url
# Mars Facts
# In[9]:
mars_facts = 'https://galaxyfacts-mars.com/'
#pandas to read html
tables = pd.read_html(mars_facts)
#Find Mars Facts DataFrame
df = tables[1]
#Assign the columns
df.columns = ['Description', 'Value']
html_table = df.to_html(table_id="html_tbl_css",justify='left',index=False)
#add parameter
data = df.to_dict(orient='records')
df
# In[10]:
facts_url = "https://galaxyfacts-mars.com/"
browser.visit(facts_url)
mars_data = pd.read_html(facts_url)
mars_data = pd.DataFrame(mars_data[0])
mars_facts = mars_data.to_html(header = False, index = False)
print(mars_facts)
#add dict
mars_info_dict['mars_facts'] = mars_facts
# In[11]:
url_hemisphere = "https://marshemispheres.com/"
browser.visit(url_hemisphere)
html_hemisphere = browser.html
soup = BeautifulSoup(html_hemisphere, "html.parser")
# In[12]:
# Scrape all items that contain mars hemispheres information
hemispheres = soup.find_all("div", class_="item")
# Create empty list
hemispheres_info = []
# main url for loop
hemispheres_url = "https://marshemispheres.com/"
# Loop through the list of all hemispheres information
for i in hemispheres:
title = i.find("h3").text
hemispheres_img = i.find("a", class_="itemLink product-item")["href"]
# Visit the link that contains image
browser.visit(hemispheres_url + hemispheres_img)
# HTML Object
image_html = browser.html
web_info = BeautifulSoup(image_html, "html.parser")
# Create full image url
img_url = hemispheres_url + web_info.find("img", class_="wide-image")["src"]
hemispheres_info.append({"title" : title, "img_url" : img_url})
# Display titles and images ulr
print("")
print(title)
print(img_url)
print("-----------------------------------------")
#add dict
mars_info_dict['hemisphere_url'] = hemisphere_img
# In[ ]:
| 18.785311 | 80 | 0.713684 |
8bb2a7194d7ce4bd989bcba79bbec8bf75ba9c1c | 1,340 | py | Python | Flappy Bird/gameVariables.py | Mechatronixyt/Python-Games | 243c26deef4303f49b1abdda97f32c3492739edb | [
"MIT"
] | 1 | 2021-03-17T11:34:39.000Z | 2021-03-17T11:34:39.000Z | Flappy Bird/gameVariables.py | Mechatronixyt/Python-Games | 243c26deef4303f49b1abdda97f32c3492739edb | [
"MIT"
] | null | null | null | Flappy Bird/gameVariables.py | Mechatronixyt/Python-Games | 243c26deef4303f49b1abdda97f32c3492739edb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pygame
from pygame.locals import *
#Global variables for the game
gameWidth = 300 #Game window gameWidth
gameHeight = 500 #Game window gameHeight
FPS = 60 #Frames per second
birdHeight = 35 #Height of the bird
birdWidth = 48 #Width of the bird
jumpSteps = 15 #Pixels to move
jumpPixels = 4 #Pixels per frame
dropPixels = 3 #Pixels per frame
groundHeight = 73 #Height of the ground
pipeWidth = 52 #Width of a pipe
pipeHeight = 320 #Max Height of a pipe
pipesSpace = 4 * birdHeight #Space between pipes
pipesAddInterval = 2000 #Milliseconds
pixelsFrame = 2 #Pixels per frame
getNewPipe = USEREVENT + 1 #Custom event
pygame.init() #Initialize pygame
screenResolution = pygame.display.Info() #Get screen resolution
pygame.quit() #Close pygame
gameScore = 0 #Game gameScore
waitClick = True
| 43.225806 | 67 | 0.468657 |
213e1e83e770614abfa29cb8c3bed63c81f80607 | 1,555 | py | Python | section_3/lesson_6_step_9_lang/examples/one/conftest.py | aderny-twc/selenium_and_python | ff18cf38bd7c266adbb37cf894548f54b1bf4633 | [
"MIT"
] | null | null | null | section_3/lesson_6_step_9_lang/examples/one/conftest.py | aderny-twc/selenium_and_python | ff18cf38bd7c266adbb37cf894548f54b1bf4633 | [
"MIT"
] | null | null | null | section_3/lesson_6_step_9_lang/examples/one/conftest.py | aderny-twc/selenium_and_python | ff18cf38bd7c266adbb37cf894548f54b1bf4633 | [
"MIT"
] | null | null | null | import pytest
import time
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
def pytest_addoption(parser):
parser.addoption('--browser_name', action='store', default="chrome",
help="Choose browser: chrome or firefox")
parser.addoption('--language', action='store', default='ru',
help="Choose language. For example use --language=es")
parser.addoption('--wait', action='store', default=0,
help="Select a waiting time (in seconds) to make sure the test is working correctly. For example use --wait=30")
@pytest.fixture(scope="function")
def browser(request):
browser_name = request.config.getoption("browser_name")
browser_language = request.config.getoption("language")
wait_time = request.config.getoption("wait")
browser = None
if browser_name == "chrome":
print("\nstart chrome browser for test..")
options = Options()
options.add_experimental_option('prefs', {'intl.accept_languages': browser_language})
browser = webdriver.Chrome(options=options)
elif browser_name == "firefox":
print("\nstart firefox browser for test..")
fp = webdriver.FirefoxProfile()
fp.set_preference("intl.accept_languages", browser_language)
browser = webdriver.Firefox(firefox_profile=fp)
else:
raise pytest.UsageError("--browser_name should be chrome or firefox")
yield browser
time.sleep(int(wait_time))
print("\nquit browser..")
browser.quit()
| 40.921053 | 133 | 0.679743 |
96ac836306d109069ab980a700a0c4c0f4da7969 | 77,369 | py | Python | harvest.py | kermitt2/article-dataset-builder | b97fbc063972658e05ffbd268dd5a3a82b12f629 | [
"Apache-2.0"
] | 13 | 2020-03-22T21:45:19.000Z | 2022-03-24T09:28:25.000Z | harvest.py | kermitt2/article-dataset-builder | b97fbc063972658e05ffbd268dd5a3a82b12f629 | [
"Apache-2.0"
] | 3 | 2020-04-09T13:27:30.000Z | 2021-11-01T20:12:41.000Z | harvest.py | kermitt2/article-dataset-builder | b97fbc063972658e05ffbd268dd5a3a82b12f629 | [
"Apache-2.0"
] | 1 | 2020-09-21T18:20:24.000Z | 2020-09-21T18:20:24.000Z | import argparse
import os
import io
import sys
import urllib3
from urllib import parse
from concurrent.futures import ThreadPoolExecutor, as_completed
import argparse
import boto3
import botocore
import magic
import requests
import shutil
import gzip
import tarfile
import json
import pickle
import subprocess
import S3
import csv
import time
import uuid
import lmdb
from tqdm import tqdm
import logging
import logging.handlers
from random import randint, choices
map_size = 100 * 1024 * 1024 * 1024
logging.basicConfig(filename='harvester.log', filemode='w', level=logging.DEBUG)
urllib3.disable_warnings()
class Harverster(object):
"""
What:
- Harvester for article set (list of DOI, PMID, PMC ID or basic metadata provided in a csv file, e.g. CORD-19 csv metadata file)
with robust parallel PDF download
- Perform some metadata enrichment/agregation via biblio-glutton/CrossRef API and output consolidated metadata in a json file
- Perform Grobid full processing of PDF (including bibliographical reference consolidation and OA access resolution)
Optionally:
- generate thumbnails for article (first page)
- load stuff on S3 instead of local file
- generate json PDF annotation (with coordinates) for inline reference markers and bibliographical references
Usage: see the Readme.md file
"""
def __init__(self, config_path='./config.json', thumbnail=False, sample=None, dump_metadata=False, annotation=False, only_download=False, full_diagnostic=False):
# boolean indicating if we only want to download the raw files without structuring them into XML
self.only_download = only_download
self.full_diagnostic = full_diagnostic
self.config = None
self._load_config(config_path)
# here are store stable resources like identifier mapping and archive download mapping
self.resource_path = "./resources"
# the file where all the metadata are stored
self.dump_file = dump_metadata
# boolean indicating if we want to generate thumbnails of front page of PDF
self.thumbnail = thumbnail
self.annotation = annotation
# if a sample value is provided, indicate that we only harvest the indicated number of PDF
self.sample = sample
self.s3 = None
if self.config["bucket_name"] is not None and len(self.config["bucket_name"]) > 0:
self.s3 = S3.S3(self.config)
# in case we use a local folder filled with Elsevier COVID-19 Open Access PDF from their ftp server
self.elsevier_oa_map = None
self._init_local_file_map()
# the following lmdb map gives for every PMC ID where to download the archive file containing NLM and PDF files
self.env_pmc_oa = None
# standard lmdb environment for storing biblio entries by uuid
self.env_entries = None
# lmdb environment for storing mapping between sha/doi/pmcid and uuid
self.env_uuid = None
self._init_lmdb()
self.dump_file_name = "consolidated_metadata.json"
def _load_config(self, path='./config.json'):
"""
Load the json configuration
"""
config_json = open(path).read()
self.config = json.loads(config_json)
# test if GROBID is up and running, except if we just want to download raw files
if not self.only_download and not self.full_diagnostic:
the_url = _grobid_url(self.config['grobid_base'], self.config['grobid_port'])
the_url += "isalive"
r = requests.get(the_url)
if r.status_code != 200:
logging.warning('GROBID server does not appear up and running ' + str(r.status_code))
else:
logging.info("GROBID server is up and running")
def _init_local_file_map(self):
# build the local file map, if any, for the Elsevier COVID-19 OA set
# TBD: this might better go to its own LMDB map than staying in memory like this!
if self.config["cord19_elsevier_pdf_path"] is not None and len(self.config["cord19_elsevier_pdf_path"])>0 and self.elsevier_oa_map is None:
# init map
self.elsevier_oa_map = {}
if not "cord19_elsevier_map_path" in self.config or len(self.config["cord19_elsevier_map_path"])==0:
return
if os.path.isfile(os.path.join(self.resource_path, self.config["cord19_elsevier_map_path"])):
with gzip.open(os.path.join(self.resource_path, self.config["cord19_elsevier_map_path"]), mode="rt") as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
if row["doi"] is not None and len(row["doi"])>0:
self.elsevier_oa_map[row["doi"].lower()] = row["pdf"]
if row["pii"] is not None and len(row["pii"])>0:
self.elsevier_oa_map[row["pii"]] = row["pdf"]
def _init_lmdb(self):
# create the data path if it does not exist
if not os.path.isdir(self.config["data_path"]):
try:
os.makedirs(self.config["data_path"])
except OSError:
logging.warning("Creation of the directory %s failed" % self.config["data_path"])
else:
logging.info("Successfully created the directory %s" % self.config["data_path"])
# open in write mode
envFilePath = os.path.join(self.config["data_path"], 'entries')
self.env_entries = lmdb.open(envFilePath, map_size=map_size)
envFilePath = os.path.join(self.config["data_path"], 'uuid')
self.env_uuid = lmdb.open(envFilePath, map_size=map_size)
# build the PMC map information, in particular for downloading the archive file containing the PDF and XML
# files (PDF not always present)
resource_file = os.path.join(self.resource_path, "oa_file_list.txt")
# TBD: if the file is not present we should download it at ftp://ftp.ncbi.nlm.nih.gov/pub/pmc/oa_file_list.txt
# https://ftp.ncbi.nlm.nih.gov/pub/pmc/oa_file_list.txt
if not os.path.isfile(resource_file):
url = "ftp://ftp.ncbi.nlm.nih.gov/pub/pmc/oa_file_list.txt"
logging.debug("Downloading PMC resource file: " + url)
_download(url, resource_file)
envFilePath = os.path.join(self.resource_path, 'pmc_oa')
if os.path.isfile(resource_file) and not os.path.isdir(envFilePath):
# open in write mode
self.env_pmc_oa = lmdb.open(envFilePath, map_size=map_size)
txn = self.env_pmc_oa.begin(write=True)
nb_lines = 0
# get number of line in the file
with open(resource_file, "r") as fp:
for line in fp:
nb_lines += 1
# fill this lmdb map
print("building PMC resource map - done only one time")
with open(resource_file, "r") as fp:
count = 0
for line in tqdm(fp, total=nb_lines):
if count == 0:
#skip first line which is just a time stamp
count += 1
continue
row = line.split('\t')
subpath = row[0]
pmcid = row[2]
# pmid is optional
pmid= row[3]
license = row[4]
localInfo = {}
localInfo["subpath"] = subpath
localInfo["pmid"] = pmid
localInfo["license"] = license
txn.put(pmcid.encode(encoding='UTF-8'), _serialize_pickle(localInfo))
count += 1
txn.commit()
self.env_pmc_oa.close()
# open in read mode only
self.env_pmc_oa = lmdb.open(envFilePath, readonly=True, lock=False)
def unpaywalling_doi(self, doi):
"""
Check the Open Access availability of the DOI via Unpaywall, return the best download URL or None otherwise.
We need to use the Unpaywall API to get fresh information, because biblio-glutton is based on the
Unpaywall dataset dump which has a 7-months gap.
"""
response = requests.get(self.config["unpaywall_base"] + doi,
params={'email': self.config["unpaywall_email"]}, verify=False, timeout=10).json()
if response['best_oa_location'] and 'url_for_pdf' in response['best_oa_location'] and response['best_oa_location']['url_for_pdf']:
return response['best_oa_location']['url_for_pdf']
elif 'url' in response['best_oa_location'] and response['best_oa_location']['url'].startswith(self.config['pmc_base_web']):
return response['best_oa_location']['url']+"/pdf/"
# we have a look at the other "oa_locations", which might have a `url_for_pdf` ('best_oa_location' has not always a
# `url_for_pdf`, for example for Elsevier OA articles)
for other_oa_location in response['oa_locations']:
# for a PMC file, we can concatenate /pdf/ to the base, eg https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7029158/pdf/
# but the downloader will have to use a good User-Agent and follow redirection
#if other_oa_location['url'].startswith(self.config['pmc_base_web']):
if 'url_for_pdf' in other_oa_location and other_oa_location['url_for_pdf'] != None:
if other_oa_location['url_for_pdf'].find('europepmc.org/articles/pmc') != -1 or other_oa_location['url_for_pdf'].find('ncbi.nlm.nih.gov/pmc/articles') != -1:
return other_oa_location['url']+"/pdf/"
# last choice, non PMC url to pdf
for other_oa_location in response['oa_locations']:
if 'url_for_pdf' in other_oa_location and other_oa_location['url_for_pdf'] != None:
return other_oa_location['url_for_pdf']
return None
def elsevier_oa_check(self, doi=None, pii=None):
# this is a list of OA articles from Elsevier, e.g. COVID papers, if successful it will return the path
# to the local PDF corresponding to this article
# we can download these pdf set from their dedicated ftp, and make them available locally for this dataset builder
# note: also direct download link for pdf - but maybe some risks to be blocked?
# https://www.sciencedirect.com/science/article/pii/S0924857920300674/pdfft?isDTMRedir=true&download=true
# their API is not even up to date: https://api.elsevier.com/content/article/pii/S0924857920300674
# still described as closed access
if self.elsevier_oa_map is None:
return None
if doi is None and pii is None:
return None
if self.config["cord19_elsevier_pdf_path"] is None or len(self.config["cord19_elsevier_pdf_path"]) == 0:
return None
'''
if doi is not None:
print(doi)
if doi.lower() in self.elsevier_oa_map:
print(self.elsevier_oa_map[doi.lower()])
'''
if doi is not None and doi.lower() in self.elsevier_oa_map:
return os.path.join(self.config["cord19_elsevier_pdf_path"],self.elsevier_oa_map[doi.lower()])
if pii is not None and pii in self.elsevier_oa_map:
return os.path.join(self.config["cord19_elsevier_pdf_path"],self.elsevier_oa_map[pii])
def pmc_oa_check(self, pmcid):
try:
with self.env_pmc_oa.begin() as txn:
pmc_info_object = txn.get(pmcid.encode(encoding='UTF-8'))
if pmc_info_object:
try:
pmc_info = _deserialize_pickle(pmc_info_object)
except:
logging.error("omg _deserialize_pickle failed?")
if "license" in pmc_info:
license = pmc_info["license"]
license = license.replace("\n","")
else:
license = ""
if "subpath" in pmc_info:
subpath = pmc_info["subpath"];
return os.path.join(self.config["pmc_base_ftp"],subpath), license
except lmdb.Error:
logging.error("lmdb pmc os look-up failed")
return None, None
def biblio_glutton_lookup(self, doi=None, pmcid=None, pmid=None, istex_id=None, istex_ark=None):
"""
Lookup on biblio_glutton with the provided strong identifiers, return the full agregated biblio_glutton record
"""
if not "biblio_glutton_base" in self.config or len(self.config["biblio_glutton_base"]) == 0:
return None
biblio_glutton_url = _biblio_glutton_url(self.config["biblio_glutton_base"])
success = False
jsonResult = None
if doi is not None and len(doi)>0:
response = requests.get(biblio_glutton_url, params={'doi': doi}, verify=False, timeout=5)
success = (response.status_code == 200)
if success:
jsonResult = response.json()
if not success and pmid is not None and len(pmid)>0:
response = requests.get(biblio_glutton_url + "pmid=" + pmid, verify=False, timeout=5)
success = (response.status_code == 200)
if success:
jsonResult = response.json()
if not success and pmcid is not None and len(pmcid)>0:
response = requests.get(biblio_glutton_url + "pmc=" + pmcid, verify=False, timeout=5)
success = (response.status_code == 200)
if success:
jsonResult = response.json()
if not success and istex_id is not None and len(istex_id)>0:
response = requests.get(biblio_glutton_url + "istexid=" + istex_id, verify=False, timeout=5)
success = (response.status_code == 200)
if success:
jsonResult = response.json()
if not success and doi is not None and len(doi)>0:
# let's call crossref as fallback for the X-months gap
# https://api.crossref.org/works/10.1037/0003-066X.59.1.29
user_agent = {'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:81.0) Gecko/20100101 Firefox/81.0 (mailto:'
+ self.config['crossref_email'] + ')'}
response = requests.get(self.config['crossref_base']+"/works/"+doi, headers=user_agent, verify=False, timeout=5)
if response.status_code == 200:
jsonResult = response.json()['message']
# filter out references and re-set doi, in case there are obtained via crossref
if "reference" in jsonResult:
del jsonResult["reference"]
else:
success = False
jsonResult = None
return jsonResult
def reset(self, dump_file=False):
"""
Remove the local files and lmdb keeping track of the state of advancement of the harvesting and
of the failed entries
"""
# close environments
self.env_entries.close()
self.env_uuid.close()
# clean any possibly remaining tmp files
for f in os.listdir(self.config["data_path"]):
if f.endswith(".pdf") or f.endswith(".png") or f.endswith(".nxml") or f.endswith(".xml") or f.endswith(".tar.gz") or f.endswith(".json"):
os.remove(os.path.join(self.config["data_path"], f))
# clean any existing data files, except
path = os.path.join(self.config["data_path"], f)
if os.path.isdir(path):
try:
shutil.rmtree(path)
except OSError as e:
logging.error("Error: %s - %s." % (e.filename, e.strerror))
# clean the metadata file if present
if self.dump_file:
if os.path.isfile(self.dump_file_name):
os.remove(self.dump_file_name)
# re-init the environments
self._init_lmdb()
def dump_metadata(self):
if self.dump_file_name is None:
self.dump_file_name = "consolidated_metadata.json"
# init lmdb transactions
txn = self.env_entries.begin(write=True)
nb_total = txn.stat()['entries']
print("number of harvested entries:", nb_total)
with open(self.dump_file_name,'w') as file_out:
# iterate over lmdb
cursor = txn.cursor()
for key, value in cursor:
if txn.get(key) is None:
continue
local_entry = _deserialize_pickle(txn.get(key))
file_out.write(json.dumps(local_entry, sort_keys=True))
file_out.write("\n")
# we need to upload to S3 the consolidated metadata file, if S3 has been set
if self.s3 is not None:
if os.path.isfile(self.dump_file_name):
self.s3.upload_file_to_s3(self.dump_file_name, ".", storage_class='ONEZONE_IA')
def run_grobid(self, pdf_file, output=None, annotation_output=None):
# normal fulltext TEI file
logging.debug("run grobid:" + pdf_file + " -> " + output)
if output is not None:
files = {
'input': (
pdf_file,
open(pdf_file, 'rb'),
'application/pdf',
{'Expires': '0'}
)
}
the_url = _grobid_url(self.config['grobid_base'], self.config['grobid_port'])
the_url += "processFulltextDocument"
# set the GROBID parameters
the_data = {}
the_data['generateIDs'] = '1'
the_data['consolidateHeader'] = '1'
the_data['consolidateCitations'] = '0'
the_data['includeRawCitations'] = '1'
the_data['includeRawAffiliations'] = '1'
the_data['teiCoordinates'] = ['ref', 'biblStruct', 'persName', 'figure', 'formula', 's']
r = requests.request(
"POST",
the_url,
headers={'Accept': 'application/xml'},
files=files,
data=the_data,
timeout=60
)
status = r.status_code
if status == 503:
time.sleep(self.config['sleep_time'])
return self.process_pdf(pdf_file, output, None)
elif status != 200:
logging.error('Processing failed with error ' + str(status))
else:
# writing TEI file
try:
with io.open(output,'w',encoding='utf8') as tei_file:
tei_file.write(r.text)
except OSError:
logging.error("Writing resulting TEI XML file %s failed" % output)
# reference annotation file
if annotation_output is not None:
# we have to re-open the PDF file
files = {
'input': (
pdf_file,
open(pdf_file, 'rb'),
'application/pdf',
{'Expires': '0'}
)
}
the_url = _grobid_url(self.config['grobid_base'], self.config['grobid_port'])
the_url += "referenceAnnotations"
# set the GROBID parameters
the_data = {}
the_data['consolidateCitations'] = '1'
r = requests.request(
"POST",
the_url,
headers={'Accept': 'application/json'},
files=files,
data=the_data,
timeout=60
)
status = r.status_code
if status == 503:
time.sleep(self.config['sleep_time'])
return self.process_pdf(pdf_file, None, annotation_output)
elif status != 200:
logging.error('Processing failed with error ' + str(status))
else:
# writing TEI file
try:
with io.open(annotation_output,'w',encoding='utf8') as json_file:
json_file.write(r.text)
except OSError:
logging.error("Writing resulting JSON file %s failed" % annotation_output)
def harvest_dois(self, dois_file):
with open(dois_file, 'rt') as fp:
line_count = 0 # total count of articles
i = 0 # counter for article per batch
identifiers = []
dois = []
for count, line in enumerate(fp):
if len(line.strip()) == 0:
continue
if i == self.config["batch_size"]:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
# branch to the right entry processor, depending on the input csv
executor.map(self.processEntryDOI, identifiers, dois, timeout=50)
# reinit
i = 0
identifiers = []
dois = []
the_doi = line.strip()
the_doi = _clean_doi(the_doi)
# check if the entry has already been processed
identifier = self.getUUIDByStrongIdentifier(the_doi)
if identifier is None:
# we need a new identifier
identifier = str(uuid.uuid4())
identifiers.append(identifier)
dois.append(the_doi)
line_count += 1
i += 1
# we need to process the last incomplete batch, if not empty
if len(identifiers) > 0:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
# branch to the right entry processor, depending on the input csv
executor.map(self.processEntryDOI, identifiers, dois, timeout=50)
print("processed", str(line_count), "articles")
def harvest_cord19(self, metadata_csv_file):
# first get the number of entries to be able to display a progress bar
total_entries = 0
with open(metadata_csv_file, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
total_entries += 1
# format is:
# cord_uid,sha,source_x,title,doi,pmcid,pubmed_id,license,abstract,publish_time,authors,journal,Microsoft Academic Paper ID,
# WHO #Covidence,has_full_text,full_text_file,url
print("harvesting CORD-19 full texts")
with open(metadata_csv_file, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0 # total count of articles
i = 0 # counter for article per batch
identifiers = []
rows = []
for row in tqdm(csv_reader, total=total_entries):
if i == self.config["batch_size"]:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
# branch to the right entry processor, depending on the input csv
executor.map(self.processEntryCord19, identifiers, rows, timeout=50)
# reinit
i = 0
identifiers = []
rows = []
# check if the entry has already been processed
# we can use from 27.03.2020 update the cord_uid as identifier, and keep doi of course as fallback
# we don't use the sha as identifier, just keep it in the metadata
if row["cord_uid"] and len(row["cord_uid"])>0:
# in the current version, there is always a cord_uid normally
if self.getUUIDByStrongIdentifier(row["cord_uid"]) is not None:
line_count += 1
continue
if row["doi"] and len(row["doi"])>0:
if self.getUUIDByStrongIdentifier(row["doi"]) is not None:
line_count += 1
continue
# we use cord_uid as identifier
identifier = row["cord_uid"]
identifiers.append(identifier)
rows.append(row)
line_count += 1
i += 1
# we need to process the last incomplete batch, if not empty
if len(identifiers) >0:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
# branch to the right entry processor, depending on the input csv
executor.map(self.processEntryCord19, identifiers, rows, timeout=50)
print("processed", str(line_count), "articles from CORD-19")
def harvest_pmids(self, pmids_file):
with open(pmids_file, 'rt') as fp:
line_count = 0 # total count of articles
i = 0 # counter for article per batch
identifiers = []
pmids = []
for count, line in enumerate(fp):
if len(line.strip()) == 0:
continue
if i == self.config["batch_size"]:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processEntryPMID, identifiers, pmids, timeout=50)
# reinit
i = 0
identifiers = []
pmids = []
the_pmid = line.strip()
# check if the entry has already been processed
identifier = self.getUUIDByStrongIdentifier(the_pmid)
if identifier is None:
# we need a new identifier
identifier = str(uuid.uuid4())
identifiers.append(identifier)
pmids.append(the_pmid)
line_count += 1
i += 1
# we need to process the last incomplete batch, if not empty
if len(identifiers) > 0:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processEntryPMID, identifiers, pmids, timeout=50)
print("processed", str(line_count), "article PMID")
def harvest_pmcids(self, pmcids_file):
with open(pmcids_file, 'rt') as fp:
line_count = 0 # total count of articles
i = 0 # counter for article per batch
identifiers = []
pmcids = []
for count, line in enumerate(fp):
if len(line.strip()) == 0:
continue
if i == self.config["batch_size"]:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processEntryPMCID, identifiers, pmcids, timeout=50)
# reinit
i = 0
identifiers = []
pmcids = []
the_pmcid = line.strip()
if the_pmcid == 'pmc':
continue
# check if the entry has already been processed
identifier = self.getUUIDByStrongIdentifier(the_pmcid)
if identifier is None:
# we need a new identifier
identifier = str(uuid.uuid4())
identifiers.append(identifier)
pmcids.append(the_pmcid)
line_count += 1
i += 1
# we need to process the last incomplete batch, if not empty
if len(identifiers) > 0:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processEntryPMCID, identifiers, pmcids, timeout=50)
print("processed", str(line_count), "article PMC ID")
def processEntryDOI(self, identifier, doi):
localJson = None
# if the entry has already been processed (partially or completely), we reuse the entry
with self.env_entries.begin(write=False) as txn:
value = txn.get(identifier.encode(encoding='UTF-8'))
if value is not None:
localJson = _deserialize_pickle(value)
if localJson is None:
localJson = self.biblio_glutton_lookup(doi=doi, pmcid=None, pmid=None, istex_id=None, istex_ark=None)
if localJson is None:
localJson = {}
localJson['DOI'] = doi
localJson["id"] = identifier
logging.debug("processing " + localJson['DOI'] + " as " + identifier)
localJson = _initProcessStateInformation(localJson)
self.updateIdentifierMap(localJson)
self.processTask(localJson)
def processEntryPMID(self, identifier, pmid):
localJson = None
# if the entry has already been processed (partially or completely), we reuse the entry
with self.env_entries.begin(write=False) as txn:
value = txn.get(identifier.encode(encoding='UTF-8'))
if value is not None:
localJson = _deserialize_pickle(value)
if localJson is None:
localJson = self.biblio_glutton_lookup(doi=None, pmcid=None, pmid=pmid, istex_id=None, istex_ark=None)
if localJson is None:
localJson = {}
localJson['pmid'] = pmid
localJson["id"] = identifier
logging.debug("processing " + localJson['pmid'] + " as " + identifier)
localJson = _initProcessStateInformation(localJson)
self.updateIdentifierMap(localJson)
self.processTask(localJson)
def processEntryPMCID(self, identifier, pmcid):
localJson = None
# if the entry has already been processed (partially or completely), we reuse the entry
with self.env_entries.begin(write=False) as txn:
value = txn.get(identifier.encode(encoding='UTF-8'))
if value is not None:
localJson = _deserialize_pickle(value)
if localJson is None:
localJson = self.biblio_glutton_lookup(doi=None, pmcid=pmcid, pmid=None, istex_id=None, istex_ark=None)
if localJson is None:
localJson = {}
localJson['pmcid'] = pmcid
localJson["id"] = identifier
logging.debug("processing " + localJson['pmcid'] + " as " + identifier)
localJson = _initProcessStateInformation(localJson)
self.updateIdentifierMap(localJson)
self.processTask(localJson)
def processEntryCord19(self, identifier, row):
# cord_uid,sha,source_x,title,doi,pmcid,pubmed_id,license,abstract,publish_time,authors,journal,Microsoft Academic Paper ID,
# WHO #Covidence,has_full_text,full_text_file,url
localJson = None
# if the entry has already been processed (partially or completely), we reuse the entry
with self.env_entries.begin(write=False) as txn:
value = txn.get(identifier.encode(encoding='UTF-8'))
if value is not None:
localJson = _deserialize_pickle(value)
# check if the json is already in the legacy repo
'''
if "legacy_data_path" in self.config and len(self.config["legacy_data_path"].strip())>0:
dest_path = generateStoragePath(identifier)
old_json_filename = os.path.join(self.config["legacy_data_path"], dest_path, identifier+".json")
if os.path.exists(old_json_filename) and _is_valid_file(old_pdf_filename, "json"):
localJson = json.load(old_json_filename)
'''
if localJson is None:
try:
localJson = self.biblio_glutton_lookup(doi=_clean_doi(row["doi"]), pmcid=row["pmcid"], pmid=row["pubmed_id"], istex_id=None, istex_ark=None)
except:
logging.debug("biblio-glutton call fails")
localJson = None
if localJson is None:
localJson = {}
localJson['title'] = row["title"]
localJson['year']= row["publish_time"]
# in the case of CORD-19, we can refresh some metadata even if the entry has already been processed, so that we can update
# the loaded set from one weekly release to another one
localJson["id"] = identifier
# add the CORD-19 sha, though it won't be used
if "sha" in row:
localJson["cord_sha"] = row["sha"]
if "license" in row and row["license"] is not None and len(row["license"])>0:
localJson["license-simplified"] = row["license"]
if "abstract" in row and row["abstract"] is not None and len(row["abstract"])>0:
localJson["abstract"] = row["abstract"]
if "mag_id" in row and row["mag_id"] is not None and len(row["mag_id"])>0:
localJson["MAG_ID"] = row["mag_id"]
if "who_covidence_id" in row and row["who_covidence_id"] is not None and len(row["who_covidence_id"])>0:
localJson["WHO_Covidence"] = row["who_covidence_id"]
if 'doi' in row and 'DOI' not in localJson and row["doi"] is not none and len(row["doi"])>0:
localJson['DOI'] = row["doi"]
# add possible missing information in the metadata entry
if "pmcid" in row and row["pmcid"] is not None and len(row["pmcid"])>0 and 'pmcid' not in localJson:
localJson['pmcid'] = row["pmcid"]
if "pubmed_id" in row and row["pubmed_id"] is not None and len(row["pubmed_id"])>0 and 'pmid' not in localJson:
localJson['pmid'] = row["pubmed_id"]
if "arxiv_id" in row and row["arxiv_id"] is not None and len(row["arxiv_id"])>0 and 'arxiv_id' not in localJson:
localJson['arxiv_id'] = row["arxiv_id"]
localJson = _initProcessStateInformation(localJson)
# update uuid lookup map
with self.env_uuid.begin(write=True) as txn_uuid:
txn_uuid.put(row["cord_uid"].encode(encoding='UTF-8'), identifier.encode(encoding='UTF-8'))
self.updateIdentifierMap(localJson)
self.processTask(localJson)
def updateIdentifierMap(self, localJson):
if "DOI" in localJson:
with self.env_uuid.begin(write=True) as txn_uuid:
txn_uuid.put(localJson['DOI'].encode(encoding='UTF-8'), localJson["id"].encode(encoding='UTF-8'))
if "pmcid" in localJson:
with self.env_uuid.begin(write=True) as txn_uuid:
txn_uuid.put(localJson['pmcid'].encode(encoding='UTF-8'), localJson["id"].encode(encoding='UTF-8'))
if "pmid" in localJson:
with self.env_uuid.begin(write=True) as txn_uuid:
txn_uuid.put(localJson['pmid'].encode(encoding='UTF-8'), localJson["id"].encode(encoding='UTF-8'))
# store the identifier itself too, for keeping track of already seen identifiers
if "id" in localJson:
with self.env_uuid.begin(write=True) as txn_uuid:
txn_uuid.put(localJson['id'].encode(encoding='UTF-8'), localJson["id"].encode(encoding='UTF-8'))
def processTask(self, localJson):
identifier = localJson["id"]
# call Unpaywall
localUrl = None
if not localJson["has_valid_oa_url"] or not localJson["has_valid_pdf"]:
# for CORD-19, we test if we have an Elsevier OA publication, if yes we can check the local PDF store
# obtained from the Elsevier COVID-19 ftp
if "pii" in localJson:
local_pii = localJson['pii']
else:
local_pii = None
if "DOI" in localJson:
local_doi = localJson['DOI'].lower()
else:
local_doi = None
local_elsevier = self.elsevier_oa_check(doi=local_doi,pii=local_pii)
if local_elsevier is not None and os.path.isfile(local_elsevier):
localUrl = "file://" + local_elsevier
# check if the PDF and metadata are available in the legacy repo
if localUrl is None and "legacy_data_path" in self.config and len(self.config["legacy_data_path"].strip())>0:
dest_path = generateStoragePath(identifier)
old_pdf_filename = os.path.join(self.config["legacy_data_path"], dest_path, identifier+".pdf")
if os.path.exists(old_pdf_filename) and _is_valid_file(old_pdf_filename, "pdf"):
localUrl = "file://" + old_pdf_filename
# for PMC, we can use NIH ftp server for retrieving the PDF and XML NLM file
if localUrl is None:
if "pmcid" in localJson:
localUrl, _ = self.pmc_oa_check(pmcid=localJson["pmcid"])
if localUrl is None:
logging.debug("no PMC oa valid url: " + localJson["pmcid"])
if localUrl is None:
try:
localUrl = self.unpaywalling_doi(localJson['DOI'])
except:
logging.debug("Unpaywall API call for finding Open URL not succesful")
if localUrl is None:
if "pmcid" in localJson:
localUrl, _ = self.pmc_oa_check(pmcid=localJson["pmcid"])
if localUrl is None:
logging.debug("no PMC oa valid url: " + localJson["pmcid"])
if localUrl is None or len(localUrl) == 0:
if "oaLink" in localJson:
# we can try to use the OA link from biblio-glutton as fallback (though not very optimistic on this!)
localUrl = localJson["oaLink"]
else:
localJson["oaLink"] = localUrl
if "oaLink" in localJson and localJson["oaLink"] is not None and len(localJson["oaLink"])>0:
localJson["has_valid_oa_url"] = True
if "oaLink" in localJson:
logging.debug("OA link: " + localJson["oaLink"])
# let's try to get this damn PDF
pdf_filename = os.path.join(self.config["data_path"], identifier+".pdf")
if not localJson["has_valid_pdf"]:
if "oaLink" in localJson:
# if there is an legacy directory/repo defined in the config, we can do a quick look-up there if local a PDF
# is already available/downloaded with the same identifier
if "legacy_data_path" in self.config and len(self.config["legacy_data_path"].strip())>0:
dest_path = generateStoragePath(identifier)
old_pdf_filename = os.path.join(self.config["legacy_data_path"], dest_path, identifier+".pdf")
if os.path.exists(old_pdf_filename) and _is_valid_file(old_pdf_filename, "pdf"):
# an existing pdf has been archive fot this unique identifier, let's reuse it
shutil.copy(old_pdf_filename, pdf_filename)
localJson["has_valid_pdf"] = True
# set back the original online url
try:
localJson["oaLink"] = self.unpaywalling_doi(localJson['DOI'])
except:
logging.debug("Unpaywall API call for finding Open URL not succesful")
# check if we have also a nlm file already downloaded
old_nlm_filename = os.path.join(self.config["legacy_data_path"], dest_path, identifier+".nxml")
if os.path.exists(old_nlm_filename): #and _is_valid_file(old_nlm_filename, "xml"):
# an existing pdf has been archive fot this unique identifier, let's reuse it
nlm_filename = os.path.join(self.config["data_path"], identifier+".nxml")
shutil.copy(old_nlm_filename, nlm_filename)
# set back the original online url
try:
localJson["oaLink"] = self.unpaywalling_doi(localJson['DOI'])
except:
logging.debug("Unpaywall API call for finding Open URL not succesful")
if not localJson["has_valid_pdf"]:
localUrl = localJson["oaLink"]
if localUrl is not None and len(localUrl)>0:
if localUrl.startswith("file://") and os.path.isfile(localUrl.replace("file://","")):
shutil.copyfile(localUrl.replace("file://",""), pdf_filename)
elif localUrl.endswith(".tar.gz"):
archive_file = os.path.join(self.config["data_path"], identifier+".tar.gz")
_download(localUrl, archive_file)
_manage_pmc_archives(archive_file)
else:
_download(localUrl, pdf_filename)
if _is_valid_file(pdf_filename, "pdf"):
localJson["has_valid_pdf"] = True
# GROBIDification if PDF available and we don't limit ourself to just download
if not localJson["has_valid_tei"] and not self.only_download:
tei_filename = os.path.join(self.config["data_path"], identifier+".grobid.tei.xml")
annotation_filename = None
if self.annotation:
annotation_filename = os.path.join(self.config["data_path"], identifier+"-ref-annotations.json")
if localJson["has_valid_pdf"]:
# GROBIDification with full biblio consolidation
if not os.path.exists(pdf_filename):
dest_path = generateStoragePath(identifier)
pdf_filename = os.path.join(self.config["data_path"], dest_path, identifier+".pdf")
try:
self.run_grobid(pdf_filename, tei_filename, annotation_filename)
except:
logging.debug("Grobid call failed")
if _is_valid_file(tei_filename, "xml"):
localJson["has_valid_tei"] = True
if self.annotation and _is_valid_file(annotation_filename, "json"):
localJson["has_valid_ref_annotation"] = True
# thumbnail if requested
if not localJson["has_valid_thumbnail"] and self.thumbnail:
if localJson["has_valid_pdf"]:
if not os.path.exists(pdf_filename):
dest_path = generateStoragePath(identifier)
pdf_filename = os.path.join(self.config["data_path"], dest_path, identifier+".pdf")
generate_thumbnail(pdf_filename)
if _is_valid_file(pdf_filename.replace('.pdf', '-thumb-small.png'), "png"):
localJson["has_valid_thumbnail"] = True
# indicate where the produced resources are
dest_path = generateStoragePath(localJson['id'])
localJson["data_path"] = dest_path
# write the consolidated metadata in the working data directory
with open(os.path.join(self.config["data_path"],identifier+".json"), "w") as file_out:
jsonStr = json.dumps(localJson, sort_keys=True)
file_out.write(jsonStr)
# and in the entry lmdb for the final dump (avoid retrieving the article metadata over S3 if set)
with self.env_entries.begin(write=True) as txn2:
txn2.put(identifier.encode(encoding='UTF-8'), _serialize_pickle(localJson))
# finalize by moving the downloaded and generated files to storage
self.manageFiles(localJson)
def manageFiles(self, local_entry):
"""
If S3 is the target storage, we upload the data for an article to the specified S3 bucket
and keep it clean behind us in the local data path.
Otherwise we simply move the data files under a tree structure adapted to a large number of files
"""
local_filename_pdf = os.path.join(self.config["data_path"], local_entry['id']+".pdf")
local_filename_nxml = os.path.join(self.config["data_path"], local_entry['id']+".nxml")
local_filename_tei = os.path.join(self.config["data_path"], local_entry['id']+".grobid.tei.xml")
local_filename_json = os.path.join(self.config["data_path"], local_entry['id']+".json")
local_filename_ref = os.path.join(self.config["data_path"], local_entry['id']+"-ref-annotations.json")
dest_path = generateStoragePath(local_entry['id'])
thumb_file_small = local_filename_pdf.replace('.pdf', '-thumb-small.png')
thumb_file_medium = local_filename_pdf.replace('.pdf', '-thumb-medium.png')
thumb_file_large = local_filename_pdf.replace('.pdf', '-thumb-large.png')
if self.s3 is not None:
# upload to S3
# upload is already in parallel for individual file (with parts)
# so we don't further upload in parallel at the level of the files
if os.path.isfile(local_filename_pdf) and _is_valid_file(local_filename_pdf, "pdf"):
self.s3.upload_file_to_s3(local_filename_pdf, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(local_filename_nxml):
self.s3.upload_file_to_s3(local_filename_nxml, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(local_filename_tei):
self.s3.upload_file_to_s3(local_filename_tei, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(local_filename_json):
self.s3.upload_file_to_s3(local_filename_json, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(local_filename_ref):
self.s3.upload_file_to_s3(local_filename_ref, dest_path, storage_class='ONEZONE_IA')
if (self.thumbnail):
if os.path.isfile(thumb_file_small):
self.s3.upload_file_to_s3(thumb_file_small, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(thumb_file_medium):
self.s3.upload_file_to_s3(thumb_file_medium, dest_path, storage_class='ONEZONE_IA')
if os.path.isfile(thumb_file_large):
self.s3.upload_file_to_s3(thumb_file_large, dest_path, storage_class='ONEZONE_IA')
else:
# save under local storate indicated by data_path in the config json
try:
local_dest_path = os.path.join(self.config["data_path"], dest_path)
os.makedirs(os.path.dirname(local_dest_path), exist_ok=True)
if os.path.isfile(local_filename_pdf) and _is_valid_file(local_filename_pdf, "pdf"):
shutil.copyfile(local_filename_pdf, os.path.join(local_dest_path, local_entry['id']+".pdf"))
if os.path.isfile(local_filename_nxml):
shutil.copyfile(local_filename_nxml, os.path.join(local_dest_path, local_entry['id']+".nxml"))
if os.path.isfile(local_filename_tei):
shutil.copyfile(local_filename_tei, os.path.join(local_dest_path, local_entry['id']+".grobid.tei.xml"))
if os.path.isfile(local_filename_json):
shutil.copyfile(local_filename_json, os.path.join(local_dest_path, local_entry['id']+".json"))
if os.path.isfile(local_filename_ref):
shutil.copyfile(local_filename_ref, os.path.join(local_dest_path, local_entry['id']+"-ref-annotations.json"))
if (self.thumbnail):
if os.path.isfile(thumb_file_small):
shutil.copyfile(thumb_file_small, os.path.join(local_dest_path, local_entry['id']+"-thumb-small.png"))
if os.path.isfile(thumb_file_medium):
shutil.copyfile(thumb_file_medium, os.path.join(local_dest_path, local_entry['id']+"-thumb-medium.png"))
if os.path.isfile(thumb_file_large):
shutil.copyfile(thumb_file_large, os.path.join(local_dest_path, local_entry['id']+"-thumb-larger.png"))
except IOError as e:
logging.error("invalid path " + str(e))
# clean pdf and thumbnail files
try:
if os.path.isfile(local_filename_pdf):
os.remove(local_filename_pdf)
if os.path.isfile(local_filename_nxml):
os.remove(local_filename_nxml)
if os.path.isfile(local_filename_tei):
os.remove(local_filename_tei)
if os.path.isfile(local_filename_json):
os.remove(local_filename_json)
if os.path.isfile(local_filename_ref):
os.remove(local_filename_ref)
if (self.thumbnail):
if os.path.isfile(thumb_file_small):
os.remove(thumb_file_small)
if os.path.isfile(thumb_file_medium):
os.remove(thumb_file_medium)
if os.path.isfile(thumb_file_large):
os.remove(thumb_file_large)
except IOError as e:
logging.error("temporary file cleaning failed: " + str(e))
def getUUIDByStrongIdentifier(self, strong_identifier):
"""
Strong identifiers depend on the data to be processed but typically includes DOI, sha, PMID, PMCID
"""
txn = self.env_uuid.begin()
return txn.get(strong_identifier.encode(encoding='UTF-8'))
def diagnostic(self, full=False, metadata_csv_file=None, cord19=False):
"""
Print a report on failures stored during the harvesting process
"""
nb_total = 0
nb_invalid_oa_url = 0
nb_invalid_pdf = 0
nb_invalid_tei = 0
nb_total_valid = 0
with self.env_entries.begin(write=True) as txn:
cursor = txn.cursor()
for key, value in cursor:
nb_total += 1
localJson = _deserialize_pickle(value)
if not localJson["has_valid_oa_url"]:
nb_invalid_oa_url += 1
nb_invalid_pdf += 1
nb_invalid_tei += 1
elif not localJson["has_valid_pdf"]:
nb_invalid_pdf += 1
nb_invalid_tei += 1
elif not localJson["has_valid_tei"]:
nb_invalid_tei += 1
else:
nb_total_valid += 1
print("---")
print("total entries:", nb_total)
print("---")
print("total valid entries:", nb_total_valid, "entries with valid OA URL and PDF and TEI XML")
print("---")
print("total invalid OA URL:", nb_invalid_oa_url)
print("total entries with valid OA URL:", str(nb_total-nb_invalid_oa_url))
print("---")
print("total invalid PDF:", nb_invalid_pdf)
print("total entries with successfully downloaded PDF:", str(nb_total-nb_invalid_pdf))
print("---")
print("total invalid TEI:", nb_invalid_tei)
print("total entries with successfully converted TEI XML:", str(nb_total-nb_invalid_tei))
print("---")
if full:
# check if we have the identifier map entries not present in the metadata map (this would indicate
# some sort of silent failure in the process, having no aggregated metadata saved)
nb_missing_metadata_entry = 0
nb_total_identifiers = 0
identifiers = set()
# iterate over the identifier lmdb
with self.env_uuid.begin(write=True) as txn:
cursor = txn.cursor()
for key, value in cursor:
decoded_value = value.decode(encoding='UTF-8')
if decoded_value not in identifiers:
identifiers.add(decoded_value)
nb_total_identifiers += 1
# do we have a corresponding entry?
with self.env_entries.begin(write=False) as txn2:
metadata_object = txn2.get(value)
if not metadata_object:
nb_missing_metadata_entry += 1
print("total identifiers:", nb_total_identifiers)
print("total missing entries in metadata map:", str(nb_missing_metadata_entry))
print("---")
# check the presence of the TEI files, from Grobid, Pub2TEI and the entries with at least one
# TEI XML file - walk through the data directory
nb_tei_present = 0
nb_grobid_tei_present = 0
nb_pub2tei_tei_present = 0
for root, dirs, files in os.walk(self.config["data_path"]):
for the_file in files:
if the_file.endswith(".json"):
# we have an entry normally, check if we have a TEI file
grobid_tei_file = os.path.join(root,the_file.replace(".json", ".grobid.tei.xml"))
pub2tei_tei_file = os.path.join(root,the_file.replace(".json", ".pub2tei.tei.xml"))
if os.path.isfile(grobid_tei_file) or os.path.isfile(pub2tei_tei_file):
nb_tei_present += 1
if os.path.isfile(grobid_tei_file):
nb_grobid_tei_present += 1
if os.path.isfile(pub2tei_tei_file):
nb_pub2tei_tei_present += 1
print("total entries with GROBID TEI file:", str(nb_grobid_tei_present))
print("total entries with Pub2TEI TEI file:", str(nb_pub2tei_tei_present))
print("total entries with at least one TEI file:", str(nb_tei_present))
print("---")
if metadata_csv_file != None and cord19:
# adding some statistics on the CORD-19 entries
# first get the number of entries to be able to display a progress bar
nb_lines = 0
with open(metadata_csv_file, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
nb_lines += 1
collection = {}
collection["name"] = "CORD-19"
collection["description"] = "Collection of Open Access research publications on COVID-19"
collection["version"] = "version of the collection - to be edited"
collection["harvester"] = "article-dataset-builder"
collection["documents"] = {}
collection["documents"]["distribution_entries_per_year"] = {}
collection["documents"]["distribution_harvested_per_year"] = {}
print("generating collection description/statistics on CORD-19 entries...")
total_entries = 0
total_distinct_entries = 0
total_harvested_entries = 0
distribution_years = {}
distribution_years_harvested = {}
# not memory friendly, but it's okay with modern computer... otherwise we will use another temporary lmdb
cord_ids = []
# format is:
# cord_uid,sha,source_x,title,doi,pmcid,pubmed_id,license,abstract,publish_time,authors,journal,Microsoft Academic Paper ID,
# WHO #Covidence,has_full_text,full_text_file,url
pbar = tqdm(total = nb_lines)
nb_lines = 0
with open(metadata_csv_file, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0 # total count of articles
i = 0 # counter for article per batch
identifiers = []
rows = []
for row in tqdm(csv_reader, total=total_entries):
nb_lines += 1
if nb_lines % 100 == 0:
pbar.update(100)
if row["cord_uid"] == None or len(row["cord_uid"]) == 0:
continue
# is it indexed?
cord_id = row["cord_uid"]
if cord_id in cord_ids:
# this is a duplicate
continue
cord_ids.append(cord_id)
total_distinct_entries += 1
# check if we have a full text for the entry (nlm/tei or pdf)
harvested = False
resource_path = generateStoragePath(cord_id)
if os.path.isfile(os.path.join(resource_path, cord_id+".pdf")) or \
os.path.isfile(os.path.join(resource_path, cord_id+".nxml")) or \
os.path.isfile(os.path.join(resource_path, cord_id+".grobid.tei.xml")):
total_harvested_entries =+1
harvested = True
# publishing date has ISO 8601 style format: 2000-08-15
if row["publish_time"]:
year = row["publish_time"].split("-")[0]
if not year in distribution_years:
distribution_years[year] = 1
else:
distribution_years[year] += 1
if harvested:
if not year in distribution_years_harvested:
distribution_years_harvested[year] = 1
else:
distribution_years_harvested[year] += 1
print("Collection description and statistics generated in file: ./collection.json")
collection["documents"]["total_entries"] = total_entries
collection["documents"]["total_distinct_entries"] = total_distinct_entries
collection["documents"]["total_harvested_entries"] = total_harvested_entries
for year in distribution_years:
collection["documents"]["distribution_entries_per_year"][year] = distribution_years[year]
for year in distribution_years_harvested:
collection["documents"]["distribution_harvested_per_year"][year] = distribution_years_harvested[year]
with open('collection.json', 'w') as outfile:
json.dump(collection, outfile, indent=4)
def reprocessFailed(self):
localJsons = []
i = 0
# iterate over the entry lmdb
with self.env_entries.begin(write=False) as txn:
cursor = txn.cursor()
for key, value in cursor:
if i == self.config["batch_size"]:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processTask, localJsons, timeout=50)
# reinit
i = 0
localJsons = []
localJson = _deserialize_pickle(value)
if not localJson["has_valid_oa_url"] or not localJson["has_valid_pdf"] or not localJson["has_valid_tei"]:
localJsons.append(localJson)
i += 1
logging.debug("re-processing " + localJson["id"])
elif self.thumbnail and not localJson["has_valid_thumbnail"]:
localJsons.append(localJson)
i += 1
logging.debug("re-processing for thumbnails " + localJson["id"])
elif self.annotation and not localJson["has_valid_ref_annotation"]:
localJsons.append(localJson)
i += 1
logging.debug("re-processing for PDF annotations " + localJson["id"])
# we need to process the latest incomplete batch (if not empty)
if len(localJsons)>0:
with ThreadPoolExecutor(max_workers=self.config["batch_size"]) as executor:
executor.map(self.processTask, localJsons, timeout=50)
def _serialize_pickle(a):
return pickle.dumps(a)
def _deserialize_pickle(serialized):
return pickle.loads(serialized)
def _clean_doi(doi):
if doi.startswith("https://doi.org/10."):
doi = doi.replace("https://doi.org/", "")
elif doi.startswith("http://dx.doi.org/10."):
doi = doi.replace("http://dx.doi.org/", "")
return doi.strip().lower()
def _check_compression(file):
'''
check if a file is compressed, if yes decompress and replace by the decompressed version
'''
if os.path.isfile(file):
if os.path.getsize(file) == 0:
return False
file_type = magic.from_file(file, mime=True)
if file_type == 'application/gzip':
success = False
# decompressed in tmp file
with gzip.open(file, 'rb') as f_in:
with open(file+'.decompressed', 'wb') as f_out:
try:
shutil.copyfileobj(f_in, f_out)
except OSError:
logging.error("Decompression file failed: " + f_in)
else:
success = True
# replace the file
if success:
try:
shutil.copyfile(file+'.decompressed', file)
except OSError:
logging.error("Replacement of decompressed file failed: " + file)
success = False
# delete the tmp file
if os.path.isfile(file+'.decompressed'):
try:
os.remove(file+'.decompressed')
except OSError:
logging.error("Deletion of temp decompressed file failed: " + file+'.decompressed')
return success
else:
return True
return False
def _get_random_user_agent():
'''
This is a simple random/rotating user agent covering different devices and web clients/browsers
Note: rotating the user agent without rotating the IP address (via proxies) might not be a good idea if the same server
is harvested - but in our case we are harvesting a large variety of different Open Access servers
'''
user_agents = ["Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:81.0) Gecko/20100101 Firefox/81.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36"]
weights = [0.2, 0.3, 0.5]
user_agent = choices(user_agents, weights=weights, k=1)
def _is_valid_file(file, mime_type):
target_mime = []
if mime_type == 'xml':
target_mime.append("application/xml")
target_mime.append("text/xml")
elif mime_type == 'png':
target_mime.append("image/png")
else:
target_mime.append("application/"+mime_type)
file_type = ""
if os.path.isfile(file):
if os.path.getsize(file) == 0:
return False
file_type = magic.from_file(file, mime=True)
return file_type in target_mime
def _initProcessStateInformation(json_entry):
# init process information
if not "has_valid_pdf" in json_entry:
json_entry["has_valid_pdf"] = False
if not "has_valid_oa_url" in json_entry:
json_entry["has_valid_oa_url"] = False
if not "has_valid_tei" in json_entry:
json_entry["has_valid_tei"] = False
if not "has_valid_ref_annotation" in json_entry:
json_entry["has_valid_ref_annotation"] = False
if not "has_valid_thumbnail" in json_entry:
json_entry["has_valid_thumbnail"] = False
return json_entry
def _biblio_glutton_url(biblio_glutton_url):
res = biblio_glutton_url
if biblio_glutton_url.endswith("/"):
res = biblio_glutton_url[:-1]
return res+"/service/lookup?"
def _grobid_url(grobid_base, grobid_port):
the_url = 'http://'+grobid_base
if grobid_port is not None and len(grobid_port)>0:
the_url += ":"+grobid_port
the_url += "/api/"
return the_url
def _download(url, filename):
result = _download_wget(url, filename)
if result != "success":
result = _download_requests(url, filename)
return result
def _download_wget(url, filename):
"""
First try with Python requests (which handle well compression), then move to a more robust download approach
"""
result = "fail"
# This is the most robust and reliable way to download files I found with Python... to rely on system wget :)
#cmd = "wget -c --quiet" + " -O " + filename + ' --connect-timeout=10 --waitretry=10 ' + \
cmd = "wget -c --quiet" + " -O " + filename + ' --timeout=15 --waitretry=0 --tries=5 --retry-connrefused ' + \
'--header="User-Agent: ' + _get_random_user_agent()+ '" ' + \
'--header="Accept: application/pdf, text/html;q=0.9,*/*;q=0.8" --header="Accept-Encoding: gzip, deflate" ' + \
'--no-check-certificate ' + \
'"' + url + '"'
logging.debug(cmd)
try:
result = subprocess.check_call(cmd, shell=True)
# if the used version of wget does not decompress automatically, the following ensures it is done
result_compression = _check_compression(filename)
if not result_compression:
# decompression failed, or file is invalid
if os.path.isfile(filename):
try:
os.remove(filename)
except OSError:
logging.error("Deletion of invalid compressed file failed: " + filename)
result = "fail"
# ensure cleaning
if os.path.isfile(filename+'.decompressed'):
try:
os.remove(filename+'.decompressed')
except OSError:
logging.error("Final deletion of temp decompressed file failed: " + filename+'.decompressed')
else:
result = "success"
except subprocess.CalledProcessError as e:
logging.debug("e.returncode " + e.returncode)
logging.debug("e.output " + e.output)
logging.debug("wget command was: " + cmd)
#if e.output is not None and e.output.startswith('error: {'):
if e.output is not None:
error = json.loads(e.output[7:]) # Skip "error: "
logging.debug("error code: " + error['code'])
logging.debug("error message: " + error['message'])
result = "fail"
except Exception as e:
logging.error("Unexpected error wget process: " + str(e))
result = "fail"
return str(result)
def _download_requests(url, filename):
"""
Download with Python requests which handle well compression, but not very robust and bad parallelization
"""
HEADERS = {"""User-Agent""": _get_random_user_agent()}
result = "fail"
try:
file_data = requests.get(url, allow_redirects=True, headers=HEADERS, verify=False, timeout=30)
if file_data.status_code == 200:
with open(filename, 'wb') as f_out:
f_out.write(file_data.content)
result = "success"
except Exception:
logging.exception("Download failed for {0} with requests".format(url))
return result
def _manage_pmc_archives(filename):
# check if finename exists and we have downloaded an archive rather than a PDF (case ftp PMC)
if os.path.exists(filename) and os.path.isfile(filename) and filename.endswith(".tar.gz"):
try:
# for PMC we still have to extract the PDF from archive
#print(filename, "is an archive")
thedir = os.path.dirname(filename)
# we need to extract the PDF, the NLM extra file, change file name and remove the tar file
tar = tarfile.open(filename)
pdf_found = False
# this is a unique temporary subdirectory to extract the relevant files in the archive, unique directory is
# introduced to avoid several files with the same name from different archives to be extracted in the
# same place
basename = os.path.basename(filename)
tmp_subdir = basename[0:6]
for member in tar.getmembers():
if not pdf_found and member.isfile() and (member.name.endswith(".pdf") or member.name.endswith(".PDF")):
member.name = os.path.basename(member.name)
# create unique subdirectory
if not os.path.exists(os.path.join(thedir,tmp_subdir)):
os.mkdir(os.path.join(thedir,tmp_subdir))
f = tar.extract(member, path=os.path.join(thedir,tmp_subdir))
#print("extracted file:", member.name)
# be sure that the file exists (corrupted archives are not a legend)
if os.path.isfile(os.path.join(thedir,tmp_subdir,member.name)):
os.rename(os.path.join(thedir,tmp_subdir,member.name), filename.replace(".tar.gz", ".pdf"))
pdf_found = True
# delete temporary unique subdirectory
try:
shutil.rmtree(os.path.join(thedir,tmp_subdir))
except OSError:
logging.error("Deletion of tmp dir failed: " + os.path.join(thedir,tmp_subdir))
#break
if member.isfile() and member.name.endswith(".nxml"):
member.name = os.path.basename(member.name)
# create unique subdirectory
if not os.path.exists(os.path.join(thedir,tmp_subdir)):
os.mkdir(os.path.join(thedir,tmp_subdir))
f = tar.extract(member, path=os.path.join(thedir,tmp_subdir))
#print("extracted file:", member.name)
# be sure that the file exists (corrupted archives are not a legend)
if os.path.isfile(os.path.join(thedir,tmp_subdir,member.name)):
os.rename(os.path.join(thedir,tmp_subdir,member.name), filename.replace(".tar.gz", ".nxml"))
# delete temporary unique subdirectory
try:
shutil.rmtree(os.path.join(thedir,tmp_subdir))
except OSError:
logging.error("Deletion of tmp dir failed: " + os.path.join(thedir,tmp_subdir))
tar.close()
if not pdf_found:
logging.warning("warning: no pdf found in archive: " + filename)
if os.path.isfile(filename):
try:
os.remove(filename)
except OSError:
logging.error("Deletion of PMC archive file failed: " + filename)
except Exception as e:
# a bit of bad practice
logging.error("Unexpected error " + str(e))
pass
def generate_thumbnail(pdfFile):
"""
Generate a PNG thumbnails (3 different sizes) for the front page of a PDF.
Use ImageMagick for this.
"""
thumb_file = pdfFile.replace('.pdf', '-thumb-small.png')
cmd = 'convert -quiet -density 200 -thumbnail x150 -flatten ' + pdfFile+'[0] ' + thumb_file
try:
subprocess.check_call(cmd, shell=True)
except subprocess.CalledProcessError as e:
logging.error("e.returncode: " + e.returncode)
thumb_file = pdfFile.replace('.pdf', '-thumb-medium.png')
cmd = 'convert -quiet -density 200 -thumbnail x300 -flatten ' + pdfFile+'[0] ' + thumb_file
try:
subprocess.check_call(cmd, shell=True)
except subprocess.CalledProcessError as e:
logging.error("e.returncode: " + e.returncode)
thumb_file = pdfFile.replace('.pdf', '-thumb-large.png')
cmd = 'convert -quiet -density 200 -thumbnail x500 -flatten ' + pdfFile+'[0] ' + thumb_file
try:
subprocess.check_call(cmd, shell=True)
except subprocess.CalledProcessError as e:
logging.error("e.returncode: " + e.returncode)
def generateStoragePath(identifier):
'''
Convert an identifier name into a path with file prefix as directory paths:
123456789 -> 12/34/56/123456789
'''
return os.path.join(identifier[:2], identifier[2:4], identifier[4:6], identifier[6:8], identifier, "")
def test():
harvester = Harverster()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "COVIDataset harvester")
parser.add_argument("--dois", default=None, help="path to a file describing a dataset articles as a simple list of DOI (one per line)")
parser.add_argument("--cord19", default=None, help="path to the csv file describing the CORD-19 dataset articles")
parser.add_argument("--pmids", default=None, help="path to a file describing a dataset articles as a simple list of PMID (one per line)")
parser.add_argument("--pmcids", default=None, help="path to a file describing a dataset articles as a simple list of PMC ID (one per line)")
parser.add_argument("--config", default="./config.json", help="path to the config file, default is ./config.json")
parser.add_argument("--reset", action="store_true", help="ignore previous processing states, and re-init the harvesting process from the beginning")
parser.add_argument("--reprocess", action="store_true", help="reprocessed existing failed entries")
parser.add_argument("--thumbnail", action="store_true", help="generate thumbnail files for the front page of the harvested PDF")
parser.add_argument("--annotation", action="store_true", help="generate bibliographical annotations with coordinates for the harvested PDF")
parser.add_argument("--diagnostic", action="store_true", help="perform a full consistency diagnostic on the harvesting and transformation process")
#parser.add_argument("--sample", type=int, default=None, help="harvest only a random sample of indicated size")
parser.add_argument("--dump", action="store_true", help="write all the consolidated metadata in json in the file consolidated_metadata.json")
parser.add_argument("--download", action="store_true", help="only download the raw files (PDF, NLM/JATS) without processing them")
args = parser.parse_args()
dois_path = args.dois
pmids_path = args.pmids
pmcids_path = args.pmcids
csv_cord19 = args.cord19
config_path = args.config
reset = args.reset
dump = args.dump
thumbnail = args.thumbnail
annotation = args.annotation
reprocess = args.reprocess
full_diagnostic = args.diagnostic
only_download = args.download
#sample = args.sample
harvester = Harverster(config_path=config_path,
thumbnail=thumbnail,
sample=None,
dump_metadata=dump,
annotation=annotation,
only_download=only_download,
full_diagnostic=full_diagnostic)
if reset:
if input("You asked to reset the existing harvesting, this will removed all the already downloaded data files... are you sure? (y/n) ") == "y":
harvester.reset(True)
else:
print("skipping reset...")
start_time = time.time()
if full_diagnostic:
harvester.diagnostic(full=full_diagnostic)
elif dump :
harvester.dump_metadata()
elif reprocess:
harvester.reprocessFailed()
elif csv_cord19:
if not os.path.isfile(csv_cord19):
print("error: the indicated cvs file path is not valid:", csv_cord19)
sys.exit(0)
harvester.harvest_cord19(csv_cord19)
elif dois_path:
if not os.path.isfile(dois_path):
print("error: the indicated DOI file path is not valid:", dois_path)
sys.exit(0)
harvester.harvest_dois(dois_path)
elif pmids_path:
if not os.path.isfile(pmids_path):
print("error: the indicated PMID file path is not valid:", pmids_path)
sys.exit(0)
harvester.harvest_pmids(pmids_path)
elif pmcids_path:
if not os.path.isfile(pmcids_path):
print("error: the indicated PMC ID file path is not valid:", pmcids_path)
sys.exit(0)
harvester.harvest_pmcids(pmcids_path)
runtime = round(time.time() - start_time, 3)
print("runtime: %s seconds " % (runtime))
| 47.553165 | 173 | 0.584679 |
5b8212dd2907f325c3d09c675c764dbd7e936f89 | 1,726 | py | Python | src/python/demo/reddit/bp.py | grongierisc/interoperability-embedded-python | 6885c7249ea902a30d17a9dad1bde3d1e0223e8a | [
"MIT"
] | null | null | null | src/python/demo/reddit/bp.py | grongierisc/interoperability-embedded-python | 6885c7249ea902a30d17a9dad1bde3d1e0223e8a | [
"MIT"
] | 1 | 2022-02-13T12:32:47.000Z | 2022-02-16T07:58:24.000Z | src/python/demo/reddit/bp.py | grongierisc/interoperability-embedded-python | 6885c7249ea902a30d17a9dad1bde3d1e0223e8a | [
"MIT"
] | 1 | 2022-01-12T09:07:53.000Z | 2022-01-12T09:07:53.000Z | from grongier.pex import BusinessProcess
from message import PostMessage
from obj import PostClass
import iris
class FilterPostRoutingRule(BusinessProcess):
"""
This process receive a PostMessage containing a reddit post.
It then understand if the post is about a dog or a cat or nothing and
fill the right infomation inside the PostMessage before sending it to
the FileOperation operation.
"""
def on_init(self):
if not hasattr(self,'target'):
self.target = "Python.FileOperation"
return
def iris_to_python(self, request:'iris.dc.Demo.PostMessage'):
request = PostMessage(post=PostClass(title=request.Post.Title,
selftext=request.Post.Selftext,
author=request.Post.Author,
url=request.Post.Url,
created_utc=request.Post.CreatedUTC,
original_json=request.Post.OriginalJSON))
return self.on_python_message(request)
def on_python_message(self, request: PostMessage):
if 'dog'.upper() in request.post.selftext.upper():
request.to_email_address = 'dog@company.com'
request.found = 'Dog'
if 'cat'.upper() in request.post.selftext.upper():
request.to_email_address = 'cat@company.com'
request.found = 'Cat'
if request.found is not None:
self.send_request_sync(self.target,request)
rsp = iris.cls('Ens.StringResponse')._New(f"{request.post.title}")
return rsp
else:
return
| 37.521739 | 86 | 0.585747 |
616e8573b1e842feb915ffe052b39a20f315b87b | 3,545 | py | Python | venv/lib/python3.6/site-packages/oslo_config/sphinxconfiggen.py | boogieLing/r0_es | 14ac336a40c4f87b8bd3bd62a60158b437690c35 | [
"MIT"
] | 110 | 2015-01-29T20:10:46.000Z | 2022-03-21T12:29:21.000Z | venv/lib/python3.6/site-packages/oslo_config/sphinxconfiggen.py | boogieLing/r0_es | 14ac336a40c4f87b8bd3bd62a60158b437690c35 | [
"MIT"
] | 1 | 2019-03-16T18:35:42.000Z | 2019-03-16T19:40:14.000Z | venv/lib/python3.6/site-packages/oslo_config/sphinxconfiggen.py | boogieLing/r0_es | 14ac336a40c4f87b8bd3bd62a60158b437690c35 | [
"MIT"
] | 115 | 2015-01-14T03:25:05.000Z | 2021-12-02T16:49:06.000Z | # Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from sphinx.util import logging
from oslo_config import generator
LOG = logging.getLogger(__name__)
def generate_sample(app):
if not app.config.config_generator_config_file:
LOG.warning("No config_generator_config_file is specified, "
"skipping sample config generation")
return
# Decided to update the existing config option
# config_generator_config_file to support a value that is a list of
# tuples, containing the file names as (input, output).
# We need to retain support for the option referring to a single string,
# and using the sample_config_basename for the output file in that case.
# After we release support for both forms of the option, we can update
# projects to always use the list of tuples, then remove
# sample_config_basename and the support for config_generator_config_file
# being a single string.
if isinstance(app.config.config_generator_config_file, list):
for config_file, base_name in app.config.config_generator_config_file:
if base_name is None:
base_name = _get_default_basename(config_file)
_generate_sample(app, config_file, base_name)
else:
_generate_sample(app,
app.config.config_generator_config_file,
app.config.sample_config_basename)
def _get_default_basename(config_file):
return os.path.splitext(os.path.basename(config_file))[0]
def _generate_sample(app, config_file, base_name):
def info(msg):
LOG.info('[%s] %s' % (__name__, msg))
# If we are given a file that isn't an absolute path, look for it
# in the source directory if it doesn't exist.
candidates = [
config_file,
os.path.join(app.srcdir, config_file,),
]
for c in candidates:
if os.path.isfile(c):
info('reading config generator instructions from %s' % c)
config_path = c
break
else:
raise ValueError(
"Could not find config_generator_config_file %r" %
app.config.config_generator_config_file)
if base_name:
out_file = os.path.join(app.srcdir, base_name) + '.conf.sample'
if not os.path.isdir(os.path.dirname(os.path.abspath(out_file))):
os.mkdir(os.path.dirname(os.path.abspath(out_file)))
else:
file_name = 'sample.config'
out_file = os.path.join(app.srcdir, file_name)
info('writing sample configuration to %s' % out_file)
generator.main(args=['--config-file', config_path,
'--output-file', out_file])
def setup(app):
app.add_config_value('config_generator_config_file', None, 'env')
app.add_config_value('sample_config_basename', None, 'env')
app.connect('builder-inited', generate_sample)
return {
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| 36.173469 | 78 | 0.684344 |
41bc89acd59ea4b7c4624f266df289d7f575b665 | 15,758 | py | Python | inbm-vision/vision-agent/vision/tests/unit/test_registry_manager.py | ahameedx/intel-inb-manageability | aca445fa4cef0b608e6e88e74476547e10c06073 | [
"Apache-2.0"
] | 5 | 2021-12-13T21:19:31.000Z | 2022-01-18T18:29:43.000Z | inbm-vision/vision-agent/vision/tests/unit/test_registry_manager.py | ahameedx/intel-inb-manageability | aca445fa4cef0b608e6e88e74476547e10c06073 | [
"Apache-2.0"
] | 45 | 2021-12-30T17:21:09.000Z | 2022-03-29T22:47:32.000Z | inbm-vision/vision-agent/vision/tests/unit/test_registry_manager.py | ahameedx/intel-inb-manageability | aca445fa4cef0b608e6e88e74476547e10c06073 | [
"Apache-2.0"
] | 4 | 2022-01-26T17:42:54.000Z | 2022-03-30T04:48:04.000Z |
from datetime import datetime
from unittest import TestCase
from vision.constant import *
from vision.configuration_constant import *
from vision.data_handler.idata_handler import IDataHandler
from vision.registry_manager import RegistryManager
from mock import Mock, patch
mock_node_info = {'bootFwDate': "2018-10-9", 'bootFwVersion': '1.5.9',
'bootFwVendor': 'Dell Inc.', 'osType': 'Linux',
'osVersion': 'Ubuntu 16.04.6 LTS',
'osReleaseDate': '2020-7-9',
'manufacturer': 'Dell Inc.',
'dmVerityEnabled': False,
'measuredBootEnabled': None,
'flashless': 'false',
'is_xlink_secure': False,
'stepping': 'A0',
'sku': '3400VE',
'model': 'Intel Keem Bay HDDL2',
'product': 'intel',
'serialNumber': 'c0428202080d709',
'version': 'bit-creek-2.13.2-r1.aarch64',
'guid': None,
'is_provisioned': False}
mock_node_id_one = '000732767ffb-17629184'
mock_node_id_two = '000732767ffb-17825792'
mock_guid = 12345
class TestRegistryManager(TestCase):
@patch('inbm_vision_lib.timer.Timer.start')
def setUp(self, mock_start):
mock_data_handler: IDataHandler = Mock()
self.new_registry_manager = RegistryManager(data_handler=mock_data_handler)
self.mock_heartbeat_timestamp = Mock()
self.mock_registry = Mock()
self.mock_registry.device_id = "example_deviceID"
self.mock_registry.status.heartbeat_retries = 0
self.mock_registry.status.heartbeat_timestamp = self.mock_heartbeat_timestamp
self.mock_vision = Mock()
self.mock_vision.send_node_register_response
self.mock_vision.create_telemetry_event
self.assertEqual(mock_start.call_count, 2)
def test_init(self):
self.assertIsNotNone(self.new_registry_manager)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(None, None))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add(self, t_start, g_device):
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager.add(mock_node_info, mock_node_id_one)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
self.mock_vision.send_node_register_response.assert_called_once()
self.mock_vision.create_telemetry_event.assert_called_once()
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
def test_get_all_active_nodes(self):
self.new_registry_manager.add(mock_node_info, mock_node_id_one)
self.new_registry_manager.add(mock_node_info, mock_node_id_two)
targets = self.new_registry_manager._get_all_active_nodes()
self.assertEqual(len(targets), 2)
def test_get_target_ids(self):
self.new_registry_manager.add(mock_node_info, mock_node_id_one)
self.new_registry_manager.add(mock_node_info, mock_node_id_two)
targets = ['000732767ffb-17629184', '000732767ffb-17825792']
self.assertEqual(self.new_registry_manager.get_target_ids(targets), targets)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(None, None))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_success(self, t_start, g_device):
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
self.mock_vision.send_node_register_response.assert_called_once()
self.mock_vision.create_telemetry_event.assert_called_once()
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
@patch('vision.registry_manager.RegistryManager.delete_registry')
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(Mock(), 0))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_node_exist_in_list(self, t_start, g_device, delete_reg):
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
delete_reg.assert_called_once()
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(Mock(), 0))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_with_different_boot_fw_date_replace_node_exist_in_list(self, t_start, g_device):
self.mock_registry.boot_fw_date = datetime(year=1, month=1, day=1, second=0)
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._registries.append(self.mock_registry)
self.mock_registry.boot_fw_date = datetime(year=2, month=2, day=2, second=0)
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
assert self.mock_vision.create_telemetry_event.call_count == 2
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(Mock(), 0))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_with_different_boot_fw_version_replace_node_exist_in_list(self, t_start, g_device):
self.mock_registry.boot_fw_version = "KMB-BETA"
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._registries.append(self.mock_registry)
self.mock_registry.boot_fw_version = "KMB-GOLD2"
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
assert self.mock_vision.create_telemetry_event.call_count == 2
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(Mock(), 0))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_with_different_os_version_replace_node_exist_in_list(self, t_start, g_device):
self.mock_registry.os_version = "1"
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._registries.append(self.mock_registry)
self.mock_registry.os_version = "2"
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
assert self.mock_vision.create_telemetry_event.call_count == 2
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
@patch('vision.registry_manager.RegistryManager.get_device', return_value=(Mock(), 0))
@patch('inbm_vision_lib.timer.Timer.start')
def test_add_registry_with_different_os_release_date_replace_node_exist_in_list(self, t_start, g_device):
self.mock_registry.os_release_date = datetime(year=1, month=1, day=1, second=0)
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._registries.append(self.mock_registry)
self.mock_registry.os_release_date = datetime(year=2, month=2, day=2, second=0)
new_registry_manager._add_registry(self.mock_registry)
self.assertEqual(t_start.call_count, 2)
g_device.assert_called_once()
assert self.mock_vision.create_telemetry_event.call_count == 2
self.assertIsNotNone(new_registry_manager)
self.assertEquals(len(new_registry_manager._registries), 1)
def test_delete_registry_success(self):
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.delete_registry(self.mock_registry, 0)
self.assertIsNotNone(self.new_registry_manager)
self.assertEquals(len(self.new_registry_manager._registries), 0)
def test_get_device_success(self):
self.new_registry_manager._registries = [self.mock_registry]
return_device, device_index = self.new_registry_manager.get_device("example_deviceID")
self.assertIsNotNone(self.new_registry_manager, device_index)
self.assertEquals(len(self.new_registry_manager._registries), 1)
self.assertIsNotNone(return_device)
self.assertEquals(self.mock_registry, return_device)
def test_get_device_fail(self):
self.new_registry_manager._registries = [self.mock_registry]
return_device, device_index = self.new_registry_manager.get_device("example_deviceID123")
self.assertIsNotNone(self.new_registry_manager)
self.assertEquals(len(self.new_registry_manager._registries), 1)
self.assertIsNone(return_device, device_index)
@patch('inbm_vision_lib.timer.Timer.start')
def test_calculate_time_interval(self, t_start):
previous_datetime = datetime(year=1, month=1, day=1, second=0)
current_datetime = datetime(year=1, month=1, day=1, second=10)
time_interval = self.new_registry_manager._calculate_time_interval(
previous_datetime, current_datetime)
self.assertIsNotNone(self.new_registry_manager)
self.assertEquals(time_interval, 10)
@patch('vision.registry_manager.RegistryManager._calculate_time_interval',
return_value=CONFIG_HEARTBEAT_CHECK_INTERVAL_SECS.default_value - 1)
def test_is_heartbeat_status_active(self, cal):
self.assertIsNotNone(self.new_registry_manager)
self.assertTrue(self.new_registry_manager._is_heartbeat_active(Mock()))
@patch('vision.registry_manager.RegistryManager._calculate_time_interval',
return_value=CONFIG_HEARTBEAT_CHECK_INTERVAL_SECS.default_value + 1)
def test_is_heartbeat_status_idle(self, cal):
self.assertIsNotNone(self.new_registry_manager)
self.assertFalse(self.new_registry_manager._is_heartbeat_active(Mock()))
@patch('vision.registry_manager.RegistryManager._update_heartbeat_status')
@patch('vision.registry_manager.RegistryManager._is_heartbeat_active',
return_value=True)
def test_check_heartbeat_active(self, is_hb, upd_hb):
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.check_heartbeat()
is_hb.assert_called_once()
upd_hb.assert_called_once()
self.assertIsNotNone(self.new_registry_manager)
@patch('vision.registry_manager.RegistryManager._handle_inactive_heartbeat')
@patch('vision.registry_manager.RegistryManager._is_heartbeat_active', return_value=False)
def test_check_heartbeat_inactive(self, is_hb, handle_hb):
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.check_heartbeat()
is_hb.assert_called_once()
handle_hb.assert_called_once()
self.assertIsNotNone(self.new_registry_manager)
@patch('vision.registry_manager.RegistryManager._update_heartbeat_status')
def test_handle_inactive_heartbeat_add_retries(self, upd_hb):
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager._handle_inactive_heartbeat(self.mock_registry)
upd_hb.assert_called_once()
self.assertEquals(self.mock_registry.status.heartbeat_retries, 1)
self.assertIsNotNone(self.new_registry_manager)
@patch('vision.registry_manager.RegistryManager._update_heartbeat_status')
@patch('inbm_vision_lib.timer.Timer.start')
def test_handle_inactive_heartbeat_send_is_alive(self, t_start, upd_hb):
self.mock_registry.status.heartbeat_retries = 2
self.mock_vision.send_is_alive
new_registry_manager = RegistryManager(data_handler=self.mock_vision)
new_registry_manager._registries = [self.mock_registry]
new_registry_manager._handle_inactive_heartbeat(self.mock_registry)
self.assertEqual(t_start.call_count, 3)
upd_hb.assert_called_once()
self.mock_vision.send_is_alive.assert_called_once()
self.assertEquals(self.mock_registry.status.heartbeat_retries, 3)
self.assertIsNotNone(new_registry_manager)
def test_check_heartbeat_skip(self):
self.new_registry_manager.check_heartbeat()
self.assertEquals(len(self.new_registry_manager._registries), 0)
@patch('vision.registry_manager.RegistryManager.check_heartbeat')
@patch('inbm_vision_lib.timer.Timer.start')
def test_start_heartbeat_timer(self, t_start, manager_check_heartbeat):
mock_data_handler: IDataHandler = Mock()
new_registry_manager = RegistryManager(data_handler=mock_data_handler)
new_registry_manager._start_heartbeat_timer()
self.assertEqual(t_start.call_count, 3)
manager_check_heartbeat.assert_called_once()
@patch('inbm_vision_lib.timer.Timer.stop')
def test_stop(self, t_stop) -> None:
self.new_registry_manager.stop()
self.assertEqual(t_stop.call_count, 2)
def test_update_heartbeat_status(self):
self.mock_registry.status.heartbeat_status = "Idle"
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager._update_heartbeat_status(self.mock_registry, "Active")
self.assertEquals(self.mock_registry.status.heartbeat_status, "Active")
def test_update_heartbeat_timestamp_pass(self):
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.update_heartbeat_timestamp("example_deviceID")
@patch('inbm_vision_lib.timer.Timer.start')
def test_update_heartbeat_timestamp_send_reregister_request(self, t_start):
mock_data_handler: IDataHandler = Mock()
mock_data_handler.create_telemetry_event
mock_data_handler.send_reregister_request
new_registry_manager = RegistryManager(data_handler=mock_data_handler)
new_registry_manager.update_heartbeat_timestamp("example_deviceID")
mock_data_handler.create_telemetry_event.assert_called_once() # type: ignore
mock_data_handler.send_reregister_request.assert_called_once() # type: ignore
self.assertEqual(t_start.call_count, 2)
@patch('vision.registry_manager.RegistryManager.delete_registry')
def test_manage_is_alive_response_delete_device(self, del_dv):
self.mock_registry.status.heartbeat_retries = 4
self.mock_registry.status.heartbeat_status = HEARTBEAT_IDLE_STATE
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.manage_is_alive_response(self.mock_registry.device_id)
del_dv.assert_called_once()
self.assertIsNotNone(self.new_registry_manager)
@patch('vision.registry_manager.RegistryManager.delete_registry')
def test_manage_is_alive_response_device_not_found(self, del_dv):
self.mock_registry.status.heartbeat_retries = 4
self.mock_registry.status.heartbeat_status = HEARTBEAT_IDLE_STATE
self.new_registry_manager._registries = [self.mock_registry]
self.new_registry_manager.manage_is_alive_response("example_deviceID_123")
del_dv.assert_not_called()
self.assertIsNotNone(self.new_registry_manager)
| 52.526667 | 109 | 0.748699 |
6fa4539a976fb68bc43f237b6e52f93cca1a5793 | 11,485 | py | Python | weasyl/login.py | greysteil/wzl-test | 0f863b9e7c58e5861437618bd590126ca323140c | [
"Apache-2.0"
] | null | null | null | weasyl/login.py | greysteil/wzl-test | 0f863b9e7c58e5861437618bd590126ca323140c | [
"Apache-2.0"
] | 19 | 2018-01-02T07:27:22.000Z | 2019-01-23T05:20:06.000Z | weasyl/login.py | greysteil/wzl-test | 0f863b9e7c58e5861437618bd590126ca323140c | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
import arrow
import bcrypt
from sqlalchemy.sql.expression import select
from libweasyl import security
from libweasyl import staff
from weasyl import define as d
from weasyl import macro as m
from weasyl import emailer
from weasyl import moderation
from weasyl.error import WeasylError
_EMAIL = 100
_PASSWORD = 10
_USERNAME = 25
def signin(userid):
# Update the last login record for the user
d.execute("UPDATE login SET last_login = %i WHERE userid = %i", [d.get_time(), userid])
# Log the successful login and increment the login count
d.append_to_log('login.success', userid=userid, ip=d.get_address())
d.metric('increment', 'logins')
# set the userid on the session
sess = d.get_weasyl_session()
sess.userid = userid
sess.save = True
def signout(request):
sess = request.weasyl_session
# unset SFW-mode cookie on logout
request.delete_cookie_on_response("sfwmode")
sess.userid = None
sess.save = True
def authenticate_bcrypt(username, password, session=True):
"""
Return a result tuple of the form (userid, error); `error` is None if the
login was successful. Pass `session` as False to authenticate a user without
creating a new session.
Possible errors are:
- "invalid"
- "unexpected"
- "address"
- "banned"
- "suspended"
- "2fa" - Indicates the user has opted-in to 2FA. Additional authentication required.
"""
# Check that the user entered potentially valid values for `username` and
# `password` before attempting to authenticate them
if not username or not password:
return 0, "invalid"
# Select the authentication data necessary to check that the the user-entered
# credentials are valid
query = d.execute("SELECT ab.userid, ab.hashsum, lo.settings, lo.twofa_secret FROM authbcrypt ab"
" RIGHT JOIN login lo USING (userid)"
" WHERE lo.login_name = '%s'", [d.get_sysname(username)], ["single"])
if not query:
return 0, "invalid"
USERID, HASHSUM, SETTINGS, TWOFA = query
HASHSUM = HASHSUM.encode('utf-8')
d.metric('increment', 'attemptedlogins')
unicode_success = bcrypt.checkpw(password.encode('utf-8'), HASHSUM)
if not unicode_success and not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), HASHSUM):
# Log the failed login attempt in a security log if the account the user
# attempted to log into is a privileged account
if USERID in staff.MODS:
d.append_to_log('login.fail', userid=USERID, ip=d.get_address())
d.metric('increment', 'failedlogins')
# Return a zero userid and an error code (indicating the entered password
# was incorrect)
return 0, "invalid"
elif "b" in SETTINGS:
# Return the proper userid and an error code (indicating the user's account
# has been banned)
return USERID, "banned"
elif "s" in SETTINGS:
suspension = moderation.get_suspension(USERID)
if d.get_time() > suspension.release:
d.execute("UPDATE login SET settings = REPLACE(settings, 's', '') WHERE userid = %i", [USERID])
d.execute("DELETE FROM suspension WHERE userid = %i", [USERID])
d.get_login_settings.invalidate(USERID)
else:
# Return the proper userid and an error code (indicating the user's
# account has been temporarily suspended)
return USERID, "suspended"
# Attempt to create a new session if `session` is True, then log the signin
# if it succeeded.
if session:
# If the user's record has ``login.twofa_secret`` set (not nulled), return that password authentication succeeded.
if TWOFA:
return USERID, "2fa"
else:
signin(USERID)
status = None
if not unicode_success:
# Oops; the user's password was stored badly, but they did successfully authenticate.
status = 'unicode-failure'
# Either way, authentication succeeded, so return the userid and a status.
return USERID, status
def passhash(password):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(m.MACRO_BCRYPT_ROUNDS))
def password_secure(password):
"""
Return True if the password meets requirements, else False.
"""
return len(password) >= _PASSWORD
# form
# username email month
# password emailcheck year
# passcheck day
def create(form):
# Normalize form data
username = d.plaintext(form.username[:_USERNAME])
sysname = d.get_sysname(username)
email = emailer.normalize_address(form.email)
emailcheck = emailer.normalize_address(form.emailcheck)
password = form.password
passcheck = form.passcheck
if form.day and form.month and form.year:
try:
birthday = arrow.Arrow(int(form.year), int(form.month), int(form.day))
except ValueError:
raise WeasylError("birthdayInvalid")
else:
birthday = None
# Check mismatched form data
if password != passcheck:
raise WeasylError("passwordMismatch")
if email != emailcheck:
raise WeasylError("emailMismatch")
# Check invalid form data
if birthday is None or d.age_in_years(birthday) < 13:
raise WeasylError("birthdayInvalid")
if not password_secure(password):
raise WeasylError("passwordInsecure")
if not email:
raise WeasylError("emailInvalid")
if is_email_blacklisted(email):
raise WeasylError("emailBlacklisted")
if not sysname or ";" in username:
raise WeasylError("usernameInvalid")
if sysname in ["admin", "administrator", "mod", "moderator", "weasyl",
"weasyladmin", "weasylmod", "staff", "security"]:
raise WeasylError("usernameInvalid")
if email_exists(email):
raise WeasylError("emailExists")
if username_exists(sysname):
raise WeasylError("usernameExists")
# Create pending account
token = security.generate_key(40)
d.engine.execute(d.meta.tables["logincreate"].insert(), {
"token": token,
"username": username,
"login_name": sysname,
"hashpass": passhash(password),
"email": email,
"birthday": birthday,
"unixtime": arrow.now(),
})
# Queue verification email
emailer.append([email], None, "Weasyl Account Creation", d.render(
"email/verify_account.html", [token, sysname]))
d.metric('increment', 'createdusers')
def verify(token):
lo = d.meta.tables["login"]
lc = d.meta.tables["logincreate"]
query = d.engine.execute(lc.select().where(lc.c.token == token)).first()
if not query:
raise WeasylError("logincreateRecordMissing")
db = d.connect()
with db.begin():
# Create login record
userid = db.scalar(lo.insert().returning(lo.c.userid), {
"login_name": d.get_sysname(query.username),
"last_login": arrow.now(),
"email": query.email,
})
# Create profile records
db.execute(d.meta.tables["authbcrypt"].insert(), {
"userid": userid,
"hashsum": query.hashpass,
})
db.execute(d.meta.tables["profile"].insert(), {
"userid": userid,
"username": query.username,
"full_name": query.username,
"unixtime": arrow.now(),
"config": "kscftj",
})
db.execute(d.meta.tables["userinfo"].insert(), {
"userid": userid,
"birthday": query.birthday,
})
db.execute(d.meta.tables["userstats"].insert(), {
"userid": userid,
})
db.execute(d.meta.tables["welcomecount"].insert(), {
"userid": userid,
})
# Update logincreate records
db.execute(lc.delete().where(lc.c.token == token))
d.metric('increment', 'verifiedusers')
def email_exists(email):
return d.engine.scalar("""
SELECT
EXISTS (SELECT 0 FROM login WHERE email = %(email)s) OR
EXISTS (SELECT 0 FROM logincreate WHERE email = %(email)s)
""", email=email)
def username_exists(login_name):
return d.engine.scalar("""
SELECT
EXISTS (SELECT 0 FROM login WHERE login_name = %(name)s) OR
EXISTS (SELECT 0 FROM useralias WHERE alias_name = %(name)s) OR
EXISTS (SELECT 0 FROM logincreate WHERE login_name = %(name)s)
""", name=login_name)
def update_unicode_password(userid, password, password_confirm):
if password != password_confirm:
raise WeasylError('passwordMismatch')
if not password_secure(password):
raise WeasylError('passwordInsecure')
hashpw = d.engine.scalar("""
SELECT hashsum FROM authbcrypt WHERE userid = %(userid)s
""", userid=userid).encode('utf-8')
if bcrypt.checkpw(password.encode('utf-8'), hashpw):
return
if not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), hashpw):
raise WeasylError('passwordIncorrect')
d.engine.execute("""
UPDATE authbcrypt SET hashsum = %(hashsum)s WHERE userid = %(userid)s
""", userid=userid, hashsum=passhash(password))
def get_account_verification_token(email=None, username=None):
email = email and emailer.normalize_address(email)
username = username and d.get_sysname(username)
logincreate = d.meta.tables['logincreate']
statement = select([logincreate.c.token])
if email:
statement = statement.where(logincreate.c.email.ilike(email))
else:
statement = statement.where(logincreate.c.login_name == username)
return d.engine.scalar(statement)
def is_email_blacklisted(address):
"""
Determines if a supplied email address is present in the 'emailblacklist' table.
Parameters:
address: The email address to split out the domain from.
Returns:
Boolean True if present on the blacklist, or False otherwise.
"""
local, domain = address.rsplit("@", 1)
return d.engine.scalar(
"SELECT EXISTS (SELECT 0 FROM emailblacklist WHERE domain_name = %(domain_name)s)",
domain_name=domain,
)
def verify_email_change(userid, token):
"""
Verify a user's email change request, updating the `login` record if it validates.
Compare a supplied token against the record within the `emailverify` table, and provided
a match exists, copy the email within into the user's account record.
Parameters:
userid: The userid of the account to attempt to update.
token: The security token to search for.
Returns: The newly set email address when verification of the `token` was successful; raises
a WeasylError upon unsuccessful verification.
"""
# Sanity checks: Must have userid and token
if not userid or not token:
raise WeasylError("Unexpected")
query_result = d.engine.scalar("""
DELETE FROM emailverify
WHERE userid = %(userid)s AND token = %(token)s
RETURNING email
""", userid=userid, token=token)
if not query_result:
raise WeasylError("ChangeEmailVerificationTokenIncorrect")
else:
d.engine.execute("""
UPDATE login
SET email = %(email)s
WHERE userid = %(userid)s
""", userid=userid, email=query_result)
return query_result
| 33.193642 | 122 | 0.647801 |
caeca3e6a5e7afb04d9647091bd84f34aae13814 | 1,267 | py | Python | run_adapters.py | atharva-naik/cartography_model_cap | 02241703e22590c9d8bda126433d4f514eb62201 | [
"MIT"
] | null | null | null | run_adapters.py | atharva-naik/cartography_model_cap | 02241703e22590c9d8bda126433d4f514eb62201 | [
"MIT"
] | null | null | null | run_adapters.py | atharva-naik/cartography_model_cap | 02241703e22590c9d8bda126433d4f514eb62201 | [
"MIT"
] | null | null | null | import os
# comment this out except for KGP servers.
# os.environ['OPENBLAS_NUM_THREADS'] = "20"
import sys
from cartography_adapters import get_cli_args, pprint_args, TrainingDynamics
def main():
adapter=False
notebook=False
# ge commandline arguments
cli_args = get_cli_args()
# print arguments.
pprint_args(cli_args)
# if not notebook:
# td = TrainingDynamics("roberta", "roberta-base", "../roberta-base-tok")
# else:
if adapter: print("\x1b[32;1musing adapters\x1b[0m")
td = TrainingDynamics(
"roberta", "roberta-base",
"../roberta-base-tok", use_adapter=adapter
)
td.train(
"./data/MNLI/original/multinli_1.0_train.jsonl",
"./data/MNLI/original/multinli_1.0_dev_matched.jsonl"
)
# if notebook:
# td.train(
# "/content/drive/MyDrive/SDM/data/MNLI/original/multinli_1.0_train.jsonl",
# "/content/drive/MyDrive/SDM/data/MNLI/original/multinli_1.0_dev_matched.jsonl"
# )
# else:
# td.train(
# "./data/MNLI/original/multinli_1.0_train.jsonl",
# "./data/MNLI/original/multinli_1.0_dev_matched.jsonl"
# )
# hello_world(**vars(cli_args))
if __name__ == "__main__":
main() | 31.675 | 92 | 0.632202 |
64bac46264d676cee090102103eac6eaf77c2faa | 19,466 | py | Python | tests/helpers/test_template.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | tests/helpers/test_template.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | tests/helpers/test_template.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | """Test Home Assistant template helper methods."""
# pylint: disable=too-many-public-methods
import unittest
from unittest.mock import patch
from homeassistant.components import group
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
from homeassistant.util.unit_system import UnitSystem
from homeassistant.const import (
LENGTH_METERS,
TEMP_CELSIUS,
MASS_GRAMS,
VOLUME_LITERS,
)
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
class TestUtilTemplate(unittest.TestCase):
"""Test the Template."""
def setUp(self): # pylint: disable=invalid-name
"""Setup the tests."""
self.hass = get_test_home_assistant()
self.hass.config.units = UnitSystem('custom', TEMP_CELSIUS,
LENGTH_METERS, VOLUME_LITERS,
MASS_GRAMS)
def tearDown(self): # pylint: disable=invalid-name
"""Stop down stuff we started."""
self.hass.stop()
def test_referring_states_by_entity_id(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.assertEqual(
'happy',
template.render(self.hass, '{{ states.test.object.state }}'))
def test_iterating_all_states(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.temperature', 10)
self.assertEqual(
'10happy',
template.render(
self.hass,
'{% for state in states %}{{ state.state }}{% endfor %}'))
def test_iterating_domain_states(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.back_door', 'open')
self.hass.states.set('sensor.temperature', 10)
self.assertEqual(
'open10',
template.render(
self.hass,
"""
{% for state in states.sensor %}{{ state.state }}{% endfor %}
"""))
def test_float(self):
"""."""
self.hass.states.set('sensor.temperature', '12')
self.assertEqual(
'12.0',
template.render(
self.hass,
'{{ float(states.sensor.temperature.state) }}'))
self.assertEqual(
'True',
template.render(
self.hass,
'{{ float(states.sensor.temperature.state) > 11 }}'))
def test_rounding_value(self):
"""."""
self.hass.states.set('sensor.temperature', 12.78)
self.assertEqual(
'12.8',
template.render(
self.hass,
'{{ states.sensor.temperature.state | round(1) }}'))
self.assertEqual(
'128',
template.render(
self.hass,
'{{ states.sensor.temperature.state | multiply(10) | round }}'
))
def test_rounding_value_get_original_value_on_error(self):
"""."""
self.assertEqual(
'None',
template.render(
self.hass,
'{{ None | round }}'
))
self.assertEqual(
'no_number',
template.render(
self.hass,
'{{ "no_number" | round }}'
))
def test_multiply(self):
"""."""
tests = {
None: 'None',
10: '100',
'"abcd"': 'abcd'
}
for inp, out in tests.items():
self.assertEqual(
out,
template.render(self.hass,
'{{ %s | multiply(10) | round }}' % inp))
def test_timestamp_custom(self):
"""Test the timestamps to custom filter."""
tests = [
(None, None, None, 'None'),
(1469119144, None, True, '2016-07-21 16:39:04'),
(1469119144, '%Y', True, '2016'),
(1469119144, 'invalid', True, 'invalid'),
(dt_util.as_timestamp(dt_util.utcnow()), None, False,
dt_util.now().strftime('%Y-%m-%d %H:%M:%S'))
]
for inp, fmt, local, out in tests:
if fmt:
fil = 'timestamp_custom(\'{}\')'.format(fmt)
elif fmt and local:
fil = 'timestamp_custom(\'{0}\', {1})'.format(fmt, local)
else:
fil = 'timestamp_custom'
self.assertEqual(
out,
template.render(self.hass, '{{ %s | %s }}' % (inp, fil))
)
def test_timestamp_local(self):
"""Test the timestamps to local filter."""
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
}
for inp, out in tests.items():
self.assertEqual(
out,
template.render(self.hass,
'{{ %s | timestamp_local }}' % inp))
def test_timestamp_utc(self):
"""Test the timestamps to local filter."""
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
dt_util.as_timestamp(dt_util.utcnow()):
dt_util.now().strftime('%Y-%m-%d %H:%M:%S')
}
for inp, out in tests.items():
self.assertEqual(
out,
template.render(self.hass,
'{{ %s | timestamp_utc }}' % inp))
def test_passing_vars_as_keywords(self):
"""."""
self.assertEqual(
'127', template.render(self.hass, '{{ hello }}', hello=127))
def test_passing_vars_as_vars(self):
"""."""
self.assertEqual(
'127', template.render(self.hass, '{{ hello }}', {'hello': 127}))
def test_render_with_possible_json_value_with_valid_json(self):
"""."""
self.assertEqual(
'world',
template.render_with_possible_json_value(
self.hass, '{{ value_json.hello }}', '{"hello": "world"}'))
def test_render_with_possible_json_value_with_invalid_json(self):
"""."""
self.assertEqual(
'',
template.render_with_possible_json_value(
self.hass, '{{ value_json }}', '{ I AM NOT JSON }'))
def test_render_with_possible_json_value_with_template_error(self):
"""."""
self.assertEqual(
'hello',
template.render_with_possible_json_value(
self.hass, '{{ value_json', 'hello'))
def test_render_with_possible_json_value_with_template_error_value(self):
"""."""
self.assertEqual(
'-',
template.render_with_possible_json_value(
self.hass, '{{ value_json', 'hello', '-'))
def test_raise_exception_on_error(self):
"""."""
with self.assertRaises(TemplateError):
template.render(self.hass, '{{ invalid_syntax')
def test_if_state_exists(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'exists',
template.render(
self.hass,
"""
{% if states.test.object %}exists{% else %}not exists{% endif %}
"""))
def test_is_state(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'yes',
template.render(
self.hass,
"""
{% if is_state("test.object", "available") %}yes{% else %}no{% endif %}
"""))
def test_is_state_attr(self):
"""."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
self.assertEqual(
'yes',
template.render(
self.hass,
"""
{% if is_state_attr("test.object", "mode", "on") %}yes{% else %}no{% endif %}
"""))
def test_states_function(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'available',
template.render(self.hass, '{{ states("test.object") }}'))
self.assertEqual(
'unknown',
template.render(self.hass, '{{ states("test.object2") }}'))
@patch('homeassistant.core.dt_util.utcnow', return_value=dt_util.utcnow())
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_now(self, mock_is_safe, mock_utcnow):
"""."""
self.assertEqual(
dt_util.utcnow().isoformat(),
template.render(self.hass, '{{ now.isoformat() }}'))
@patch('homeassistant.core.dt_util.utcnow', return_value=dt_util.utcnow())
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_utcnow(self, mock_is_safe, mock_utcnow):
"""."""
self.assertEqual(
dt_util.utcnow().isoformat(),
template.render(self.hass, '{{ utcnow.isoformat() }}'))
def test_utcnow_is_exactly_now(self):
"""."""
self.assertEqual(
'True',
template.render(self.hass, '{{ utcnow == now }}'))
def test_distance_function_with_1_state(self):
"""."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.assertEqual(
'187',
template.render(
self.hass, '{{ distance(states.test.object) | round }}'))
def test_distance_function_with_2_states(self):
"""."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance(states.test.object, states.test.object_2)'
'| round }}'))
def test_distance_function_with_1_coord(self):
"""."""
self.assertEqual(
'187',
template.render(
self.hass, '{{ distance("32.87336", "-117.22943") | round }}'))
def test_distance_function_with_2_coords(self):
"""."""
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance("32.87336", "-117.22943", %s, %s) | round }}'
% (self.hass.config.latitude, self.hass.config.longitude)))
def test_distance_function_with_1_state_1_coord(self):
"""."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance("32.87336", "-117.22943", states.test.object_2) '
'| round }}'))
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance(states.test.object_2, "32.87336", "-117.22943") '
'| round }}'))
def test_distance_function_return_None_if_invalid_state(self):
"""."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': 10,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance(states.test.object_2) | round }}'))
def test_distance_function_return_None_if_invalid_coord(self):
"""."""
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123", "abc") }}'))
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123") }}'))
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123", states.test_object_2) }}'))
def test_closest_function_home_vs_domain(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_test_domain.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'test_domain.object',
template.render(self.hass,
'{{ closest(states.test_domain).entity_id }}'))
def test_closest_function_home_vs_all_states(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain_2.and_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'test_domain_2.and_closer',
template.render(self.hass,
'{{ closest(states).entity_id }}'))
def test_closest_function_home_vs_group_entity_id(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group(self.hass, 'location group', ['test_domain.object'])
self.assertEqual(
'test_domain.object',
template.render(self.hass,
'{{ closest("group.location_group").entity_id }}'))
def test_closest_function_home_vs_group_state(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group(self.hass, 'location group', ['test_domain.object'])
self.assertEqual(
'test_domain.object',
template.render(
self.hass,
'{{ closest(states.group.location_group).entity_id }}'))
def test_closest_function_to_coord(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest("%s", %s, states.test_domain).entity_id }}'
% (self.hass.config.latitude + 0.3,
self.hass.config.longitude + 0.3))
)
def test_closest_function_to_entity_id(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest("zone.far_away", states.test_domain).entity_id }}')
)
def test_closest_function_to_state(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}')
)
def test_closest_function_invalid_state(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
for state in ('states.zone.non_existing', '"zone.non_existing"'):
self.assertEqual(
'None',
template.render(
self.hass, '{{ closest(%s, states) }}' % state))
def test_closest_function_state_with_invalid_location(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': 'invalid latitude',
'longitude': self.hass.config.longitude + 0.1,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ closest(states.test_domain.closest_home, '
'states) }}'))
def test_closest_function_invalid_coordinates(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.assertEqual(
'None',
template.render(self.hass,
'{{ closest("invalid", "coord", states) }}'))
def test_closest_function_no_location_states(self):
"""."""
self.assertEqual('None',
template.render(self.hass, '{{ closest(states) }}'))
| 33.105442 | 79 | 0.522501 |
b91344661c0ad3020b2a53f00c09b83ee3c4072b | 1,000 | py | Python | ga_reports/users/admin.py | MikaelSantilio/ga-reports | c92f3053fbf0d2f88d5cb57cf625c1e0e82a36e9 | [
"MIT"
] | 1 | 2021-05-19T16:37:01.000Z | 2021-05-19T16:37:01.000Z | ga_reports/users/admin.py | MikaelSantilio/ga-reports | c92f3053fbf0d2f88d5cb57cf625c1e0e82a36e9 | [
"MIT"
] | 4 | 2021-05-12T05:52:49.000Z | 2022-03-31T09:08:22.000Z | ga_reports/users/admin.py | MikaelSantilio/ga-reports | c92f3053fbf0d2f88d5cb57cf625c1e0e82a36e9 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.contrib.auth import get_user_model
from django.utils.translation import gettext_lazy as _
from ga_reports.users.forms import UserChangeForm, UserCreationForm
User = get_user_model()
@admin.register(User)
class UserAdmin(auth_admin.UserAdmin):
form = UserChangeForm
add_form = UserCreationForm
fieldsets = (
(None, {"fields": ("username", "password")}),
(_("Personal info"), {"fields": ("name", "email")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
),
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
list_display = ["username", "name", "is_superuser"]
search_fields = ["name"]
| 28.571429 | 74 | 0.557 |
57628cfc23fd2fcd20f73cdc537ff320d10f83e7 | 2,931 | py | Python | configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py | kiyoon/Video-Swin-Transformer | 7a0d40ced8fb52c064d1cd11ffa8b0c3bbb77607 | [
"Apache-2.0"
] | 648 | 2021-06-24T19:33:09.000Z | 2022-03-31T06:27:24.000Z | configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py | jayleicn/mmaction2-1 | 0a6fde1abb8403f1f68b568f5b4694c6f828e27e | [
"Apache-2.0"
] | 53 | 2021-07-01T03:07:52.000Z | 2022-03-27T16:15:29.000Z | configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py | jayleicn/mmaction2-1 | 0a6fde1abb8403f1f68b568f5b4694c6f828e27e | [
"Apache-2.0"
] | 117 | 2021-06-25T01:22:32.000Z | 2022-03-31T08:33:55.000Z | _base_ = [
'../../_base_/models/tsm_mobilenet_v2.py',
'../../_base_/schedules/sgd_tsm_mobilenet_v2_100e.py',
'../../_base_/default_runtime.py'
]
# dataset settings
dataset_type = 'RawframeDataset'
data_root = 'data/kinetics400/rawframes_train'
data_root_val = 'data/kinetics400/rawframes_val'
ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt'
ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt'
ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(
type='MultiScaleCrop',
input_size=224,
scales=(1, 0.875, 0.75, 0.66),
random_crop=False,
max_wh_scale_gap=1,
num_fixed_crops=13),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(
type='DenseSampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(
type='DenseSampleFrames',
clip_len=1,
frame_interval=1,
num_clips=8,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='ThreeCrop', crop_size=256),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=8,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_test,
data_prefix=data_root_val,
pipeline=test_pipeline))
evaluation = dict(
interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'])
# runtime settings
checkpoint_config = dict(interval=1)
work_dir = './work_dirs/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/'
| 33.306818 | 78 | 0.657455 |
55460cd1c553515059ba1a8f678bf082be2cc1d2 | 1,117 | py | Python | strawberry/enum.py | skalarsystems/strawberry | 95525e10966bb61c37b68fd438dd07ef15a0a811 | [
"MIT"
] | 1 | 2020-10-22T01:22:48.000Z | 2020-10-22T01:22:48.000Z | strawberry/enum.py | skalarsystems/strawberry | 95525e10966bb61c37b68fd438dd07ef15a0a811 | [
"MIT"
] | null | null | null | strawberry/enum.py | skalarsystems/strawberry | 95525e10966bb61c37b68fd438dd07ef15a0a811 | [
"MIT"
] | null | null | null | import dataclasses
from enum import EnumMeta
from typing import Any, List, Optional
from .exceptions import NotAnEnum
@dataclasses.dataclass
class EnumValue:
name: str
value: Any
@dataclasses.dataclass
class EnumDefinition:
name: str
values: List[EnumValue]
description: Optional[str]
def _process_enum(cls, name=None, description=None):
if not isinstance(cls, EnumMeta):
raise NotAnEnum()
if not name:
name = cls.__name__
description = description
values = [EnumValue(item.name, item.value) for item in cls] # type: ignore
cls._enum_definition = EnumDefinition( # type: ignore
name=name,
values=values,
description=description,
)
return cls
def enum(_cls=None, *, name=None, description=None):
"""Registers the enum in the GraphQL type system.
If name is passed, the name of the GraphQL type will be
the value passed of name instead of the Enum class name.
"""
def wrap(cls):
return _process_enum(cls, name, description)
if not _cls:
return wrap
return wrap(_cls)
| 20.309091 | 79 | 0.675918 |
31da83c692f25421886a6cd097a6eed35bc162d5 | 2,376 | py | Python | panoptes_cli/commands/user.py | sarpu/panoptes-cli | cb2e6fc3a17644055102f396344f8390c3878d3f | [
"Apache-2.0"
] | 16 | 2016-06-16T16:02:00.000Z | 2021-07-01T13:22:18.000Z | panoptes_cli/commands/user.py | sarpu/panoptes-cli | cb2e6fc3a17644055102f396344f8390c3878d3f | [
"Apache-2.0"
] | 106 | 2016-01-18T10:26:21.000Z | 2022-03-24T10:48:27.000Z | panoptes_cli/commands/user.py | sarpu/panoptes-cli | cb2e6fc3a17644055102f396344f8390c3878d3f | [
"Apache-2.0"
] | 5 | 2016-01-19T09:47:23.000Z | 2020-12-19T10:03:00.000Z | import yaml
import click
from panoptes_cli.scripts.panoptes import cli
from panoptes_client import Panoptes, User
@cli.group()
def user():
"""Contains commands for retrieving information about users."""
pass
@user.command()
@click.option(
'--email',
'-e',
help='Search for users by email address (only works if you\'re an admin).',
type=str,
)
@click.option(
'--login',
'-l',
help='Search for users by login name.',
type=str,
)
@click.argument('user-id', required=False, type=int)
def info(user_id, email, login):
"""
Displays information about a user. Defaults to the current user if no ID or
search criteria are given.
"""
if (user_id and email) or (user_id and login) or (email and login):
click.echo(
'Error: At most only one of user ID, login, or email may be '
'specified.',
err=True,
)
return -1
if user_id:
user = User.find(user_id)
elif email:
try:
user = next(User.where(email=email))
except StopIteration:
user = None
if getattr(user, 'email', '') != email:
click.echo('User not found', err=True)
return -1
else:
if not login:
login = Panoptes.client().username
try:
user = next(User.where(login=login))
except StopIteration:
user = None
if getattr(user, 'login', '') != login:
click.echo('User not found', err=True)
return -1
click.echo(yaml.dump(user.raw))
@user.command()
@click.option(
'--force',
'-f',
is_flag=True,
help='Delete without asking for confirmation.',
)
@click.argument('user-ids', required=True, nargs=-1, type=int)
def delete(force, user_ids):
"""
Deletes a user. Only works if you're an admin.
"""
for user_id in user_ids:
user = User.find(user_id)
if not force:
click.confirm('Delete user {} ({})?'.format(
user_id,
user.login,
), abort=True)
user.delete()
@user.command()
def token():
"""
Returns the current oauth token and its expiration date.
"""
click.echo("Token: {}".format(Panoptes.client().get_bearer_token()))
click.echo("Expiry time: {}".format(Panoptes.client().bearer_expires)) | 24.75 | 79 | 0.577441 |
60101b7ff88352463e24808b721dc9529ccb1877 | 2,373 | py | Python | pypureclient/flasharray/FA_2_10/api/__init__.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | pypureclient/flasharray/FA_2_10/api/__init__.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | pypureclient/flasharray/FA_2_10/api/__init__.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | from __future__ import absolute_import
# import apis into api package
from .api_clients_api import APIClientsApi
from .active_directory_api import ActiveDirectoryApi
from .administrators_api import AdministratorsApi
from .alert_watchers_api import AlertWatchersApi
from .alerts_api import AlertsApi
from .apps_api import AppsApi
from .array_connections_api import ArrayConnectionsApi
from .arrays_api import ArraysApi
from .audits_api import AuditsApi
from .authorization_api import AuthorizationApi
from .certificates_api import CertificatesApi
from .connections_api import ConnectionsApi
from .controllers_api import ControllersApi
from .dns_api import DNSApi
from .directories_api import DirectoriesApi
from .directory_exports_api import DirectoryExportsApi
from .directory_quotas_api import DirectoryQuotasApi
from .directory_services_api import DirectoryServicesApi
from .directory_snapshots_api import DirectorySnapshotsApi
from .drives_api import DrivesApi
from .file_systems_api import FileSystemsApi
from .hardware_api import HardwareApi
from .host_groups_api import HostGroupsApi
from .hosts_api import HostsApi
from .kmip_api import KMIPApi
from .maintenance_windows_api import MaintenanceWindowsApi
from .network_interfaces_api import NetworkInterfacesApi
from .offloads_api import OffloadsApi
from .pod_replica_links_api import PodReplicaLinksApi
from .pods_api import PodsApi
from .policies_api import PoliciesApi
from .ports_api import PortsApi
from .protection_group_snapshots_api import ProtectionGroupSnapshotsApi
from .protection_groups_api import ProtectionGroupsApi
from .remote_pods_api import RemotePodsApi
from .remote_protection_group_snapshots_api import RemoteProtectionGroupSnapshotsApi
from .remote_protection_groups_api import RemoteProtectionGroupsApi
from .remote_volume_snapshots_api import RemoteVolumeSnapshotsApi
from .smi_s_api import SMISApi
from .smtp_api import SMTPApi
from .snmp_agents_api import SNMPAgentsApi
from .snmp_managers_api import SNMPManagersApi
from .sessions_api import SessionsApi
from .software_api import SoftwareApi
from .subnets_api import SubnetsApi
from .support_api import SupportApi
from .syslog_api import SyslogApi
from .virtual_machines_api import VirtualMachinesApi
from .volume_groups_api import VolumeGroupsApi
from .volume_snapshots_api import VolumeSnapshotsApi
from .volumes_api import VolumesApi
| 42.375 | 84 | 0.887063 |
cb20a3ac34047a12d0fb23babaac403b74033fa7 | 8,958 | py | Python | docs/_api/conf.py | arjenroodselaar/skidl | 0bf801bd3b74e6ef94bd9aa1b68eef756b568276 | [
"MIT"
] | 2 | 2022-02-27T14:31:52.000Z | 2022-02-27T14:31:56.000Z | docs/_api/conf.py | arjenroodselaar/skidl | 0bf801bd3b74e6ef94bd9aa1b68eef756b568276 | [
"MIT"
] | null | null | null | docs/_api/conf.py | arjenroodselaar/skidl | 0bf801bd3b74e6ef94bd9aa1b68eef756b568276 | [
"MIT"
] | 1 | 2020-09-21T23:31:41.000Z | 2020-09-21T23:31:41.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# skidl documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
skidl_root = os.path.join(project_root, '../..', 'skidl')
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
sys.path.insert(1, skidl_root)
import skidl.pckg_info as pckg_info
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'recommonmark']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = {'.rst': 'restructuredtext', '.md': 'markdown'}
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document. Don't use index.rst because that will conflict
# with the index.rst file generated with Jekyll.
master_doc = 'api'
# General information about the project.
project = u'skidl'
copyright = u'2016-2019, XESS Corp.'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = pckg_info.__version__
# The full version, including alpha/beta/rc tags.
release = pckg_info.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'skidldoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'skidl.tex',
u'skidl Documentation',
u'XESS Corp.', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'skidl',
u'skidl Documentation',
[u'XESS Corp.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'skidl',
u'skidl Documentation',
u'XESS Corp.',
'skidl',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# autodoc options.
autodoc_member_order = 'bysource'
autoclass_content = "both"
autodoc_default_flags = {"members", "undoc-members", "private-members", "special-members"}
autodoc_default_options = {"exclude-members": "__dict__, __module__, __weakref__"}
| 31.212544 | 123 | 0.716231 |
49e5effeae40239e5c1b63e42efe2a2e1e8b2fe1 | 4,092 | py | Python | calliope/core/io.py | guidogz/Calliope_guido | 148ee39c3671e55ad3a1a2da216ee23112d16abf | [
"Apache-2.0"
] | null | null | null | calliope/core/io.py | guidogz/Calliope_guido | 148ee39c3671e55ad3a1a2da216ee23112d16abf | [
"Apache-2.0"
] | null | null | null | calliope/core/io.py | guidogz/Calliope_guido | 148ee39c3671e55ad3a1a2da216ee23112d16abf | [
"Apache-2.0"
] | null | null | null | """
Copyright (C) 2013-2019 Calliope contributors listed in AUTHORS.
Licensed under the Apache 2.0 License (see LICENSE file).
io.py
~~~~~
Functions to read and save model results.
"""
import os
import xarray as xr
from calliope._version import __version__
from calliope import exceptions
from calliope.core.util.dataset import split_loc_techs
def read_netcdf(path):
"""Read model_data from NetCDF file"""
with xr.open_dataset(path) as model_data:
model_data.load()
calliope_version = model_data.attrs.get('calliope_version', False)
if calliope_version:
if not str(calliope_version) in __version__:
exceptions.warn(
'This model data was created with Calliope version {}, '
'but you are running {}. Proceed with caution!'.format(calliope_version, __version__)
)
# FIXME some checks for consistency
# use check_dataset from the checks module
# also check the old checking from 0.5.x
return model_data
def save_netcdf(model_data, path, model=None):
encoding = {k: {'zlib': True, 'complevel': 4} for k in model_data.data_vars}
original_model_data_attrs = model_data.attrs
model_data_attrs = model_data.attrs.copy()
if model is not None and hasattr(model, '_model_run'):
# Attach _model_run and _debug_data to _model_data
model_run_to_save = model._model_run.copy()
if 'timeseries_data' in model_run_to_save:
del model_run_to_save['timeseries_data'] # Can't be serialised!
model_data_attrs['_model_run'] = model_run_to_save.to_yaml()
model_data_attrs['_debug_data'] = model._debug_data.to_yaml()
# Convert boolean attrs to ints
bool_attrs = [
k for k, v in model_data_attrs.items()
if isinstance(v, bool)
]
for k in bool_attrs:
model_data_attrs[k] = int(model_data_attrs[k])
# Convert None attrs to 'None'
none_attrs = [
k for k, v in model_data_attrs.items()
if v is None
]
for k in none_attrs:
model_data_attrs[k] = 'None'
# Convert `object` dtype coords to string
# FIXME: remove once xarray issue https://github.com/pydata/xarray/issues/2404 is resolved
for k, v in model_data.coords.items():
if v.dtype == 'O':
model_data[k] = v.astype('<U{}'.format(max([len(i.item()) for i in v])))
try:
model_data.attrs = model_data_attrs
model_data.to_netcdf(path, format='netCDF4', encoding=encoding)
model_data.close() # Force-close NetCDF file after writing
finally: # Revert model_data.attrs back
model_data.attrs = original_model_data_attrs
def save_csv(model_data, path, dropna=True):
"""
If termination condition was not optimal, filters inputs only, and
warns that results will not be saved.
"""
os.makedirs(path, exist_ok=False)
# a MILP model which optimises to within the MIP gap, but does not fully
# converge on the LP relaxation, may return as 'feasible', not 'optimal'
if ('termination_condition' not in model_data.attrs or
model_data.attrs['termination_condition'] in ['optimal', 'feasible']):
data_vars = model_data.data_vars
else:
data_vars = model_data.filter_by_attrs(is_result=0).data_vars
exceptions.warn(
'Model termination condition was not optimal, saving inputs only.'
)
for var in data_vars:
in_out = 'results' if model_data[var].attrs['is_result'] else 'inputs'
out_path = os.path.join(path, '{}_{}.csv'.format(in_out, var))
series = split_loc_techs(model_data[var], return_as='Series')
if dropna:
series = series.dropna()
series.to_csv(out_path, header=True)
def save_lp(model, path):
if not model.run_config['backend'] == 'pyomo':
raise IOError('Only the pyomo backend can save to LP.')
if not hasattr(model, '_backend_model'):
model.run(build_only=True)
model._backend_model.write(path, format='lp', io_options={'symbolic_solver_labels': True})
| 34.386555 | 101 | 0.672532 |
f8fff395415e6ca64413af4bcdf10841371e5e2b | 10,107 | py | Python | src/oci/object_storage/models/create_preauthenticated_request_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 249 | 2017-09-11T22:06:05.000Z | 2022-03-04T17:09:29.000Z | src/oci/object_storage/models/create_preauthenticated_request_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 228 | 2017-09-11T23:07:26.000Z | 2022-03-23T10:58:50.000Z | src/oci/object_storage/models/create_preauthenticated_request_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 224 | 2017-09-27T07:32:43.000Z | 2022-03-25T16:55:42.000Z | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreatePreauthenticatedRequestDetails(object):
"""
CreatePreauthenticatedRequestDetails model.
"""
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "ObjectRead"
ACCESS_TYPE_OBJECT_READ = "ObjectRead"
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "ObjectWrite"
ACCESS_TYPE_OBJECT_WRITE = "ObjectWrite"
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "ObjectReadWrite"
ACCESS_TYPE_OBJECT_READ_WRITE = "ObjectReadWrite"
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "AnyObjectWrite"
ACCESS_TYPE_ANY_OBJECT_WRITE = "AnyObjectWrite"
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "AnyObjectRead"
ACCESS_TYPE_ANY_OBJECT_READ = "AnyObjectRead"
#: A constant which can be used with the access_type property of a CreatePreauthenticatedRequestDetails.
#: This constant has a value of "AnyObjectReadWrite"
ACCESS_TYPE_ANY_OBJECT_READ_WRITE = "AnyObjectReadWrite"
def __init__(self, **kwargs):
"""
Initializes a new CreatePreauthenticatedRequestDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param name:
The value to assign to the name property of this CreatePreauthenticatedRequestDetails.
:type name: str
:param bucket_listing_action:
The value to assign to the bucket_listing_action property of this CreatePreauthenticatedRequestDetails.
:type bucket_listing_action: str
:param object_name:
The value to assign to the object_name property of this CreatePreauthenticatedRequestDetails.
:type object_name: str
:param access_type:
The value to assign to the access_type property of this CreatePreauthenticatedRequestDetails.
Allowed values for this property are: "ObjectRead", "ObjectWrite", "ObjectReadWrite", "AnyObjectWrite", "AnyObjectRead", "AnyObjectReadWrite"
:type access_type: str
:param time_expires:
The value to assign to the time_expires property of this CreatePreauthenticatedRequestDetails.
:type time_expires: datetime
"""
self.swagger_types = {
'name': 'str',
'bucket_listing_action': 'str',
'object_name': 'str',
'access_type': 'str',
'time_expires': 'datetime'
}
self.attribute_map = {
'name': 'name',
'bucket_listing_action': 'bucketListingAction',
'object_name': 'objectName',
'access_type': 'accessType',
'time_expires': 'timeExpires'
}
self._name = None
self._bucket_listing_action = None
self._object_name = None
self._access_type = None
self._time_expires = None
@property
def name(self):
"""
**[Required]** Gets the name of this CreatePreauthenticatedRequestDetails.
A user-specified name for the pre-authenticated request. Names can be helpful in managing pre-authenticated requests.
Avoid entering confidential information.
:return: The name of this CreatePreauthenticatedRequestDetails.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this CreatePreauthenticatedRequestDetails.
A user-specified name for the pre-authenticated request. Names can be helpful in managing pre-authenticated requests.
Avoid entering confidential information.
:param name: The name of this CreatePreauthenticatedRequestDetails.
:type: str
"""
self._name = name
@property
def bucket_listing_action(self):
"""
Gets the bucket_listing_action of this CreatePreauthenticatedRequestDetails.
Specifies whether a list operation is allowed on a PAR with accessType \"AnyObjectRead\" or \"AnyObjectReadWrite\".
Deny: Prevents the user from performing a list operation.
ListObjects: Authorizes the user to perform a list operation.
:return: The bucket_listing_action of this CreatePreauthenticatedRequestDetails.
:rtype: str
"""
return self._bucket_listing_action
@bucket_listing_action.setter
def bucket_listing_action(self, bucket_listing_action):
"""
Sets the bucket_listing_action of this CreatePreauthenticatedRequestDetails.
Specifies whether a list operation is allowed on a PAR with accessType \"AnyObjectRead\" or \"AnyObjectReadWrite\".
Deny: Prevents the user from performing a list operation.
ListObjects: Authorizes the user to perform a list operation.
:param bucket_listing_action: The bucket_listing_action of this CreatePreauthenticatedRequestDetails.
:type: str
"""
self._bucket_listing_action = bucket_listing_action
@property
def object_name(self):
"""
Gets the object_name of this CreatePreauthenticatedRequestDetails.
The name of the object that is being granted access to by the pre-authenticated request. Avoid entering confidential
information. The object name can be null and if so, the pre-authenticated request grants access to the entire bucket
if the access type allows that. The object name can be a prefix as well, in that case pre-authenticated request
grants access to all the objects within the bucket starting with that prefix provided that we have the correct access type.
:return: The object_name of this CreatePreauthenticatedRequestDetails.
:rtype: str
"""
return self._object_name
@object_name.setter
def object_name(self, object_name):
"""
Sets the object_name of this CreatePreauthenticatedRequestDetails.
The name of the object that is being granted access to by the pre-authenticated request. Avoid entering confidential
information. The object name can be null and if so, the pre-authenticated request grants access to the entire bucket
if the access type allows that. The object name can be a prefix as well, in that case pre-authenticated request
grants access to all the objects within the bucket starting with that prefix provided that we have the correct access type.
:param object_name: The object_name of this CreatePreauthenticatedRequestDetails.
:type: str
"""
self._object_name = object_name
@property
def access_type(self):
"""
**[Required]** Gets the access_type of this CreatePreauthenticatedRequestDetails.
The operation that can be performed on this resource.
Allowed values for this property are: "ObjectRead", "ObjectWrite", "ObjectReadWrite", "AnyObjectWrite", "AnyObjectRead", "AnyObjectReadWrite"
:return: The access_type of this CreatePreauthenticatedRequestDetails.
:rtype: str
"""
return self._access_type
@access_type.setter
def access_type(self, access_type):
"""
Sets the access_type of this CreatePreauthenticatedRequestDetails.
The operation that can be performed on this resource.
:param access_type: The access_type of this CreatePreauthenticatedRequestDetails.
:type: str
"""
allowed_values = ["ObjectRead", "ObjectWrite", "ObjectReadWrite", "AnyObjectWrite", "AnyObjectRead", "AnyObjectReadWrite"]
if not value_allowed_none_or_none_sentinel(access_type, allowed_values):
raise ValueError(
"Invalid value for `access_type`, must be None or one of {0}"
.format(allowed_values)
)
self._access_type = access_type
@property
def time_expires(self):
"""
**[Required]** Gets the time_expires of this CreatePreauthenticatedRequestDetails.
The expiration date for the pre-authenticated request as per `RFC 3339`__.
After this date the pre-authenticated request will no longer be valid.
__ https://tools.ietf.org/html/rfc3339
:return: The time_expires of this CreatePreauthenticatedRequestDetails.
:rtype: datetime
"""
return self._time_expires
@time_expires.setter
def time_expires(self, time_expires):
"""
Sets the time_expires of this CreatePreauthenticatedRequestDetails.
The expiration date for the pre-authenticated request as per `RFC 3339`__.
After this date the pre-authenticated request will no longer be valid.
__ https://tools.ietf.org/html/rfc3339
:param time_expires: The time_expires of this CreatePreauthenticatedRequestDetails.
:type: datetime
"""
self._time_expires = time_expires
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 41.085366 | 245 | 0.70001 |
a50f1eaba9456b55a44888e38d204d8e8b908958 | 610 | py | Python | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_200/rule_164.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_200/rule_164.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/numtrees_200/rule_164.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | def findDecision(obj): #obj[0]: Driving_to, obj[1]: Passanger, obj[2]: Weather, obj[3]: Temperature, obj[4]: Time, obj[5]: Coupon, obj[6]: Coupon_validity, obj[7]: Gender, obj[8]: Age, obj[9]: Maritalstatus, obj[10]: Children, obj[11]: Education, obj[12]: Occupation, obj[13]: Income, obj[14]: Bar, obj[15]: Coffeehouse, obj[16]: Carryaway, obj[17]: Restaurantlessthan20, obj[18]: Restaurant20to50, obj[19]: Direction_same, obj[20]: Distance
# {"feature": "Occupation", "instances": 5, "metric_value": 0.971, "depth": 1}
if obj[12]>2:
return 'True'
elif obj[12]<=2:
return 'False'
else: return 'False'
| 76.25 | 441 | 0.678689 |
29a7585669258f41406d1cf3b496dcee5376dabd | 309 | py | Python | models/Status.py | FanchiniRudolf/gamification-back-end | aca5c465d5ef0e695049221564f8725112478afa | [
"MIT"
] | null | null | null | models/Status.py | FanchiniRudolf/gamification-back-end | aca5c465d5ef0e695049221564f8725112478afa | [
"MIT"
] | null | null | null | models/Status.py | FanchiniRudolf/gamification-back-end | aca5c465d5ef0e695049221564f8725112478afa | [
"MIT"
] | null | null | null | from core.Model import *
from core.Utils import Utils
class Status(Base, Model):
# STATUS
PENDING = 1
PROCESSING = 2
ERROR = 3
SEND = 4
__tablename__ = "status"
id = Column(BigInteger, primary_key=True, autoincrement=True)
description = Column(String(100), nullable=False)
| 19.3125 | 65 | 0.669903 |
f832fb18541439675fa6590755fafe7de89361a9 | 793 | py | Python | stream_reader/obs_reader.py | anarkia7115/pubmed_parser | c671e324f68345ee6afc23b8c2f762cd35354406 | [
"MIT"
] | null | null | null | stream_reader/obs_reader.py | anarkia7115/pubmed_parser | c671e324f68345ee6afc23b8c2f762cd35354406 | [
"MIT"
] | null | null | null | stream_reader/obs_reader.py | anarkia7115/pubmed_parser | c671e324f68345ee6afc23b8c2f762cd35354406 | [
"MIT"
] | null | null | null | import os
import configparser
import boto3
config = configparser.ConfigParser()
config.read("config.ini")
class ObsReader(object):
def __init__(self):
self.s3_client = boto3.client(
service_name='s3',
aws_access_key_id=os.getenv("AK"),
aws_secret_access_key=os.getenv("SK"),
endpoint_url=config['OBS']['endpoint'],
)
self.bucket = config['OBS']['bucket']
def read_chunk(self, obs_key, chunk_size):
obj = self.s3_client.get_object(
Bucket=self.bucket,
Key=obs_key)
return obj.get("Body").read(chunk_size)
def read_obj(self, obs_key):
obj = self.s3_client.get_object(
Bucket=self.bucket,
Key=obs_key)
return obj.get("Body")
| 24.78125 | 51 | 0.601513 |
f0a86ca7a7a0d960778bffbf51f75eee0fbbf3f1 | 12,861 | py | Python | tests/frontier/test_ethash.py | ethereum/eth1.0-specs | ac7d0edbfcc5a21ef869eb1e7a25e2f6df4a2eac | [
"CC0-1.0"
] | 152 | 2020-08-12T15:22:13.000Z | 2021-08-17T17:13:57.000Z | tests/frontier/test_ethash.py | voith/eth1.0-specs | 53281c10f8cbdade5bd9a318f739a619044d4c8d | [
"CC0-1.0"
] | 260 | 2020-09-03T14:00:20.000Z | 2021-08-16T20:10:59.000Z | tests/frontier/test_ethash.py | voith/eth1.0-specs | 53281c10f8cbdade5bd9a318f739a619044d4c8d | [
"CC0-1.0"
] | 72 | 2020-09-09T19:44:12.000Z | 2021-08-14T23:02:11.000Z | import json
import pkgutil
import shutil
import subprocess
from random import randint
from typing import Any, Dict, List, Tuple, cast
import pytest
from ethereum import crypto
from ethereum.base_types import U256_CEIL_VALUE, Uint
from ethereum.crypto import keccak256
from ethereum.ethash import (
EPOCH_SIZE,
HASH_BYTES,
MIX_BYTES,
cache_size,
dataset_size,
epoch,
generate_cache,
generate_dataset_item,
generate_seed,
hashimoto_light,
)
from ethereum.frontier import rlp
from ethereum.frontier.eth_types import Header
from ethereum.frontier.spec import (
generate_header_hash_for_pow,
validate_proof_of_work,
)
from ethereum.frontier.utils.json import json_to_header
from ethereum.utils.hexadecimal import (
hex_to_bytes,
hex_to_bytes8,
hex_to_bytes32,
)
from ethereum.utils.numeric import is_prime, le_uint32_sequence_to_bytes
@pytest.mark.parametrize(
"block_number, expected_epoch",
[
(Uint(0), Uint(0)),
(Uint(29999), Uint(0)),
(Uint(30000), Uint(1)),
],
)
def test_epoch(block_number: Uint, expected_epoch: Uint) -> None:
assert epoch(block_number) == expected_epoch
def test_epoch_start_and_end_blocks_have_same_epoch() -> None:
for _ in range(100):
block_number = Uint(randint(10 ** 9, 2 * (10 ** 9)))
epoch_start_block_number = (block_number // EPOCH_SIZE) * EPOCH_SIZE
epoch_end_block_number = epoch_start_block_number + EPOCH_SIZE - 1
assert (
epoch(block_number)
== epoch(epoch_start_block_number)
== epoch(epoch_end_block_number)
)
def test_cache_size_1st_epoch() -> None:
assert (
cache_size(Uint(0)) == cache_size(Uint(0) + EPOCH_SIZE - 1) == 16776896
)
assert is_prime(cache_size(Uint(0)) // HASH_BYTES)
def test_cache_size_2048_epochs() -> None:
cache_size_2048_epochs = json.loads(
cast(
bytes,
pkgutil.get_data(
"ethereum", "assets/cache_sizes_2048_epochs.json"
),
).decode()
)
assert len(cache_size_2048_epochs) == 2048
for epoch_number in range(2048):
assert (
cache_size(Uint(epoch_number * EPOCH_SIZE))
== cache_size_2048_epochs[epoch_number]
)
def test_epoch_start_and_end_blocks_have_same_cache_size() -> None:
for _ in range(100):
block_number = Uint(randint(10 ** 9, 2 * (10 ** 9)))
epoch_start_block_number = (block_number // EPOCH_SIZE) * EPOCH_SIZE
epoch_end_block_number = epoch_start_block_number + EPOCH_SIZE - 1
assert (
cache_size(block_number)
== cache_size(epoch_start_block_number)
== cache_size(epoch_end_block_number)
)
def test_dataset_size_1st_epoch() -> None:
assert (
dataset_size(Uint(0))
== dataset_size(Uint(0 + EPOCH_SIZE - 1))
== 1073739904
)
assert is_prime(dataset_size(Uint(0)) // MIX_BYTES)
def test_dataset_size_2048_epochs() -> None:
dataset_size_2048_epochs = json.loads(
cast(
bytes,
pkgutil.get_data(
"ethereum", "assets/dataset_sizes_2048_epochs.json"
),
).decode()
)
assert len(dataset_size_2048_epochs) == 2048
for epoch_number in range(2048):
assert (
dataset_size(Uint(epoch_number * EPOCH_SIZE))
== dataset_size_2048_epochs[epoch_number]
)
def test_epoch_start_and_end_blocks_have_same_dataset_size() -> None:
for _ in range(100):
block_number = Uint(randint(10 ** 9, 2 * (10 ** 9)))
epoch_start_block_number = (block_number // EPOCH_SIZE) * EPOCH_SIZE
epoch_end_block_number = epoch_start_block_number + EPOCH_SIZE - 1
assert (
dataset_size(block_number)
== dataset_size(epoch_start_block_number)
== dataset_size(epoch_end_block_number)
)
def test_seed() -> None:
assert (
generate_seed(Uint(0))
== generate_seed(Uint(0 + EPOCH_SIZE - 1))
== b"\x00" * 32
)
assert (
generate_seed(Uint(EPOCH_SIZE))
== generate_seed(Uint(2 * EPOCH_SIZE - 1))
== keccak256(b"\x00" * 32)
)
# NOTE: The below bytes value was obtained by obtaining the seed for the same block number from Geth.
assert (
generate_seed(Uint(12345678))
== b"[\x8c\xa5\xaaC\x05\xae\xed<\x87\x1d\xbc\xabQBGj\xfd;\x9cJ\x98\xf6Dq\\z\xaao\x1c\xf7\x03"
)
def test_epoch_start_and_end_blocks_have_same_seed() -> None:
for _ in range(100):
block_number = Uint(randint(10000, 20000))
epoch_start_block_number = (block_number // EPOCH_SIZE) * EPOCH_SIZE
epoch_end_block_number = epoch_start_block_number + EPOCH_SIZE - 1
assert (
generate_seed(epoch_start_block_number)
== generate_seed(block_number)
== generate_seed(epoch_end_block_number)
)
def test_ethtest_fixtures() -> None:
ethereum_tests = load_pow_test_fixtures()
for test in ethereum_tests:
header = test["header"]
assert header.nonce == test["nonce"]
assert header.mix_digest == test["mix_digest"]
assert generate_seed(header.number) == test["seed"]
assert cache_size(header.number) == test["cache_size"]
assert dataset_size(header.number) == test["dataset_size"]
header_hash = generate_header_hash_for_pow(header)
assert header_hash == test["header_hash"]
cache = generate_cache(header.number)
cache_hash = crypto.keccak256(
b"".join(
le_uint32_sequence_to_bytes(cache_item) for cache_item in cache
)
)
assert cache_hash == test["cache_hash"]
mix_digest, result = hashimoto_light(
header_hash, header.nonce, cache, dataset_size(header.number)
)
assert mix_digest == test["mix_digest"]
assert result == test["result"]
def load_pow_test_fixtures() -> List[Dict[str, Any]]:
with open(
"tests/fixtures/PoWTests/ethash_tests.json"
) as pow_test_file_handler:
return [
{
"nonce": hex_to_bytes8(raw_fixture["nonce"]),
"mix_digest": hex_to_bytes32(raw_fixture["mixHash"]),
"header": rlp.decode_to_header(
hex_to_bytes(raw_fixture["header"])
),
"seed": hex_to_bytes32(raw_fixture["seed"]),
"result": hex_to_bytes32(raw_fixture["result"]),
"cache_size": Uint(raw_fixture["cache_size"]),
"dataset_size": Uint(raw_fixture["full_size"]),
"header_hash": hex_to_bytes32(raw_fixture["header_hash"]),
"cache_hash": hex_to_bytes32(raw_fixture["cache_hash"]),
}
for raw_fixture in json.load(pow_test_file_handler).values()
]
@pytest.mark.slow
@pytest.mark.parametrize(
"block_number, block_difficulty, header_hash, nonce, expected_mix_digest, expected_result",
[
[
Uint(1),
Uint(17171480576),
"0x85913a3057ea8bec78cd916871ca73802e77724e014dda65add3405d02240eb7",
"0x539bd4979fef1ec4",
"0x969b900de27b6ac6a67742365dd65f55a0526c41fd18e1b16f1a1215c2e66f59",
"0x000000002bc095dd4de049873e6302c3f14a7f2e5b5a1f60cdf1f1798164d610",
],
[
Uint(5),
Uint(17154711556),
"0xfe557bbc2346abe74c4e66b1843df7a884f83e3594a210d96594c455c32d33c1",
"0xfba9d0cff9dc5cf3",
"0x17b85b5ec310c4868249fa2f378c83b4f330e2d897e5373a8195946c71d1d19e",
"0x000000000767f35d1d21220cb5c53e060afd84fadd622db784f0d4b0541c034a",
],
[
Uint(123456),
Uint(4505282870523),
"0xad896938ef53ff923b4336d03573d52c69097dabf8734d71b9546d31db603121",
"0xf4b883fed83092b2",
"0x84d4162717b039a996ffaf59a54158443c62201b76170b02dbad626cca3226d5",
"0x00000000000fb25dfcfe2fcdc9a63c892ce795aba4380513a9705489bf247b07",
],
[
Uint(1000865),
Uint(12652630789208),
"0xcc868f6114e4cadc3876e4ca4e0705b2bcb76955f459bb019a80d72a512eefdb",
"0xc6613bcf40e716d6",
"0xce47e0609103ac85d56bf1637e51afd28e29431f47c11df47db80a63d95efbae",
"0x000000000015de37404be3c9beda75e12ae41ef7c937dcd52130cfc3b389bf42",
],
],
)
def test_pow_random_blocks(
block_number: Uint,
block_difficulty: Uint,
header_hash: str,
nonce: str,
expected_mix_digest: str,
expected_result: str,
) -> None:
mix_digest, result = hashimoto_light(
hex_to_bytes32(header_hash),
hex_to_bytes8(nonce),
generate_cache(block_number),
dataset_size(block_number),
)
assert mix_digest == hex_to_bytes32(expected_mix_digest)
assert result == hex_to_bytes(expected_result)
assert Uint.from_be_bytes(result) <= U256_CEIL_VALUE // (block_difficulty)
@pytest.mark.slow
@pytest.mark.parametrize(
"block_file_name",
[
"block_1.json",
"block_1234567.json",
"block_12964999.json",
],
)
def test_pow_validation_block_headers(block_file_name: str) -> None:
block_str_data = cast(
bytes, pkgutil.get_data("ethereum", f"assets/blocks/{block_file_name}")
).decode()
block_json_data = json.loads(block_str_data)
header: Header = json_to_header(block_json_data)
validate_proof_of_work(header)
# TODO: Once there is a method to download blocks, test the proof-of-work
# validation for the following blocks in each hardfork (except London as the
# current PoW algo won't work from London):
# * Start of hardfork
# * two random blocks inside the hardfork
# * End of hardfork
#
# Geth DAG related functionalities for fuzz testing
#
def generate_dag_via_geth(
geth_path: str, block_number: Uint, dag_dump_dir: str
) -> None:
subprocess.call([geth_path, "makedag", str(block_number), dag_dump_dir])
def fetch_dag_data(dag_dump_dir: str, epoch_seed: bytes) -> Tuple[bytes, ...]:
dag_file_path = f"{dag_dump_dir}/full-R23-{epoch_seed.hex()[:16]}"
with open(dag_file_path, "rb") as fp:
dag_dataset = fp.read()
# The first 8 bytes are Magic Bytes and can be ignored.
dag_dataset = dag_dataset[8:]
dag_dataset_items = []
for i in range(0, len(dag_dataset), HASH_BYTES):
dag_dataset_items.append(dag_dataset[i : i + HASH_BYTES])
return tuple(dag_dataset_items)
GETH_MISSING = """geth binary not found.
Some tests require a copy of the go-ethereum client binary to generate required
data.
The tool `scripts/download_geth_linux.py` can fetch the appropriate version, or
you can download geth from:
https://geth.ethereum.org/downloads/
Make sure you add the directory containing `geth` to your PATH, then try
running the tests again.
"""
@pytest.mark.slow
def test_dataset_generation_random_epoch(tmpdir: str) -> None:
"""
Generate a random epoch and obtain the DAG for that epoch from geth.
Then ensure the following 2 test scenarios:
1. The first 100 dataset indices are same when the python
implementation is compared with the DAG dataset.
2. Randomly take 500 indices between
[101, `dataset size in words` - 1] and ensure that the values are
same between python implementation and DAG dataset.
NOTE - For this test case to run, it is mandatory for Geth to be
installed and accessible
"""
geth_path = shutil.which("geth")
if geth_path is None:
raise Exception(GETH_MISSING)
epoch_number = Uint(randint(0, 100))
block_number = epoch_number * EPOCH_SIZE + randint(0, EPOCH_SIZE - 1)
generate_dag_via_geth(geth_path, block_number, f"{tmpdir}/.ethash")
seed = generate_seed(block_number)
dag_dataset = fetch_dag_data(f"{tmpdir}/.ethash", seed)
cache = generate_cache(block_number)
dataset_size_bytes = dataset_size(block_number)
dataset_size_words = dataset_size_bytes // HASH_BYTES
assert len(dag_dataset) == dataset_size_words
assert generate_dataset_item(cache, Uint(0)) == dag_dataset[0]
for i in range(100):
assert generate_dataset_item(cache, Uint(i)) == dag_dataset[i]
# Then for this dataset randomly take 5000 indices and check the
# data obtained from our implementation with geth DAG
for _ in range(500):
index = Uint(randint(101, dataset_size_words - 1))
dataset_item = generate_dataset_item(cache, index)
assert dataset_item == dag_dataset[index], index
# Manually forcing the dataset out of the memory incase the gc
# doesn't kick in immediately
del dag_dataset
| 32.642132 | 105 | 0.665656 |
0377ea243b69ee749031fa3206a18401ccb44a0f | 3,470 | py | Python | scripts_gpio/therm.py | BertrandFreylin/WeatherStation | 4ab6f5af2af02a83c109ecb79498e4c92e5af5d2 | [
"Apache-2.0"
] | null | null | null | scripts_gpio/therm.py | BertrandFreylin/WeatherStation | 4ab6f5af2af02a83c109ecb79498e4c92e5af5d2 | [
"Apache-2.0"
] | null | null | null | scripts_gpio/therm.py | BertrandFreylin/WeatherStation | 4ab6f5af2af02a83c109ecb79498e4c92e5af5d2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import ADC0834
import time
import math
def setup_files(number_of_lines):
num_lines_temp = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv'))
if num_lines_temp > number_of_lines:
to_delete = int(num_lines_temp - number_of_lines)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'w') as fout:
fout.writelines(data[to_delete:])
fin.close()
fout.close()
num_lines_photo = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/photo.csv'))
if num_lines_photo > number_of_lines:
to_delete = int(num_lines_photo - number_of_lines)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'w') as fout:
fout.writelines(data[to_delete:])
fin.close()
fout.close()
return
def main(number_of_lines, date):
temp_val_raw = ADC0834.getResult(0)
Vr = 5 * float(temp_val_raw) / 255
Rt = 10000 * Vr / (5 - Vr)
temp = 1 / (((math.log(Rt / 10000)) / 3950) + (1 / (273.15 + 25)))
temp_val = round(temp - 273.15)
time.sleep(1)
lum_val = round((ADC0834.getResult(2) * -1) + 255)
weather_temp = open("/home/bertrand/workspace/rasp/static/data/therm_inside.csv", "a+")
weather_temp.write("%s,%s\n" % (date, temp_val))
num_lines_temp = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv'))
if num_lines_temp > number_of_lines:
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/therm_inside.csv', 'w') as fout:
fout.writelines(data[1:])
weather_temp.close()
weather_temp_total = open("/home/bertrand/workspace/rasp/static/data/therm_inside_total.csv", "a+")
weather_temp_total.write("%s,%s\n" % (date, temp_val))
weather_temp_total.close()
photo = open("/home/bertrand/workspace/rasp/static/data/photo.csv", "a+")
photo.write("%s,%s\n" % (date, lum_val))
num_lines_photo = sum(1 for line in open('/home/bertrand/workspace/rasp/static/data/photo.csv'))
if num_lines_photo > number_of_lines:
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'r') as fin:
data = fin.read().splitlines(True)
with open('/home/bertrand/workspace/rasp/static/data/photo.csv', 'w') as fout:
fout.writelines(data[1:])
photo.close()
photo_total = open("/home/bertrand/workspace/rasp/static/data/photo_total.csv", "a+")
photo_total.write("%s,%s\n" % (date, lum_val))
photo_total.close()
return
def destroy():
weather_temp = open("/home/bertrand/workspace/rasp/static/data/therm_inside.csv", "a+")
weather_temp.close()
weather_temp_total = open("/home/bertrand/workspace/rasp/static/data/therm_inside_total.csv", "a+")
weather_temp_total.close()
photo = open("/home/bertrand/workspace/rasp/static/data/photo.csv", "a+")
photo.close()
photo_total = open("/home/bertrand/workspace/rasp/static/data/photo_total.csv", "a+")
photo_total.close()
return
| 44.487179 | 106 | 0.665994 |
ef32a7580ce01496225600ec16ac30b6222e8250 | 153 | py | Python | srilm/__init__.py | tetsuok/py-srilm-interpolator | 063d87be16e6c7ec8f9b3b0e4f97e2616ec46b46 | [
"BSD-3-Clause"
] | 3 | 2016-05-03T19:05:54.000Z | 2017-09-29T03:05:57.000Z | srilm/tests/__init__.py | tetsuok/py-srilm-interpolator | 063d87be16e6c7ec8f9b3b0e4f97e2616ec46b46 | [
"BSD-3-Clause"
] | null | null | null | srilm/tests/__init__.py | tetsuok/py-srilm-interpolator | 063d87be16e6c7ec8f9b3b0e4f97e2616ec46b46 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2012 Tetsuo Kiso. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
| 38.25 | 52 | 0.764706 |
8c1b3acc1d3ba9f1d23e9006c43826246e2290ca | 2,643 | py | Python | models/alexnet.py | ZhecanJamesWang/MPIIGaze_Pytorch | 369f836d8317b57d9d0f67622d220bc1e80a8696 | [
"MIT"
] | 8 | 2019-02-28T18:16:21.000Z | 2020-07-05T10:15:25.000Z | models/alexnet.py | ZhecanJamesWang/MPIIGaze_Pytorch | 369f836d8317b57d9d0f67622d220bc1e80a8696 | [
"MIT"
] | 1 | 2020-03-19T06:26:16.000Z | 2020-03-19T06:26:16.000Z | models/alexnet.py | ZhecanJamesWang/MPIIGaze_Pytorch | 369f836d8317b57d9d0f67622d220bc1e80a8696 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
# __all__ = ['AlexNet', 'alexnet']
model_urls = {
'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth',
}
class Model(nn.Module):
def __init__(self, num_classes=1000):
super(Model, self).__init__()
self.relu = nn.ReLU(inplace=True)
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
)
self.classifier = nn.Sequential(
nn.Dropout(),
nn.Linear(256 * 6 * 6, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(inplace=True),
nn.Linear(4096, num_classes),
)
self.load_state_dict(model_zoo.load_url(model_urls['alexnet']))
self.fc1 = nn.Linear(num_classes + 2, 2)
# self.fc1 = nn.Linear(num_classes, 2)
# self.fc1 = nn.Linear(num_classes + 2, 502)
# self.fc2 = nn.Linear(502, 2)
# def forward(self, x, y):
# # x = x.float()
# # y = y.float()
#
# x = self.features(x)
# x = x.view(x.size(0), 256 * 6 * 6)
# x = self.classifier(x)
# # x = torch.cat([x, y], dim=1)
#
# x = self.relu(x)
# x = self.fc1(x)
# # x = self.fc2(x)
# return x
def forward(self, x, y):
# x = x.float()
# y = y.float()
x = self.features(x)
x = x.view(x.size(0), 256 * 6 * 6)
x = self.classifier(x)
x = torch.cat([x, y], dim=1)
x = self.relu(x)
x = self.fc1(x)
# x = self.fc2(x)
return x
def alexnet(pretrained=False, **kwargs):
r"""AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = AlexNet(**kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['alexnet']))
return model
| 28.728261 | 78 | 0.538403 |
e73dbd92426c598ff474cf8eb0981c2b73164e1c | 1,654 | py | Python | fingerprint_large_lib.py | ferluht/dejavu | b1d4cc3495b00b28dc332ac257ec5413ecefbb62 | [
"MIT"
] | null | null | null | fingerprint_large_lib.py | ferluht/dejavu | b1d4cc3495b00b28dc332ac257ec5413ecefbb62 | [
"MIT"
] | null | null | null | fingerprint_large_lib.py | ferluht/dejavu | b1d4cc3495b00b28dc332ac257ec5413ecefbb62 | [
"MIT"
] | null | null | null | import json
from dejavu import Dejavu
from dejavu.logic.recognizer.file_recognizer import FileRecognizer
from dejavu.logic.recognizer.microphone_recognizer import MicrophoneRecognizer
import pandas as pd
import sys
import tqdm
# load config from a JSON file (or anything outputting a python dictionary)
config = {
"database": {
"host": "localhost",
"user": "postgres",
"password": "password",
"database": "dejavu"
},
"database_type": "postgres"
}
if __name__ == '__main__':
root = sys.argv[1]
filelist = pd.read_csv(sys.argv[2], sep=',', header=None)
chunk_size = int(sys.argv[3])
filenames = []
pbar = tqdm(total=filelist.shape[0])
for index, row in filelist.iterrows():
filename = row.values[-1].split('/')[-1]
filename = os.join(root, filename)
try:
data = scipy.io.wavfile.read(filename)
filenames.append(filename)
except Exception as e:
pass
if len(filenames) >= chunk_size:
djv = Dejavu(config)
djv.fingerprint_filelist(filenames)
pbar.update(1)
filenames = []
# # Recognize audio from a file
# results = djv.recognize(FileRecognizer, "mp3/Josh-Woodward--I-Want-To-Destroy-Something-Beautiful.mp3")
# print(f"From file we recognized: {results}\n")
# # Or use a recognizer without the shortcut, in anyway you would like
# recognizer = FileRecognizer(djv)
# results = recognizer.recognize_file("mp3/Josh-Woodward--I-Want-To-Destroy-Something-Beautiful.mp3")
# print(f"No shortcut, we recognized: {results}\n")
| 29.535714 | 109 | 0.641475 |
66c2e2e7e3639378642a1bf488255130231552d3 | 10,865 | py | Python | autofit/non_linear/mcmc/zeus/zeus.py | rhayes777/PyAutoF | 87f56419348833b285b00da1a524e329588e0b01 | [
"MIT"
] | 39 | 2019-01-24T10:45:23.000Z | 2022-03-18T09:37:59.000Z | autofit/non_linear/mcmc/zeus/zeus.py | rhayes777/PyAutoF | 87f56419348833b285b00da1a524e329588e0b01 | [
"MIT"
] | 260 | 2018-11-27T12:56:33.000Z | 2022-03-31T16:08:59.000Z | autofit/non_linear/mcmc/zeus/zeus.py | rhayes777/PyAutoF | 87f56419348833b285b00da1a524e329588e0b01 | [
"MIT"
] | 13 | 2018-11-30T16:49:05.000Z | 2022-01-21T17:39:29.000Z | from os import path
from typing import Optional
import numpy as np
import zeus
from sqlalchemy.orm import Session
from autoconf import conf
from autofit import exc
from autofit.mapper.model_mapper import ModelMapper
from autofit.mapper.prior_model.abstract import AbstractPriorModel
from autofit.non_linear.mcmc.abstract_mcmc import AbstractMCMC
from autofit.non_linear.mcmc.auto_correlations import AutoCorrelationsSettings
from autofit.non_linear.mcmc.zeus.samples import ZeusSamples
from autofit.non_linear.abstract_search import PriorPasser
from autofit.non_linear.initializer import Initializer
from autofit.non_linear.mcmc.zeus.plotter import ZeusPlotter
from autofit.plot.output import Output
class Zeus(AbstractMCMC):
__identifier_fields__ = (
"nwalkers",
"tune",
"tolerance",
"patience",
"mu",
"light_mode"
)
def __init__(
self,
name: Optional[str] = None,
path_prefix: Optional[str] = None,
unique_tag: Optional[str] = None,
prior_passer: Optional[PriorPasser] = None,
initializer: Optional[Initializer] = None,
auto_correlations_settings=AutoCorrelationsSettings(),
iterations_per_update: int = None,
number_of_cores: int = None,
session: Optional[Session] = None,
**kwargs
):
"""
An Zeus non-linear search.
For a full description of Zeus, checkout its Github and readthedocs webpages:
https://github.com/minaskar/zeus
https://zeus-mcmc.readthedocs.io/en/latest/
If you use `Zeus` as part of a published work, please cite the package following the instructions under the
*Attribution* section of the GitHub page.
Parameters
----------
name
The name of the search, controlling the last folder results are output.
path_prefix
The path of folders prefixing the name folder where results are output.
unique_tag
The name of a unique tag for this model-fit, which will be given a unique entry in the sqlite database
and also acts as the folder after the path prefix and before the search name.
prior_passer
Controls how priors are passed from the results of this `NonLinearSearch` to a subsequent non-linear search.
nwalkers : int
The number of walkers in the ensemble used to sample parameter space.
nsteps : int
The number of steps that must be taken by every walker. The `NonLinearSearch` will thus run for nwalkers *
nsteps iterations.
initializer
Generates the initialize samples of non-linear parameter space (see autofit.non_linear.initializer).
auto_correlations_settings : AutoCorrelationsSettings
Customizes and performs auto correlation calculations performed during and after the search.
number_of_cores : int
The number of cores Zeus sampling is performed using a Python multiprocessing Pool instance. If 1, a
pool instance is not created and the job runs in serial.
session
An SQLalchemy session instance so the results of the model-fit are written to an SQLite database.
"""
super().__init__(
name=name,
path_prefix=path_prefix,
unique_tag=unique_tag,
prior_passer=prior_passer,
initializer=initializer,
auto_correlations_settings=auto_correlations_settings,
iterations_per_update=iterations_per_update,
session=session,
**kwargs
)
self.number_of_cores = number_of_cores or self._config("parallel", "number_of_cores")
self.logger.debug("Creating Zeus Search")
class Fitness(AbstractMCMC.Fitness):
def __call__(self, parameters):
try:
return self.figure_of_merit_from(parameter_list=parameters)
except exc.FitException:
return self.resample_figure_of_merit
def figure_of_merit_from(self, parameter_list):
"""
The figure of merit is the value that the `NonLinearSearch` uses to sample parameter space.
`Zeus` uses the log posterior.
"""
return self.log_posterior_from(parameter_list=parameter_list)
def _fit(self, model: AbstractPriorModel, analysis, log_likelihood_cap=None):
"""
Fit a model using Zeus and the Analysis class which contains the data and returns the log likelihood from
instances of the model, which the `NonLinearSearch` seeks to maximize.
Parameters
----------
model : ModelMapper
The model which generates instances for different points in parameter space.
analysis : Analysis
Contains the data and the log likelihood function which fits an instance of the model to the data, returning
the log likelihood the `NonLinearSearch` maximizes.
Returns
-------
A result object comprising the Samples object that inclues the maximum log likelihood instance and full
chains used by the fit.
"""
pool = self.make_pool()
fitness_function = self.fitness_function_from_model_and_analysis(
model=model, analysis=analysis
)
if self.paths.is_object("zeus"):
zeus_sampler = self.zeus_pickled
zeus_state = zeus_sampler.get_last_sample()
log_posterior_list = zeus_sampler.get_last_log_prob()
samples = self.samples_from(model=model)
total_iterations = zeus_sampler.iteration
if samples.converged:
iterations_remaining = 0
else:
iterations_remaining = self.config_dict_run["nsteps"] - total_iterations
self.logger.info("Existing Zeus samples found, resuming non-linear search.")
else:
zeus_sampler = zeus.EnsembleSampler(
nwalkers=self.config_dict_search["nwalkers"],
ndim=model.prior_count,
logprob_fn=fitness_function.__call__,
pool=pool,
)
zeus_sampler.ncall_total = 0
unit_parameter_lists, parameter_lists, log_posterior_list = self.initializer.samples_from_model(
total_points=zeus_sampler.nwalkers,
model=model,
fitness_function=fitness_function,
)
zeus_state = np.zeros(shape=(zeus_sampler.nwalkers, model.prior_count))
self.logger.info("No Zeus samples found, beginning new non-linear search.")
for index, parameters in enumerate(parameter_lists):
zeus_state[index, :] = np.asarray(parameters)
total_iterations = 0
iterations_remaining = self.config_dict_run["nsteps"]
while iterations_remaining > 0:
if self.iterations_per_update > iterations_remaining:
iterations = iterations_remaining
else:
iterations = self.iterations_per_update
for sample in zeus_sampler.sample(
start=zeus_state,
log_prob0=log_posterior_list,
iterations=iterations,
progress=True,
):
pass
zeus_sampler.ncall_total += zeus_sampler.ncall
self.paths.save_object(
"zeus",
zeus_sampler
)
zeus_state = zeus_sampler.get_last_sample()
log_posterior_list = zeus_sampler.get_last_log_prob()
total_iterations += iterations
iterations_remaining = self.config_dict_run["nsteps"] - total_iterations
samples = self.perform_update(
model=model, analysis=analysis, during_analysis=True
)
if self.auto_correlations_settings.check_for_convergence:
if zeus_sampler.iteration > self.auto_correlations_settings.check_size:
if samples.converged:
iterations_remaining = 0
auto_correlation_time = zeus.AutoCorrTime(samples=zeus_sampler.get_chain())
discard = int(3.0 * np.max(auto_correlation_time))
thin = int(np.max(auto_correlation_time) / 2.0)
chain = zeus_sampler.get_chain(discard=discard, thin=thin, flat=True)
if "maxcall" in self.kwargs:
if zeus_sampler.ncall_total > self.kwargs["maxcall"]:
iterations_remaining = 0
self.logger.info("Zeus sampling complete.")
def fitness_function_from_model_and_analysis(self, model, analysis, log_likelihood_cap=None):
return Zeus.Fitness(
paths=self.paths,
model=model,
analysis=analysis,
samples_from_model=self.samples_from,
log_likelihood_cap=log_likelihood_cap
)
def samples_from(self, model):
"""Create a `Samples` object from this non-linear search's output files on the hard-disk and model.
Parameters
----------
model
The model which generates instances for different points in parameter space. This maps the points from unit
cube values to physical values via the priors.
paths : af.Paths
Manages all paths, e.g. where the search outputs are stored, the `NonLinearSearch` chains,
etc.
"""
return ZeusSamples(
model=model,
zeus_sampler=self.zeus_pickled,
auto_correlation_settings=self.auto_correlations_settings,
time=self.timer.time
)
@property
def zeus_pickled(self):
return self.paths.load_object(
"zeus"
)
def plot_results(self, samples):
def should_plot(name):
return conf.instance["visualize"]["plots_search"]["emcee"][name]
plotter = ZeusPlotter(
samples=samples,
output=Output(path=path.join(self.paths.image_path, "search"), format="png")
)
if should_plot("corner"):
plotter.corner()
if should_plot("trajectories"):
plotter.trajectories()
if should_plot("likelihood_series"):
plotter.likelihood_series()
if should_plot("time_series"):
plotter.time_series()
| 36.955782 | 121 | 0.615094 |
2ff60c36cb6b2f6bfdae8bdb63a9ec2906452c2d | 41,048 | py | Python | ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_mapping.py | LeHack/Docker-network-research | 62a57a6d723d8701a6d045a07a5abd2bd844a409 | [
"Beerware"
] | 4 | 2017-06-03T20:46:07.000Z | 2017-12-19T02:15:00.000Z | ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_mapping.py | LeHack/Docker-network-research | 62a57a6d723d8701a6d045a07a5abd2bd844a409 | [
"Beerware"
] | 1 | 2017-06-03T20:32:37.000Z | 2017-06-03T20:32:37.000Z | ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_mapping.py | LeHack/Docker-network-research | 62a57a6d723d8701a6d045a07a5abd2bd844a409 | [
"Beerware"
] | null | null | null | #!/usr/bin/env python
from ansible.module_utils.openshift_common import OpenShiftAnsibleModule, OpenShiftAnsibleException
DOCUMENTATION = '''
module: openshift_v1_image_stream_mapping
short_description: OpenShift ImageStreamMapping
description:
- Manage the lifecycle of a image_stream_mapping object. Supports check mode, and
attempts to to be idempotent.
version_added: 2.3.0
author: OpenShift (@openshift)
options:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource that may
be set by external tools to store and retrieve arbitrary metadata. They are
not queryable and should be preserved when modifying objects.
type: dict
api_key:
description:
- Token used to connect to the API.
cert_file:
description:
- Path to a certificate used to authenticate with the API.
type: path
context:
description:
- The name of a context found in the Kubernetes config file.
debug:
description:
- Enable debug output from the OpenShift helper. Logging info is written to KubeObjHelper.log
default: false
type: bool
force:
description:
- If set to C(True), and I(state) is C(present), an existing object will updated,
and lists will be replaced, rather than merged.
default: false
type: bool
host:
description:
- Provide a URL for acessing the Kubernetes API.
image_api_version:
description:
- APIVersion defines the versioned schema of this representation of an object.
Servers should convert recognized schemas to the latest internal value, and
may reject unrecognized values.
aliases:
- api_version
image_docker_image_config:
description:
- DockerImageConfig is a JSON blob that the runtime uses to set up the container.
This is a part of manifest schema v2.
aliases:
- docker_image_config
image_docker_image_layers:
description:
- DockerImageLayers represents the layers in the image. May not be set if the
image does not define that data.
aliases:
- docker_image_layers
type: list
image_docker_image_manifest:
description:
- DockerImageManifest is the raw JSON of the manifest
aliases:
- docker_image_manifest
image_docker_image_manifest_media_type:
description:
- DockerImageManifestMediaType specifies the mediaType of manifest. This is a
part of manifest schema v2.
aliases:
- docker_image_manifest_media_type
image_docker_image_metadata_raw:
description:
- Raw is the underlying serialization of this object.
aliases:
- image_docker_metadata_raw
image_docker_image_metadata_version:
description:
- DockerImageMetadataVersion conveys the version of the object, which if empty
defaults to "1.0"
aliases:
- docker_image_metadata_version
image_docker_image_reference:
description:
- DockerImageReference is the string that can be used to pull this image.
aliases:
- docker_image_reference
image_docker_image_signatures:
description:
- DockerImageSignatures provides the signatures as opaque blobs. This is a part
of manifest schema v1.
aliases:
- docker_image_signatures
type: list
image_kind:
description:
- Kind is a string value representing the REST resource this object represents.
Servers may infer this from the endpoint the client submits requests to. Cannot
be updated. In CamelCase.
aliases:
- kind
image_metadata_annotations:
description:
- Annotations is an unstructured key value map stored with a resource that may
be set by external tools to store and retrieve arbitrary metadata. They are
not queryable and should be preserved when modifying objects.
type: dict
image_metadata_labels:
description:
- Map of string keys and values that can be used to organize and categorize (scope
and select) objects. May match selectors of replication controllers and services.
type: dict
image_metadata_name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of an appropriate
name automatically. Name is primarily intended for creation idempotence and
configuration definition. Cannot be updated.
image_metadata_namespace:
description:
- Namespace defines the space within each name must be unique. An empty namespace
is equivalent to the "default" namespace, but "default" is the canonical representation.
Not all objects are required to be scoped to a namespace - the value of this
field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated.
image_signatures:
description:
- Signatures holds all signatures of the image.
aliases:
- signatures
type: list
key_file:
description:
- Path to a key file used to authenticate with the API.
type: path
kubeconfig:
description:
- Path to an existing Kubernetes config file. If not provided, and no other connection
options are provided, the openshift client will attempt to load the default
configuration file from I(~/.kube/config.json).
type: path
labels:
description:
- Map of string keys and values that can be used to organize and categorize (scope
and select) objects. May match selectors of replication controllers and services.
type: dict
name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of an appropriate
name automatically. Name is primarily intended for creation idempotence and
configuration definition. Cannot be updated.
namespace:
description:
- Namespace defines the space within each name must be unique. An empty namespace
is equivalent to the "default" namespace, but "default" is the canonical representation.
Not all objects are required to be scoped to a namespace - the value of this
field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated.
password:
description:
- Provide a password for connecting to the API. Use in conjunction with I(username).
ssl_ca_cert:
description:
- Path to a CA certificate used to authenticate with the API.
type: path
tag:
description:
- Tag is a string value this image can be located with inside the stream.
username:
description:
- Provide a username for connecting to the API.
verify_ssl:
description:
- Whether or not to verify the API server's SSL certificates.
type: bool
requirements:
- openshift == 1.0.0-snapshot
'''
EXAMPLES = '''
'''
RETURN = '''
api_version:
type: string
description: Requested API version
image_stream_mapping:
type: complex
returned: on success
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of an object.
Servers should convert recognized schemas to the latest internal value, and
may reject unrecognized values.
type: str
image:
description:
- Image is a Docker image.
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of an object.
Servers should convert recognized schemas to the latest internal value,
and may reject unrecognized values.
type: str
docker_image_config:
description:
- DockerImageConfig is a JSON blob that the runtime uses to set up the container.
This is a part of manifest schema v2.
type: str
docker_image_layers:
description:
- DockerImageLayers represents the layers in the image. May not be set if
the image does not define that data.
type: list
contains:
media_type:
description:
- MediaType of the referenced object.
type: str
name:
description:
- Name of the layer as defined by the underlying store.
type: str
size:
description:
- Size of the layer in bytes as defined by the underlying store.
type: int
docker_image_manifest:
description:
- DockerImageManifest is the raw JSON of the manifest
type: str
docker_image_manifest_media_type:
description:
- DockerImageManifestMediaType specifies the mediaType of manifest. This
is a part of manifest schema v2.
type: str
docker_image_metadata:
description:
- DockerImageMetadata contains metadata about this image
type: complex
contains:
raw:
description:
- Raw is the underlying serialization of this object.
type: str
docker_image_metadata_version:
description:
- DockerImageMetadataVersion conveys the version of the object, which if
empty defaults to "1.0"
type: str
docker_image_reference:
description:
- DockerImageReference is the string that can be used to pull this image.
type: str
docker_image_signatures:
description:
- DockerImageSignatures provides the signatures as opaque blobs. This is
a part of manifest schema v1.
type: list
contains: str
kind:
description:
- Kind is a string value representing the REST resource this object represents.
Servers may infer this from the endpoint the client submits requests to.
Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource
that may be set by external tools to store and retrieve arbitrary
metadata. They are not queryable and should be preserved when modifying
objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This is used
to distinguish resources with same name and namespace in different
clusters. This field is not set anywhere right now and apiserver is
going to ignore it if set in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time when
this object was created. It is not guaranteed to be set in happens-before
order across separate operations. Clients may not set this value.
It is represented in RFC3339 form and is in UTC. Populated by the
system. Read-only. Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate
before it will be removed from the system. Only set when deletionTimestamp
is also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this resource
will be deleted. This field is set by the server when a graceful deletion
is requested by the user, and is not directly settable by a client.
The resource is expected to be deleted (no longer visible from resource
lists, and not reachable by name) after the time in this field. Once
set, this value may not be unset or be set further into the future,
although it may be shortened or the resource may be deleted prior
to this time. For example, a user may request that a pod is deleted
in 30 seconds. The Kubelet will react by sending a graceful termination
signal to the containers in the pod. After that 30 seconds, the Kubelet
will send a hard termination signal (SIGKILL) to the container and
after cleanup, remove the pod from the API. In the presence of network
partitions, this object may still exist after this timestamp, until
an administrator or automated process can determine the resource is
fully terminated. If not set, graceful deletion of the object has
not been requested. Populated by the system when a graceful deletion
is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry. Each
entry is an identifier for the responsible component that will remove
the entry from the list. If the deletionTimestamp of the object is
non-nil, entries in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to generate
a unique name ONLY IF the Name field has not been provided. If this
field is used, the name returned to the client will be different than
the name passed. This value will also be combined with a unique suffix.
The provided value has the same validation rules as the Name field,
and may be truncated by the length of the suffix required to make
the value unique on the server. If this field is specified and the
generated name exists, the server will NOT return a 409 - instead,
it will either return 201 Created or 500 with Reason ServerTimeout
indicating a unique name could not be found in the time allotted,
and the client should retry (optionally after the time indicated in
the Retry-After header). Applied only if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the desired
state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize and categorize
(scope and select) objects. May match selectors of replication controllers
and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating
resources, although some resources may allow a client to request the
generation of an appropriate name automatically. Name is primarily
intended for creation idempotence and configuration definition. Cannot
be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique. An empty
namespace is equivalent to the "default" namespace, but "default"
is the canonical representation. Not all objects are required to be
scoped to a namespace - the value of this field for those objects
will be empty. Must be a DNS_LABEL. Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in the list
have been deleted, this object will be garbage collected. If this
object is managed by a controller, then an entry in this list will
point to this controller, with the controller field set to true. There
cannot be more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this object
that can be used by clients to determine when objects have changed.
May be used for optimistic concurrency, change detection, and the
watch operation on a resource or set of resources. Clients must treat
these values as opaque and passed unmodified back to the server. They
may only be valid for a particular resource or set of resources. Populated
by the system. Read-only. Value must be treated as opaque by clients
and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the system.
Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object. It is typically
generated by the server on successful creation of a resource and is
not allowed to change on PUT operations. Populated by the system.
Read-only.
type: str
signatures:
description:
- Signatures holds all signatures of the image.
type: list
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of
an object. Servers should convert recognized schemas to the latest
internal value, and may reject unrecognized values.
type: str
conditions:
description:
- Conditions represent the latest available observations of a signature's
current state.
type: list
contains:
last_probe_time:
description:
- Last time the condition was checked.
type: complex
contains: {}
last_transition_time:
description:
- Last time the condition transit from one status to another.
type: complex
contains: {}
message:
description:
- Human readable message indicating details about last transition.
type: str
reason:
description:
- (brief) reason for the condition's last transition.
type: str
status:
description:
- Status of the condition, one of True, False, Unknown.
type: str
type:
description:
- Type of signature condition, Complete or Failed.
type: str
content:
description:
- "Required: An opaque binary string which is an image's signature."
type: str
created:
description:
- If specified, it is the time of signature's creation.
type: complex
contains: {}
image_identity:
description:
- A human readable string representing image's identity. It could be
a product name and version, or an image pull spec (e.g. "registry.access.redhat.com/rhel7/rhel:7.2").
type: str
issued_by:
description:
- If specified, it holds information about an issuer of signing certificate
or key (a person or entity who signed the signing certificate or key).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
issued_to:
description:
- If specified, it holds information about a subject of signing certificate
or key (a person or entity who signed the image).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
public_key_id:
description:
- If present, it is a human readable key id of public key belonging
to the subject used to verify image signature. It should contain
at least 64 lowest bits of public key's fingerprint (e.g. 0x685ebe62bf278440).
type: str
kind:
description:
- Kind is a string value representing the REST resource this object
represents. Servers may infer this from the endpoint the client submits
requests to. Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource
that may be set by external tools to store and retrieve arbitrary
metadata. They are not queryable and should be preserved when
modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This is used
to distinguish resources with same name and namespace in different
clusters. This field is not set anywhere right now and apiserver
is going to ignore it if set in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time
when this object was created. It is not guaranteed to be set in
happens-before order across separate operations. Clients may not
set this value. It is represented in RFC3339 form and is in UTC.
Populated by the system. Read-only. Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate
before it will be removed from the system. Only set when deletionTimestamp
is also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this resource
will be deleted. This field is set by the server when a graceful
deletion is requested by the user, and is not directly settable
by a client. The resource is expected to be deleted (no longer
visible from resource lists, and not reachable by name) after
the time in this field. Once set, this value may not be unset
or be set further into the future, although it may be shortened
or the resource may be deleted prior to this time. For example,
a user may request that a pod is deleted in 30 seconds. The Kubelet
will react by sending a graceful termination signal to the containers
in the pod. After that 30 seconds, the Kubelet will send a hard
termination signal (SIGKILL) to the container and after cleanup,
remove the pod from the API. In the presence of network partitions,
this object may still exist after this timestamp, until an administrator
or automated process can determine the resource is fully terminated.
If not set, graceful deletion of the object has not been requested.
Populated by the system when a graceful deletion is requested.
Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry.
Each entry is an identifier for the responsible component that
will remove the entry from the list. If the deletionTimestamp
of the object is non-nil, entries in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to generate
a unique name ONLY IF the Name field has not been provided. If
this field is used, the name returned to the client will be different
than the name passed. This value will also be combined with a
unique suffix. The provided value has the same validation rules
as the Name field, and may be truncated by the length of the suffix
required to make the value unique on the server. If this field
is specified and the generated name exists, the server will NOT
return a 409 - instead, it will either return 201 Created or 500
with Reason ServerTimeout indicating a unique name could not be
found in the time allotted, and the client should retry (optionally
after the time indicated in the Retry-After header). Applied only
if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the desired
state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize and
categorize (scope and select) objects. May match selectors of
replication controllers and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating
resources, although some resources may allow a client to request
the generation of an appropriate name automatically. Name is primarily
intended for creation idempotence and configuration definition.
Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique. An
empty namespace is equivalent to the "default" namespace, but
"default" is the canonical representation. Not all objects are
required to be scoped to a namespace - the value of this field
for those objects will be empty. Must be a DNS_LABEL. Cannot be
updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in the
list have been deleted, this object will be garbage collected.
If this object is managed by a controller, then an entry in this
list will point to this controller, with the controller field
set to true. There cannot be more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this object
that can be used by clients to determine when objects have changed.
May be used for optimistic concurrency, change detection, and
the watch operation on a resource or set of resources. Clients
must treat these values as opaque and passed unmodified back to
the server. They may only be valid for a particular resource or
set of resources. Populated by the system. Read-only. Value must
be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the system.
Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object. It
is typically generated by the server on successful creation of
a resource and is not allowed to change on PUT operations. Populated
by the system. Read-only.
type: str
signed_claims:
description:
- Contains claims from the signature.
type: complex
contains: str, str
type:
description:
- 'Required: Describes a type of stored blob.'
type: str
kind:
description:
- Kind is a string value representing the REST resource this object represents.
Servers may infer this from the endpoint the client submits requests to. Cannot
be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource that
may be set by external tools to store and retrieve arbitrary metadata.
They are not queryable and should be preserved when modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This is used to distinguish
resources with same name and namespace in different clusters. This field
is not set anywhere right now and apiserver is going to ignore it if set
in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time when this
object was created. It is not guaranteed to be set in happens-before order
across separate operations. Clients may not set this value. It is represented
in RFC3339 form and is in UTC. Populated by the system. Read-only. Null
for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate before
it will be removed from the system. Only set when deletionTimestamp is
also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this resource will
be deleted. This field is set by the server when a graceful deletion is
requested by the user, and is not directly settable by a client. The resource
is expected to be deleted (no longer visible from resource lists, and
not reachable by name) after the time in this field. Once set, this value
may not be unset or be set further into the future, although it may be
shortened or the resource may be deleted prior to this time. For example,
a user may request that a pod is deleted in 30 seconds. The Kubelet will
react by sending a graceful termination signal to the containers in the
pod. After that 30 seconds, the Kubelet will send a hard termination signal
(SIGKILL) to the container and after cleanup, remove the pod from the
API. In the presence of network partitions, this object may still exist
after this timestamp, until an administrator or automated process can
determine the resource is fully terminated. If not set, graceful deletion
of the object has not been requested. Populated by the system when a graceful
deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry. Each entry
is an identifier for the responsible component that will remove the entry
from the list. If the deletionTimestamp of the object is non-nil, entries
in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to generate a
unique name ONLY IF the Name field has not been provided. If this field
is used, the name returned to the client will be different than the name
passed. This value will also be combined with a unique suffix. The provided
value has the same validation rules as the Name field, and may be truncated
by the length of the suffix required to make the value unique on the server.
If this field is specified and the generated name exists, the server will
NOT return a 409 - instead, it will either return 201 Created or 500 with
Reason ServerTimeout indicating a unique name could not be found in the
time allotted, and the client should retry (optionally after the time
indicated in the Retry-After header). Applied only if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the desired state.
Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize and categorize
(scope and select) objects. May match selectors of replication controllers
and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of
an appropriate name automatically. Name is primarily intended for creation
idempotence and configuration definition. Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique. An empty
namespace is equivalent to the "default" namespace, but "default" is the
canonical representation. Not all objects are required to be scoped to
a namespace - the value of this field for those objects will be empty.
Must be a DNS_LABEL. Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in the list have
been deleted, this object will be garbage collected. If this object is
managed by a controller, then an entry in this list will point to this
controller, with the controller field set to true. There cannot be more
than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this object that
can be used by clients to determine when objects have changed. May be
used for optimistic concurrency, change detection, and the watch operation
on a resource or set of resources. Clients must treat these values as
opaque and passed unmodified back to the server. They may only be valid
for a particular resource or set of resources. Populated by the system.
Read-only. Value must be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object. It is typically
generated by the server on successful creation of a resource and is not
allowed to change on PUT operations. Populated by the system. Read-only.
type: str
tag:
description:
- Tag is a string value this image can be located with inside the stream.
type: str
'''
def main():
try:
module = OpenShiftAnsibleModule('image_stream_mapping', 'V1')
except OpenShiftAnsibleException as exc:
# The helper failed to init, so there is no module object. All we can do is raise the error.
raise Exception(exc.message)
try:
module.execute_module()
except OpenShiftAnsibleException as exc:
module.fail_json(msg="Module failed!", error=str(exc))
if __name__ == '__main__':
main()
| 46.698521 | 117 | 0.600468 |
b5f62545ded438be138b603c79bdb478d44608f2 | 722 | py | Python | predict.py | lujiannan/Artificial-Intelligence | 6ecb7f0b0ec18e9f2f374edafa097723c7bee375 | [
"MIT"
] | null | null | null | predict.py | lujiannan/Artificial-Intelligence | 6ecb7f0b0ec18e9f2f374edafa097723c7bee375 | [
"MIT"
] | null | null | null | predict.py | lujiannan/Artificial-Intelligence | 6ecb7f0b0ec18e9f2f374edafa097723c7bee375 | [
"MIT"
] | null | null | null | """
The purpose for this file is to display the labels in an specific image at a specific directory
variables needs to be altered before running:
when inpur window pops up, enter the address of the image which needs to be predicted
"""
from yolo import YOLO
from PIL import Image
yolo = YOLO()
normal_list = []
overall_bool = ''
b_dis_count = 0
while True:
img = input('Input image filename:')
try:
image = Image.open(img)
except:
print('Open Error! Try again!')
continue
else:
r_image, _, normal_list, overall_bool, b_dis_count = yolo.detect_image(image, normal_list, overall_bool, b_dis_count)
# print(coord_set)
r_image.show()
yolo.close_session()
| 24.066667 | 125 | 0.689751 |
83616dd6cf632b0e02bfa46915372936eecd46a0 | 1,310 | py | Python | utils.py | RaviTejaKomma/Automate-Boring-Stuff-Python | e5d8df1b060f20e50691f824ecabc3a30dc845c7 | [
"MIT"
] | null | null | null | utils.py | RaviTejaKomma/Automate-Boring-Stuff-Python | e5d8df1b060f20e50691f824ecabc3a30dc845c7 | [
"MIT"
] | null | null | null | utils.py | RaviTejaKomma/Automate-Boring-Stuff-Python | e5d8df1b060f20e50691f824ecabc3a30dc845c7 | [
"MIT"
] | null | null | null | import os, shutil
import os
import shutil
from subprocess import call
def copy_dir(src_path, dest_path):
try:
print("Copying", src_path, "to", dest_path)
call(['cp', '-rp', src_path, dest_path])
except Exception as e:
print("Exception:", e)
return e
def clean_dir(dir_path, exclude=[]):
print("Cleaning the contents of", dir_path)
for folder in os.listdir(dir_path):
if folder in exclude:
continue
folder_path = os.path.join(dir_path, folder)
if os.path.isdir(folder_path):
shutil.rmtree(folder_path)
else:
os.remove(folder_path)
def retrieve_archive(filename, extract_dir, archive_format):
try:
shutil.unpack_archive(filename, extract_dir, archive_format)
except Exception as e:
print("Exception:", e)
return e
def make_archive(source, destination):
base = os.path.basename(destination)
name = base.split('.')[0]
format = base.split('.')[1]
archive_from = os.path.dirname(source)
archive_to = os.path.basename(source.strip(os.sep))
shutil.make_archive(name, format, archive_from, archive_to)
shutil.move('%s.%s'%(name,format), destination)
make_archive('/path/to/folder', '/path/to/folder.zip') | 29.111111 | 68 | 0.638168 |
8ec8245eb19ea10c8ae5e5fb65c476b030899e9c | 53,063 | py | Python | venv/lib/python3.9/site-packages/GameCenter/_metadata.py | ipriyam26/RedditTTS | 8528bdc3adcea1197c7159e6eb4c302487e32406 | [
"MIT"
] | null | null | null | venv/lib/python3.9/site-packages/GameCenter/_metadata.py | ipriyam26/RedditTTS | 8528bdc3adcea1197c7159e6eb4c302487e32406 | [
"MIT"
] | null | null | null | venv/lib/python3.9/site-packages/GameCenter/_metadata.py | ipriyam26/RedditTTS | 8528bdc3adcea1197c7159e6eb4c302487e32406 | [
"MIT"
] | null | null | null | # This file is generated by objective.metadata
#
# Last update: Sat Jul 11 10:43:16 2020
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b):
return b
else:
def sel32or64(a, b):
return a
misc = {}
constants = """$GKErrorDomain$GKExchangeTimeoutDefault@d$GKExchangeTimeoutNone@d$GKPlayerAuthenticationDidChangeNotificationName$GKPlayerDidChangeNotificationName$GKSessionErrorDomain$GKTurnTimeoutDefault@d$GKTurnTimeoutNone@d$GKVoiceChatServiceErrorDomain$"""
enums = """$GKChallengeStateCompleted@2$GKChallengeStateDeclined@3$GKChallengeStateInvalid@0$GKChallengeStatePending@1$GKErrorAuthenticationInProgress@7$GKErrorCancelled@2$GKErrorChallengeInvalid@19$GKErrorCommunicationsFailure@3$GKErrorGameUnrecognized@15$GKErrorInvalidCredentials@5$GKErrorInvalidParameter@17$GKErrorInvalidPlayer@8$GKErrorInvitationsDisabled@25$GKErrorMatchRequestInvalid@13$GKErrorNotAuthenticated@6$GKErrorNotSupported@16$GKErrorParentalControlsBlocked@10$GKErrorPlayerPhotoFailure@26$GKErrorPlayerStatusExceedsMaximumLength@11$GKErrorPlayerStatusInvalid@12$GKErrorScoreNotSet@9$GKErrorTurnBasedInvalidParticipant@22$GKErrorTurnBasedInvalidState@24$GKErrorTurnBasedInvalidTurn@23$GKErrorTurnBasedMatchDataTooLarge@20$GKErrorTurnBasedTooManySessions@21$GKErrorUbiquityContainerUnavailable@27$GKErrorUnderage@14$GKErrorUnexpectedConnection@18$GKErrorUnknown@1$GKErrorUserDenied@4$GKGameCenterViewControllerStateAchievements@1$GKGameCenterViewControllerStateChallenges@2$GKGameCenterViewControllerStateDefault@-1$GKGameCenterViewControllerStateLeaderboards@0$GKInviteRecipientResponseAccepted@0$GKInviteRecipientResponseDeclined@1$GKInviteRecipientResponseFailed@2$GKInviteRecipientResponseIncompatible@3$GKInviteRecipientResponseNoAnswer@5$GKInviteRecipientResponseUnableToConnect@4$GKInviteeResponseAccepted@0$GKInviteeResponseDeclined@1$GKInviteeResponseFailed@2$GKInviteeResponseIncompatible@3$GKInviteeResponseNoAnswer@5$GKInviteeResponseUnableToConnect@4$GKLeaderboardPlayerScopeFriendsOnly@1$GKLeaderboardPlayerScopeGlobal@0$GKLeaderboardTimeScopeAllTime@2$GKLeaderboardTimeScopeToday@0$GKLeaderboardTimeScopeWeek@1$GKMatchSendDataReliable@0$GKMatchSendDataUnreliable@1$GKMatchTypeHosted@1$GKMatchTypePeerToPeer@0$GKMatchTypeTurnBased@2$GKPeerStateAvailable@0$GKPeerStateConnected@2$GKPeerStateConnecting@4$GKPeerStateDisconnected@3$GKPeerStateUnavailable@1$GKPhotoSizeNormal@1$GKPhotoSizeSmall@0$GKPlayerStateConnected@1$GKPlayerStateDisconnected@2$GKPlayerStateUnknown@0$GKSendDataReliable@0$GKSendDataUnreliable@1$GKSessionCancelledError@30504$GKSessionCannotEnableError@30509$GKSessionConnectionClosedError@30506$GKSessionConnectionFailedError@30505$GKSessionConnectivityError@30201$GKSessionDataTooBigError@30507$GKSessionDeclinedError@30502$GKSessionInProgressError@30510$GKSessionInternalError@30203$GKSessionInvalidParameterError@30500$GKSessionModeClient@1$GKSessionModePeer@2$GKSessionModeServer@0$GKSessionNotConnectedError@30508$GKSessionPeerNotFoundError@30501$GKSessionSystemError@30205$GKSessionTimedOutError@30503$GKSessionTransportError@30202$GKSessionUnknownError@30204$GKTurnBasedExchangeStatusActive@1$GKTurnBasedExchangeStatusCanceled@4$GKTurnBasedExchangeStatusComplete@2$GKTurnBasedExchangeStatusResolved@3$GKTurnBasedExchangeStatusUnknown@0$GKTurnBasedMatchOutcomeCustomRange@16711680$GKTurnBasedMatchOutcomeFirst@6$GKTurnBasedMatchOutcomeFourth@9$GKTurnBasedMatchOutcomeLost@3$GKTurnBasedMatchOutcomeNone@0$GKTurnBasedMatchOutcomeQuit@1$GKTurnBasedMatchOutcomeSecond@7$GKTurnBasedMatchOutcomeThird@8$GKTurnBasedMatchOutcomeTied@4$GKTurnBasedMatchOutcomeTimeExpired@5$GKTurnBasedMatchOutcomeWon@2$GKTurnBasedMatchStatusEnded@2$GKTurnBasedMatchStatusMatching@3$GKTurnBasedMatchStatusOpen@1$GKTurnBasedMatchStatusUnknown@0$GKTurnBasedParticipantStatusActive@4$GKTurnBasedParticipantStatusDeclined@2$GKTurnBasedParticipantStatusDone@5$GKTurnBasedParticipantStatusInvited@1$GKTurnBasedParticipantStatusMatching@3$GKTurnBasedParticipantStatusUnknown@0$GKVoiceChatPlayerConnected@0$GKVoiceChatPlayerConnecting@4$GKVoiceChatPlayerDisconnected@1$GKVoiceChatPlayerSilent@3$GKVoiceChatPlayerSpeaking@2$GKVoiceChatServiceAudioUnavailableError@32005$GKVoiceChatServiceClientMissingRequiredMethodsError@32007$GKVoiceChatServiceInternalError@32000$GKVoiceChatServiceInvalidCallIDError@32004$GKVoiceChatServiceInvalidParameterError@32016$GKVoiceChatServiceMethodCurrentlyInvalidError@32012$GKVoiceChatServiceNetworkConfigurationError@32013$GKVoiceChatServiceNoRemotePacketsError@32001$GKVoiceChatServiceOutOfMemoryError@32015$GKVoiceChatServiceRemoteParticipantBusyError@32008$GKVoiceChatServiceRemoteParticipantCancelledError@32009$GKVoiceChatServiceRemoteParticipantDeclinedInviteError@32011$GKVoiceChatServiceRemoteParticipantHangupError@32003$GKVoiceChatServiceRemoteParticipantResponseInvalidError@32010$GKVoiceChatServiceUnableToConnectError@32002$GKVoiceChatServiceUninitializedClientError@32006$GKVoiceChatServiceUnsupportedRemoteVersionError@32014$"""
misc.update({})
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(
b"GKAchievement",
b"challengeComposeControllerWithMessage:players:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"@"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"Z"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(b"GKAchievement", b"isCompleted", {"retval": {"type": "Z"}})
r(b"GKAchievement", b"isHidden", {"retval": {"type": "Z"}})
r(
b"GKAchievement",
b"loadAchievementsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"reportAchievementWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"reportAchievements:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"reportAchievements:withEligibleChallenges:withCompletionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"resetAchievementsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"selectChallengeablePlayerIDs:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievement",
b"selectChallengeablePlayers:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(b"GKAchievement", b"setShowsCompletionBanner:", {"arguments": {2: {"type": "Z"}}})
r(b"GKAchievement", b"showsCompletionBanner", {"retval": {"type": "Z"}})
r(b"GKAchievementDescription", b"isHidden", {"retval": {"type": "Z"}})
r(b"GKAchievementDescription", b"isReplayable", {"retval": {"type": "Z"}})
r(
b"GKAchievementDescription",
b"loadAchievementDescriptionsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKAchievementDescription",
b"loadImageWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKChallenge",
b"loadReceivedChallengesWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(b"GKDialogController", b"presentViewController:", {"retval": {"type": "Z"}})
r(b"GKInvite", b"isHosted", {"retval": {"type": "Z"}})
r(b"GKInvite", b"setHosted:", {"arguments": {2: {"type": "Z"}}})
r(b"GKLeaderboard", b"isLoading", {"retval": {"type": "Z"}})
r(
b"GKLeaderboard",
b"loadCategoriesWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
3: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboard",
b"loadImageWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboard",
b"loadLeaderboardsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboard",
b"loadScoresWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboard",
b"setDefaultLeaderboard:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboardSet",
b"loadImageWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboardSet",
b"loadLeaderboardSetsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLeaderboardSet",
b"loadLeaderboardsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"authenticateHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
},
)
r(
b"GKLocalPlayer",
b"authenticateWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"deleteSavedGamesWithName:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"fetchSavedGamesWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"generateIdentityVerificationSignatureWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
3: {"type": b"@"},
4: {"type": sel32or64(b"I", b"Q")},
5: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(b"GKLocalPlayer", b"isAuthenticated", {"retval": {"type": "Z"}})
r(
b"GKLocalPlayer",
b"loadDefaultLeaderboardCategoryIDWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"loadDefaultLeaderboardIdentifierWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"loadFriendPlayersWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"loadFriendsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"loadLeaderboardSetsWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"resolveConflictingSavedGames:withData:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"saveGameData:withName:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"setAuthenticateHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"setDefaultLeaderboardCategoryID:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKLocalPlayer",
b"setDefaultLeaderboardIdentifier:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKMatch",
b"chooseBestHostPlayerWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKMatch",
b"chooseBestHostingPlayerWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKMatch",
b"rematchWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatch",
b"sendData:toPlayers:dataMode:error:",
{"retval": {"type": "Z"}, "arguments": {5: {"type_modifier": b"o"}}},
)
r(
b"GKMatch",
b"sendData:toPlayers:withDataMode:error:",
{"retval": {"type": "Z"}, "arguments": {5: {"type_modifier": b"o"}}},
)
r(
b"GKMatch",
b"sendDataToAllPlayers:withDataMode:error:",
{"retval": {"type": "Z"}, "arguments": {4: {"type_modifier": b"o"}}},
)
r(
b"GKMatchRequest",
b"inviteeResponseHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
},
)
r(
b"GKMatchRequest",
b"recipientResponseHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
},
)
r(
b"GKMatchRequest",
b"setInviteeResponseHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchRequest",
b"setRecipientResponseHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"addPlayersToMatch:matchRequest:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"findMatchForRequest:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"findPlayersForHostedMatchRequest:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"findPlayersForHostedRequest:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"inviteHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
},
)
r(
b"GKMatchmaker",
b"matchForInvite:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"queryActivityWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": sel32or64(b"i", b"q")},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"queryPlayerGroupActivity:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": sel32or64(b"i", b"q")},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"setInviteHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"startBrowsingForNearbyPlayersWithHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"Z"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKMatchmaker",
b"startBrowsingForNearbyPlayersWithReachableHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"Z"},
},
},
"type": "@?",
}
}
},
)
r(b"GKMatchmakerViewController", b"isHosted", {"retval": {"type": "Z"}})
r(b"GKMatchmakerViewController", b"setHosted:", {"arguments": {2: {"type": "Z"}}})
r(
b"GKMatchmakerViewController",
b"setHostedPlayer:connected:",
{"arguments": {3: {"type": "Z"}}},
)
r(
b"GKMatchmakerViewController",
b"setHostedPlayer:didConnect:",
{"arguments": {3: {"type": "Z"}}},
)
r(
b"GKMatchmakerViewController",
b"setShowExistingMatches:",
{"arguments": {2: {"type": "Z"}}},
)
r(b"GKMatchmakerViewController", b"showExistingMatches", {"retval": {"type": "Z"}})
r(
b"GKNotificationBanner",
b"showBannerWithTitle:message:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
},
"type": "@?",
}
}
},
)
r(
b"GKNotificationBanner",
b"showBannerWithTitle:message:duration:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
},
"type": "@?",
}
}
},
)
r(
b"GKPlayer",
b"loadPhotoForSize:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKPlayer",
b"loadPlayersForIdentifiers:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKSavedGame",
b"loadDataWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKScore",
b"challengeComposeControllerWithMessage:players:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"@"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"Z"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKScore",
b"reportScoreWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKScore",
b"reportScores:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKScore",
b"reportScores:withEligibleChallenges:withCompletionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(b"GKScore", b"setShouldSetDefaultLeaderboard:", {"arguments": {2: {"type": "Z"}}})
r(b"GKScore", b"shouldSetDefaultLeaderboard", {"retval": {"type": "Z"}})
r(
b"GKSession",
b"acceptConnectionFromPeer:error:",
{"retval": {"type": "Z"}, "arguments": {3: {"type_modifier": b"o"}}},
)
r(b"GKSession", b"isActive", {"retval": {"type": "Z"}})
r(b"GKSession", b"isAvailable", {"retval": {"type": "Z"}})
r(
b"GKSession",
b"sendData:toPeers:withDataMode:error:",
{"retval": {"type": "Z"}, "arguments": {5: {"type_modifier": b"o"}}},
)
r(
b"GKSession",
b"sendDataToAllPeers:withDataMode:error:",
{"retval": {"type": "Z"}, "arguments": {4: {"type_modifier": b"o"}}},
)
r(b"GKSession", b"setAvailable:", {"arguments": {2: {"type": "Z"}}})
r(b"GKSession", b"setIsActive:", {"arguments": {2: {"type": "Z"}}})
r(
b"GKTurnBasedExchange",
b"cancelWithLocalizableMessageKey:arguments:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedExchange",
b"replyWithLocalizableMessageKey:arguments:data:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedExchange",
b"setShowExistingMatches:",
{"arguments": {2: {"type": "Z"}}},
)
r(b"GKTurnBasedExchange", b"showExistingMatches", {"retval": {"type": "Z"}})
r(
b"GKTurnBasedMatch",
b"acceptInviteWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"declineInviteWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"endMatchInTurnWithMatchData:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"endMatchInTurnWithMatchData:scores:achievements:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"endTurnWithNextParticipant:matchData:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"endTurnWithNextParticipants:turnTimeout:matchData:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"findMatchForRequest:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"loadMatchDataWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"loadMatchWithID:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"loadMatchesWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"participantQuitInTurnWithOutcome:nextParticipant:matchData:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"participantQuitInTurnWithOutcome:nextParticipants:turnTimeout:matchData:completionHandler:",
{
"arguments": {
6: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"participantQuitOutOfTurnWithOutcome:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"rematchWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"removeWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"saveCurrentTurnWithMatchData:completionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"saveMergedMatchData:withResolvedExchanges:completionHandler:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"sendExchangeToParticipants:data:localizableMessageKey:arguments:timeout:completionHandler:",
{
"arguments": {
7: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatch",
b"sendReminderToParticipants:localizableMessageKey:arguments:completionHandler:",
{
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"@"}},
},
"type": "@?",
}
}
},
)
r(
b"GKTurnBasedMatchmakerViewController",
b"setShowExistingMatches:",
{"arguments": {2: {"type": "Z"}}},
)
r(
b"GKTurnBasedMatchmakerViewController",
b"showExistingMatches",
{"retval": {"type": "Z"}},
)
r(b"GKVoiceChat", b"isActive", {"retval": {"type": "Z"}})
r(b"GKVoiceChat", b"isVoIPAllowed", {"retval": {"type": "Z"}})
r(
b"GKVoiceChat",
b"playerStateUpdateHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
},
)
r(
b"GKVoiceChat",
b"playerVoiceChatStateDidChangeHandler",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
},
)
r(b"GKVoiceChat", b"setActive:", {"arguments": {2: {"type": "Z"}}})
r(b"GKVoiceChat", b"setMute:forPlayer:", {"arguments": {2: {"type": "Z"}}})
r(b"GKVoiceChat", b"setPlayer:muted:", {"arguments": {3: {"type": "Z"}}})
r(
b"GKVoiceChat",
b"setPlayerStateUpdateHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"type": "@?",
}
}
},
)
r(
b"GKVoiceChat",
b"setPlayerVoiceChatStateDidChangeHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"type": "@?",
}
}
},
)
r(
b"NSObject",
b"handleTurnEventForMatch:didBecomeActive:",
{"arguments": {3: {"type": "Z"}}},
)
r(
b"NSObject",
b"match:player:didChangeConnectionState:",
{"arguments": {4: {"type": sel32or64(b"I", b"Q")}}},
)
r(
b"NSObject",
b"match:player:didChangeState:",
{"arguments": {4: {"type": sel32or64(b"I", b"Q")}}},
)
r(
b"NSObject",
b"match:shouldReinviteDisconnectedPlayer:",
{"retval": {"type": "Z"}},
)
r(b"NSObject", b"match:shouldReinvitePlayer:", {"retval": {"type": "Z"}})
r(
b"NSObject",
b"player:receivedTurnEventForMatch:didBecomeActive:",
{"arguments": {4: {"type": "Z"}}},
)
r(
b"NSObject",
b"session:peer:didChangeState:",
{"arguments": {4: {"type": sel32or64(b"I", b"Q")}}},
)
r(
b"NSObject",
b"shouldShowBannerForLocallyCompletedChallenge:",
{"retval": {"type": "Z"}},
)
r(
b"NSObject",
b"shouldShowBannerForLocallyReceivedChallenge:",
{"retval": {"type": "Z"}},
)
r(
b"NSObject",
b"shouldShowBannerForRemotelyCompletedChallenge:",
{"retval": {"type": "Z"}},
)
r(
b"NSObject",
b"voiceChatService:didReceiveInvitationFromParticipantID:callID:",
{"arguments": {4: {"type": sel32or64(b"I", b"Q")}}},
)
finally:
objc._updatingMetadata(False)
expressions = {}
# END OF FILE
| 31.755236 | 4,495 | 0.334131 |
724e57410213a3ff45d202a146ca68f3bbcdbeeb | 8,964 | py | Python | plotly/graph_objs/streamtube/_lighting.py | omridanan/plotly.py | a8d26670cba49ce15ce9b7639ae0f55a6088a825 | [
"MIT"
] | null | null | null | plotly/graph_objs/streamtube/_lighting.py | omridanan/plotly.py | a8d26670cba49ce15ce9b7639ae0f55a6088a825 | [
"MIT"
] | null | null | null | plotly/graph_objs/streamtube/_lighting.py | omridanan/plotly.py | a8d26670cba49ce15ce9b7639ae0f55a6088a825 | [
"MIT"
] | 1 | 2019-02-18T04:12:56.000Z | 2019-02-18T04:12:56.000Z | from plotly.basedatatypes import BaseTraceHierarchyType
import copy
class Lighting(BaseTraceHierarchyType):
# ambient
# -------
@property
def ambient(self):
"""
Ambient light increases overall color visibility but can wash
out the image.
The 'ambient' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['ambient']
@ambient.setter
def ambient(self, val):
self['ambient'] = val
# diffuse
# -------
@property
def diffuse(self):
"""
Represents the extent that incident rays are reflected in a
range of angles.
The 'diffuse' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['diffuse']
@diffuse.setter
def diffuse(self, val):
self['diffuse'] = val
# facenormalsepsilon
# ------------------
@property
def facenormalsepsilon(self):
"""
Epsilon for face normals calculation avoids math issues arising
from degenerate geometry.
The 'facenormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['facenormalsepsilon']
@facenormalsepsilon.setter
def facenormalsepsilon(self, val):
self['facenormalsepsilon'] = val
# fresnel
# -------
@property
def fresnel(self):
"""
Represents the reflectance as a dependency of the viewing
angle; e.g. paper is reflective when viewing it from the edge
of the paper (almost 90 degrees), causing shine.
The 'fresnel' property is a number and may be specified as:
- An int or float in the interval [0, 5]
Returns
-------
int|float
"""
return self['fresnel']
@fresnel.setter
def fresnel(self, val):
self['fresnel'] = val
# roughness
# ---------
@property
def roughness(self):
"""
Alters specular reflection; the rougher the surface, the wider
and less contrasty the shine.
The 'roughness' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['roughness']
@roughness.setter
def roughness(self, val):
self['roughness'] = val
# specular
# --------
@property
def specular(self):
"""
Represents the level that incident rays are reflected in a
single direction, causing shine.
The 'specular' property is a number and may be specified as:
- An int or float in the interval [0, 2]
Returns
-------
int|float
"""
return self['specular']
@specular.setter
def specular(self, val):
self['specular'] = val
# vertexnormalsepsilon
# --------------------
@property
def vertexnormalsepsilon(self):
"""
Epsilon for vertex normals calculation avoids math issues
arising from degenerate geometry.
The 'vertexnormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['vertexnormalsepsilon']
@vertexnormalsepsilon.setter
def vertexnormalsepsilon(self, val):
self['vertexnormalsepsilon'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'streamtube'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
"""
def __init__(
self,
arg=None,
ambient=None,
diffuse=None,
facenormalsepsilon=None,
fresnel=None,
roughness=None,
specular=None,
vertexnormalsepsilon=None,
**kwargs
):
"""
Construct a new Lighting object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.streamtube.Lighting
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
Returns
-------
Lighting
"""
super(Lighting, self).__init__('lighting')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.streamtube.Lighting
constructor must be a dict or
an instance of plotly.graph_objs.streamtube.Lighting"""
)
# Import validators
# -----------------
from plotly.validators.streamtube import (lighting as v_lighting)
# Initialize validators
# ---------------------
self._validators['ambient'] = v_lighting.AmbientValidator()
self._validators['diffuse'] = v_lighting.DiffuseValidator()
self._validators['facenormalsepsilon'
] = v_lighting.FacenormalsepsilonValidator()
self._validators['fresnel'] = v_lighting.FresnelValidator()
self._validators['roughness'] = v_lighting.RoughnessValidator()
self._validators['specular'] = v_lighting.SpecularValidator()
self._validators['vertexnormalsepsilon'
] = v_lighting.VertexnormalsepsilonValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('ambient', None)
self.ambient = ambient if ambient is not None else _v
_v = arg.pop('diffuse', None)
self.diffuse = diffuse if diffuse is not None else _v
_v = arg.pop('facenormalsepsilon', None)
self.facenormalsepsilon = facenormalsepsilon if facenormalsepsilon is not None else _v
_v = arg.pop('fresnel', None)
self.fresnel = fresnel if fresnel is not None else _v
_v = arg.pop('roughness', None)
self.roughness = roughness if roughness is not None else _v
_v = arg.pop('specular', None)
self.specular = specular if specular is not None else _v
_v = arg.pop('vertexnormalsepsilon', None)
self.vertexnormalsepsilon = vertexnormalsepsilon if vertexnormalsepsilon is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
| 30.593857 | 100 | 0.578425 |
fc4ecd0c62021a1f4f2ca4b352ce7e3923b2e1aa | 733 | py | Python | bihgpy/bounds.py | pedroramaciotti/BIHGPy | 6b5be54deb89cdbffa4e2bddf5f7c4553742ffa2 | [
"MIT"
] | null | null | null | bihgpy/bounds.py | pedroramaciotti/BIHGPy | 6b5be54deb89cdbffa4e2bddf5f7c4553742ffa2 | [
"MIT"
] | null | null | null | bihgpy/bounds.py | pedroramaciotti/BIHGPy | 6b5be54deb89cdbffa4e2bddf5f7c4553742ffa2 | [
"MIT"
] | null | null | null | import numpy as np
from scipy.special import comb
from scipy.special import beta
from scipy.special import gamma
from .check import initial_checks
from .posterior import K_posterior_distribution
def upper(N,n,k,alpha,a=1,b=1):
# initial check
N,n,_,k = initial_checks(N,n,N,k)
if k==n:
return 1.0;
# Computing posterior distribution
K_dom,K_img = K_posterior_distribution(N,n,k,a,b)
# naive bound
return K_dom[np.argmax(K_img.cumsum() > (1.0-alpha))];
def lower(N,n,k,alpha,a=1,b=1):
# initial check
N,n,_,k = initial_checks(N,n,N,k)
if k==0:
return 0.0;
# Computing posterior distribution
K_dom,K_img = K_posterior_distribution(N,n,k,a,b)
# naive bound
return K_dom[np.argmax(K_img.cumsum() > (alpha))]; | 20.942857 | 55 | 0.720327 |
89e991cb5e9c57453e7520eaca2d62e35aa5eb6e | 1,509 | py | Python | test/countries/test_zimbabwe.py | hugovk/python-holidays | e22c667a159c959d81b512cc354910fc5c6653a9 | [
"MIT"
] | 48 | 2016-11-22T09:18:50.000Z | 2018-01-14T14:06:49.000Z | test/countries/test_zimbabwe.py | hugovk/python-holidays | e22c667a159c959d81b512cc354910fc5c6653a9 | [
"MIT"
] | 59 | 2016-12-03T15:52:36.000Z | 2018-01-16T09:37:15.000Z | test/countries/test_zimbabwe.py | hugovk/python-holidays | e22c667a159c959d81b512cc354910fc5c6653a9 | [
"MIT"
] | 51 | 2016-11-25T14:53:55.000Z | 2018-01-16T09:58:56.000Z | # -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2022
# ryanss <ryanssdev@icloud.com> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
import unittest
from datetime import date
import holidays
class TestZimbabwe(unittest.TestCase):
def setUp(self):
self.holidays = holidays.ZW()
def test_new_years(self):
self.assertIn(date(2010, 1, 1), self.holidays)
self.assertIn(date(2020, 1, 1), self.holidays)
self.assertNotIn(date(1986, 1, 2), self.holidays) # sunday
def test_observed(self):
self.assertIn(date(2017, 1, 2), self.holidays) # sunday
def test_easter(self):
self.assertIn(date(2017, 4, 14), self.holidays) # Good friday
self.assertIn(date(2017, 4, 15), self.holidays) # Easter Saturday
self.assertIn(date(2017, 4, 17), self.holidays) # Easter Monday
def test_not_holiday(self):
self.assertNotIn(date(2016, 1, 12), self.holidays)
self.assertNotIn(date(1999, 2, 3), self.holidays)
def test_youth_day(self):
self.assertIn(date(2019, 2, 21), self.holidays)
self.assertNotIn(date(2015, 2, 21), self.holidays)
| 33.533333 | 78 | 0.666004 |
0f88c805ee12efe80e9f249b7d2589f2cd4b6663 | 3,436 | py | Python | xblock_jupyter_viewer/xblock_jupyter_viewer.py | murat-polat/jupyter-edx-viewer-xblock | 6024a3c5b443934654882b0e9b11c50005e2ee44 | [
"BSD-3-Clause"
] | null | null | null | xblock_jupyter_viewer/xblock_jupyter_viewer.py | murat-polat/jupyter-edx-viewer-xblock | 6024a3c5b443934654882b0e9b11c50005e2ee44 | [
"BSD-3-Clause"
] | null | null | null | xblock_jupyter_viewer/xblock_jupyter_viewer.py | murat-polat/jupyter-edx-viewer-xblock | 6024a3c5b443934654882b0e9b11c50005e2ee44 | [
"BSD-3-Clause"
] | null | null | null | """Jupyter Notebook Viewer XBlock"""
import logging
import pkg_resources
import urllib.request
from urllib.parse import urlencode, quote_plus
from django.urls import reverse
from xblock.core import XBlock
from xblock.fields import Scope, String, Integer
from xblock.fragment import Fragment
from xblockutils.studio_editable import StudioEditableXBlockMixin
log = logging.getLogger(__name__)
class JupyterViewerXBlock(XBlock, StudioEditableXBlockMixin):
"""iframe used with endpoint to render full/section of jupyter notebook"""
display_name = String(
display_name="Display Name", default="Jupyter Notebook Viewer",
scope=Scope.settings,
help="Name of this XBlock"
)
jupyter_url = String(
help="URL to the .ipynb File",
scope=Scope.content,
display_name="Notebook URL",
default="http://path/to/file.ipynb"
)
image_url = String(
help="(Optional) Absolute URL to images root (http://.../)",
scope=Scope.content,
display_name="Image Root URL",
default=""
)
start_tag = String(
help="(Optional) Finds first occurrence of this text and renders notebook starting in this cell",
scope=Scope.content,
display_name="Start Tag",
default=""
)
end_tag = String(
help="(Optional) Finds first occurrence of this text and renders notebook up to this cell (not inclusive)",
scope=Scope.content,
display_name="End Tag",
default=""
)
xblock_height = Integer(
help="Height of this XBlock (px)",
scope=Scope.content,
display_name="Height",
default=500
)
editable_fields = ('display_name', 'jupyter_url', 'image_url', 'start_tag', 'end_tag', 'xblock_height')
def resource_string(self, path):
"""Handy helper for getting resources from our kit."""
data = pkg_resources.resource_string(__name__, path)
return data.decode("utf8")
def student_view(self, context=None):
base = reverse('xblock_jupyter_viewer:jupyter_nb_viewer') + "?{}"
# setup start/end tags
if self.start_tag != '':
base += "&{}".format(urlencode({'start': self.start_tag}))
if self.end_tag != '':
base += "&{}".format(urlencode({'end': self.end_tag}))
# Add Image root
base += "&{}".format(urlencode({'images_url': self.image_url}))
# setup full url and inject into template iframe
full_url = base.format(urlencode({'url': self.jupyter_url}))
log.debug("Full URL: {}".format(full_url))
base_html = self.resource_string('static/html/student_view.html')\
.format(self.xblock_height, full_url)
# add html and css
frag = Fragment(base_html)
# frag.add_css(self.resource_string('static/css/style.css'))
return frag
# TO-DO: change this to create the scenarios you'd like to see in the
# workbench while developing your XBlock.
@staticmethod
def workbench_scenarios():
"""A canned scenario for display in the workbench."""
return [
("MyXBlock",
"""<myxblock/>
"""),
("Multiple MyXBlock",
"""<vertical_demo>
<myxblock/>
<myxblock/>
<myxblock/>
</vertical_demo>
"""),
]
| 31.236364 | 115 | 0.614668 |
92281349a1f329ad50d812be00bc3d594ae37624 | 25,687 | py | Python | docs/code/Coverage.py | vrthra/fuzzingbook | 15319dcd7c213559cfe992c2e5936dab52929658 | [
"MIT"
] | null | null | null | docs/code/Coverage.py | vrthra/fuzzingbook | 15319dcd7c213559cfe992c2e5936dab52929658 | [
"MIT"
] | null | null | null | docs/code/Coverage.py | vrthra/fuzzingbook | 15319dcd7c213559cfe992c2e5936dab52929658 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# "Code Coverage" - a chapter of "The Fuzzing Book"
# Web site: https://www.fuzzingbook.org/html/Coverage.html
# Last change: 2022-02-09 08:18:28+01:00
#
# Copyright (c) 2021 CISPA Helmholtz Center for Information Security
# Copyright (c) 2018-2020 Saarland University, authors, and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
r'''
The Fuzzing Book - Code Coverage
This file can be _executed_ as a script, running all experiments:
$ python Coverage.py
or _imported_ as a package, providing classes, functions, and constants:
>>> from fuzzingbook.Coverage import <identifier>
but before you do so, _read_ it and _interact_ with it at:
https://www.fuzzingbook.org/html/Coverage.html
This chapter introduces a `Coverage` class allowing you to measure coverage for Python programs. Within the context of this book, we use coverage information to guide fuzzing towards uncovered locations.
The typical usage of the `Coverage` class is in conjunction with a `with` clause:
>>> with Coverage() as cov:
>>> cgi_decode("a+b")
Printing out a coverage object shows the covered functions, with covered lines prefixed as `#`:
>>> print(cov)
1 def cgi_decode(s: str) -> str:
2 """Decode the CGI-encoded string `s`:
3 * replace '+' by ' '
4 * replace "%xx" by the character with hex number xx.
5 Return the decoded string. Raise `ValueError` for invalid inputs."""
6
7 # Mapping of hex digits to their integer values
# 8 hex_values = {
# 9 '0': 0, '1': 1, '2': 2, '3': 3, '4': 4,
# 10 '5': 5, '6': 6, '7': 7, '8': 8, '9': 9,
# 11 'a': 10, 'b': 11, 'c': 12, 'd': 13, 'e': 14, 'f': 15,
# 12 'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14, 'F': 15,
13 }
14
# 15 t = ""
# 16 i = 0
# 17 while i < len(s):
# 18 c = s[i]
# 19 if c == '+':
# 20 t += ' '
# 21 elif c == '%':
22 digit_high, digit_low = s[i + 1], s[i + 2]
23 i += 2
24 if digit_high in hex_values and digit_low in hex_values:
25 v = hex_values[digit_high] * 16 + hex_values[digit_low]
26 t += chr(v)
27 else:
28 raise ValueError("Invalid encoding")
29 else:
# 30 t += c
# 31 i += 1
# 32 return t
The `trace()` method returns the _trace_ – that is, the list of locations executed in order. Each location comes as a pair (`function name`, `line`).
>>> cov.trace()
[('cgi_decode', 9),
('cgi_decode', 10),
('cgi_decode', 11),
('cgi_decode', 12),
('cgi_decode', 8),
('cgi_decode', 15),
('cgi_decode', 16),
('cgi_decode', 17),
('cgi_decode', 18),
('cgi_decode', 19),
('cgi_decode', 21),
('cgi_decode', 30),
('cgi_decode', 31),
('cgi_decode', 17),
('cgi_decode', 18),
('cgi_decode', 19),
('cgi_decode', 20),
('cgi_decode', 31),
('cgi_decode', 17),
('cgi_decode', 18),
('cgi_decode', 19),
('cgi_decode', 21),
('cgi_decode', 30),
('cgi_decode', 31),
('cgi_decode', 17),
('cgi_decode', 32)]
The `coverage()` method returns the _coverage_, that is, the set of locations in the trace executed at least once:
>>> cov.coverage()
{('cgi_decode', 8),
('cgi_decode', 9),
('cgi_decode', 10),
('cgi_decode', 11),
('cgi_decode', 12),
('cgi_decode', 15),
('cgi_decode', 16),
('cgi_decode', 17),
('cgi_decode', 18),
('cgi_decode', 19),
('cgi_decode', 20),
('cgi_decode', 21),
('cgi_decode', 30),
('cgi_decode', 31),
('cgi_decode', 32)}
Coverage sets can be subject to set operations, such as _intersection_ (which locations are covered in multiple executions) and _difference_ (which locations are covered in run _a_, but not _b_).
The chapter also discusses how to obtain such coverage from C programs.
For more details, source, and documentation, see
"The Fuzzing Book - Code Coverage"
at https://www.fuzzingbook.org/html/Coverage.html
'''
# Allow to use 'from . import <module>' when run as script (cf. PEP 366)
if __name__ == '__main__' and __package__ is None:
__package__ = 'fuzzingbook'
# Code Coverage
# =============
if __name__ == '__main__':
print('# Code Coverage')
if __name__ == '__main__':
from .bookutils import YouTubeVideo
if __name__ == '__main__':
YouTubeVideo('2lfgI9KdARs')
## Synopsis
## --------
if __name__ == '__main__':
print('\n## Synopsis')
if __name__ == '__main__':
# We use the same fixed seed as the notebook to ensure consistency
import random
random.seed(2001)
from typing import Any, Optional, Callable, List, Type, Set, Tuple
## A CGI Decoder
## -------------
if __name__ == '__main__':
print('\n## A CGI Decoder')
def cgi_decode(s: str) -> str:
"""Decode the CGI-encoded string `s`:
* replace '+' by ' '
* replace "%xx" by the character with hex number xx.
Return the decoded string. Raise `ValueError` for invalid inputs."""
# Mapping of hex digits to their integer values
hex_values = {
'0': 0, '1': 1, '2': 2, '3': 3, '4': 4,
'5': 5, '6': 6, '7': 7, '8': 8, '9': 9,
'a': 10, 'b': 11, 'c': 12, 'd': 13, 'e': 14, 'f': 15,
'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14, 'F': 15,
}
t = ""
i = 0
while i < len(s):
c = s[i]
if c == '+':
t += ' '
elif c == '%':
digit_high, digit_low = s[i + 1], s[i + 2]
i += 2
if digit_high in hex_values and digit_low in hex_values:
v = hex_values[digit_high] * 16 + hex_values[digit_low]
t += chr(v)
else:
raise ValueError("Invalid encoding")
else:
t += c
i += 1
return t
if __name__ == '__main__':
cgi_decode("Hello+world")
## Black-Box Testing
## -----------------
if __name__ == '__main__':
print('\n## Black-Box Testing')
if __name__ == '__main__':
assert cgi_decode('+') == ' '
assert cgi_decode('%20') == ' '
assert cgi_decode('abc') == 'abc'
try:
cgi_decode('%?a')
assert False
except ValueError:
pass
## White-Box Testing
## -----------------
if __name__ == '__main__':
print('\n## White-Box Testing')
## Tracing Executions
## ------------------
if __name__ == '__main__':
print('\n## Tracing Executions')
if __name__ == '__main__':
cgi_decode("a+b")
from types import FrameType, TracebackType
if __name__ == '__main__':
coverage = []
def traceit(frame: FrameType, event: str, arg: Any) -> Optional[Callable]:
"""Trace program execution. To be passed to sys.settrace()."""
if event == 'line':
global coverage
function_name = frame.f_code.co_name
lineno = frame.f_lineno
coverage.append(lineno)
return traceit
import sys
def cgi_decode_traced(s: str) -> None:
global coverage
coverage = []
sys.settrace(traceit) # Turn on
cgi_decode(s)
sys.settrace(None) # Turn off
if __name__ == '__main__':
cgi_decode_traced("a+b")
print(coverage)
import inspect
if __name__ == '__main__':
cgi_decode_code = inspect.getsource(cgi_decode)
from .bookutils import print_content, print_file
if __name__ == '__main__':
print_content(cgi_decode_code[:300] + "...", ".py")
if __name__ == '__main__':
cgi_decode_lines = [""] + cgi_decode_code.splitlines()
if __name__ == '__main__':
cgi_decode_lines[1]
if __name__ == '__main__':
cgi_decode_lines[9:13]
if __name__ == '__main__':
cgi_decode_lines[15]
if __name__ == '__main__':
covered_lines = set(coverage)
print(covered_lines)
if __name__ == '__main__':
for lineno in range(1, len(cgi_decode_lines)):
if lineno not in covered_lines:
print("# ", end="")
else:
print(" ", end="")
print("%2d " % lineno, end="")
print_content(cgi_decode_lines[lineno], '.py')
print()
## A Coverage Class
## ----------------
if __name__ == '__main__':
print('\n## A Coverage Class')
Location = Tuple[str, int]
class Coverage:
"""Track coverage within a `with` block. Use as
```
with Coverage() as cov:
function_to_be_traced()
c = cov.coverage()
```
"""
def __init__(self) -> None:
"""Constructor"""
self._trace: List[Location] = []
# Trace function
def traceit(self, frame: FrameType, event: str, arg: Any) -> Optional[Callable]:
"""Tracing function. To be overloaded in subclasses."""
if self.original_trace_function is not None:
self.original_trace_function(frame, event, arg)
if event == "line":
function_name = frame.f_code.co_name
lineno = frame.f_lineno
if function_name != '__exit__': # avoid tracing ourselves:
self._trace.append((function_name, lineno))
return self.traceit
def __enter__(self) -> Any:
"""Start of `with` block. Turn on tracing."""
self.original_trace_function = sys.gettrace()
sys.settrace(self.traceit)
return self
def __exit__(self, exc_type: Type, exc_value: BaseException,
tb: TracebackType) -> Optional[bool]:
"""End of `with` block. Turn off tracing."""
sys.settrace(self.original_trace_function)
return None # default: pass all exceptions
def trace(self) -> List[Location]:
"""The list of executed lines, as (function_name, line_number) pairs"""
return self._trace
def coverage(self) -> Set[Location]:
"""The set of executed lines, as (function_name, line_number) pairs"""
return set(self.trace())
def function_names(self) -> Set[str]:
"""The set of function names seen"""
return set(function_name for (function_name, line_number) in self.coverage())
def __repr__(self) -> str:
"""Return a string representation of this object.
Show covered (and uncovered) program code"""
t = ""
for function_name in self.function_names():
# Similar code as in the example above
try:
fun = eval(function_name)
except Exception as exc:
t += f"Skipping {function_name}: {exc}"
continue
source_lines, start_line_number = inspect.getsourcelines(fun)
for lineno in range(start_line_number, start_line_number + len(source_lines)):
if (function_name, lineno) in self.trace():
t += "# "
else:
t += " "
t += "%2d " % lineno
t += source_lines[lineno - start_line_number]
return t
if __name__ == '__main__':
with Coverage() as cov:
cgi_decode("a+b")
print(cov.coverage())
if __name__ == '__main__':
print(cov)
## Comparing Coverage
## ------------------
if __name__ == '__main__':
print('\n## Comparing Coverage')
if __name__ == '__main__':
with Coverage() as cov_plus:
cgi_decode("a+b")
with Coverage() as cov_standard:
cgi_decode("abc")
cov_plus.coverage() - cov_standard.coverage()
if __name__ == '__main__':
with Coverage() as cov_max:
cgi_decode('+')
cgi_decode('%20')
cgi_decode('abc')
try:
cgi_decode('%?a')
except Exception:
pass
if __name__ == '__main__':
cov_max.coverage() - cov_plus.coverage()
## Coverage of Basic Fuzzing
## --------------------------
if __name__ == '__main__':
print('\n## Coverage of Basic Fuzzing')
from .Fuzzer import fuzzer
if __name__ == '__main__':
sample = fuzzer()
sample
if __name__ == '__main__':
with Coverage() as cov_fuzz:
try:
cgi_decode(sample)
except:
pass
cov_fuzz.coverage()
if __name__ == '__main__':
cov_max.coverage() - cov_fuzz.coverage()
if __name__ == '__main__':
trials = 100
def population_coverage(population: List[str], function: Callable) \
-> Tuple[Set[Location], List[int]]:
cumulative_coverage: List[int] = []
all_coverage: Set[Location] = set()
for s in population:
with Coverage() as cov:
try:
function(s)
except:
pass
all_coverage |= cov.coverage()
cumulative_coverage.append(len(all_coverage))
return all_coverage, cumulative_coverage
def hundred_inputs() -> List[str]:
population = []
for i in range(trials):
population.append(fuzzer())
return population
if __name__ == '__main__':
all_coverage, cumulative_coverage = \
population_coverage(hundred_inputs(), cgi_decode)
# %matplotlib inline
if __name__ == '__main__':
import matplotlib.pyplot as plt # type: ignore
if __name__ == '__main__':
plt.plot(cumulative_coverage)
plt.title('Coverage of cgi_decode() with random inputs')
plt.xlabel('# of inputs')
plt.ylabel('lines covered')
if __name__ == '__main__':
runs = 100
# Create an array with TRIALS elements, all zero
sum_coverage = [0] * trials
for run in range(runs):
all_coverage, coverage = population_coverage(hundred_inputs(), cgi_decode)
assert len(coverage) == trials
for i in range(trials):
sum_coverage[i] += coverage[i]
average_coverage = []
for i in range(trials):
average_coverage.append(sum_coverage[i] / runs)
if __name__ == '__main__':
plt.plot(average_coverage)
plt.title('Average coverage of cgi_decode() with random inputs')
plt.xlabel('# of inputs')
plt.ylabel('lines covered')
## Getting Coverage from External Programs
## ---------------------------------------
if __name__ == '__main__':
print('\n## Getting Coverage from External Programs')
if __name__ == '__main__':
cgi_c_code = """
/* CGI decoding as C program */
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
"""
if __name__ == '__main__':
cgi_c_code += r"""
int hex_values[256];
void init_hex_values() {
for (int i = 0; i < sizeof(hex_values) / sizeof(int); i++) {
hex_values[i] = -1;
}
hex_values['0'] = 0; hex_values['1'] = 1; hex_values['2'] = 2; hex_values['3'] = 3;
hex_values['4'] = 4; hex_values['5'] = 5; hex_values['6'] = 6; hex_values['7'] = 7;
hex_values['8'] = 8; hex_values['9'] = 9;
hex_values['a'] = 10; hex_values['b'] = 11; hex_values['c'] = 12; hex_values['d'] = 13;
hex_values['e'] = 14; hex_values['f'] = 15;
hex_values['A'] = 10; hex_values['B'] = 11; hex_values['C'] = 12; hex_values['D'] = 13;
hex_values['E'] = 14; hex_values['F'] = 15;
}
"""
if __name__ == '__main__':
cgi_c_code += r"""
int cgi_decode(char *s, char *t) {
while (*s != '\0') {
if (*s == '+')
*t++ = ' ';
else if (*s == '%') {
int digit_high = *++s;
int digit_low = *++s;
if (hex_values[digit_high] >= 0 && hex_values[digit_low] >= 0) {
*t++ = hex_values[digit_high] * 16 + hex_values[digit_low];
}
else
return -1;
}
else
*t++ = *s;
s++;
}
*t = '\0';
return 0;
}
"""
if __name__ == '__main__':
cgi_c_code += r"""
int main(int argc, char *argv[]) {
init_hex_values();
if (argc >= 2) {
char *s = argv[1];
char *t = malloc(strlen(s) + 1); /* output is at most as long as input */
int ret = cgi_decode(s, t);
printf("%s\n", t);
return ret;
}
else
{
printf("cgi_decode: usage: cgi_decode STRING\n");
return 1;
}
}
"""
if __name__ == '__main__':
with open("cgi_decode.c", "w") as f:
f.write(cgi_c_code)
from .bookutils import print_file
if __name__ == '__main__':
print_file("cgi_decode.c")
if __name__ == '__main__':
import os
os.system(f'cc --coverage -o cgi_decode cgi_decode.c')
if __name__ == '__main__':
import os
os.system(f"./cgi_decode 'Send+mail+to+me%40fuzzingbook.org'")
if __name__ == '__main__':
import os
os.system(f'gcov cgi_decode.c')
if __name__ == '__main__':
lines = open('cgi_decode.c.gcov').readlines()
for i in range(30, 50):
print(lines[i], end='')
def read_gcov_coverage(c_file):
gcov_file = c_file + ".gcov"
coverage = set()
with open(gcov_file) as file:
for line in file.readlines():
elems = line.split(':')
covered = elems[0].strip()
line_number = int(elems[1].strip())
if covered.startswith('-') or covered.startswith('#'):
continue
coverage.add((c_file, line_number))
return coverage
if __name__ == '__main__':
coverage = read_gcov_coverage('cgi_decode.c')
if __name__ == '__main__':
list(coverage)[:5]
## Finding Errors with Basic Fuzzing
## ---------------------------------
if __name__ == '__main__':
print('\n## Finding Errors with Basic Fuzzing')
from .ExpectError import ExpectError
if __name__ == '__main__':
with ExpectError():
for i in range(trials):
try:
s = fuzzer()
cgi_decode(s)
except ValueError:
pass
if __name__ == '__main__':
s
## Synopsis
## --------
if __name__ == '__main__':
print('\n## Synopsis')
if __name__ == '__main__':
with Coverage() as cov:
cgi_decode("a+b")
if __name__ == '__main__':
print(cov)
if __name__ == '__main__':
cov.trace()
if __name__ == '__main__':
cov.coverage()
from .ClassDiagram import display_class_hierarchy
if __name__ == '__main__':
display_class_hierarchy(Coverage,
public_methods=[
Coverage.__init__,
Coverage.__enter__,
Coverage.__exit__,
Coverage.coverage,
Coverage.trace,
Coverage.function_names,
Coverage.__repr__,
],
types={'Location': Location},
project='fuzzingbook')
## Lessons Learned
## ---------------
if __name__ == '__main__':
print('\n## Lessons Learned')
import os
import glob
if __name__ == '__main__':
for file in glob.glob("cgi_decode") + glob.glob("cgi_decode.*"):
os.remove(file)
## Next Steps
## ----------
if __name__ == '__main__':
print('\n## Next Steps')
## Background
## ----------
if __name__ == '__main__':
print('\n## Background')
## Exercises
## ---------
if __name__ == '__main__':
print('\n## Exercises')
### Exercise 1: Fixing `cgi_decode()`
if __name__ == '__main__':
print('\n### Exercise 1: Fixing `cgi_decode()`')
if __name__ == '__main__':
with ExpectError():
assert cgi_decode('%') == '%'
if __name__ == '__main__':
with ExpectError():
assert cgi_decode('%4') == '%4'
if __name__ == '__main__':
assert cgi_decode('%40') == '@'
def fixed_cgi_decode(s):
"""Decode the CGI-encoded string `s`:
* replace "+" by " "
* replace "%xx" by the character with hex number xx.
Return the decoded string. Raise `ValueError` for invalid inputs."""
# Mapping of hex digits to their integer values
hex_values = {
'0': 0, '1': 1, '2': 2, '3': 3, '4': 4,
'5': 5, '6': 6, '7': 7, '8': 8, '9': 9,
'a': 10, 'b': 11, 'c': 12, 'd': 13, 'e': 14, 'f': 15,
'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14, 'F': 15,
}
t = ""
i = 0
while i < len(s):
c = s[i]
if c == '+':
t += ' '
elif c == '%' and i + 2 < len(s): # <--- *** FIX ***
digit_high, digit_low = s[i + 1], s[i + 2]
i += 2
if digit_high in hex_values and digit_low in hex_values:
v = hex_values[digit_high] * 16 + hex_values[digit_low]
t += chr(v)
else:
raise ValueError("Invalid encoding")
else:
t += c
i += 1
return t
if __name__ == '__main__':
assert fixed_cgi_decode('%') == '%'
if __name__ == '__main__':
assert fixed_cgi_decode('%4') == '%4'
if __name__ == '__main__':
assert fixed_cgi_decode('%40') == '@'
if __name__ == '__main__':
for i in range(trials):
try:
s = fuzzer()
fixed_cgi_decode(s)
except ValueError:
pass
if __name__ == '__main__':
cgi_c_code = cgi_c_code.replace(
r"if (*s == '%')", # old code
r"if (*s == '%' && s[1] != '\0' && s[2] != '\0')" # new code
)
### Exercise 2: Branch Coverage
if __name__ == '__main__':
print('\n### Exercise 2: Branch Coverage')
if __name__ == '__main__':
with Coverage() as cov:
cgi_decode("a+b")
trace = cov.trace()
trace[:5]
#### Part 1: Compute branch coverage
if __name__ == '__main__':
print('\n#### Part 1: Compute branch coverage')
def branch_coverage(trace):
coverage = set()
past_line = None
for line in trace:
if past_line is not None:
coverage.add((past_line, line))
past_line = line
return coverage
if __name__ == '__main__':
branch_coverage(trace)
class BranchCoverage(Coverage):
def coverage(self):
"""The set of executed line pairs"""
coverage = set()
past_line = None
for line in self.trace():
if past_line is not None:
coverage.add((past_line, line))
past_line = line
return coverage
#### Part 2: Comparing statement coverage and branch coverage
if __name__ == '__main__':
print('\n#### Part 2: Comparing statement coverage and branch coverage')
if __name__ == '__main__':
with BranchCoverage() as cov:
cgi_decode("a+b")
print(cov.coverage())
if __name__ == '__main__':
with BranchCoverage() as cov_plus:
cgi_decode("a+b")
with BranchCoverage() as cov_standard:
cgi_decode("abc")
cov_plus.coverage() - cov_standard.coverage()
if __name__ == '__main__':
with BranchCoverage() as cov_max:
cgi_decode('+')
cgi_decode('%20')
cgi_decode('abc')
try:
cgi_decode('%?a')
except:
pass
if __name__ == '__main__':
cov_max.coverage() - cov_plus.coverage()
if __name__ == '__main__':
sample
if __name__ == '__main__':
with BranchCoverage() as cov_fuzz:
try:
cgi_decode(s)
except:
pass
cov_fuzz.coverage()
if __name__ == '__main__':
cov_max.coverage() - cov_fuzz.coverage()
def population_branch_coverage(population, function):
cumulative_coverage = []
all_coverage = set()
for s in population:
with BranchCoverage() as cov:
try:
function(s)
except Exception:
pass
all_coverage |= cov.coverage()
cumulative_coverage.append(len(all_coverage))
return all_coverage, cumulative_coverage
if __name__ == '__main__':
all_branch_coverage, cumulative_branch_coverage = population_branch_coverage(
hundred_inputs(), cgi_decode)
if __name__ == '__main__':
plt.plot(cumulative_branch_coverage)
plt.title('Branch coverage of cgi_decode() with random inputs')
plt.xlabel('# of inputs')
plt.ylabel('line pairs covered')
if __name__ == '__main__':
len(cov_max.coverage())
if __name__ == '__main__':
all_branch_coverage - cov_max.coverage()
if __name__ == '__main__':
cov_max.coverage() - all_branch_coverage
#### Part 3: Average coverage
if __name__ == '__main__':
print('\n#### Part 3: Average coverage')
if __name__ == '__main__':
runs = 100
# Create an array with TRIALS elements, all zero
sum_coverage = [0] * trials
for run in range(runs):
all_branch_coverage, coverage = population_branch_coverage(
hundred_inputs(), cgi_decode)
assert len(coverage) == trials
for i in range(trials):
sum_coverage[i] += coverage[i]
average_coverage = []
for i in range(trials):
average_coverage.append(sum_coverage[i] / runs)
if __name__ == '__main__':
plt.plot(average_coverage)
plt.title('Average branch coverage of cgi_decode() with random inputs')
plt.xlabel('# of inputs')
plt.ylabel('line pairs covered')
| 25.868077 | 203 | 0.578697 |
36d044de04f38e88ac67805b1c5c7389fabee585 | 4,203 | py | Python | pyrobolearn/worlds/samples/sports/billiard.py | Pandinosaurus/pyrobolearn | 9cd7c060723fda7d2779fa255ac998c2c82b8436 | [
"Apache-2.0"
] | 2 | 2021-01-21T21:08:30.000Z | 2022-03-29T16:45:49.000Z | pyrobolearn/worlds/samples/sports/billiard.py | Pandinosaurus/pyrobolearn | 9cd7c060723fda7d2779fa255ac998c2c82b8436 | [
"Apache-2.0"
] | null | null | null | pyrobolearn/worlds/samples/sports/billiard.py | Pandinosaurus/pyrobolearn | 9cd7c060723fda7d2779fa255ac998c2c82b8436 | [
"Apache-2.0"
] | 1 | 2020-09-29T21:25:39.000Z | 2020-09-29T21:25:39.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""Provide the billiard world.
"""
import os
import numpy as np
from pyrobolearn.worlds import BasicWorld
__author__ = "Brian Delhaisse"
__copyright__ = "Copyright 2019, PyRoboLearn"
__credits__ = ["Brian Delhaisse"]
__license__ = "GNU GPLv3"
__version__ = "1.0.0"
__maintainer__ = "Brian Delhaisse"
__email__ = "briandelhaisse@gmail.com"
__status__ = "Development"
# TODO: finish to implement the world, create corresponding environment (in `envs` folder) with state and reward.
class BilliardWorld(BasicWorld):
r"""Billiard world
"""
def __init__(self, simulator, position=(0., 0., 0.5), scale=(1., 1., 1.)):
"""
Initialize the Billiard world.
Args:
simulator (Simulator): the simulator instance.
position (tuple/list of 3 float, np.array[3]): position of the billiard table.
scale (tuple/list of 3 float): scale of the billiard table.
"""
super(BilliardWorld, self).__init__(simulator)
mesh_path = os.path.dirname(os.path.abspath(__file__)) + '/../../meshes/sports/billiards/'
position = np.asarray(position)
# load table
table = self.load_mesh(mesh_path + 'table_without_cloth.obj', position=position, scale=scale, mass=0,
color=(133/255., 94/255., 66/255., 1), flags=1)
table_cloth = self.load_mesh(mesh_path + 'table_cloth.obj', position=position, scale=scale, mass=0,
color=(0.039, 0.424, 0.012, 1), flags=0) # color=(0.21, 0.35, 0.29, 1)
# table = self.load_mesh(mesh_path + 'table.obj', position=position, scale=(1., 1., 1.), mass=0, flags=1)
# load cue
self.cue1 = self.load_mesh(mesh_path + 'cue.obj', position=position + np.array([-0.5, 0.4, 0.4]), mass=0.595,
scale=(1., 1., 1.), flags=0, return_body=True)
# load balls
# the order is based on: https://www.wikihow.com/Rack-a-Pool-Table
balls = [1, 9, 2, 10, 8, 3, 11, 7, 14, 4, 5, 13, 15, 6, 12]
z = 0.785 # height
r = 0.028575 # radius
d = 2*r # diameter
x, y = 0.6, 0. # x, y positions
depth = 0 # depth in the triangle when racking the balls
b = 0 # use to count the number of ball at a particular level in the triangle
self.balls = []
self.removed_balls = []
# load white ball
ball = self.load_mesh(mesh_path + 'ball_0.obj', position=(-x, 0, z), mass=0.170, flags=0, return_body=True)
self.balls.append(ball)
# load color balls
for ball_id in balls:
pos = (x + depth*d, y - depth * (r + 0.001) + b * (d + 0.001 * 2), z)
ball = self.load_mesh(mesh_path + 'ball_' + str(ball_id) + '.obj', position=pos, mass=0.170, flags=0,
return_body=True)
b += 1
if depth == (b-1):
b = 0
depth += 1
self.balls.append(ball)
def reset(self, world_state=None):
# reset the billiard
super(BilliardWorld, self).reset(world_state)
def step(self, sleep_dt=None):
# check if a ball has entered in a pocket by checking their position
# if white replace it
# call the parent step
super(BilliardWorld, self).step(sleep_dt=sleep_dt)
# Test
if __name__ == '__main__':
from itertools import count
import pyrobolearn as prl
# create simulator
sim = prl.simulators.Bullet()
# create world
world = BilliardWorld(sim)
# create manipulator
robot = world.load_robot('kuka_iiwa', position=[-2., 0.2, 0.])
# attach cue to robot end effector
# Note that you can detach the cue from the robot end effector using `world.detach`
world.attach(body1=robot, body2=world.cue1, link1=robot.end_effectors[0], link2=-1, joint_axis=[0., 0., 0.],
parent_frame_position=[-0., 0., 0.02], child_frame_position=[0., 0., 0.],
parent_frame_orientation=[0, 0., 0., 1.])
# run simulation
for t in count():
world.step(sim.dt)
| 35.618644 | 117 | 0.585534 |
738b73ab921efcf9eacedafaa621d2793eadf9d7 | 11,817 | py | Python | torchdyn/models/galerkin.py | mirams/torchdyn | 32515299e7fa731c28d5384822b4bfca9f81fba7 | [
"Apache-2.0"
] | 1 | 2020-08-20T08:46:38.000Z | 2020-08-20T08:46:38.000Z | torchdyn/models/galerkin.py | mirams/torchdyn | 32515299e7fa731c28d5384822b4bfca9f81fba7 | [
"Apache-2.0"
] | 2 | 2020-10-07T23:21:11.000Z | 2020-10-08T07:10:46.000Z | torchdyn/models/galerkin.py | mirams/torchdyn | 32515299e7fa731c28d5384822b4bfca9f81fba7 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import torch
import torch.nn as nn
import numpy as np
class GaussianRBF(nn.Module):
"""Eigenbasis expansion using gaussian radial basis functions. $phi(r) = e^{-(\eps r)^2}$ with $r := || x - x0 ||_2$"
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: whether to adjust `centers` and `eps_scales` during training.
:type adaptive: bool
:param eps_scales: scaling in the rbf formula ($\eps$)
:type eps_scales: int
:param centers: centers of the radial basis functions (one per degree). Same center across all degrees. x0 in the radius formulas
:type centers: int
"""
def __init__(self, deg, adaptive=False, eps_scales=2, centers=0):
super().__init__()
self.deg, self.n_eig = deg, 1
if adaptive:
self.centers = torch.nn.Parameter(centers*torch.ones(deg+1))
self.eps_scales = torch.nn.Parameter(eps_scales*torch.ones((deg+1)))
else:
self.centers = 0; self.eps_scales = 2
def forward(self, n_range, s):
n_range_scaled = (n_range - self.centers) / self.eps_scales
r = torch.norm(s - self.centers, p=2)
basis = [math.e**(-(r*n_range_scaled)**2)]
return basis
class VanillaRBF(nn.Module):
"""Eigenbasis expansion using vanilla radial basis functions."
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: whether to adjust `centers` and `eps_scales` during training.
:type adaptive: bool
:param eps_scales: scaling in the rbf formula ($\eps$)
:type eps_scales: int
:param centers: centers of the radial basis functions (one per degree). Same center across all degrees. x0 in the radius formulas
:type centers: int
"""
def __init__(self, deg, adaptive=False, eps_scales=2, centers=0):
super().__init__()
self.deg, self.n_eig = deg, 1
if adaptive:
self.centers = torch.nn.Parameter(centers*torch.ones(deg+1))
self.eps_scales = torch.nn.Parameter(eps_scales*torch.ones((deg+1)))
else:
self.centers = 0; self.eps_scales = 2
def forward(self, n_range, s):
n_range_scaled = n_range / self.eps_scales
r = torch.norm(s - self.centers, p=2)
basis = [r*n_range_scaled]
return basis
class MultiquadRBF(nn.Module):
"""Eigenbasis expansion using multiquadratic radial basis functions."
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: whether to adjust `centers` and `eps_scales` during training.
:type adaptive: bool
:param eps_scales: scaling in the rbf formula ($\eps$)
:type eps_scales: int
:param centers: centers of the radial basis functions (one per degree). Same center across all degrees. x0 in the radius formulas
:type centers: int
"""
def __init__(self, deg, adaptive=False, eps_scales=2, centers=0):
super().__init__()
self.deg, self.n_eig = deg, 1
if adaptive:
self.centers = torch.nn.Parameter(centers*torch.ones(deg+1))
self.eps_scales = torch.nn.Parameter(eps_scales*torch.ones((deg+1)))
else:
self.centers = 0; self.eps_scales = 2
def forward(self, n_range, s):
n_range_scaled = n_range / self.eps_scales
r = torch.norm(s - self.centers, p=2)
basis = [1 + torch.sqrt(1+ (r*n_range_scaled)**2)]
return basis
class Fourier(nn.Module):
"""Eigenbasis expansion using fourier functions."
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: does nothing (for now)
:type adaptive: bool
"""
def __init__(self, deg, adaptive=False):
super().__init__()
self.deg, self.n_eig = deg, 2
def forward(self, n_range, s):
s_n_range = s*n_range
basis = [torch.cos(s_n_range), torch.sin(s_n_range)]
return basis
class Polynomial(nn.Module):
"""Eigenbasis expansion using polynomials."
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: does nothing (for now)
:type adaptive: bool
"""
def __init__(self, deg, adaptive=False):
super().__init__()
self.deg, self.n_eig = deg, 1
def forward(self, n_range, s):
basis = [s**n_range]
return basis
class Chebychev(nn.Module):
"""Eigenbasis expansion using chebychev polynomials."
:param deg: degree of the eigenbasis expansion
:type deg: int
:param adaptive: does nothing (for now)
:type adaptive: bool
"""
def __init__(self, deg, adaptive=False):
super().__init__()
self.deg, self.n_eig = deg, 1
def forward(self, n_range, s):
max_order = n_range[-1].int().item()
basis = [1]
# Based on numpy's Cheb code
if max_order > 0:
s2 = 2*s
basis += [s.item()]
for i in range(2, max_order):
basis += [basis[-1]*s2 - basis[-2]]
return [torch.tensor(basis).to(n_range)]
class GalLayer(nn.Module):
"""Galerkin layer template. Introduced in https://arxiv.org/abs/2002.08071"""
def __init__(self, bias=True, basisfunc=Fourier(5), dilation=True, shift=True):
super().__init__()
self.dilation = torch.ones(1) if not dilation else nn.Parameter(data=torch.ones(1), requires_grad=True)
self.shift = torch.zeros(1) if not shift else nn.Parameter(data=torch.zeros(1), requires_grad=True)
self.basisfunc = basisfunc
self.n_eig = n_eig = self.basisfunc.n_eig
self.deg = deg = self.basisfunc.deg
def reset_parameters(self):
torch.nn.init.zeros_(self.coeffs)
def calculate_weights(self, s):
"Expands `s` following the chosen eigenbasis"
n_range = torch.linspace(0, self.deg, self.deg).to(self.coeffs.device)
basis = self.basisfunc(n_range, s*self.dilation.to(self.coeffs.device) + self.shift.to(self.coeffs.device))
B = []
for i in range(self.n_eig):
Bin = torch.eye(self.deg).to(self.coeffs.device)
Bin[range(self.deg), range(self.deg)] = basis[i]
B.append(Bin)
B = torch.cat(B, 1).to(self.coeffs.device)
coeffs = torch.cat([self.coeffs[:,:,i] for i in range(self.n_eig)],1).transpose(0,1).to(self.coeffs.device)
X = torch.matmul(B, coeffs)
return X.sum(0)
class GalLinear(GalLayer):
"""Linear Galerkin layer for depth--variant neural differential equations. Introduced in https://arxiv.org/abs/2002.08071
:param in_features: input dimensions
:type in_features: int
:param out_features: output dimensions
:type out_features: int
:param bias: include bias parameter vector in the layer computation
:type bias: bool
:param basisfunc: {'Fourier', 'Polynomial', 'Chebychev', 'VanillaRBF', 'MultiquadRBF', 'GaussianRBF'}. Choice of eigenfunction expansion.
:type basisfunc: str
:param dilation: whether to optimize for `dilation` parameter. Allows the GalLayer to dilate the eigenfunction period.
:type dilation: bool
:param shift: whether to optimize for `shift` parameter. Allows the GalLayer to shift the eigenfunction period.
:type shift: bool
"""
def __init__(self, in_features, out_features, bias=True, basisfunc=Fourier(5), dilation=True, shift=True):
super().__init__(bias, basisfunc, dilation, shift)
self.in_features, self.out_features = in_features, out_features
self.weight = torch.Tensor(out_features, in_features)
if bias:
self.bias = torch.Tensor(out_features)
else:
self.register_parameter('bias', None)
self.coeffs = torch.nn.Parameter(torch.Tensor((in_features+1)*out_features, self.deg, self.n_eig))
self.reset_parameters()
def forward(self, input):
# For the moment, GalLayers rely on DepthCat to access the `s` variable. A better design would free the user
# of having to introduce DepthCat(1) every time a GalLayer is used
s = input[-1,-1]
input = input[:,:-1]
w = self.calculate_weights(s)
self.weight = w[0:self.in_features*self.out_features].reshape(self.out_features, self.in_features)
self.bias = w[self.in_features*self.out_features:(self.in_features+1)*self.out_features].reshape(self.out_features)
return torch.nn.functional.linear(input, self.weight, self.bias)
class GalConv2d(GalLayer):
"""2D convolutional Galerkin layer for depth--variant neural differential equations. Introduced in https://arxiv.org/abs/2002.08071
:param in_channels: number of channels in the input image
:type in_channels: int
:param out_channels: number of channels produced by the convolution
:type out_channels: int
:param kernel_size: size of the convolving kernel
:type kernel_size: int
:param stride: stride of the convolution. Default: 1
:type stride: int
:param padding: zero-padding added to both sides of the input. Default: 0
:type padding: int
:param bias: include bias parameter vector in the layer computation
:type bias: bool
:param basisfunc: {'Fourier', 'Polynomial', 'Chebychev', 'VanillaRBF', 'MultiquadRBF', 'GaussianRBF'}. Choice of eigenfunction expansion.
:type basisfunc: str
:param dilation: whether to optimize for `dilation` parameter. Allows the GalLayer to dilate the eigenfunction period.
:type dilation: bool
:param shift: whether to optimize for `shift` parameter. Allows the GalLayer to shift the eigenfunction period.
:type shift: bool
"""
__constants__ = ['bias', 'in_channels', 'out_channels', 'kernel_size', 'stride', 'padding', 'deg']
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=0, bias=True,
basisfunc=Fourier(5), dilation=True, shift=True):
super().__init__(bias, basisfunc, dilation, shift)
self.ic, self.oc, self.ks = in_channels, out_channels, kernel_size
self.pad, self.stride = padding, stride
self.weight = torch.Tensor(out_channels, in_channels, kernel_size, kernel_size)
if bias:
self.bias = torch.Tensor(out_channels)
else:
self.register_parameter('bias', None)
self.coeffs = torch.nn.Parameter(torch.Tensor(((out_channels)*in_channels*(kernel_size**2)+out_channels), self.deg, 2))
self.reset_parameters()
def forward(self, input):
s = input[-1,-1,0,0]
input = input[:,:-1]
w = self.calculate_weights(s)
n = self.oc*self.ic*self.ks*self.ks
self.weight = w[0:n].reshape(self.oc, self.ic, self.ks, self.ks)
self.bias = w[n:].reshape(self.oc)
return torch.nn.functional.conv2d(input, self.weight, self.bias, stride=self.stride, padding=self.pad)
| 45.625483 | 141 | 0.640941 |
0902082b716f889f0ef4ec9ca9dbfc6f158868f6 | 2,506 | py | Python | plugins/action/dcnm_inventory.py | rost-d/ansible-dcnm | 653b0ce5b89e8615d31bca3b15b60aac96c46e11 | [
"Apache-2.0"
] | 28 | 2020-07-19T02:56:38.000Z | 2022-03-03T01:28:10.000Z | plugins/action/dcnm_inventory.py | rost-d/ansible-dcnm | 653b0ce5b89e8615d31bca3b15b60aac96c46e11 | [
"Apache-2.0"
] | 67 | 2020-07-17T21:49:00.000Z | 2022-03-20T14:59:23.000Z | plugins/action/dcnm_inventory.py | rost-d/ansible-dcnm | 653b0ce5b89e8615d31bca3b15b60aac96c46e11 | [
"Apache-2.0"
] | 18 | 2020-07-07T14:42:22.000Z | 2022-03-09T12:31:13.000Z | # Copyright (c) 2020 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.ansible.netcommon.plugins.action.network import (
ActionModule as ActionNetworkModule,
)
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
connection = self._connection
persistent_connect_timeout = connection.get_option("persistent_connect_timeout")
persistent_command_timeout = connection.get_option("persistent_command_timeout")
timeout = 1000
if (persistent_command_timeout < timeout or persistent_connect_timeout < timeout):
display.warning(
"PERSISTENT_COMMAND_TIMEOUT is %s"
% str(persistent_command_timeout),
self._play_context.remote_addr,
)
display.warning(
"PERSISTENT_CONNECT_TIMEOUT is %s"
% str(persistent_connect_timeout),
self._play_context.remote_addr,
)
msg = (
"PERSISTENT_COMMAND_TIMEOUT and PERSISTENT_CONNECT_TIMEOUT"
)
msg += " must be set to {} seconds or higher when using dcnm_inventory module.".format(timeout)
msg += " Current persistent_command_timeout setting:" + str(
persistent_command_timeout
)
msg += " Current persistent_connect_timeout setting:" + str(
persistent_connect_timeout
)
return {"failed": True, "msg": msg}
if self._task.args.get('state') == 'merged' or self._task.args.get('state') == 'overridden':
display.warning("Adding switches to a VXLAN fabric can take a while. Please be patient...")
self.result = super(ActionModule, self).run(task_vars=task_vars)
return self.result
| 39.777778 | 107 | 0.672785 |
ae5f8c94a08a51b74debd674108c8a2ec0df20ba | 1,488 | py | Python | tools/plyaddtexture_bal.py | pureexe/my-simple-sfm-ceres | 12eed6f2ef4be6d2304b4f8b3851c71e39b51cc1 | [
"MIT"
] | null | null | null | tools/plyaddtexture_bal.py | pureexe/my-simple-sfm-ceres | 12eed6f2ef4be6d2304b4f8b3851c71e39b51cc1 | [
"MIT"
] | null | null | null | tools/plyaddtexture_bal.py | pureexe/my-simple-sfm-ceres | 12eed6f2ef4be6d2304b4f8b3851c71e39b51cc1 | [
"MIT"
] | null | null | null | """
Convert point3d in numpy format into ply
"""
from database import COLMAPDatabase
import numpy as np
import argparse
def main(args):
point3d = []
image_id_index = []
with open(args.input,'r') as f:
# remove header
while f.readline().strip() != 'end_header':
pass
line = f.readline().strip()
while line != '':
point = line.split(' ')
point3d.append([float(point[0]),float(point[1]),float(point[2])])
line = f.readline().strip()
with open(args.bal, 'r'):
point_count = 0
line = f.readline().strip()
print(line)
exit()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='point3dnpy2ply.py - Convert point3d in numpy format into ply')
parser.add_argument('-i', '--input', type=str, help='input ply', required=True)
parser.add_argument('-d', '--directory', type=str, help='image directory', required=True)
parser.add_argument('-c', '--colmap', type=str, help='colmap database for lookup', required=True)
parser.add_argument('-b', '--bal', type=str, help='bal format file for lookup', required=True)
parser.add_argument('-o', '--output', type=str, help='output ply file', required=True)
main(parser.parse_args())
# python .\plyaddtexture_bal.py -i ../penguin_ceres.ply -o ../penguin_ceres_color.ply -c penguinguy_cam004_matched.db -d 'D:\\Datasets\\penguinguy_cam004' -b .\penguin_feature_matching.txt | 40.216216 | 188 | 0.639113 |
bae6eac8441182c919e3ead0a93797f5e73bc0bb | 249 | py | Python | smallApps/smallApps/timestamp.py | dambo1993/moje_konfigi_itp | 29c294a559f07c1d90c80cf10cf9a5b103f40ff8 | [
"MIT"
] | null | null | null | smallApps/smallApps/timestamp.py | dambo1993/moje_konfigi_itp | 29c294a559f07c1d90c80cf10cf9a5b103f40ff8 | [
"MIT"
] | null | null | null | smallApps/smallApps/timestamp.py | dambo1993/moje_konfigi_itp | 29c294a559f07c1d90c80cf10cf9a5b103f40ff8 | [
"MIT"
] | null | null | null | import sys
from datetime import datetime
if len(sys.argv) == 3:
if sys.argv[1] == "-t":
timestamp = sys.argv[2]
dt_object = datetime.fromtimestamp(int(timestamp))
print(f"Date from timestamp: {timestamp} -> {dt_object}") | 31.125 | 65 | 0.638554 |
a56deec6e0a03d3c1c1923d9cba0bab02bba1aab | 1,372 | py | Python | ex095_v02.py | danilodelucio/Exercicios_Curso_em_Video | d59e1b4efaf27dd0fc828a608201613c69ac333d | [
"MIT"
] | null | null | null | ex095_v02.py | danilodelucio/Exercicios_Curso_em_Video | d59e1b4efaf27dd0fc828a608201613c69ac333d | [
"MIT"
] | null | null | null | ex095_v02.py | danilodelucio/Exercicios_Curso_em_Video | d59e1b4efaf27dd0fc828a608201613c69ac333d | [
"MIT"
] | null | null | null | time = list()
jogador = dict()
partidas = list()
while True:
jogador.clear()
jogador['nome'] = str(input('Nome do jogador: ')).title().strip()
tot = int(input(f'Quantas partidas {jogador["nome"]} jogou? '))
partidas.clear()
for c in range(0, tot):
partidas.append(int(input(f' Quantos gols na partida {c+1}?')))
jogador['gols'] = partidas[:]
jogador['total'] = sum(partidas)
time.append(jogador.copy())
while True:
resp = str(input('Quer continuar?[S/N] ')).upper()[0]
if resp in 'SN':
break
print('ERRO! Responde apenas com "S" ou "N".')
if resp == 'N':
break
print('-' * 30)
print('cod', end='')
for i in jogador.keys():
print(f'{i:<15}', end='')
print()
print('-' * 40)
for k, v in enumerate(time):
print(f'{k:>3}', end='')
for d in v.values():
print(f'{str(d):15}', end='')
print()
print('-' * 40)
while True:
busca = int(input('Mostrar dados de qual jogador? (999 para parar) '))
if busca == 999:
break
if busca >= len(time):
print(f'ERRO! Não existe jogador com o código {busca}.')
else:
print(f' -- LEVANTAMENTO DO JOGADOR {time[busca]["nome"]}.')
for i, g in enumerate(time[busca]['gols']):
print(f' No jogo {i+1} fez {g} gols.')
print('-' * 40)
print('<<< VOLTE SEMPRE >>>')
| 28 | 75 | 0.545918 |
533446afc3b781b1c63c59d71520aa275d7b4dcb | 389 | py | Python | empmgt/asgi.py | Boydlloyd/empmgt | de2af88e5f26f4c998fde991e5379a44333f0121 | [
"MIT"
] | null | null | null | empmgt/asgi.py | Boydlloyd/empmgt | de2af88e5f26f4c998fde991e5379a44333f0121 | [
"MIT"
] | null | null | null | empmgt/asgi.py | Boydlloyd/empmgt | de2af88e5f26f4c998fde991e5379a44333f0121 | [
"MIT"
] | null | null | null | """
ASGI config for empmgt project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'empmgt.settings')
application = get_asgi_application()
| 22.882353 | 78 | 0.784062 |
c628e5ad41fa839032bcc29034a770296d2fae87 | 855 | py | Python | dataset/generate_csv.py | Beta3-Data/FacialLandmark-Live-Training | 10b2b464f1deb015a7f152bb14f120f0dc6f9de2 | [
"MIT"
] | null | null | null | dataset/generate_csv.py | Beta3-Data/FacialLandmark-Live-Training | 10b2b464f1deb015a7f152bb14f120f0dc6f9de2 | [
"MIT"
] | null | null | null | dataset/generate_csv.py | Beta3-Data/FacialLandmark-Live-Training | 10b2b464f1deb015a7f152bb14f120f0dc6f9de2 | [
"MIT"
] | null | null | null | import cv2
import os
import random
anno_root = '/mnt/lvmhdd1/dataset/face_keypoints/annos/'
img_root = '/mnt/lvmhdd1/dataset/face_keypoints/images/'
items = []
for anno_path in os.listdir(anno_root):
if 'pts' in anno_path:
with open(os.path.join(anno_root,anno_path)) as anno_file:
landmarks = anno_file.readline().strip().split(' ')
if(len(landmarks) == 152):
items.append(anno_path.split('.')[0]+'.jpg,'+','.join(landmarks)+'\n')
else:
print anno_path
random.shuffle(items)
train_items = items[:30000]
val_items = items[30000:]
with open('face_landmark_train.csv','w') as trainfile:
for item in train_items:
trainfile.write(item)
with open('face_landmark_val.csv','w') as valfile:
for item in val_items:
valfile.write(item)
| 34.2 | 87 | 0.635088 |
35b02e6ee380aaa511748ed5b56c6408fa1e7ea8 | 33,176 | py | Python | sdk/python/pulumi_azure_nextgen/resources/v20190801/outputs.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/resources/v20190801/outputs.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/resources/v20190801/outputs.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'AliasPathTypeResponse',
'AliasTypeResponse',
'BasicDependencyResponse',
'DebugSettingResponse',
'DependencyResponse',
'DeploymentPropertiesExtendedResponse',
'IdentityResponse',
'IdentityResponseUserAssignedIdentities',
'OnErrorDeploymentExtendedResponse',
'ParametersLinkResponse',
'PlanResponse',
'ProviderResourceTypeResponse',
'ProviderResponse',
'ResourceGroupPropertiesResponse',
'SkuResponse',
'TemplateLinkResponse',
]
@pulumi.output_type
class AliasPathTypeResponse(dict):
"""
The type of the paths for alias.
"""
def __init__(__self__, *,
api_versions: Optional[Sequence[str]] = None,
path: Optional[str] = None):
"""
The type of the paths for alias.
:param Sequence[str] api_versions: The API versions.
:param str path: The path of an alias.
"""
if api_versions is not None:
pulumi.set(__self__, "api_versions", api_versions)
if path is not None:
pulumi.set(__self__, "path", path)
@property
@pulumi.getter(name="apiVersions")
def api_versions(self) -> Optional[Sequence[str]]:
"""
The API versions.
"""
return pulumi.get(self, "api_versions")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
The path of an alias.
"""
return pulumi.get(self, "path")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AliasTypeResponse(dict):
"""
The alias type.
"""
def __init__(__self__, *,
name: Optional[str] = None,
paths: Optional[Sequence['outputs.AliasPathTypeResponse']] = None):
"""
The alias type.
:param str name: The alias name.
:param Sequence['AliasPathTypeResponseArgs'] paths: The paths for an alias.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if paths is not None:
pulumi.set(__self__, "paths", paths)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The alias name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def paths(self) -> Optional[Sequence['outputs.AliasPathTypeResponse']]:
"""
The paths for an alias.
"""
return pulumi.get(self, "paths")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BasicDependencyResponse(dict):
"""
Deployment dependency information.
"""
def __init__(__self__, *,
id: Optional[str] = None,
resource_name: Optional[str] = None,
resource_type: Optional[str] = None):
"""
Deployment dependency information.
:param str id: The ID of the dependency.
:param str resource_name: The dependency resource name.
:param str resource_type: The dependency resource type.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the dependency.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
"""
The dependency resource name.
"""
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[str]:
"""
The dependency resource type.
"""
return pulumi.get(self, "resource_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DebugSettingResponse(dict):
"""
The debug setting.
"""
def __init__(__self__, *,
detail_level: Optional[str] = None):
"""
The debug setting.
:param str detail_level: Specifies the type of information to log for debugging. The permitted values are none, requestContent, responseContent, or both requestContent and responseContent separated by a comma. The default is none. When setting this value, carefully consider the type of information you are passing in during deployment. By logging information about the request or response, you could potentially expose sensitive data that is retrieved through the deployment operations.
"""
if detail_level is not None:
pulumi.set(__self__, "detail_level", detail_level)
@property
@pulumi.getter(name="detailLevel")
def detail_level(self) -> Optional[str]:
"""
Specifies the type of information to log for debugging. The permitted values are none, requestContent, responseContent, or both requestContent and responseContent separated by a comma. The default is none. When setting this value, carefully consider the type of information you are passing in during deployment. By logging information about the request or response, you could potentially expose sensitive data that is retrieved through the deployment operations.
"""
return pulumi.get(self, "detail_level")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DependencyResponse(dict):
"""
Deployment dependency information.
"""
def __init__(__self__, *,
depends_on: Optional[Sequence['outputs.BasicDependencyResponse']] = None,
id: Optional[str] = None,
resource_name: Optional[str] = None,
resource_type: Optional[str] = None):
"""
Deployment dependency information.
:param Sequence['BasicDependencyResponseArgs'] depends_on: The list of dependencies.
:param str id: The ID of the dependency.
:param str resource_name: The dependency resource name.
:param str resource_type: The dependency resource type.
"""
if depends_on is not None:
pulumi.set(__self__, "depends_on", depends_on)
if id is not None:
pulumi.set(__self__, "id", id)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter(name="dependsOn")
def depends_on(self) -> Optional[Sequence['outputs.BasicDependencyResponse']]:
"""
The list of dependencies.
"""
return pulumi.get(self, "depends_on")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the dependency.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
"""
The dependency resource name.
"""
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[str]:
"""
The dependency resource type.
"""
return pulumi.get(self, "resource_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DeploymentPropertiesExtendedResponse(dict):
"""
Deployment properties with additional details.
"""
def __init__(__self__, *,
correlation_id: str,
duration: str,
provisioning_state: str,
timestamp: str,
debug_setting: Optional['outputs.DebugSettingResponse'] = None,
dependencies: Optional[Sequence['outputs.DependencyResponse']] = None,
mode: Optional[str] = None,
on_error_deployment: Optional['outputs.OnErrorDeploymentExtendedResponse'] = None,
outputs: Optional[Any] = None,
parameters: Optional[Any] = None,
parameters_link: Optional['outputs.ParametersLinkResponse'] = None,
providers: Optional[Sequence['outputs.ProviderResponse']] = None,
template: Optional[Any] = None,
template_link: Optional['outputs.TemplateLinkResponse'] = None):
"""
Deployment properties with additional details.
:param str correlation_id: The correlation ID of the deployment.
:param str duration: The duration of the template deployment.
:param str provisioning_state: The state of the provisioning.
:param str timestamp: The timestamp of the template deployment.
:param 'DebugSettingResponseArgs' debug_setting: The debug setting of the deployment.
:param Sequence['DependencyResponseArgs'] dependencies: The list of deployment dependencies.
:param str mode: The deployment mode. Possible values are Incremental and Complete.
:param 'OnErrorDeploymentExtendedResponseArgs' on_error_deployment: The deployment on error behavior.
:param Any outputs: Key/value pairs that represent deployment output.
:param Any parameters: Deployment parameters. Use only one of Parameters or ParametersLink.
:param 'ParametersLinkResponseArgs' parameters_link: The URI referencing the parameters. Use only one of Parameters or ParametersLink.
:param Sequence['ProviderResponseArgs'] providers: The list of resource providers needed for the deployment.
:param Any template: The template content. Use only one of Template or TemplateLink.
:param 'TemplateLinkResponseArgs' template_link: The URI referencing the template. Use only one of Template or TemplateLink.
"""
pulumi.set(__self__, "correlation_id", correlation_id)
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "timestamp", timestamp)
if debug_setting is not None:
pulumi.set(__self__, "debug_setting", debug_setting)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if on_error_deployment is not None:
pulumi.set(__self__, "on_error_deployment", on_error_deployment)
if outputs is not None:
pulumi.set(__self__, "outputs", outputs)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if parameters_link is not None:
pulumi.set(__self__, "parameters_link", parameters_link)
if providers is not None:
pulumi.set(__self__, "providers", providers)
if template is not None:
pulumi.set(__self__, "template", template)
if template_link is not None:
pulumi.set(__self__, "template_link", template_link)
@property
@pulumi.getter(name="correlationId")
def correlation_id(self) -> str:
"""
The correlation ID of the deployment.
"""
return pulumi.get(self, "correlation_id")
@property
@pulumi.getter
def duration(self) -> str:
"""
The duration of the template deployment.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The state of the provisioning.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def timestamp(self) -> str:
"""
The timestamp of the template deployment.
"""
return pulumi.get(self, "timestamp")
@property
@pulumi.getter(name="debugSetting")
def debug_setting(self) -> Optional['outputs.DebugSettingResponse']:
"""
The debug setting of the deployment.
"""
return pulumi.get(self, "debug_setting")
@property
@pulumi.getter
def dependencies(self) -> Optional[Sequence['outputs.DependencyResponse']]:
"""
The list of deployment dependencies.
"""
return pulumi.get(self, "dependencies")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
The deployment mode. Possible values are Incremental and Complete.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="onErrorDeployment")
def on_error_deployment(self) -> Optional['outputs.OnErrorDeploymentExtendedResponse']:
"""
The deployment on error behavior.
"""
return pulumi.get(self, "on_error_deployment")
@property
@pulumi.getter
def outputs(self) -> Optional[Any]:
"""
Key/value pairs that represent deployment output.
"""
return pulumi.get(self, "outputs")
@property
@pulumi.getter
def parameters(self) -> Optional[Any]:
"""
Deployment parameters. Use only one of Parameters or ParametersLink.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="parametersLink")
def parameters_link(self) -> Optional['outputs.ParametersLinkResponse']:
"""
The URI referencing the parameters. Use only one of Parameters or ParametersLink.
"""
return pulumi.get(self, "parameters_link")
@property
@pulumi.getter
def providers(self) -> Optional[Sequence['outputs.ProviderResponse']]:
"""
The list of resource providers needed for the deployment.
"""
return pulumi.get(self, "providers")
@property
@pulumi.getter
def template(self) -> Optional[Any]:
"""
The template content. Use only one of Template or TemplateLink.
"""
return pulumi.get(self, "template")
@property
@pulumi.getter(name="templateLink")
def template_link(self) -> Optional['outputs.TemplateLinkResponse']:
"""
The URI referencing the template. Use only one of Template or TemplateLink.
"""
return pulumi.get(self, "template_link")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IdentityResponse(dict):
"""
Identity for the resource.
"""
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None,
user_assigned_identities: Optional[Mapping[str, 'outputs.IdentityResponseUserAssignedIdentities']] = None):
"""
Identity for the resource.
:param str principal_id: The principal ID of resource identity.
:param str tenant_id: The tenant ID of resource.
:param str type: The identity type.
:param Mapping[str, 'IdentityResponseUserAssignedIdentitiesArgs'] user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
if user_assigned_identities is not None:
pulumi.set(__self__, "user_assigned_identities", user_assigned_identities)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal ID of resource identity.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant ID of resource.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The identity type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userAssignedIdentities")
def user_assigned_identities(self) -> Optional[Mapping[str, 'outputs.IdentityResponseUserAssignedIdentities']]:
"""
The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
return pulumi.get(self, "user_assigned_identities")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IdentityResponseUserAssignedIdentities(dict):
def __init__(__self__, *,
client_id: str,
principal_id: str):
"""
:param str client_id: The client id of user assigned identity.
:param str principal_id: The principal id of user assigned identity.
"""
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "principal_id", principal_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The client id of user assigned identity.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of user assigned identity.
"""
return pulumi.get(self, "principal_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OnErrorDeploymentExtendedResponse(dict):
"""
Deployment on error behavior with additional details.
"""
def __init__(__self__, *,
provisioning_state: str,
deployment_name: Optional[str] = None,
type: Optional[str] = None):
"""
Deployment on error behavior with additional details.
:param str provisioning_state: The state of the provisioning for the on error deployment.
:param str deployment_name: The deployment to be used on error case.
:param str type: The deployment on error behavior type. Possible values are LastSuccessful and SpecificDeployment.
"""
pulumi.set(__self__, "provisioning_state", provisioning_state)
if deployment_name is not None:
pulumi.set(__self__, "deployment_name", deployment_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The state of the provisioning for the on error deployment.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="deploymentName")
def deployment_name(self) -> Optional[str]:
"""
The deployment to be used on error case.
"""
return pulumi.get(self, "deployment_name")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The deployment on error behavior type. Possible values are LastSuccessful and SpecificDeployment.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ParametersLinkResponse(dict):
"""
Entity representing the reference to the deployment parameters.
"""
def __init__(__self__, *,
uri: str,
content_version: Optional[str] = None):
"""
Entity representing the reference to the deployment parameters.
:param str uri: The URI of the parameters file.
:param str content_version: If included, must match the ContentVersion in the template.
"""
pulumi.set(__self__, "uri", uri)
if content_version is not None:
pulumi.set(__self__, "content_version", content_version)
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the parameters file.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="contentVersion")
def content_version(self) -> Optional[str]:
"""
If included, must match the ContentVersion in the template.
"""
return pulumi.get(self, "content_version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PlanResponse(dict):
"""
Plan for the resource.
"""
def __init__(__self__, *,
name: Optional[str] = None,
product: Optional[str] = None,
promotion_code: Optional[str] = None,
publisher: Optional[str] = None,
version: Optional[str] = None):
"""
Plan for the resource.
:param str name: The plan ID.
:param str product: The offer ID.
:param str promotion_code: The promotion code.
:param str publisher: The publisher ID.
:param str version: The plan's version.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if product is not None:
pulumi.set(__self__, "product", product)
if promotion_code is not None:
pulumi.set(__self__, "promotion_code", promotion_code)
if publisher is not None:
pulumi.set(__self__, "publisher", publisher)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The plan ID.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def product(self) -> Optional[str]:
"""
The offer ID.
"""
return pulumi.get(self, "product")
@property
@pulumi.getter(name="promotionCode")
def promotion_code(self) -> Optional[str]:
"""
The promotion code.
"""
return pulumi.get(self, "promotion_code")
@property
@pulumi.getter
def publisher(self) -> Optional[str]:
"""
The publisher ID.
"""
return pulumi.get(self, "publisher")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
The plan's version.
"""
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ProviderResourceTypeResponse(dict):
"""
Resource type managed by the resource provider.
"""
def __init__(__self__, *,
aliases: Optional[Sequence['outputs.AliasTypeResponse']] = None,
api_versions: Optional[Sequence[str]] = None,
capabilities: Optional[str] = None,
locations: Optional[Sequence[str]] = None,
properties: Optional[Mapping[str, str]] = None,
resource_type: Optional[str] = None):
"""
Resource type managed by the resource provider.
:param Sequence['AliasTypeResponseArgs'] aliases: The aliases that are supported by this resource type.
:param Sequence[str] api_versions: The API version.
:param str capabilities: The additional capabilities offered by this resource type.
:param Sequence[str] locations: The collection of locations where this resource type can be created.
:param Mapping[str, str] properties: The properties.
:param str resource_type: The resource type.
"""
if aliases is not None:
pulumi.set(__self__, "aliases", aliases)
if api_versions is not None:
pulumi.set(__self__, "api_versions", api_versions)
if capabilities is not None:
pulumi.set(__self__, "capabilities", capabilities)
if locations is not None:
pulumi.set(__self__, "locations", locations)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter
def aliases(self) -> Optional[Sequence['outputs.AliasTypeResponse']]:
"""
The aliases that are supported by this resource type.
"""
return pulumi.get(self, "aliases")
@property
@pulumi.getter(name="apiVersions")
def api_versions(self) -> Optional[Sequence[str]]:
"""
The API version.
"""
return pulumi.get(self, "api_versions")
@property
@pulumi.getter
def capabilities(self) -> Optional[str]:
"""
The additional capabilities offered by this resource type.
"""
return pulumi.get(self, "capabilities")
@property
@pulumi.getter
def locations(self) -> Optional[Sequence[str]]:
"""
The collection of locations where this resource type can be created.
"""
return pulumi.get(self, "locations")
@property
@pulumi.getter
def properties(self) -> Optional[Mapping[str, str]]:
"""
The properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[str]:
"""
The resource type.
"""
return pulumi.get(self, "resource_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ProviderResponse(dict):
"""
Resource provider information.
"""
def __init__(__self__, *,
id: str,
registration_policy: str,
registration_state: str,
resource_types: Sequence['outputs.ProviderResourceTypeResponse'],
namespace: Optional[str] = None):
"""
Resource provider information.
:param str id: The provider ID.
:param str registration_policy: The registration policy of the resource provider.
:param str registration_state: The registration state of the resource provider.
:param Sequence['ProviderResourceTypeResponseArgs'] resource_types: The collection of provider resource types.
:param str namespace: The namespace of the resource provider.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "registration_policy", registration_policy)
pulumi.set(__self__, "registration_state", registration_state)
pulumi.set(__self__, "resource_types", resource_types)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def id(self) -> str:
"""
The provider ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="registrationPolicy")
def registration_policy(self) -> str:
"""
The registration policy of the resource provider.
"""
return pulumi.get(self, "registration_policy")
@property
@pulumi.getter(name="registrationState")
def registration_state(self) -> str:
"""
The registration state of the resource provider.
"""
return pulumi.get(self, "registration_state")
@property
@pulumi.getter(name="resourceTypes")
def resource_types(self) -> Sequence['outputs.ProviderResourceTypeResponse']:
"""
The collection of provider resource types.
"""
return pulumi.get(self, "resource_types")
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
The namespace of the resource provider.
"""
return pulumi.get(self, "namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourceGroupPropertiesResponse(dict):
"""
The resource group properties.
"""
def __init__(__self__, *,
provisioning_state: str):
"""
The resource group properties.
:param str provisioning_state: The provisioning state.
"""
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state.
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SkuResponse(dict):
"""
SKU for the resource.
"""
def __init__(__self__, *,
capacity: Optional[int] = None,
family: Optional[str] = None,
model: Optional[str] = None,
name: Optional[str] = None,
size: Optional[str] = None,
tier: Optional[str] = None):
"""
SKU for the resource.
:param int capacity: The SKU capacity.
:param str family: The SKU family.
:param str model: The SKU model.
:param str name: The SKU name.
:param str size: The SKU size.
:param str tier: The SKU tier.
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if model is not None:
pulumi.set(__self__, "model", model)
if name is not None:
pulumi.set(__self__, "name", name)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
The SKU capacity.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def family(self) -> Optional[str]:
"""
The SKU family.
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def model(self) -> Optional[str]:
"""
The SKU model.
"""
return pulumi.get(self, "model")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The SKU name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def size(self) -> Optional[str]:
"""
The SKU size.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
The SKU tier.
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class TemplateLinkResponse(dict):
"""
Entity representing the reference to the template.
"""
def __init__(__self__, *,
uri: str,
content_version: Optional[str] = None):
"""
Entity representing the reference to the template.
:param str uri: The URI of the template to deploy.
:param str content_version: If included, must match the ContentVersion in the template.
"""
pulumi.set(__self__, "uri", uri)
if content_version is not None:
pulumi.set(__self__, "content_version", content_version)
@property
@pulumi.getter
def uri(self) -> str:
"""
The URI of the template to deploy.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="contentVersion")
def content_version(self) -> Optional[str]:
"""
If included, must match the ContentVersion in the template.
"""
return pulumi.get(self, "content_version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 33.88764 | 495 | 0.624156 |
4536bdaf515094d49908bfca65ff9b8746ec2885 | 6,550 | py | Python | frappe/app.py | cstkyrilos/frappe | 27d9306bc5924c11c2749503454cc6d11a8cc654 | [
"MIT"
] | null | null | null | frappe/app.py | cstkyrilos/frappe | 27d9306bc5924c11c2749503454cc6d11a8cc654 | [
"MIT"
] | null | null | null | frappe/app.py | cstkyrilos/frappe | 27d9306bc5924c11c2749503454cc6d11a8cc654 | [
"MIT"
] | 1 | 2018-03-21T16:13:12.000Z | 2018-03-21T16:13:12.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import os
import MySQLdb
from werkzeug.wrappers import Request
from werkzeug.local import LocalManager
from werkzeug.exceptions import HTTPException, NotFound
from werkzeug.contrib.profiler import ProfilerMiddleware
from werkzeug.wsgi import SharedDataMiddleware
import frappe
import frappe.handler
import frappe.auth
import frappe.api
import frappe.async
import frappe.utils.response
import frappe.website.render
from frappe.utils import get_site_name
from frappe.middlewares import StaticDataMiddleware
from frappe.utils.error import make_error_snapshot
from frappe.core.doctype.communication.comment import update_comments_in_parent_after_request
from frappe import _
local_manager = LocalManager([frappe.local])
_site = None
_sites_path = os.environ.get("SITES_PATH", ".")
class RequestContext(object):
def __init__(self, environ):
self.request = Request(environ)
def __enter__(self):
init_request(self.request)
def __exit__(self, type, value, traceback):
frappe.destroy()
@Request.application
def application(request):
response = None
try:
rollback = True
init_request(request)
if frappe.local.form_dict.cmd:
response = frappe.handler.handle()
elif frappe.request.path.startswith("/api/"):
if frappe.local.form_dict.data is None:
frappe.local.form_dict.data = request.get_data()
response = frappe.api.handle()
elif frappe.request.path.startswith('/backups'):
response = frappe.utils.response.download_backup(request.path)
elif frappe.request.path.startswith('/private/files/'):
response = frappe.utils.response.download_private_file(request.path)
elif frappe.local.request.method in ('GET', 'HEAD'):
response = frappe.website.render.render()
else:
raise NotFound
except HTTPException, e:
return e
except frappe.SessionStopped, e:
response = frappe.utils.response.handle_session_stopped()
except Exception, e:
response = handle_exception(e)
else:
rollback = after_request(rollback)
finally:
if frappe.local.request.method in ("POST", "PUT") and frappe.db and rollback:
frappe.db.rollback()
# set cookies
if response and hasattr(frappe.local, 'cookie_manager'):
frappe.local.cookie_manager.flush_cookies(response=response)
frappe.destroy()
return response
def init_request(request):
frappe.local.request = request
frappe.local.is_ajax = frappe.get_request_header("X-Requested-With")=="XMLHttpRequest"
site = _site or request.headers.get('X-Frappe-Site-Name') or get_site_name(request.host)
frappe.init(site=site, sites_path=_sites_path)
if not (frappe.local.conf and frappe.local.conf.db_name):
# site does not exist
raise NotFound
if frappe.local.conf.get('maintenance_mode'):
raise frappe.SessionStopped
make_form_dict(request)
frappe.local.http_request = frappe.auth.HTTPRequest()
def make_form_dict(request):
frappe.local.form_dict = frappe._dict({ k:v[0] if isinstance(v, (list, tuple)) else v \
for k, v in (request.form or request.args).iteritems() })
if "_" in frappe.local.form_dict:
# _ is passed by $.ajax so that the request is not cached by the browser. So, remove _ from form_dict
frappe.local.form_dict.pop("_")
def handle_exception(e):
http_status_code = getattr(e, "http_status_code", 500)
return_as_message = False
if (http_status_code==500
and isinstance(e, MySQLdb.OperationalError)
and e.args[0] in (1205, 1213)):
# 1205 = lock wait timeout
# 1213 = deadlock
# code 409 represents conflict
http_status_code = 508
if http_status_code==401:
frappe.respond_as_web_page(_("Session Expired"),
_("Your session has expired, please login again to continue."),
http_status_code=http_status_code, indicator_color='red')
return_as_message = True
if http_status_code==403:
frappe.respond_as_web_page(_("Not Permitted"),
_("You do not have enough permissions to complete the action"),
http_status_code=http_status_code, indicator_color='red')
return_as_message = True
elif http_status_code==404:
frappe.respond_as_web_page(_("Not Found"),
_("The resource you are looking for is not available"),
http_status_code=http_status_code, indicator_color='red')
return_as_message = True
elif frappe.local.is_ajax or 'application/json' in frappe.local.request.headers.get('Accept', ''):
response = frappe.utils.response.report_error(http_status_code)
else:
traceback = "<pre>"+frappe.get_traceback()+"</pre>"
if frappe.local.flags.disable_traceback:
traceback = ""
frappe.respond_as_web_page("Server Error",
traceback, http_status_code=http_status_code,
indicator_color='red')
return_as_message = True
if e.__class__ == frappe.AuthenticationError:
if hasattr(frappe.local, "login_manager"):
frappe.local.login_manager.clear_cookies()
if http_status_code >= 500:
frappe.logger().error('Request Error', exc_info=True)
make_error_snapshot(e)
if return_as_message:
response = frappe.website.render.render("message", http_status_code=http_status_code)
return response
def after_request(rollback):
if (frappe.local.request.method in ("POST", "PUT") or frappe.local.flags.commit) and frappe.db:
if frappe.db.transaction_writes:
frappe.db.commit()
rollback = False
# update session
if getattr(frappe.local, "session_obj", None):
updated_in_db = frappe.local.session_obj.update()
if updated_in_db:
frappe.db.commit()
rollback = False
update_comments_in_parent_after_request()
return rollback
application = local_manager.make_middleware(application)
def serve(port=8000, profile=False, site=None, sites_path='.'):
global application, _site, _sites_path
_site = site
_sites_path = sites_path
from werkzeug.serving import run_simple
if profile:
application = ProfilerMiddleware(application, sort_by=('cumtime', 'calls'))
if not os.environ.get('NO_STATICS'):
application = SharedDataMiddleware(application, {
b'/assets': os.path.join(sites_path, 'assets').encode("utf-8"),
})
application = StaticDataMiddleware(application, {
b'/files': os.path.abspath(sites_path).encode("utf-8")
})
application.debug = True
application.config = {
'SERVER_NAME': 'localhost:8000'
}
in_test_env = os.environ.get('CI')
run_simple('0.0.0.0', int(port), application,
use_reloader=not in_test_env,
use_debugger=not in_test_env,
use_evalex=not in_test_env,
threaded=True)
| 28.478261 | 103 | 0.754351 |
f0496cb72286a2cc570d469270240ed418b2408b | 21,261 | py | Python | Tests/modules/network_related/test__socket.py | aisk/ironpython3 | d492fd811a0cee4d0a07cd46f02a29a3c90d964b | [
"Apache-2.0"
] | 1,872 | 2015-01-02T18:56:47.000Z | 2022-03-31T07:34:39.000Z | Tests/modules/network_related/test__socket.py | aisk/ironpython3 | d492fd811a0cee4d0a07cd46f02a29a3c90d964b | [
"Apache-2.0"
] | 675 | 2015-02-27T09:01:01.000Z | 2022-03-31T14:03:25.000Z | Tests/modules/network_related/test__socket.py | aisk/ironpython3 | d492fd811a0cee4d0a07cd46f02a29a3c90d964b | [
"Apache-2.0"
] | 278 | 2015-01-02T03:48:20.000Z | 2022-03-29T20:40:44.000Z | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
#
# test _socket
#
import os
import _socket
import sys
import _thread
import time
import unittest
from iptest import IronPythonTestCase, is_cli, run_test, skipUnlessIronPython
AF_DICT = {"AF_APPLETALK" : 5,
"AF_DECnet" : 12,
"AF_INET" : 2,
"AF_INET6" : 10,
"AF_IPX" : 4,
"AF_IRDA" : 23,
"AF_SNA" : 22,
"AF_UNSPEC" : 0,
}
ST_DICT = {"SOCK_DGRAM" : 2,
"SOCK_RAW" : 3,
"SOCK_RDM" : 4,
"SOCK_SEQPACKET" : 5,
"SOCK_STREAM" : 1,
}
IPPROTO_DICT = { "IPPROTO_AH" : 51,
"IPPROTO_DSTOPTS" : 60,
"IPPROTO_ESP" : 50,
"IPPROTO_FRAGMENT" : 44,
"IPPROTO_HOPOPTS" : 0,
"IPPROTO_ICMP" : 1,
"IPPROTO_ICMPV6" : 58,
"IPPROTO_IDP" : 22,
"IPPROTO_IGMP" : 2,
"IPPROTO_IP" : 0,
"IPPROTO_IPV6" : 41,
"IPPROTO_NONE" : 59,
"IPPROTO_PUP" : 12,
"IPPROTO_RAW" : 255,
"IPPROTO_ROUTING" : 43,
"IPPROTO_TCP" : 6,
"IPPROTO_UDP" : 17,
}
OTHER_GLOBALS = {"AI_ADDRCONFIG" : 32,
"AI_ALL" : 16,
"AI_CANONNAME" : 2,
"AI_NUMERICHOST" : 4,
"AI_PASSIVE" : 1,
"AI_V4MAPPED" : 8,
"EAI_ADDRFAMILY" : -9,
"EAI_AGAIN" : -3,
"EAI_BADFLAGS" : -1,
"EAI_FAIL" : -4,
"EAI_FAMILY" : -6,
"EAI_MEMORY" : -10,
"EAI_NODATA" : -5,
"EAI_NONAME" : -2,
"EAI_SERVICE" : -8,
"EAI_SOCKTYPE" : -7,
"EAI_SYSTEM" : -11,
"INADDR_ALLHOSTS_GROUP" : -536870911,
"INADDR_ANY" : 0,
"INADDR_BROADCAST" : -1,
"INADDR_LOOPBACK" : 2130706433,
"INADDR_MAX_LOCAL_GROUP" : -536870657,
"INADDR_NONE" : -1,
"INADDR_UNSPEC_GROUP" : -536870912,
"IPPORT_RESERVED" : 1024,
"IPPORT_USERRESERVED" : 5000,
"IPV6_CHECKSUM" : 7,
"IPV6_DSTOPTS" : 4,
"IPV6_HOPLIMIT" : 8,
"IPV6_HOPOPTS" : 3,
"IPV6_JOIN_GROUP" : 20,
"IPV6_LEAVE_GROUP" : 21,
"IPV6_MULTICAST_HOPS" : 18,
"IPV6_MULTICAST_IF" : 17,
"IPV6_MULTICAST_LOOP" : 19,
"IPV6_NEXTHOP" : 9,
"IPV6_PKTINFO" : 2,
"IPV6_RTHDR" : 5,
"IPV6_RTHDR_TYPE_0" : 0,
"IPV6_UNICAST_HOPS" : 16,
"IPV6_V6ONLY" : 26,
"IP_ADD_MEMBERSHIP" : 35,
"IP_DEFAULT_MULTICAST_LOOP" : 1,
"IP_DEFAULT_MULTICAST_TTL" : 1,
"IP_DROP_MEMBERSHIP" : 36,
"IP_HDRINCL" : 3,
"IP_MAX_MEMBERSHIPS" : 20,
"IP_MULTICAST_IF" : 32,
"IP_MULTICAST_LOOP" : 34,
"IP_MULTICAST_TTL" : 33,
"IP_OPTIONS" : 4,
"IP_RECVOPTS" : 6,
"IP_RECVRETOPTS" : 7,
"IP_RETOPTS" : 7,
"IP_TOS" : 1,
"IP_TTL" : 2,
"MSG_CTRUNC" : 8,
"MSG_DONTROUTE" : 4,
"MSG_DONTWAIT" : 64,
"MSG_EOR" : 128,
"MSG_OOB" : 1,
"MSG_PEEK" : 2,
"MSG_TRUNC" : 32,
"MSG_WAITALL" : 256,
"NI_DGRAM" : 16,
"NI_MAXHOST" : 1025,
"NI_MAXSERV" : 32,
"NI_NAMEREQD" : 8,
"NI_NOFQDN" : 4,
"NI_NUMERICHOST" : 1,
"NI_NUMERICSERV" : 2,
"PACKET_BROADCAST" : 1,
"PACKET_FASTROUTE" : 6,
"PACKET_HOST" : 0,
"PACKET_LOOPBACK" : 5,
"PACKET_MULTICAST" : 2,
"PACKET_OTHERHOST" : 3,
"PACKET_OUTGOING" : 4,
"PF_PACKET" : 17,
"SHUT_RD" : 0,
"SHUT_RDWR" : 2,
"SHUT_WR" : 1,
"SOL_IP" : 0,
"SOL_SOCKET" : 1,
"SOL_TCP" : 6,
"SOL_UDP" : 17,
"SOMAXCONN" : 128,
"SO_ACCEPTCONN" : 30,
"SO_BROADCAST" : 6,
"SO_DEBUG" : 1,
"SO_DONTROUTE" : 5,
"SO_ERROR" : 4,
"SO_KEEPALIVE" : 9,
"SO_LINGER" : 13,
"SO_OOBINLINE" : 10,
"SO_RCVBUF" : 8,
"SO_RCVLOWAT" : 18,
"SO_RCVTIMEO" : 20,
"SO_REUSEADDR" : 2,
"SO_SNDBUF" : 7,
"SO_SNDLOWAT" : 19,
"SO_SNDTIMEO" : 21,
"SO_TYPE" : 3,
"SSL_ERROR_EOF" : 8,
"SSL_ERROR_INVALID_ERROR_CODE" : 9,
"SSL_ERROR_SSL" : 1,
"SSL_ERROR_SYSCALL" : 5,
"SSL_ERROR_WANT_CONNECT" : 7,
"SSL_ERROR_WANT_READ" : 2,
"SSL_ERROR_WANT_WRITE" : 3,
"SSL_ERROR_WANT_X509_LOOKUP" : 4,
"SSL_ERROR_ZERO_RETURN" : 6,
"TCP_CORK" : 3,
"TCP_DEFER_ACCEPT" : 9,
"TCP_INFO" : 11,
"TCP_KEEPCNT" : 6,
"TCP_KEEPIDLE" : 4,
"TCP_KEEPINTVL" : 5,
"TCP_LINGER2" : 8,
"TCP_MAXSEG" : 2,
"TCP_NODELAY" : 1,
"TCP_QUICKACK" : 12,
"TCP_SYNCNT" : 7,
"TCP_WINDOW_CLAMP" : 10}
class SocketTest(IronPythonTestCase):
def test_getprotobyname(self):
'''Tests _socket.getprotobyname'''
#IP and CPython
proto_map = {
"icmp": _socket.IPPROTO_ICMP,
"ip": _socket.IPPROTO_IP,
"tcp": _socket.IPPROTO_TCP,
"udp": _socket.IPPROTO_UDP,
}
#supported only by IP
if is_cli:
proto_map.update(
{"dstopts": _socket.IPPROTO_DSTOPTS,
"none": _socket.IPPROTO_NONE,
"raw": _socket.IPPROTO_RAW,
"ipv4": _socket.IPPROTO_IPV4,
"ipv6": _socket.IPPROTO_IPV6,
"esp": _socket.IPPROTO_ESP,
"fragment": _socket.IPPROTO_FRAGMENT,
"nd": _socket.IPPROTO_ND,
"icmpv6": _socket.IPPROTO_ICMPV6,
"routing": _socket.IPPROTO_ROUTING,
"pup": _socket.IPPROTO_PUP, #http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=21918
"ggp": _socket.IPPROTO_GGP, #http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=21918
})
for proto_name, good_val in proto_map.items():
temp_val = _socket.getprotobyname(proto_name)
self.assertEqual(temp_val, good_val)
#negative cases
bad_list = ["", "blah", "i"]
for name in bad_list:
self.assertRaises(_socket.error, _socket.getprotobyname, name)
def test_getaddrinfo(self):
'''Tests _socket.getaddrinfo'''
joe = { ("127.0.0.1", 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 1) : "[(2, 0, 0, '', ('127.0.0.1', 1))]",
("127.0.0.1", 0, 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 0, 0, 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 0, 0, 0, 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 0, 0, 0, 0, 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 0, 0, 0, 0, 0) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
("127.0.0.1", 0, 0, 0, 0, 1) : "[(2, 0, 0, '', ('127.0.0.1', 0))]",
}
tmp = _socket.getaddrinfo("127.0.0.1", 0, 0, 0, -100000, 0)
tmp = _socket.getaddrinfo("127.0.0.1", 0, 0, 0, 100000, 0)
tmp = _socket.getaddrinfo("127.0.0.1", 0, 0, 0, 0, 0)
#just try them as-is
for params,value in joe.items():
addrinfo = _socket.getaddrinfo(*params)
self.assertEqual(repr(addrinfo), value)
#change the address family
for addr_fam in ["AF_INET", "AF_UNSPEC"]:
addrinfo = _socket.getaddrinfo("127.0.0.1",
0,
eval("_socket." + addr_fam),
0,
0,
0)
self.assertEqual(repr(addrinfo), "[(2, 0, 0, '', ('127.0.0.1', 0))]")
#change the _socket type
for socktype in ["SOCK_DGRAM", "SOCK_RAW", "SOCK_STREAM"]:
socktype = eval("_socket." + socktype)
addrinfo = _socket.getaddrinfo("127.0.0.1",
0,
0,
socktype,
0,
0)
self.assertEqual(repr(addrinfo), "[(2, " + str(socktype) + ", 0, '', ('127.0.0.1', 0))]")
#change the protocol
for proto in IPPROTO_DICT.keys():#["SOCK_DGRAM", "SOCK_RAW", "SOCK_STREAM"]:
try:
proto = eval("_socket." + proto)
except:
print(proto)
continue
addrinfo = _socket.getaddrinfo("127.0.0.1",
0,
0,
0,
proto,
0)
self.assertEqual(repr(addrinfo), "[(2, 0, " + str(proto) + ", '', ('127.0.0.1', 0))]")
#negative cases
#TODO - this actually passes on a Windows 7 machine...
#self.assertRaises(_socket.gaierror, _socket.getaddrinfo, "should never work.dfkdfjkkjdfkkdfjkdjf", 0)
self.assertRaises(_socket.gaierror, _socket.getaddrinfo, "1", 0)
if is_cli:
self.assertRaises(_socket.gaierror, _socket.getaddrinfo, ".", 0)
else:
self.assertRaises(UnicodeError, _socket.getaddrinfo, ".", 0)
self.assertRaises(_socket.error, _socket.getaddrinfo, "127.0.0.1", 3.14, 0, 0, 0, 0)
self.assertRaises(_socket.error, _socket.getaddrinfo, "127.0.0.1", 0, -1, 0, 0, 0)
self.assertRaises(_socket.error, _socket.getaddrinfo, "127.0.0.1", 0, 0, -1, 0, 0)
_socket.getaddrinfo("127.0.0.1", 0, 0, 0, 1000000, 0)
_socket.getaddrinfo("127.0.0.1", 0, 0, 0, -1000000, 0)
_socket.getaddrinfo("127.0.0.1", 0, 0, 0, 0, 0)
def test_getnameinfo(self):
'''Tests _socket.getnameinfo()'''
#sanity
_socket.getnameinfo(("127.0.0.1", 80), 8)
_socket.getnameinfo(("127.0.0.1", 80), 9)
host, service = _socket.getnameinfo( ("127.0.0.1", 80), 8)
self.assertEqual(service, '80')
host, service = _socket.getnameinfo( ("127.0.0.1", 80), 0)
self.assertEqual(service, "http")
#IP gives a TypeError
#self.assertRaises(SystemError, _socket.getnameinfo, ("127.0.0.1"), 8)
#self.assertRaises(SystemError, _socket.getnameinfo, (321), 8)
self.assertRaises(TypeError, _socket.getnameinfo, ("127.0.0.1"), '0')
self.assertRaises(TypeError, _socket.getnameinfo, ("127.0.0.1", 80, 0, 0, 0), 8)
self.assertRaises(_socket.gaierror, _socket.getnameinfo, ('no such host will ever exist', 80), 8)
def test_gethostbyaddr(self):
'''Tests _socket.gethostbyaddr'''
_socket.gethostbyaddr("localhost")
_socket.gethostbyaddr("127.0.0.1")
def test_gethostbyname(self):
'''Tests _socket.gethostbyname'''
#sanity
self.assertEqual(_socket.gethostbyname("localhost"), "127.0.0.1")
self.assertEqual(_socket.gethostbyname("127.0.0.1"), "127.0.0.1")
self.assertEqual(_socket.gethostbyname("<broadcast>"), "255.255.255.255")
#negative
self.assertRaises(_socket.gaierror, _socket.gethostbyname, "should never work")
def test_gethostbyname_ex(self):
'''Tests _socket.gethostbyname_ex'''
#sanity
joe = _socket.gethostbyname_ex("localhost")[2]
self.assertIn("127.0.0.1" , joe)
joe = _socket.gethostbyname_ex("127.0.0.1")[2]
self.assertIn("127.0.0.1", joe)
#negative
self.assertRaises(_socket.gaierror, _socket.gethostbyname_ex, "should never work")
def test_getservbyport(self):
self.assertEqual(_socket.getservbyport(80), "http")
def test_getservbyname(self):
self.assertEqual(_socket.getservbyname("http"), 80)
def test_inet_ntop(self):
'''Tests _socket.inet_ntop'''
#negative
self.assertRaises(ValueError, _socket.inet_ntop, _socket.AF_INET, b"garbage dkfjdkfjdkfj")
def test_inet_pton(self):
'''Tests _socket.inet_pton'''
#sanity
_socket.inet_pton(_socket.AF_INET, "127.0.0.1")
#negative
self.assertRaises(_socket.error, _socket.inet_pton, _socket.AF_INET, "garbage dkfjdkfjdkfj")
def test_getfqdn(self):
'''Tests _socket.getfqdn'''
#TODO
pass
def test_cp5814(self):
global EXIT_CODE
global HAS_EXITED
EXIT_CODE = -1
HAS_EXITED = False
portFile = os.path.join(self.temporary_dir, "cp5814port_%d" % os.getpid())
#Server code
server = """
from time import sleep
import _socket
import os
HOST = 'localhost'
PORT = 0
s = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM)
s.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) # prevents an "Address already in use" error when the socket is in a TIME_WAIT state
s.settimeout(20) # prevents the server from staying open if the client never connects
s.bind((HOST, PORT))
s.listen(1)
try:
with open(r"{PORTFILE}", "w") as f:
print(s.getsockname()[1], file=f)
fd, addr = s._accept()
conn = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM, fileno=fd)
#Whatever we get from the client, send it back.
data = conn.recv(1024)
conn.send(data)
#Verifications
if not addr[0] in [HOST, '127.0.0.1']:
raise Exception('The address, %s, was unexpected' % str(addr))
if data!=b'stuff':
raise Exception('%s!=stuff' % str(data))
sleep(10)
finally:
conn.close()
try:
os.remove(r"{PORTFILE}")
except:
pass
""".format(PORTFILE=portFile)
#Spawn off a thread to startup the server
def server_thread():
global EXIT_CODE
global HAS_EXITED
serverFile = os.path.join(self.temporary_dir, "cp5814server_%d.py" % os.getpid())
self.write_to_file(serverFile, server)
EXIT_CODE = os.system('"%s" %s' %
(sys.executable, serverFile))
HAS_EXITED = True
try:
os.remove(serverFile)
except:
pass
_thread.start_new_thread(server_thread, ())
#Give the server a chance to startup
portex = None
startTime = time.perf_counter()
for _ in range(20):
time.sleep(1)
if EXIT_CODE > 0:
self.fail("Server died with exit code %d" % EXIT_CODE)
try:
with open(portFile) as f:
PORT = int(f.read())
break
except Exception as ex:
portex = ex
else:
duration = time.perf_counter() - startTime
self.fail("Server not detected after trying for %g s, last detection attempt resulted in %r" % (duration, portex))
#Client
HOST = 'localhost'
s = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM)
s.connect((HOST, PORT))
s.send(b"stuff")
data, addr = s.recvfrom(1024)
s.close()
#Ensure the server didn't die
for i in range(100):
if not HAS_EXITED:
print("*", end="")
time.sleep(1)
else:
self.assertEqual(EXIT_CODE, 0)
break
self.assertTrue(HAS_EXITED)
#Verification
self.assertEqual(data, b"stuff")
if is_cli:
self.assertEqual(addr[0], "0.0.0.0")
else:
self.assertEqual(addr[0], 0)
import socket
class SocketMakefileTest(IronPythonTestCase):
def test_misc(self):
f = socket.socket().makefile()
f.bufsize = 4096
self.assertEqual(4096, f.bufsize)
def test_makefile_refcount(self):
"Ensures that the _socket stays open while there's still a file associated"
global PORT
def echoer():
global PORT
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # prevents an "Address already in use" error when the socket is in a TIME_WAIT state
s.settimeout(15) # prevents the server from staying open if the client never connects
s.bind(('localhost', 0))
PORT = s.getsockname()[1]
s.listen(5)
(s2, ignore) = s.accept()
s2.send(s2.recv(10))
_thread.start_new_thread(echoer, ())
time.sleep(1)
s = socket.socket()
s.connect(('localhost', PORT))
f1 = s.makefile('r')
f2 = s.makefile('w')
s.close()
test_msg = 'abc\n'
f2.write(test_msg)
f2.flush()
str = f1.readline()
self.assertEqual(str, test_msg)
def test_cp7451(self):
global EXIT_CODE
global HAS_EXITED
EXIT_CODE = -1
HAS_EXITED = False
portFile = os.path.join(self.temporary_dir, "cp7451port_%d" % os.getpid())
#Server code
server = """
from time import sleep
import socket as _socket
import os
HOST = 'localhost'
PORT = 0
s = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM)
s.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1) # prevents an "Address already in use" error when the socket is in a TIME_WAIT state
s.settimeout(20) # prevents the server from staying open if the client never connects
s.bind((HOST, PORT))
s.listen(1)
try:
with open(r"{PORTFILE}", "w") as f:
print(s.getsockname()[1], file=f)
conn, addr = s.accept()
#Whatever we get from the client, send it back.
data = conn.recv(1024)
conn.send(data)
#Verifications
if not addr[0] in [HOST, '127.0.0.1']:
raise Exception('The address, %s, was unexpected' % str(addr))
if data!=b'stuff2':
raise Exception('%s!=stuff2' % str(data))
sleep(10)
finally:
conn.close()
try:
os.remove(r"{PORTFILE}")
except:
pass
""".format(PORTFILE=portFile)
#Spawn off a thread to startup the server
def server_thread():
global EXIT_CODE
global HAS_EXITED
serverFile = os.path.join(self.temporary_dir, "cp7451server_%d.py" % os.getpid())
self.write_to_file(serverFile, server)
EXIT_CODE = os.system('"%s" %s' %
(sys.executable, serverFile))
HAS_EXITED = True
try:
os.remove(serverFile)
except:
pass
_thread.start_new_thread(server_thread, ())
#Give the server a chance to startup
portex = None
startTime = time.perf_counter()
for _ in range(20):
time.sleep(1)
if EXIT_CODE > 0:
self.fail("Server died with exit code %d" % EXIT_CODE)
try:
with open(portFile) as f:
PORT = int(f.read())
break
except Exception as ex:
portex = ex
else:
duration = time.perf_counter() - startTime
self.fail("Server not detected after trying for %g s, last detection attempt resulted in %r" % (duration, portex))
#Client
HOST = 'localhost'
s = socket.socket()
s.connect((HOST, PORT))
s.send(b"stuff2")
f = s.makefile()
s.close()
#Ensure the server didn't die
for i in range(100):
if not HAS_EXITED:
print("*", end="")
time.sleep(1)
else:
self.assertEqual(EXIT_CODE, 0)
break
self.assertTrue(HAS_EXITED)
#Verification
self.assertEqual(f.read(6), "stuff2")
run_test(__name__)
| 35.084158 | 152 | 0.499976 |
56d390248ef54c3700b1186650bb902d847c34e0 | 2,108 | py | Python | src/fts3/rest/client/easy/ban.py | Jar-win/fts-rest | 4db0880cf328037b8587b4d16741c40959b47ad2 | [
"Apache-2.0"
] | 1 | 2018-08-28T11:28:09.000Z | 2018-08-28T11:28:09.000Z | src/fts3/rest/client/easy/ban.py | Jar-win/fts-rest | 4db0880cf328037b8587b4d16741c40959b47ad2 | [
"Apache-2.0"
] | 13 | 2019-06-17T13:24:21.000Z | 2022-02-03T16:28:10.000Z | src/fts3/rest/client/easy/ban.py | Jar-win/fts-rest | 4db0880cf328037b8587b4d16741c40959b47ad2 | [
"Apache-2.0"
] | 3 | 2018-11-29T12:16:29.000Z | 2021-02-25T09:16:47.000Z | # Copyright notice:
# Copyright CERN, 2014.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fts3.rest.client import Ban
def ban_se(context, storage, status='cancel', timeout=0, allow_submit=False):
"""
Ban a storage element
Args:
context: fts3.rest.client.context.Context instance
storage: The storage to ban
status: The status of the banning: cancel or wait (leave queued jobs for some time)
timeout: The wait timeout (0 means leave the queued jobs until they are done)
allow_submit: If True, submissions will be accepted. Only meaningful if status=active
Returns:
List of job ids affected by the banning
"""
ban = Ban(context)
return ban.ban_se(storage, status, timeout, allow_submit)
def ban_dn(context, dn):
"""
Ban a user
Args:
context: fts3.rest.client.context.Context instance
dn: The dn of the user to be banned
Returns:
List of job ids affected by the banning
"""
ban = Ban(context)
return ban.ban_dn(dn)
def unban_se(context, storage):
"""
Unban a storage element
Args:
context: fts3.rest.client.context.Context instance
storage: The storage to unban
Returns:
Nothing
"""
ban = Ban(context)
return ban.unban_se(storage)
def unban_dn(context, dn):
"""
Unban a user
Args:
context: fts3.rest.client.context.Context instance
dn: The dn of the user to be unbanned
Returns:
Nothing
"""
ban = Ban(context)
ban.unban_dn(dn)
| 27.376623 | 93 | 0.663188 |
4f86cde4f58465d0a6e5c0a0642a092cf9bc9978 | 2,162 | py | Python | hoopa/commands/cmdline.py | fishtn/hoopa | 1742097c76b4ad4880bd22b87ee89be8490e2b24 | [
"Apache-2.0"
] | 9 | 2021-04-12T03:21:11.000Z | 2022-01-06T07:51:11.000Z | hoopa/commands/cmdline.py | fishtn/hoopa | 1742097c76b4ad4880bd22b87ee89be8490e2b24 | [
"Apache-2.0"
] | 3 | 2021-04-14T06:58:00.000Z | 2021-06-17T03:25:34.000Z | hoopa/commands/cmdline.py | fishtn/hoopa | 1742097c76b4ad4880bd22b87ee89be8490e2b24 | [
"Apache-2.0"
] | 3 | 2021-04-20T09:03:51.000Z | 2022-01-06T07:51:19.000Z | import sys
import cProfile
from hoopa.exceptions import UsageError
from hoopa.commands.create import CreateCommand
def _pop_command_name(argv):
i = 0
for arg in argv[1:]:
if not arg.startswith('-'):
del argv[i]
return arg
i += 1
def _print_unknown_command(cmd_name):
print("Unknown command: %s\n" % cmd_name)
print('Use "hoopa" to see available commands')
def _run_print_help(parser, func, *a, **kw):
try:
func(*a, **kw)
except UsageError as e:
if str(e):
parser.error(str(e))
if e.print_help:
parser.print_help()
sys.exit(2)
def _run_command(cmd, args, opts):
if opts.profile:
_run_command_profiled(cmd, args, opts)
else:
cmd.run(args, opts)
def _run_command_profiled(cmd, args, opts):
if opts.profile:
sys.stderr.write("scrapy: writing cProfile stats to %r\n" % opts.profile)
loc = locals()
p = cProfile.Profile()
p.runctx('cmd.run(args, opts)', globals(), loc)
if opts.profile:
p.dump_stats(opts.profile)
def _print_commands():
# with open(join(dirname(dirname(__file__)), "VERSION"), "rb") as f:
# version = f.read().decode("ascii").strip()
#
# print("hoopa {}".format(version))
print("Usage:")
print(" hoopa <command> [options] [args]\n")
print("Available commands:")
cmd_list = {"create": "create project、spider、item and so on"}
for cmd_name, cmd_class in sorted(cmd_list.items()):
print(" %-13s %s" % (cmd_name, cmd_class))
print('Use "hoopa <command> -h" to see more info about a command')
def execute(argv=None):
if argv is None:
argv = sys.argv
if len(argv) < 2:
_print_commands()
return
cmd_name = argv.pop(1)
cmd_list = {
"create": CreateCommand
}
if not cmd_name:
_print_commands()
sys.exit(0)
elif cmd_name not in cmd_list:
_print_unknown_command(cmd_name)
sys.exit(2)
cmd = cmd_list[cmd_name]()
cmd.add_arguments()
cmd.run_cmd()
sys.exit()
if __name__ == '__main__':
execute()
| 22.757895 | 81 | 0.600833 |
95147e4ab11dff09859168faa0a31a6f42f473b3 | 5,292 | py | Python | Proyecto/1_red_neuronal/curva_aprendizaje.py | Rasan98/AA | 0d755f3564483649dc1cfa9e127f4f66dcb533f5 | [
"MIT"
] | null | null | null | Proyecto/1_red_neuronal/curva_aprendizaje.py | Rasan98/AA | 0d755f3564483649dc1cfa9e127f4f66dcb533f5 | [
"MIT"
] | null | null | null | Proyecto/1_red_neuronal/curva_aprendizaje.py | Rasan98/AA | 0d755f3564483649dc1cfa9e127f4f66dcb533f5 | [
"MIT"
] | null | null | null | #0 normal, 1 neumonía
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
from scipy.io import savemat
import scipy.optimize as opt
def sigmoide(X):
#print(np.ravel(X)[np.argmax(X)])
return 1/(1+np.exp(-X))
def pesosAleatorios(L_in, L_out):
eini = np.sqrt(6)/np.sqrt(L_in + L_out)
aux = np.random.uniform(-eini,eini,(L_in+1)*L_out)
return np.reshape(aux, (L_out,L_in + 1))
def forwprop(theta1, theta2, X):
a1 = X
z2 = np.dot(theta1, np.transpose(a1))
a2 = sigmoide(z2)
a2 = np.vstack((np.ones(np.shape(a2)[1]), a2))
z3 = np.dot(theta2, a2)
a3 = sigmoide(z3)
return a2.transpose(), a3.transpose()
def coste(theta1, theta2, m, y, lda, H):
aux = (-y*np.log((H + 1e-10))) - ((1-y)*np.log((1-H + 1e-10)))
aux = (1 / m) * np.sum(aux)
aux2 = np.sum(theta1[:,1:] ** 2) + np.sum(theta2[:,1:] ** 2)
aux2 = (aux2*lda)/(2*m)
c = aux + aux2
print(c)
return c
def backprop_rec(params_rn, num_entradas, num_ocultas, num_etiquetas, X, y, reg):
theta1 = np.reshape(params_rn[: (num_ocultas * (num_entradas + 1))], (num_ocultas, (num_entradas+1)))
theta2 = np.reshape(params_rn[num_ocultas * (num_entradas + 1):], (num_etiquetas, (num_ocultas + 1)))
m = X.shape[0]
a1 = np.hstack([np.ones([m, 1]), X])
a2, h = forwprop(theta1, theta2, a1)
cost = coste(theta1, theta2, m, y, reg, h)
delta3 = h - y
delta2 = np.dot(theta2.transpose(), delta3.transpose()).transpose() * (a2 * (1-a2))
delta2 = delta2[:,1:]
inc1 = np.dot(delta2.transpose(), a1)
inc2 = np.dot(delta3.transpose(), a2)
D1 = inc1/m
D1[:,1:] = D1[:,1:] + (reg/m)*theta1[:,1:]
D2 = inc2/m
D2[:,1:] = D2[:,1:] + (reg/m)*theta2[:,1:]
#print(cost)
return cost, np.concatenate((np.ravel(D1), np.ravel(D2)))
def fun(h, etiq):
return np.argmax(h) == etiq
def calculate_precision(theta1, theta2, X, Y):
a1 = np.hstack([np.ones([len(X), 1]), X])
_ , h = forwprop(theta1, theta2, a1)
aux = [fun(h[i], Y[i][0]) for i in range(len(X))]
return np.sum(aux)/len(X)
def codificaY(Y, num_etiquetas):
Yp = np.zeros((Y.shape[0], num_etiquetas))
Yp[[np.arange(Y.shape[0])], Y[:,0]] = 1
return Yp
def calc_norm(X):
medias = np.mean(X,0)
desv = np.std(X,0)
Xnorm = (X-medias)/desv
return Xnorm, medias, desv
def aplica_norm(X, medias, desv):
Xnorm = (X-medias)/desv
return Xnorm
def divide_data(X, Y, fact):
sep = np.where(Y == 1)[1][0]
newX = X[0:int(sep//fact), :]
newY = Y[0, 0:int(sep//fact)]
newX = np.vstack([newX,X[sep:sep + int((X.shape[0]-sep)//fact)]])
newY = np.hstack([newY, np.ones(int((X.shape[0]-sep)//fact))])
return newX, np.array([newY])
print("Loading data")
data = loadmat("..\\60_20_20_data300.mat")
print("Data loaded")
Xtrain = data['xtrain']
Ytrain = data['ytrain']
Xtrain, Ytrain = divide_data(Xtrain, Ytrain, 3) #Mín: 1.7 with data256;
Ytrain = Ytrain.transpose()
Ytrain = Ytrain.astype(int)
print("Normalizing xtrain")
Xtrain, medias, desv = calc_norm(Xtrain)
print("xtrain normalized")
num_etiquetas = 2
Ytrain = codificaY(Ytrain,num_etiquetas)
num_entradas = Xtrain.shape[1]
num_ocultas = 150
params_1 = pesosAleatorios(num_entradas, num_ocultas)
params_2 = pesosAleatorios(num_ocultas, num_etiquetas)
params_rn = np.concatenate((np.ravel(params_1), np.ravel(params_2)))
reg = 0.1
Xval = data["xval"]
Yval = data["yval"]
Yval = Yval.transpose()
Yval = Yval.astype(int)
Yval = codificaY(Yval, num_etiquetas)
print("Normalizing xval")
Xval = aplica_norm(Xval, medias, desv)
print("xval normalized")
Hs = np.array([])
ErrTrains = np.array([])
ErrVals = np.array([])
nms = np.arange(1, np.minimum(len(Xtrain), len(Xval)), np.ceil(np.minimum(len(Xtrain), len(Xval))/10))
nms = nms.astype("int")
for i in nms:
auxXtrain = Xtrain[:i,:]
auxYtrain = Ytrain[:i,:]
auxXval = Xval[:i,:]
auxYval = Yval[:i,:]
print(" Training with " + str(i))
res = opt.minimize(fun=backprop_rec, x0=params_rn, args=(num_entradas, num_ocultas, num_etiquetas, auxXtrain, auxYtrain, reg),
method="TNC", jac = True, options={"maxiter":70})
print(" Training end")
thetas = res.x
theta1 = np.reshape(thetas[:(num_ocultas * (num_entradas + 1))], (num_ocultas, (num_entradas+1)))
theta2 = np.reshape(thetas[num_ocultas * (num_entradas + 1):], (num_etiquetas, (num_ocultas + 1)))
_ , H = forwprop(theta1, theta2, np.hstack([np.ones([len(auxXval), 1]), auxXval]))
_ , Htrain = forwprop(theta1, theta2, np.hstack([np.ones([len(auxXtrain), 1]), auxXtrain]))
valErr = np.sum((H - auxYval)**2)*(1/(2*auxYval.shape[0])) #pylint: disable=unsubscriptable-object
trainErr = np.sum((Htrain - auxYtrain)**2)*(1/(2*auxYtrain.shape[0])) #pylint: disable=unsubscriptable-object
ErrTrains = np.concatenate((ErrTrains,np.array([trainErr])))
ErrVals = np.concatenate((ErrVals,np.array([valErr])))
plt.figure()
plt.plot(nms, ErrTrains, c="blue", label="Train", linestyle='-')
plt.plot(nms, ErrVals, c="orange", label="Cross validation", linestyle='-')
plt.legend()
plt.xlabel("Number of training examples")
plt.ylabel("Error")
plt.title("reg = 0.1")
plt.savefig("Curva.png")
print("Fin" * 5) | 32.869565 | 130 | 0.630197 |
37819c86d256d7a8f12eb393cfbc2c25c39dc676 | 13,270 | py | Python | brainstorm/layers/clockwork_lstm_layer.py | PyCN/brainstorm | 8f1fc886faf268b25085fa5c95bf106b1805d766 | [
"MIT"
] | 1,473 | 2015-10-25T19:12:45.000Z | 2022-03-13T01:00:51.000Z | brainstorm/layers/clockwork_lstm_layer.py | PyCN/brainstorm | 8f1fc886faf268b25085fa5c95bf106b1805d766 | [
"MIT"
] | 50 | 2015-10-25T19:14:17.000Z | 2018-10-03T07:48:25.000Z | brainstorm/layers/clockwork_lstm_layer.py | PyCN/brainstorm | 8f1fc886faf268b25085fa5c95bf106b1805d766 | [
"MIT"
] | 209 | 2015-10-25T20:22:06.000Z | 2021-07-23T00:00:39.000Z | #!/usr/bin/env python
# coding=utf-8
from __future__ import division, print_function, unicode_literals
from collections import OrderedDict
from brainstorm.structure.construction import ConstructionWrapper
from brainstorm.utils import LayerValidationError, flatten_time, \
flatten_time_and_features
from brainstorm.layers.base_layer import Layer
from brainstorm.structure.buffer_structure import BufferStructure, \
StructureTemplate
def ClockworkLstm(size, activation='tanh', name=None):
return ConstructionWrapper.create(ClockworkLstmLayerImpl,
size=size,
name=name,
activation=activation)
class ClockworkLstmLayerImpl(Layer):
expected_kwargs = {'size', 'activation'}
expected_inputs = {'default': StructureTemplate('T', 'B', '...')}
computes_no_gradients_for = ['timing']
def setup(self, kwargs, in_shapes):
self.activation = kwargs.get('activation', 'tanh')
self.size = kwargs.get('size', in_shapes['default'].feature_size)
if not isinstance(self.size, int):
raise LayerValidationError('size must be int but was {}'.
format(self.size))
in_size = in_shapes['default'].feature_size
outputs = OrderedDict()
outputs['default'] = BufferStructure('T', 'B', self.size,
context_size=1)
parameters = OrderedDict()
parameters['Wz'] = BufferStructure(self.size, in_size)
parameters['Wi'] = BufferStructure(self.size, in_size)
parameters['Wf'] = BufferStructure(self.size, in_size)
parameters['Wo'] = BufferStructure(self.size, in_size)
parameters['pi'] = BufferStructure(1, self.size)
parameters['pf'] = BufferStructure(1, self.size)
parameters['po'] = BufferStructure(1, self.size)
parameters['Rz'] = BufferStructure(self.size, self.size)
parameters['Ri'] = BufferStructure(self.size, self.size)
parameters['Rf'] = BufferStructure(self.size, self.size)
parameters['Ro'] = BufferStructure(self.size, self.size)
parameters['bz'] = BufferStructure(self.size)
parameters['bi'] = BufferStructure(self.size)
parameters['bf'] = BufferStructure(self.size)
parameters['bo'] = BufferStructure(self.size)
parameters['timing'] = BufferStructure(self.size)
internals = OrderedDict()
internals['Za'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Zb'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Ia'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Ib'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Fa'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Fb'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Oa'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Ob'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Ca'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['Cb'] = BufferStructure('T', 'B', self.size, context_size=1)
internals['dZa'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dZb'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dIa'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dIb'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dFa'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dFb'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dOa'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dOb'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dCa'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
internals['dCb'] = BufferStructure('T', 'B', self.size, context_size=1,
is_backward_only=True)
return outputs, parameters, internals
def forward_pass(self, buffers, training_pass=True):
# prepare
_h = self.handler
(Wz, Wi, Wf, Wo,
pi, pf, po,
Rz, Ri, Rf, Ro,
bz, bi, bf, bo,
timing) = buffers.parameters
(Za, Zb, Ia, Ib, Fa, Fb, Oa, Ob, Ca, Cb,
dZa, dZb, dIa, dIb, dFa, dFb, dOa, dOb, dCa, dCb) = buffers.internals
x = buffers.inputs.default
y = buffers.outputs.default
time_size, batch_size = x.shape[0], x.shape[1]
# Temporary variable to be filled with the current value of time t
tmp = _h.zeros(timing.shape)
cond = _h.zeros(y[0].shape)
flat_x = flatten_time_and_features(x)
flat_Za = flatten_time(Za[:-1])
flat_Ia = flatten_time(Ia[:-1])
flat_Fa = flatten_time(Fa[:-1])
flat_Oa = flatten_time(Oa[:-1])
_h.dot_mm(flat_x, Wz, flat_Za, transb=True)
_h.dot_mm(flat_x, Wi, flat_Ia, transb=True)
_h.dot_mm(flat_x, Wf, flat_Fa, transb=True)
_h.dot_mm(flat_x, Wo, flat_Oa, transb=True)
for t in range(time_size):
# Block input
_h.dot_add_mm(y[t - 1], Rz, Za[t], transb=True)
_h.add_mv(Za[t], bz.reshape((1, self.size)), Za[t])
_h.act_func[self.activation](Za[t], Zb[t])
# Input Gate
_h.dot_add_mm(y[t - 1], Ri, Ia[t], transb=True)
_h.mult_add_mv(Ca[t - 1], pi, Ia[t]) # ADDED PEEPHOLE CONNECTION
_h.add_mv(Ia[t], bi.reshape((1, self.size)), Ia[t])
_h.sigmoid(Ia[t], Ib[t])
# Forget Gate
_h.dot_add_mm(y[t - 1], Rf, Fa[t], transb=True)
_h.mult_add_mv(Ca[t - 1], pf, Fa[t]) # ADDED PEEPHOLE CONNECTION
_h.add_mv(Fa[t], bf.reshape((1, self.size)), Fa[t])
_h.sigmoid(Fa[t], Fb[t])
# Cell
_h.mult_tt(Ib[t], Zb[t], Ca[t])
_h.mult_add_tt(Fb[t], Ca[t - 1], Ca[t])
# Output Gate
_h.dot_add_mm(y[t - 1], Ro, Oa[t], transb=True)
_h.mult_add_mv(Ca[t], po, Oa[t]) # ADDED PEEPHOLE CONNECTION
_h.add_mv(Oa[t], bo.reshape((1, self.size)), Oa[t])
_h.sigmoid(Oa[t], Ob[t])
# Block output
_h.act_func[self.activation](Ca[t], Cb[t])
_h.mult_tt(Ob[t], Cb[t], y[t])
if t > 0:
_h.fill(tmp, t)
_h.modulo_tt(tmp, timing, tmp)
_h.broadcast_t(tmp.reshape((1, tmp.shape[0])), 0, cond)
# Reset Cell
_h.copy_to_if(Ca[t-1], Ca[t], cond)
# Reset Block output
_h.copy_to_if(y[t-1], y[t], cond)
def backward_pass(self, buffers):
# prepare
_h = self.handler
(dWz, dWi, dWf, dWo,
dpi, dpf, dpo,
dRz, dRi, dRf, dRo,
dbz, dbi, dbf, dbo,
dtiming) = buffers.gradients
(Wz, Wi, Wf, Wo,
pi, pf, po,
Rz, Ri, Rf, Ro,
bz, bi, bf, bo,
timing) = buffers.parameters
(Za, Zb, Ia, Ib, Fa, Fb, Oa, Ob, Ca, Cb,
dZa, dZb, dIa, dIb, dFa, dFb, dOa, dOb, dCa, dCb) = buffers.internals
x = buffers.inputs.default
dx = buffers.input_deltas.default
y = buffers.outputs.default
deltas = buffers.output_deltas.default
dy = _h.allocate(y.shape)
time_size, batch_size = x.shape[0], x.shape[1]
# Temporary variable to be filled with the current value of time t
tmp = _h.zeros(timing.shape)
_h.fill(dCa, 0.0)
cond = _h.zeros(y[0].shape)
for t in range(time_size - 1, -1, - 1):
# Accumulate recurrent deltas
_h.add_tt(dy[t], deltas[t], dy[t])
_h.fill(tmp, t)
_h.modulo_tt(tmp, timing, tmp)
_h.broadcast_t(tmp.reshape((1, tmp.shape[0])), 0, cond)
_h.dot_add_mm(dIa[t + 1], Ri, dy[t])
_h.dot_add_mm(dFa[t + 1], Rf, dy[t])
_h.dot_add_mm(dOa[t + 1], Ro, dy[t])
_h.dot_add_mm(dZa[t + 1], Rz, dy[t])
_h.mult_add_mv(dIa[t + 1], pi, dCa[t])
_h.mult_add_mv(dFa[t + 1], pf, dCa[t])
# Output Gate
_h.mult_tt(dy[t], Cb[t], dOb[t])
_h.fill_if(dOb[t], 0, cond) # Set inactive to 0
_h.sigmoid_deriv(Oa[t], Ob[t], dOb[t], dOa[t])
# Output influence on peephole:
_h.mult_add_mv(dOa[t], po, dCa[t])
# Cell
_h.mult_tt(dy[t], Ob[t], dCb[t])
_h.act_func_deriv[self.activation](Ca[t], Cb[t], dCb[t], dCb[t])
_h.fill_if(dCb[t], 0, cond)
_h.add_tt(dCa[t], dCb[t], dCa[t])
_h.mult_add_tt(dCa[t + 1], Fb[t + 1], dCa[t])
# Forget Gate
_h.mult_tt(dCa[t], Ca[t - 1], dFb[t])
_h.sigmoid_deriv(Fa[t], Fb[t], dFb[t], dFa[t])
# Input Gate
_h.mult_tt(dCa[t], Zb[t], dIb[t])
_h.sigmoid_deriv(Ia[t], Ib[t], dIb[t], dIa[t])
# Block Input
_h.mult_tt(dCa[t], Ib[t], dZb[t])
_h.act_func_deriv[self.activation](Za[t], Zb[t], dZb[t], dZa[t])
# Copy over the error from previous inactive nodes
_h.add_into_if(dy[t], dy[t-1], cond)
_h.add_into_if(dCa[t], dCa[t-1], cond)
# Undo updates to inactive nodes:
_h.fill_if(dIa[t], 0, cond)
_h.fill_if(dFa[t], 0, cond)
_h.fill_if(dZa[t], 0, cond)
_h.fill_if(Fb[t], 0, cond)
# Same as for standard RNN:
flat_inputs = flatten_time_and_features(x)
flat_dinputs = flatten_time_and_features(dx)
flat_dIa = flatten_time(dIa[:-1])
flat_dFa = flatten_time(dFa[:-1])
flat_dOa = flatten_time(dOa[:-1])
flat_dZa = flatten_time(dZa[:-1])
# calculate in_deltas and gradients
_h.dot_add_mm(flat_dIa, Wi, flat_dinputs)
_h.dot_add_mm(flat_dFa, Wf, flat_dinputs)
_h.dot_add_mm(flat_dOa, Wo, flat_dinputs)
_h.dot_add_mm(flat_dZa, Wz, flat_dinputs)
_h.dot_add_mm(flat_dIa, flat_inputs, dWi, transa=True)
_h.dot_add_mm(flat_dFa, flat_inputs, dWf, transa=True)
_h.dot_add_mm(flat_dOa, flat_inputs, dWo, transa=True)
_h.dot_add_mm(flat_dZa, flat_inputs, dWz, transa=True)
dbias_tmp = _h.allocate(dbz.shape)
_h.sum_t(flat_dIa, axis=0, out=dbias_tmp)
_h.add_tt(dbi, dbias_tmp, dbi)
_h.sum_t(flat_dFa, axis=0, out=dbias_tmp)
_h.add_tt(dbf, dbias_tmp, dbf)
_h.sum_t(flat_dOa, axis=0, out=dbias_tmp)
_h.add_tt(dbo, dbias_tmp, dbo)
_h.sum_t(flat_dZa, axis=0, out=dbias_tmp)
_h.add_tt(dbz, dbias_tmp, dbz)
flat_outputs = flatten_time(y[:-2])
flat_cell = flatten_time(Ca[:-2])
flat_cell2 = flatten_time(Ca[:-1])
dWco_tmp = _h.allocate(flat_cell2.shape)
dWc_tmp = _h.allocate(dpo.shape)
# Peephole connection output weight:
_h.mult_tt(flat_cell2, flat_dOa, dWco_tmp)
_h.sum_t(dWco_tmp, axis=0, out=dWc_tmp)
_h.add_tt(dpo, dWc_tmp, dpo)
flat_dIa = flatten_time(dIa[1:-1])
flat_dFa = flatten_time(dFa[1:-1])
flat_dOa = flatten_time(dOa[1:-1])
flat_dZa = flatten_time(dZa[1:-1])
_h.dot_add_mm(flat_dIa, flat_outputs, dRi, transa=True)
_h.dot_add_mm(flat_dFa, flat_outputs, dRf, transa=True)
_h.dot_add_mm(flat_dOa, flat_outputs, dRo, transa=True)
_h.dot_add_mm(flat_dZa, flat_outputs, dRz, transa=True)
_h.dot_add_mm(dIa[0], dy[-1], dRi, transa=True)
_h.dot_add_mm(dFa[0], dy[-1], dRf, transa=True)
_h.dot_add_mm(dOa[0], dy[-1], dRo, transa=True)
_h.dot_add_mm(dZa[0], dy[-1], dRz, transa=True)
# Other Peephole connections
dWcif_tmp = _h.allocate(flat_cell.shape)
_h.mult_tt(flat_cell, flat_dIa, dWcif_tmp)
_h.sum_t(dWcif_tmp, axis=0, out=dWc_tmp)
_h.add_tt(dpi, dWc_tmp, dpi)
_h.mult_tt(flat_cell, flat_dFa, dWcif_tmp)
_h.sum_t(dWcif_tmp, axis=0, out=dWc_tmp)
_h.add_tt(dpf, dWc_tmp, dpf)
dWcif_tmp = _h.allocate(dIa[0].shape)
_h.mult_tt(dCa[-1], dIa[0], dWcif_tmp)
_h.sum_t(dWcif_tmp, axis=0, out=dWc_tmp)
_h.add_tt(dpi, dWc_tmp, dpi)
_h.mult_tt(dCa[-1], dIa[0], dWcif_tmp)
_h.sum_t(dWcif_tmp, axis=0, out=dWc_tmp)
_h.add_tt(dpf, dWc_tmp, dpf)
| 40.58104 | 79 | 0.562622 |
388609561d7c4f1d914adb33c56e63fb8108b237 | 13,660 | py | Python | src/tests/base/test_cancelevent.py | snadal/pretix | 430ccece9a3af6fd93c51626a9551ef79cee8002 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/tests/base/test_cancelevent.py | snadal/pretix | 430ccece9a3af6fd93c51626a9551ef79cee8002 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/tests/base/test_cancelevent.py | snadal/pretix | 430ccece9a3af6fd93c51626a9551ef79cee8002 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | from datetime import timedelta
from decimal import Decimal
from django.core import mail as djmail
from django.test import TestCase
from django.utils.timezone import now
from django_scopes import scope
from pretix.base.models import Event, Item, Order, OrderPosition, Organizer
from pretix.base.models.orders import OrderFee, OrderPayment, OrderRefund
from pretix.base.services.cancelevent import cancel_event
from pretix.base.services.invoices import generate_invoice
from pretix.testutils.scope import classscope
class EventCancelTests(TestCase):
def setUp(self):
super().setUp()
self.o = Organizer.objects.create(name='Dummy', slug='dummy')
with scope(organizer=self.o):
self.event = Event.objects.create(organizer=self.o, name='Dummy', slug='dummy', date_from=now(),
plugins='tests.testdummy')
self.order = Order.objects.create(
code='FOO', event=self.event, email='dummy@dummy.test',
status=Order.STATUS_PENDING, locale='en',
datetime=now(), expires=now() + timedelta(days=10),
total=Decimal('46.00'),
)
self.ticket = Item.objects.create(event=self.event, name='Early-bird ticket',
default_price=Decimal('23.00'), admission=True)
self.op1 = OrderPosition.objects.create(
order=self.order, item=self.ticket, variation=None,
price=Decimal("23.00"), attendee_name_parts={'full_name': "Peter"}, positionid=1
)
self.op2 = OrderPosition.objects.create(
order=self.order, item=self.ticket, variation=None,
price=Decimal("23.00"), attendee_name_parts={'full_name': "Dieter"}, positionid=2
)
generate_invoice(self.order)
djmail.outbox = []
@classscope(attr='o')
def test_cancel_send_mail(self):
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
assert len(djmail.outbox) == 1
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_CANCELED
@classscope(attr='o')
def test_cancel_send_mail_attendees(self):
self.op1.attendee_email = 'foo@example.com'
self.op1.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
assert len(djmail.outbox) == 2
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_CANCELED
@classscope(attr='o')
def test_cancel_auto_refund(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
p1 = self.order.payments.create(
amount=Decimal('46.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.order.status = Order.STATUS_PAID
self.order.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
r = self.order.refunds.get()
assert r.state == OrderRefund.REFUND_STATE_DONE
assert r.amount == Decimal('46.00')
assert r.source == OrderRefund.REFUND_SOURCE_BUYER
assert r.payment == p1
assert self.order.all_logentries().filter(action_type='pretix.event.order.refund.created').exists()
assert not self.order.all_logentries().filter(action_type='pretix.event.order.refund.requested').exists()
assert gc.value == Decimal('46.00')
@classscope(attr='o')
def test_cancel_do_not_refund(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
self.order.payments.create(
amount=Decimal('46.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.order.status = Order.STATUS_PAID
self.order.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=False, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_CANCELED
assert not self.order.refunds.exists()
@classscope(attr='o')
def test_cancel_refund_paid_with_fees(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
p1 = self.order.payments.create(
amount=Decimal('46.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.order.status = Order.STATUS_PAID
self.order.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="10.00", keep_fee_percentage="10.00",
keep_fees=True, send=False, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
r = self.order.refunds.get()
assert r.state == OrderRefund.REFUND_STATE_DONE
assert r.amount == Decimal('31.40')
assert r.source == OrderRefund.REFUND_SOURCE_BUYER
assert r.payment == p1
assert self.order.all_logentries().filter(action_type='pretix.event.order.refund.created').exists()
assert not self.order.all_logentries().filter(action_type='pretix.event.order.refund.requested').exists()
assert gc.value == Decimal('31.40')
@classscope(attr='o')
def test_cancel_refund_partially_paid_with_fees(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
self.order.payments.create(
amount=Decimal('12.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.order.status = Order.STATUS_PENDING
self.order.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="10.00", keep_fee_percentage="10.00",
keep_fees=True, send=False, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
assert not self.order.refunds.exists()
self.order.refresh_from_db()
assert self.order.total == Decimal('12.00')
assert self.order.status == Order.STATUS_PAID
assert self.order.positions.count() == 0
@classscope(attr='o')
def test_cancel_keep_fees(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
p1 = self.order.payments.create(
amount=Decimal('46.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.op1.price -= Decimal('5.00')
self.op1.save()
self.order.fees.create(
fee_type=OrderFee.FEE_TYPE_PAYMENT,
value=Decimal('5.00'),
)
self.order.status = Order.STATUS_PAID
self.order.save()
cancel_event(
self.event.pk, subevent=None,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="10.00",
keep_fees=True, send=False, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
r = self.order.refunds.get()
assert r.state == OrderRefund.REFUND_STATE_DONE
assert r.amount == Decimal('36.90')
assert r.source == OrderRefund.REFUND_SOURCE_BUYER
assert r.payment == p1
assert self.order.all_logentries().filter(action_type='pretix.event.order.refund.created').exists()
assert not self.order.all_logentries().filter(action_type='pretix.event.order.refund.requested').exists()
assert gc.value == Decimal('36.90')
class SubEventCancelTests(TestCase):
def setUp(self):
super().setUp()
self.o = Organizer.objects.create(name='Dummy', slug='dummy')
with scope(organizer=self.o):
self.event = Event.objects.create(organizer=self.o, name='Dummy', slug='dummy', date_from=now(),
plugins='tests.testdummy', has_subevents=True)
self.se1 = self.event.subevents.create(name='One', date_from=now())
self.se2 = self.event.subevents.create(name='Two', date_from=now())
self.order = Order.objects.create(
code='FOO', event=self.event, email='dummy@dummy.test',
status=Order.STATUS_PENDING, locale='en',
datetime=now(), expires=now() + timedelta(days=10),
total=Decimal('46.00'),
)
self.ticket = Item.objects.create(event=self.event, name='Early-bird ticket',
default_price=Decimal('23.00'), admission=True)
self.op1 = OrderPosition.objects.create(
order=self.order, item=self.ticket, variation=None, subevent=self.se1,
price=Decimal("23.00"), attendee_name_parts={'full_name': "Peter"}, positionid=1
)
self.op2 = OrderPosition.objects.create(
order=self.order, item=self.ticket, variation=None, subevent=self.se2,
price=Decimal("23.00"), attendee_name_parts={'full_name': "Dieter"}, positionid=2
)
generate_invoice(self.order)
djmail.outbox = []
@classscope(attr='o')
def test_cancel_partially_send_mail_attendees(self):
self.op1.attendee_email = 'foo@example.com'
self.op1.save()
self.op2.attendee_email = 'foo@example.org'
self.op2.save()
cancel_event(
self.event.pk, subevent=self.se1.pk,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
assert len(djmail.outbox) == 2
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_PENDING
assert self.order.positions.count() == 1
@classscope(attr='o')
def test_cancel_simple_order(self):
self.op2.subevent = self.se1
self.op2.save()
cancel_event(
self.event.pk, subevent=self.se1.pk,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_CANCELED
@classscope(attr='o')
def test_cancel_mixed_order(self):
cancel_event(
self.event.pk, subevent=self.se1.pk,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="0.00",
keep_fees=True, send=True, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
self.order.refresh_from_db()
assert self.order.status == Order.STATUS_PENDING
assert self.order.positions.filter(subevent=self.se2).count() == 1
assert self.order.positions.filter(subevent=self.se1).count() == 0
@classscope(attr='o')
def test_cancel_partially_keep_fees(self):
gc = self.o.issued_gift_cards.create(currency="EUR")
p1 = self.order.payments.create(
amount=Decimal('46.00'),
state=OrderPayment.PAYMENT_STATE_CONFIRMED,
provider='giftcard',
info='{"gift_card": %d}' % gc.pk
)
self.op1.price -= Decimal('5.00')
self.op1.save()
self.order.fees.create(
fee_type=OrderFee.FEE_TYPE_PAYMENT,
value=Decimal('5.00'),
)
self.order.status = Order.STATUS_PAID
self.order.save()
cancel_event(
self.event.pk, subevent=self.se1.pk,
auto_refund=True, keep_fee_fixed="0.00", keep_fee_percentage="10.00",
keep_fees=True, send=False, send_subject="Event canceled", send_message="Event canceled :-(",
user=None
)
r = self.order.refunds.get()
assert r.state == OrderRefund.REFUND_STATE_DONE
assert r.amount == Decimal('16.20')
assert r.source == OrderRefund.REFUND_SOURCE_BUYER
assert r.payment == p1
assert self.order.all_logentries().filter(action_type='pretix.event.order.refund.created').exists()
assert not self.order.all_logentries().filter(action_type='pretix.event.order.refund.requested').exists()
assert gc.value == Decimal('16.20')
assert self.order.positions.filter(subevent=self.se2).count() == 1
assert self.order.positions.filter(subevent=self.se1).count() == 0
f = self.order.fees.get(fee_type=OrderFee.FEE_TYPE_CANCELLATION)
assert f.value == Decimal('1.80')
| 43.642173 | 113 | 0.615886 |
d0a4d5be5af4376e1d2458ed5b4af3fd6c563ff3 | 1,873 | py | Python | src/utils/runWhenBotStart/getMembersInVoiceStatesWhoAreActive.py | ZhangPluto/Funny-Nation | abd38f10d5cc9c026cbab5ae2995cb17a2902b8f | [
"MIT"
] | 126 | 2022-01-15T02:29:07.000Z | 2022-03-30T09:57:40.000Z | src/utils/runWhenBotStart/getMembersInVoiceStatesWhoAreActive.py | ZhangPluto/Funny-Nation | abd38f10d5cc9c026cbab5ae2995cb17a2902b8f | [
"MIT"
] | 18 | 2022-01-11T22:24:35.000Z | 2022-03-16T00:13:01.000Z | src/utils/runWhenBotStart/getMembersInVoiceStatesWhoAreActive.py | ZhangPluto/Funny-Nation | abd38f10d5cc9c026cbab5ae2995cb17a2902b8f | [
"MIT"
] | 25 | 2022-01-22T15:06:27.000Z | 2022-03-01T04:34:19.000Z | from discord import VoiceState, Member, Guild
from typing import List, Dict
from pymysql import Connection
from src.Storage import Storage
from src.model.activityStatManagement import getActivityStatByUser, newActivityStatForUser
from src.model.userManagement import getUser, addNewUser
from loguru import logger
def getMembersInVoiceStatesWhoAreActive(voiceStates: Dict[int, VoiceState], db: Connection) -> (List[Member], List[Member]):
"""
:param voiceStates:
:param db:
:return:
a tuple (members who in voice but not steaming, members who streaming)
"""
storage: Storage = Storage()
myGuild: Guild = storage.myGuild
membersInVoice: List[Member] = []
membersInStreaming: List[Member] = []
for userID in voiceStates:
# get user information
userInfo: tuple = getUser(db, userID)
# Check if user existed
if userInfo is None:
if not addNewUser(db, userID):
logger.error(f"Cannot create new account to {userID} when sending message. ")
else:
logger.info(f"New account created for user {userID}")
if not getActivityStatByUser(db, userID):
if not newActivityStatForUser(db, userID):
logger.error(f"Cannot create new activity stat for user {userID}")
continue
voiceState = voiceStates[userID]
# Check if member is online
thisMember: Member = myGuild.get_member(userID)
if (thisMember.premium_since is None) and (str(thisMember.desktop_status) != 'online'):
continue
# Check if user mute
if voiceState.self_mute:
continue
if voiceState.self_stream:
membersInStreaming.append(thisMember)
else:
membersInVoice.append(thisMember)
return membersInVoice, membersInStreaming | 33.446429 | 124 | 0.665243 |
4146cf9186b413c798c50ce439528a9372aa4584 | 4,264 | py | Python | sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | sdk/tables/azure-data-tables/samples/async_samples/sample_authentication_async.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_authentication_async.py
DESCRIPTION:
These samples demonstrate authenticating a client via:
* connection string
* shared access key
* generating a sas token with which the returned signature can be used with
the credential parameter of any TableServiceClient or TableClient
USAGE:
python sample_authentication_async.py
Set the environment variables with your own values before running the sample:
1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account
2) AZURE_STORAGE_ACCOUNT_URL - the Table service account URL
3) AZURE_STORAGE_ACCOUNT_NAME - the name of the storage account
4) AZURE_STORAGE_ACCESS_KEY - the storage account access key
"""
from datetime import datetime, timedelta
import os
import asyncio
from dotenv import find_dotenv, load_dotenv
class TableAuthSamples(object):
def __init__(self):
load_dotenv(find_dotenv())
self.access_key = os.getenv("TABLES_PRIMARY_STORAGE_ACCOUNT_KEY")
self.endpoint = os.getenv("TABLES_STORAGE_ENDPOINT_SUFFIX")
self.account_name = os.getenv("TABLES_STORAGE_ACCOUNT_NAME")
self.account_url = "{}.table.{}".format(self.account_name, self.endpoint)
self.connection_string = "DefaultEndpointsProtocol=https;AccountName={};AccountKey={};EndpointSuffix={}".format(
self.account_name,
self.access_key,
self.endpoint
)
async def authentication_by_connection_string(self):
# Instantiate a TableServiceClient using a connection string
# [START auth_from_connection_string]
from azure.data.tables.aio import TableServiceClient
async with TableServiceClient.from_connection_string(conn_str=self.connection_string) as table_service:
properties = await table_service.get_service_properties()
print("Connection String: {}".format(properties))
# [END auth_from_connection_string]
async def authentication_by_shared_key(self):
# Instantiate a TableServiceClient using a shared access key
# [START auth_from_shared_key]
from azure.data.tables.aio import TableServiceClient
async with TableServiceClient.from_connection_string(conn_str=self.connection_string) as table_service:
properties = await table_service.get_service_properties()
print("Shared Key: {}".format(properties))
# [END auth_from_shared_key]
async def authentication_by_shared_access_signature(self):
# Instantiate a TableServiceClient using a connection string
# [START auth_by_sas]
from azure.data.tables.aio import TableServiceClient
from azure.core.credentials import AzureNamedKeyCredential
# Create a SAS token to use for authentication of a client
from azure.data.tables import generate_account_sas, ResourceTypes, AccountSasPermissions
print("Account name: {}".format(self.account_name))
credential = AzureNamedKeyCredential(self.account_name, self.access_key)
sas_token = generate_account_sas(
credential,
resource_types=ResourceTypes(service=True),
permission=AccountSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
async with TableServiceClient(account_url=self.account_url, credential=sas_token) as token_auth_table_service:
properties = await token_auth_table_service.get_service_properties()
print("Shared Access Signature: {}".format(properties))
# [END auth_by_sas]
async def main():
sample = TableAuthSamples()
await sample.authentication_by_connection_string()
await sample.authentication_by_shared_key()
await sample.authentication_by_shared_access_signature()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 42.217822 | 120 | 0.704503 |
eb55da74d8b8304711f6d8d8f1928c990fda18a6 | 4,200 | py | Python | configs/reppoints_moment_parallel_r50_fpn_1x-deepfashion2.py | ShenhanQian/KGDet | 730bc8254440a7e75f56f28f06982c1879f70403 | [
"MIT"
] | 24 | 2021-01-08T01:40:02.000Z | 2022-03-09T07:31:10.000Z | configs/reppoints_moment_parallel_r50_fpn_1x-deepfashion2.py | ShenhanQian/KGDet | 730bc8254440a7e75f56f28f06982c1879f70403 | [
"MIT"
] | 3 | 2021-07-29T04:21:19.000Z | 2022-02-09T07:42:25.000Z | configs/reppoints_moment_parallel_r50_fpn_1x-deepfashion2.py | ShenhanQian/KGDet | 730bc8254440a7e75f56f28f06982c1879f70403 | [
"MIT"
] | 3 | 2021-08-24T12:16:49.000Z | 2021-12-25T09:49:54.000Z | # model settings
norm_cfg = dict(type='GN', num_groups=32, requires_grad=True)
model = dict(
type='RepPointsDetectorKp',
pretrained='modelzoo://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs=True,
num_outs=5,
norm_cfg=norm_cfg),
bbox_head=dict(
type='RepPointsHeadKpParallel',
num_classes=14,
in_channels=256,
feat_channels=256,
point_feat_channels=256,
stacked_convs=3,
num_reppts=9,
num_keypts=294,
gradient_mul=0.1,
point_strides=[8, 16, 32, 64, 128],
point_base_scale=4,
norm_cfg=norm_cfg,
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5),
loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.),
loss_kpt_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=2.),
loss_kpt_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=4.),
transform_method='moment'))
# training and testing settings
train_cfg = dict(
init=dict(
assigner=dict(type='PointAssigner', scale=4, pos_num=1),
allowed_border=-1,
pos_weight=-1,
debug=False),
refine=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False))
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100)
# dataset settings
dataset_type = 'DeepFashion2Dataset'
data_root = 'data/deepfashion2/'
img_norm_cfg = dict(
mean=[154.992, 146.197, 140.744], std=[62.757, 64.507, 62.076], to_rgb=True)
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'train/train-coco_style.json',
img_prefix=data_root + 'train/image/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0.5,
with_keypoint=True,
with_mask=False,
with_crowd=False,
with_label=True,
group_mode=False),
val=dict(
type=dataset_type,
ann_file=data_root + 'validation/val-coco_style.json',
img_prefix=data_root + 'validation/image/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_keypoint=True,
with_mask=False,
with_crowd=False,
with_label=True),
test=dict(
type=dataset_type,
ann_file=data_root + 'validation/val-coco_style.json',
img_prefix=data_root + 'validation/image/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_keypoint=True,
with_mask=False,
with_crowd=False,
with_label=False,
test_mode=True))
# optimizer
optimizer = dict(type='SGD', lr=5e-3, momentum=0.9, weight_decay=1e-4)
# LR 1e-2, WD 1e-4
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=100,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
device_ids = range(8)
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/reppoints_moment_r50_fpn_1x-deepfashion2'
load_from = None
resume_from = None
auto_resume = True
workflow = [('train', 1)]
| 28.965517 | 80 | 0.615 |
f72ae0f6c794d479f6cdc796193f0e7a465e9821 | 15,152 | py | Python | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | Perception-Project/project_template.py | renowator/Udacity_Robotics_Projects | 3dc1f1ebff3c33d6bbb031653398ace5beb7f809 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Import modules
import matplotlib.colors
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn.preprocessing import LabelEncoder
import pickle
from sensor_stick.srv import GetNormals
from sensor_stick.features import compute_color_histograms
from sensor_stick.features import compute_normal_histograms
from visualization_msgs.msg import Marker
from sensor_stick.marker_tools import *
from sensor_stick.msg import DetectedObjectsArray
from sensor_stick.msg import DetectedObject
from sensor_stick.pcl_helper import *
import rospy
import tf
from geometry_msgs.msg import Pose
from std_msgs.msg import Float64
from std_msgs.msg import Int32
from std_msgs.msg import String
from pr2_robot.srv import *
from rospy_message_converter import message_converter
import yaml
# Helper function to get surface normals
def get_normals(cloud):
get_normals_prox = rospy.ServiceProxy('/feature_extractor/get_normals', GetNormals)
return get_normals_prox(cloud).cluster
#Helper function to convert RGB to HSV
def rgb_to_hsv(rgb_list):
rgb_normalized = [1.0*rgb_list[0]/255, 1.0*rgb_list[1]/255, 1.0*rgb_list[2]/255]
hsv_normalized = matplotlib.colors.rgb_to_hsv([[rgb_normalized]])[0][0]
return hsv_normalized
bins_range=(0, 256)
nbins = 32
#Helper function to compute color histograms
def compute_color_histograms(cloud, using_hsv=False):
# Compute histograms for the clusters
point_colors_list = []
# Step through each point in the point cloud
for point in pc2.read_points(cloud, skip_nans=True):
rgb_list = float_to_rgb(point[3])
if using_hsv:
point_colors_list.append(rgb_to_hsv(rgb_list) * 255)
else:
point_colors_list.append(rgb_list)
# Populate lists with color values
channel_1_vals = []
channel_2_vals = []
channel_3_vals = []
for color in point_colors_list:
channel_1_vals.append(color[0])
channel_2_vals.append(color[1])
channel_3_vals.append(color[2])
# Compute histograms
# Compute the histogram of the HSV channels separately
h_hist = np.histogram(channel_1_vals, bins=nbins, range=bins_range)
s_hist = np.histogram(channel_2_vals, bins=nbins, range=bins_range)
v_hist = np.histogram(channel_3_vals, bins=nbins, range=bins_range)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((h_hist[0], s_hist[0], v_hist[0])).astype(np.float64)
# Normalize the result
normed_features = hist_features / np.sum(hist_features)
return normed_features
#Helper function to compute normal histograms
def compute_normal_histograms(normal_cloud):
norm_x_vals = []
norm_y_vals = []
norm_z_vals = []
for norm_component in pc2.read_points(normal_cloud,
field_names = ('normal_x', 'normal_y', 'normal_z'),
skip_nans=True):
norm_x_vals.append(norm_component[0])
norm_y_vals.append(norm_component[1])
norm_z_vals.append(norm_component[2])
# TODO: Compute histograms of normal values (just like with color)
x_hist = np.histogram(norm_x_vals, bins=nbins, range =bins_range)
y_hist = np.histogram(norm_y_vals, bins=nbins, range =bins_range)
z_hist = np.histogram(norm_z_vals, bins=nbins, range =bins_range)
# TODO: Concatenate and normalize the histograms
hist_features = np.concatenate((x_hist[0], y_hist[0], z_hist[0])).astype(np.float64)
normed_features = hist_features/ np.sum(hist_features)
return normed_features
# Helper function to create a yaml friendly dictionary from ROS messages
def make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose):
yaml_dict = {}
yaml_dict["test_scene_num"] = test_scene_num.data
yaml_dict["arm_name"] = arm_name.data
yaml_dict["object_name"] = object_name.data
yaml_dict["pick_pose"] = message_converter.convert_ros_message_to_dictionary(pick_pose)
yaml_dict["place_pose"] = message_converter.convert_ros_message_to_dictionary(place_pose)
print type(yaml_dict["arm_name"]), type(yaml_dict["pick_pose"])
return yaml_dict
# Helper function to output to yaml file
def send_to_yaml(yaml_filename, dict_list):
data_dict = {"object_list": dict_list}
with open(yaml_filename, 'w+') as outfile:
yaml.dump(data_dict, outfile, default_flow_style=False)
print "done yaml"
# Callback function for your Point Cloud Subscriber
def pcl_callback(pcl_msg):
# Convert ROS msg to PCL data
pcl_data=ros_to_pcl(pcl_msg)
# Voxel Grid filter
# Create a VoxelGrid filter object for our input point cloud
vox = pcl_data.make_voxel_grid_filter()
# Choose a voxel (also known as leaf) size
# Note: this (1) is a poor choice of leaf size
# Experiment and find the appropriate size!
LEAF_SIZE = 0.008
# Set the voxel (or leaf) size
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
# Call the filter function to obtain the resultant downsampled point cloud
cloud_filtered = vox.filter()
# Much like the previous filters, we start by creating a filter object:
cloud_filter = cloud_filtered.make_statistical_outlier_filter()
# Set the number of neighboring points to analyze for any given point
cloud_filter.set_mean_k(50)
# Set threshold scale factor
x = 1.0
# Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
cloud_filter.set_std_dev_mul_thresh(x)
# Finally call the filter function for magic
cloud_filtered = cloud_filter.filter()
# PassThrough filter
# Create a PassThrough filter object.
passthrough1 = cloud_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis1 = 'z'
passthrough1.set_filter_field_name(filter_axis1)
axis_min1 = 0.6
axis_max1 = 1.1
passthrough1.set_filter_limits(axis_min1, axis_max1)
# Finally use the filter function to obtain the resultant point cloud.
cloud_p1_filtered = passthrough1.filter()
# Create a PassThrough filter object.
passthrough2 = cloud_p1_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis2 = 'y'
passthrough2.set_filter_field_name(filter_axis2)
axis_min2 = -0.55
axis_max2 = 0.55
passthrough2.set_filter_limits(axis_min2, axis_max2)
cloud_p_filtered = passthrough2.filter()
# RANSAC plane segmentation
# Create the segmentation object
seg = cloud_p_filtered.make_segmenter()
# Set the model you wish to fit
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
max_distance = 0.03
seg.set_distance_threshold(max_distance)
# Call the segment function to obtain set of inlier indices and model coefficients
inliers, coefficients = seg.segment()
# Extract inliers
extracted_inliers = cloud_p_filtered.extract(inliers, negative=False)
# Extract outliers
extracted_outliers = cloud_p_filtered.extract(inliers, negative=True)
# Euclidean Clustering
white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ
tree = white_cloud.make_kdtree()
# Create a cluster extraction object
ec = white_cloud.make_EuclideanClusterExtraction()
# Set tolerances for distance threshold
# as well as minimum and maximum cluster size (in points)
# NOTE: These are poor choices of clustering parameters
# Your task is to experiment and find values that work for segmenting objects.
ec.set_ClusterTolerance(0.01)
ec.set_MinClusterSize(50)
ec.set_MaxClusterSize(3000)
# Search the k-d tree for clusters
ec.set_SearchMethod(tree)
# Extract indices for each of the discovered clusters
cluster_indices = ec.Extract()
# Create Cluster-Mask Point Cloud to visualize each cluster separately
#Assign a color corresponding to each segmented object in scene
cluster_color = get_color_list(len(cluster_indices))
color_cluster_point_list = []
for j, indices in enumerate(cluster_indices):
for i, indice in enumerate(indices):
color_cluster_point_list.append([white_cloud[indice][0],
white_cloud[indice][1],
white_cloud[indice][2],
rgb_to_float(cluster_color[j])])
#Create new cloud containing all clusters, each with unique color
cluster_cloud = pcl.PointCloud_PointXYZRGB()
cluster_cloud.from_list(color_cluster_point_list)
# Convert PCL data to ROS messages
ros_cluster_cloud = pcl_to_ros(cluster_cloud)
ros_cloud_objects = pcl_to_ros(extracted_outliers)
ros_cloud_table = pcl_to_ros(extracted_inliers)
# Publish ROS messages
pcl_cluster_cloud_pub.publish(ros_cluster_cloud)
pcl_objects_pub.publish(ros_cloud_objects)
pcl_table_pub.publish(ros_cloud_table)
# Classify the clusters! (loop through each detected cluster one at a time)
detected_objects_labels = []
detected_objects = []
labeled_features =[]
for index, pts_list in enumerate(cluster_indices):
# Grab the points for the cluster
pcl_cluster = extracted_outliers.extract(pts_list)
ros_cluster = pcl_to_ros(pcl_cluster)
# Compute the associated feature vector
# Extract histogram features
chists = compute_color_histograms(ros_cluster, using_hsv=True)
normals = get_normals(ros_cluster)
nhists = compute_normal_histograms(normals)
feature = np.concatenate((chists, nhists)).astype(np.float64)
#detected_objects.append([feature])
# Make the prediction
prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
label = encoder.inverse_transform(prediction)[0]
detected_objects_labels.append(label)
# Publish a label into RViz
label_pos = list(white_cloud[pts_list[0]])
label_pos[2] += .4
object_markers_pub.publish(make_label(label,label_pos, index))
# Add the detected object to the list of detected objects.
do = DetectedObject()
do.label = label
do.cloud = ros_cluster
detected_objects.append(do)
# Publish the list of detected objects
rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
detected_objects_pub.publish(detected_objects)
# Suggested location for where to invoke your pr2_mover() function within pcl_callback()
# Could add some logic to determine whether or not your object detections are robust
# before calling pr2_mover()
try:
pr2_mover(detected_objects)
except rospy.ROSInterruptException:
pass
# function to load parameters and request PickPlace service
def pr2_mover(detected):
# TODO: Initialize variables
test_scene_num = Int32()
object_name = String()
arm_name = String()
pick_pose = Pose()
place_pose = Pose()
dict_list = []
yaml_filename = 'output_3.yaml' #Change for different worlds
test_scene_num.data = 3 #Change for different worlds
labels = []
centroids = []
# TODO: Get/Read parameters
object_list_param = rospy.get_param('/object_list')
dropbox_param = rospy.get_param('/dropbox')
# TODO: Parse parameters into individual variables
for obj in detected:
#print obj.label
labels.append(obj.label)
points_arr = ros_to_pcl(obj.cloud).to_array()
centroids.append(np.mean(points_arr, axis=0)[:3])
# TODO: Rotate PR2 in place to capture side tables for the collision map
# TODO: Loop through the pick list
for i in range(0, len(object_list_param)):
object_name.data = object_list_param[i]['name']
object_group = object_list_param[i]['group']
for j in range(0,len(labels)):
if object_name.data == labels[j]:
pick_pose.position.x = np.asscalar(centroids[j][0])
pick_pose.position.y = np.asscalar(centroids[j][1])
pick_pose.position.z = np.asscalar(centroids[j][2])
#print pick_pose
# TODO: Get the PointCloud for a given object and obtain it's centroid
# TODO: Create 'place_pose' for the object
for j in range(0, len(dropbox_param)):
if object_group == dropbox_param[j]['group']:
place_pose.position.x = dropbox_param[j]['position'][0]
place_pose.position.y = dropbox_param[j]['position'][1]
place_pose.position.z = dropbox_param[j]['position'][2]
# TODO: Assign the arm to be used for pick_place
if object_group =='green':
arm_name.data = 'right'
elif object_group == 'red':
arm_name.data = 'left'
# TODO: Create a list of dictionaries (made with make_yaml_dict()) for later output to yaml format
print "Test_num:",type(test_scene_num),"Arm_name:", type(arm_name),"Ob_name:", type(object_name),"Pick_pose:", type(pick_pose),"Place_pose:", type(place_pose)
yaml_dict = make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose)
dict_list.append(yaml_dict)
# Wait for 'pick_place_routine' service to come up
rospy.wait_for_service('pick_place_routine')
#try:
#pick_place_routine = rospy.ServiceProxy('pick_place_routine', PickPlace)
# TODO: Insert your message variables to be sent as a service request
#resp = pick_place_routine(test_scene_num, object_name, arm_name, pick_pose, place_pose)
#print ("Response: ",resp.success)
#except rospy.ServiceException, e:
#print "Service call failed: %s"%e
# TODO: Output your request parameters into output yaml file
send_to_yaml(yaml_filename, dict_list)
if __name__ == '__main__':
# TODO: ROS node initialization
rospy.init_node('clustering', anonymous=True)
# TODO: Create Subscribers
pcl_sub = rospy.Subscriber("/pr2/world/points", pc2.PointCloud2, pcl_callback, queue_size=1)
# TODO: Create Publishers
detected_objects_pub = rospy.Publisher("/detected_objects", DetectedObjectsArray, queue_size=1)
object_markers_pub = rospy.Publisher("/object_markers", Marker, queue_size=1)
pcl_objects_pub = rospy.Publisher("/pcl_objects", PointCloud2, queue_size=1)
pcl_table_pub = rospy.Publisher("/pcl_table", PointCloud2, queue_size=1)
pcl_cluster_cloud_pub = rospy.Publisher("/pcl_clusters", PointCloud2, queue_size=1)
# Initialize color_list
get_color_list.color_list = []
# Load Model From disk
model = pickle.load(open('model.sav', 'rb'))
clf = model['classifier']
encoder = LabelEncoder()
encoder.classes_ = model['classes']
scaler = model['scaler']
# TODO: Spin while node is not shutdown
while not rospy.is_shutdown():
rospy.spin()
| 38.262626 | 159 | 0.714625 |