hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a9baf146c23eea77cbd4cdb80da89605919ac1ab | 249 | py | Python | python/Intro/files/csv/csv3.py | Joaxin/GitComments | 7aa862f0ee892cbbc94b037395a6273b2654cbea | [
"MIT"
] | null | null | null | python/Intro/files/csv/csv3.py | Joaxin/GitComments | 7aa862f0ee892cbbc94b037395a6273b2654cbea | [
"MIT"
] | 88 | 2019-10-31T12:30:02.000Z | 2020-08-14T12:17:12.000Z | python/Intro/files/csv/csv3.py | Joaxin/GitComments | 7aa862f0ee892cbbc94b037395a6273b2654cbea | [
"MIT"
] | null | null | null | import json
import csv2
json_str = '{"name": "骆昊", "age": 38, "title": "叫兽"}'
result = json.loads(json_str)
# 把转换得到的字典作为关键字参数传入Teacher的构造器
teacher = csv2.Teacher(**result)
print(teacher)
print(teacher.name)
print(teacher.age)
print(teacher.title)
| 19.153846 | 53 | 0.726908 |
6c5f1acae0576f74d9c3da278a5c9d5b2cbc2a69 | 1,107 | py | Python | stylize_2.py | Lunatic-Works/neural-tools | 8f486906fcb5d800cdfbb804bf2877e0f1e4149f | [
"MIT"
] | 1 | 2021-08-14T00:30:18.000Z | 2021-08-14T00:30:18.000Z | stylize_2.py | Lunatic-Works/neural-tools | 8f486906fcb5d800cdfbb804bf2877e0f1e4149f | [
"MIT"
] | null | null | null | stylize_2.py | Lunatic-Works/neural-tools | 8f486906fcb5d800cdfbb804bf2877e0f1e4149f | [
"MIT"
] | 1 | 2021-11-30T07:51:14.000Z | 2021-11-30T07:51:14.000Z | #!/usr/bin/env python3
import numpy as np
import skimage.transform
from utils import do_imgs, read_img, write_img
model_filename = './models/stylize_2/cezanne.onnx'
in_filenames = [
'./in.png',
]
out_suffix = '_cezanne'
size = 1024
swap_rb = True
noise = 0
output_8_bit = False
def convert_img(sess, in_filename, out_filename):
img = read_img(in_filename, swap_rb=swap_rb, signed=False, noise=noise)
original_size = img.shape[:2]
img = skimage.transform.resize(img, (size, size))
img = img.transpose(2, 0, 1)
img = np.expand_dims(img, axis=0)
out_img = sess.run(None, {'Input': img})[0]
out_img = out_img.squeeze(axis=0)
out_img = out_img.transpose(1, 2, 0)
out_img = skimage.transform.resize(out_img, original_size)
write_img(out_filename,
out_img,
swap_rb=swap_rb,
signed=False,
output_8_bit=output_8_bit)
if __name__ == '__main__':
do_imgs(convert_img,
model_filename,
in_filenames,
out_suffix,
out_extname=None if output_8_bit else '.png')
| 24.065217 | 75 | 0.650407 |
11c45e4178ec2d6274bb2b06a44975868e589934 | 370 | py | Python | ejercicios/granos.py | carlosviveros/Soluciones | 115f4fa929c7854ca497e4c994352adc64565456 | [
"MIT"
] | 1 | 2022-02-02T04:44:56.000Z | 2022-02-02T04:44:56.000Z | ejercicios/granos.py | leugimkm/Soluciones | d71601c8d9b5e86e926f48d9e49462af8a956b6d | [
"MIT"
] | null | null | null | ejercicios/granos.py | leugimkm/Soluciones | d71601c8d9b5e86e926f48d9e49462af8a956b6d | [
"MIT"
] | null | null | null | """AyudaEnPython: https://www.facebook.com/groups/ayudapython
Calcular cuantos granos de arroz tendríamos en la ultima casilla, si
por cada casilla de una matriz 3x4 pusiéramos un grano en la primera
casilla y el triple en la siguiente.
"""
CASILLAS = 12
total = sum(3 ** n for n in range(CASILLAS))
print(f"En la última casilla tendríamos {total} granos de arroz.")
| 30.833333 | 68 | 0.759459 |
992c7e723e021102adbff88db874524d7d9d3de0 | 2,798 | py | Python | fofaPlug/file/my_save.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | 1 | 2021-08-28T17:54:45.000Z | 2021-08-28T17:54:45.000Z | fofaPlug/file/my_save.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | null | null | null | fofaPlug/file/my_save.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | null | null | null | import os
import time
from fofaPlug.file import save_html, save_db
def save(ret_list, file_type, file_type_name, default_file, name_file):
str2 = str()
for i in ret_list:
for j in i:
str2 += str(j) + " "
str2 += '\n'
pwd = os.getcwd()
if default_file:
if file_type:
if file_type_name == 'html':
str3 = time.strftime("%Y-%m-%d-%H-%M") + ".html"
file_type_name = pwd + '\outfile\\' + str3
save_html.deal_file(file_type_name)
save_html.save(ret_list,file_type_name)
return str3
elif file_type_name == 'db':
str3 = time.strftime("%Y%m%d%H") + ".db"
file_type_name = pwd + '\outfile\\' + str3
save_db.createbd(str3)
save_db.save(ret_list,file_type_name)
return str3
else:
print('未知类型,保存默认类型')
str3 = time.strftime("%Y-%m-%d-%H-%M") + ".txt"
file_type_name = pwd + '\outfile\\' + str3
with open(file_type_name, 'a', encoding='utf-8') as f:
f.write(str2)
print('write done!')
return str3
else:
str3 = time.strftime("%Y-%m-%d-%H-%M") + ".txt"
file_type_name = pwd + '\outfile\\' + str3
# 默认txt
with open(file_type_name, 'a', encoding='utf-8') as f:
f.write(str2)
print('write done!')
return str3
else:
if file_type:
if file_type_name == 'html':
str3 = name_file + '.html'
file_type_name = pwd + '\outfile\\' + str3
save_html.deal_file(file_type_name)
save_html.save(ret_list,file_type_name)
return str3
elif file_type_name == 'db':
str3 = name_file + '.db'
file_type_name = pwd + '\outfile\\' + str3
save_db.createbd(str3)
save_db.save(ret_list,file_type_name)
return str3
else:
print('未知类型,保存默认类型')
str3 = name_file + '.txt'
file_type_name = pwd + '\outfile\\' + str3
with open(file_type_name, 'a', encoding='utf-8') as f:
f.write(str2)
print('write done!')
return str3
else:
str3 = name_file + '.txt'
file_type_name = pwd + '\outfile\\' + str3
with open(file_type_name, 'a', encoding='utf-8') as f:
f.write(str2)
print('write done!')
return str3
| 38.328767 | 72 | 0.459971 |
ace29b93e821893691aeaa4949e72e0b8aacf2f7 | 6,987 | py | Python | finrl/model/WorldModel/utils/misc.py | Adwaver4157/WorldModel_for_FinRL | 0aa0a984aadffe0f6f2e83e55678c0e9304fba05 | [
"MIT"
] | 1 | 2021-05-15T13:13:25.000Z | 2021-05-15T13:13:25.000Z | finrl/model/WorldModel/utils/misc.py | Adwaver4157/WorldModel_for_FinRL | 0aa0a984aadffe0f6f2e83e55678c0e9304fba05 | [
"MIT"
] | null | null | null | finrl/model/WorldModel/utils/misc.py | Adwaver4157/WorldModel_for_FinRL | 0aa0a984aadffe0f6f2e83e55678c0e9304fba05 | [
"MIT"
] | null | null | null | """ Various auxiliary utilities """
import math
from os.path import join, exists
import torch
from torchvision import transforms
import numpy as np
import gym
import gym.envs.box2d
from typing import Dict, Generator, Optional, Union
# A bit dirty: manually change size of car racing env
#gym.envs.box2d.car_racing.STATE_W, gym.envs.box2d.car_racing.STATE_H = 64, 64
# Hardcoded for now
ASIZE, LSIZE, RSIZE, RED_SIZE, SIZE =\
3, 32, 256, 64, 64
# Same
transform = transforms.Compose([
transforms.ToTensor()
])
def sample_continuous_policy(action_space, seq_len, dt):
""" Sample a continuous policy.
Atm, action_space is supposed to be a box environment. The policy is
sampled as a brownian motion a_{t+1} = a_t + sqrt(dt) N(0, 1).
:args action_space: gym action space
:args seq_len: number of actions returned
:args dt: temporal discretization
:returns: sequence of seq_len actions
"""
actions = [action_space.sample()]
for _ in range(seq_len):
daction_dt = np.random.randn(*actions[-1].shape)
actions.append(
np.clip(actions[-1] + math.sqrt(dt) * daction_dt,
action_space.low, action_space.high))
return actions
def save_checkpoint(state, is_best, filename, best_filename):
# Save state in filename. Also save in best_filename if is_best.
torch.save(state, filename)
if is_best:
torch.save(state, best_filename)
def flatten_parameters(params):
"""Flattening parameters.
:args params: generator of parameters (as returned by module.parameters())
:returns: flattened parameters (i.e. one tensor of dimension 1 with all
parameters concatenated)
"""
return torch.cat([p.detach().view(-1) for p in params], dim=0).cpu().numpy()
def unflatten_parameters(params, example, device):
""" Unflatten parameters.
:args params: parameters as a single 1D np array
:args example: generator of parameters (as returned by module.parameters()),
used to reshape params
:args device: where to store unflattened parameters
:returns: unflattened parameters
"""
params = torch.Tensor(params).to(device)
idx = 0
unflattened = []
for e_p in example:
unflattened += [params[idx:idx + e_p.numel()].view(e_p.size())]
idx += e_p.numel()
return unflattened
def load_parameters(params, controller):
""" Load flattened parameters into controller.
:args params: parameters as a single 1D np array
:args controller: module in which params is loaded
"""
proto = next(controller.parameters())
params = unflatten_parameters(
params, controller.parameters(), proto.device)
for p, p_0 in zip(controller.parameters(), params):
p.data.copy_(p_0)
class RolloutGenerator(object):
""" Utility to generate rollouts.
Encapsulate everything that is needed to generate rollouts in the TRUE ENV
using a controller with previously trained VAE and mdnrnn.
:attr vae: VAE model loaded from mdir/vae
:attr mdnrnn: MDNRNN model loaded from mdir/mdnrnn
:attr controller: Controller, either loaded from mdir/ctrl or randomly
initialized
:attr env: instance of the CarRacing-v0 gym environment
:attr device: device used to run VAE, MDNRNN and Controller
:attr time_limit: rollouts have a maximum of time_limit timesteps
"""
def __init__(self, vae, mdnrnn, mdnrnncell, controller, env, device, time_limit):
" Build vae, rnn, controller and environment. "
self.vae = vae
state = mdnrnn.state_dict()
self.mdnrnn = mdnrnncell
self.mdnrnn.load_state_dict(
{k.strip('_l0'): v for k, v in state.items()})
self.controller = controller
self.env = env
self.device = device
self.time_limit = time_limit
def get_action_and_transition(self, obs, hidden):
""" Get action and transition.
Encode obs to latent using the VAE, then obtain estimation for next
latent and next hidden state using the MDNRNN and compute the controller
corresponding action.
:args obs: current observation (1 x 3 x 64 x 64) torch tensor
:args hidden: current hidden state (1 x 256) torch tensor
:returns: (action, next_hidden)
- action: 1D np array
- next_hidden (1 x 256) torch tensor
"""
_, latent_mu, _ = self.vae(obs)
action = self.controller(latent_mu, hidden[0])
_, _, _, _, _, next_hidden = self.mdnrnn(action, latent_mu, hidden)
return action.squeeze().cpu().numpy(), next_hidden
def _normalize_obs(self,
obs: Union[np.ndarray, Dict[str, np.ndarray]], env = None
) -> Union[np.ndarray, Dict[str, np.ndarray]]:
if env is not None:
return env.normalize_obs(obs)
return obs
def to_torch(self, array: np.ndarray, copy: bool = True) -> torch.Tensor:
"""
Convert a numpy array to a PyTorch tensor.
Note: it copies the data by default
:param array:
:param copy: Whether to copy or not the data
(may be useful to avoid changing things be reference)
:return:
"""
if copy:
return torch.tensor(array).to(self.device)
return torch.as_tensor(array).to(self.device)
def rollout(self, params, env, render=False):
""" Execute a rollout and returns minus cumulative reward.
Load :params: into the controller and execute a single rollout. This
is the main API of this class.
:args params: parameters as a single 1D np array
:returns: minus cumulative reward
"""
# copy params into the controller
if params is not None:
load_parameters(params, self.controller)
obs = self.env.reset()
# This first render is required !
self.env.render()
hidden = [
torch.zeros(1, RSIZE).to(self.device)
for _ in range(2)]
cumulative = 0
i = 0
while True:
"""
if self.optimize_memory_usage:
next_obs = self._normalize_obs(self.observations[(batch_inds + 1) % self.buffer_size, 0, :], env)
else:
"""
if not isinstance(obs, np.ndarray):
obs = np.array(obs)
obs = self._normalize_obs(obs, env)
obs = self.to_torch(obs)
# obs = transform(obs).unsqueeze(0).to(self.device)
action, hidden = self.get_action_and_transition(obs, hidden)
action = np.array([action])
action = self._normalize_obs(action, env)
action = np.array([action])
obs, reward, done, _ = self.env.step(action)
if render:
self.env.render()
cumulative += reward
if done or i > self.time_limit:
return - cumulative
i += 1 | 36.015464 | 113 | 0.635609 |
e3d2cb38f1563257b2f35af36ba2fab0ecc60549 | 53,493 | py | Python | ansible/my_env/lib/python2.7/site-packages/ansible/modules/cloud/amazon/elb_classic_lb.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | 1 | 2019-04-16T21:23:15.000Z | 2019-04-16T21:23:15.000Z | ansible/my_env/lib/python2.7/site-packages/ansible/modules/cloud/amazon/elb_classic_lb.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | 5 | 2020-02-26T20:10:50.000Z | 2021-09-23T23:23:18.000Z | ansible/my_env/lib/python2.7/site-packages/ansible/modules/cloud/amazon/elb_classic_lb.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | 1 | 2020-02-13T14:24:57.000Z | 2020-02-13T14:24:57.000Z | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = """
---
module: elb_classic_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "1.5"
author:
- "Jim Dalton (@jsdalton)"
options:
state:
description:
- Create or destroy the ELB
choices: ["present", "absent"]
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
type: bool
default: 'yes'
instance_ids:
description:
- List of instance ids to attach to this ELB
version_added: "2.1"
purge_instance_ids:
description:
- Purge existing instance ids on ELB that are not found in instance_ids
type: bool
default: 'no'
version_added: "2.1"
zones:
description:
- List of availability zones to enable on this ELB
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
type: bool
default: 'no'
security_group_ids:
description:
- A list of security groups to apply to the elb
version_added: "1.6"
security_group_names:
description:
- A list of security group names to apply to the elb
version_added: "2.0"
health_check:
description:
- An associative array of health check configuration settings (see example)
access_logs:
description:
- An associative array of access logs configuration settings (see example)
version_added: "2.0"
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
type: bool
default: 'no'
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
If you choose to update your scheme with a different value the ELB will be destroyed and
recreated. To update scheme you must use the option wait.
choices: ["internal", "internet-facing"]
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to C(no), SSL certificates will not be validated for boto versions >= 2.6.0.
type: bool
default: 'yes'
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
version_added: "1.8"
idle_timeout:
description:
- ELB connections from clients and to servers are timed out after this amount of time
version_added: "2.0"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
type: bool
default: 'no'
version_added: "1.8"
stickiness:
description:
- An associative array of stickiness policy settings. Policy will be applied to all listeners ( see example )
version_added: "2.0"
wait:
description:
- When specified, Ansible will check the status of the load balancer to ensure it has been successfully
removed from AWS.
type: bool
default: 'no'
version_added: "2.1"
wait_timeout:
description:
- Used in conjunction with wait. Number of seconds to wait for the elb to be terminated.
A maximum of 600 seconds (10 minutes) is allowed.
default: 60
version_added: "2.1"
tags:
description:
- An associative array of tags. To delete all tags, supply an empty dict.
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
proxy_protocol: True
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
delegate_to: localhost
# Internal ELB example
- elb_classic_lb:
name: "test-vpc"
scheme: internal
state: present
instance_ids:
- i-abcd1234
purge_instance_ids: true
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
delegate_to: localhost
# Configure a health check and the access logs
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
access_logs:
interval: 5 # minutes (defaults to 60)
s3_location: "my-bucket" # This value is required if access_logs is set
s3_prefix: "logs"
delegate_to: localhost
# Ensure ELB is gone
- elb_classic_lb:
name: "test-please-delete"
state: absent
delegate_to: localhost
# Ensure ELB is gone and wait for check (for default timeout)
- elb_classic_lb:
name: "test-please-delete"
state: absent
wait: yes
delegate_to: localhost
# Ensure ELB is gone and wait for check with timeout value
- elb_classic_lb:
name: "test-please-delete"
state: absent
wait: yes
wait_timeout: 600
delegate_to: localhost
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_listeners: no
delegate_to: localhost
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_zones: yes
delegate_to: localhost
# Creates a ELB and assigns a list of subnets to it.
- elb_classic_lb:
state: present
name: 'New ELB'
security_group_ids: 'sg-123456, sg-67890'
region: us-west-2
subnets: 'subnet-123456,subnet-67890'
purge_subnets: yes
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
delegate_to: localhost
# Create an ELB with connection draining, increased idle timeout and cross availability
# zone load balancing
- elb_classic_lb:
name: "New ELB"
state: present
connection_draining_timeout: 60
idle_timeout: 300
cross_az_load_balancing: "yes"
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
delegate_to: localhost
# Create an ELB with load balancer stickiness enabled
- elb_classic_lb:
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
stickiness:
type: loadbalancer
enabled: yes
expiration: 300
delegate_to: localhost
# Create an ELB with application stickiness enabled
- elb_classic_lb:
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
stickiness:
type: application
enabled: yes
cookie: SESSIONID
delegate_to: localhost
# Create an ELB and add tags
- elb_classic_lb:
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
tags:
Name: "New ELB"
stack: "production"
client: "Bob"
delegate_to: localhost
# Delete all tags from an ELB
- elb_classic_lb:
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
tags: {}
delegate_to: localhost
"""
import random
import time
import traceback
try:
import boto
import boto.ec2.elb
import boto.ec2.elb.attributes
import boto.vpc
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.tag import Tag
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, connect_to_aws, AnsibleAWSError, get_aws_connection_info
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
def _throttleable_operation(max_retries):
def _operation_wrapper(op):
def _do_op(*args, **kwargs):
retry = 0
while True:
try:
return op(*args, **kwargs)
except boto.exception.BotoServerError as e:
if retry < max_retries and e.code in \
("Throttling", "RequestLimitExceeded"):
retry = retry + 1
time.sleep(min(random.random() * (2 ** retry), 300))
continue
else:
raise
return _do_op
return _operation_wrapper
def _get_vpc_connection(module, region, aws_connect_params):
try:
return connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
_THROTTLING_RETRIES = 5
class ElbManager(object):
"""Handles ELB creation and destruction"""
def __init__(self, module, name, listeners=None, purge_listeners=None,
zones=None, purge_zones=None, security_group_ids=None,
health_check=None, subnets=None, purge_subnets=None,
scheme="internet-facing", connection_draining_timeout=None,
idle_timeout=None,
cross_az_load_balancing=None, access_logs=None,
stickiness=None, wait=None, wait_timeout=None, tags=None,
region=None,
instance_ids=None, purge_instance_ids=None, **aws_connect_params):
self.module = module
self.name = name
self.listeners = listeners
self.purge_listeners = purge_listeners
self.instance_ids = instance_ids
self.purge_instance_ids = purge_instance_ids
self.zones = zones
self.purge_zones = purge_zones
self.security_group_ids = security_group_ids
self.health_check = health_check
self.subnets = subnets
self.purge_subnets = purge_subnets
self.scheme = scheme
self.connection_draining_timeout = connection_draining_timeout
self.idle_timeout = idle_timeout
self.cross_az_load_balancing = cross_az_load_balancing
self.access_logs = access_logs
self.stickiness = stickiness
self.wait = wait
self.wait_timeout = wait_timeout
self.tags = tags
self.aws_connect_params = aws_connect_params
self.region = region
self.changed = False
self.status = 'gone'
self.elb_conn = self._get_elb_connection()
try:
self.elb = self._get_elb()
except boto.exception.BotoServerError as e:
module.fail_json(msg='unable to get all load balancers: %s' % e.message, exception=traceback.format_exc())
self.ec2_conn = self._get_ec2_connection()
@_throttleable_operation(_THROTTLING_RETRIES)
def ensure_ok(self):
"""Create the ELB"""
if not self.elb:
# Zones and listeners will be added at creation
self._create_elb()
else:
if self._get_scheme():
# the only way to change the scheme is by recreating the resource
self.ensure_gone()
self._create_elb()
else:
self._set_zones()
self._set_security_groups()
self._set_elb_listeners()
self._set_subnets()
self._set_health_check()
# boto has introduced support for some ELB attributes in
# different versions, so we check first before trying to
# set them to avoid errors
if self._check_attribute_support('connection_draining'):
self._set_connection_draining_timeout()
if self._check_attribute_support('connecting_settings'):
self._set_idle_timeout()
if self._check_attribute_support('cross_zone_load_balancing'):
self._set_cross_az_load_balancing()
if self._check_attribute_support('access_log'):
self._set_access_log()
# add sitcky options
self.select_stickiness_policy()
# ensure backend server policies are correct
self._set_backend_policies()
# set/remove instance ids
self._set_instance_ids()
self._set_tags()
def ensure_gone(self):
"""Destroy the ELB"""
if self.elb:
self._delete_elb()
if self.wait:
elb_removed = self._wait_for_elb_removed()
# Unfortunately even though the ELB itself is removed quickly
# the interfaces take longer so reliant security groups cannot
# be deleted until the interface has registered as removed.
elb_interface_removed = self._wait_for_elb_interface_removed()
if not (elb_removed and elb_interface_removed):
self.module.fail_json(msg='Timed out waiting for removal of load balancer.')
def get_info(self):
try:
check_elb = self.elb_conn.get_all_load_balancers(self.name)[0]
except:
check_elb = None
if not check_elb:
info = {
'name': self.name,
'status': self.status,
'region': self.region
}
else:
try:
lb_cookie_policy = check_elb.policies.lb_cookie_stickiness_policies[0].__dict__['policy_name']
except:
lb_cookie_policy = None
try:
app_cookie_policy = check_elb.policies.app_cookie_stickiness_policies[0].__dict__['policy_name']
except:
app_cookie_policy = None
info = {
'name': check_elb.name,
'dns_name': check_elb.dns_name,
'zones': check_elb.availability_zones,
'security_group_ids': check_elb.security_groups,
'status': self.status,
'subnets': self.subnets,
'scheme': check_elb.scheme,
'hosted_zone_name': check_elb.canonical_hosted_zone_name,
'hosted_zone_id': check_elb.canonical_hosted_zone_name_id,
'lb_cookie_policy': lb_cookie_policy,
'app_cookie_policy': app_cookie_policy,
'proxy_policy': self._get_proxy_protocol_policy(),
'backends': self._get_backend_policies(),
'instances': [instance.id for instance in check_elb.instances],
'out_of_service_count': 0,
'in_service_count': 0,
'unknown_instance_state_count': 0,
'region': self.region
}
# status of instances behind the ELB
if info['instances']:
info['instance_health'] = [dict(
instance_id=instance_state.instance_id,
reason_code=instance_state.reason_code,
state=instance_state.state
) for instance_state in self.elb_conn.describe_instance_health(self.name)]
else:
info['instance_health'] = []
# instance state counts: InService or OutOfService
if info['instance_health']:
for instance_state in info['instance_health']:
if instance_state['state'] == "InService":
info['in_service_count'] += 1
elif instance_state['state'] == "OutOfService":
info['out_of_service_count'] += 1
else:
info['unknown_instance_state_count'] += 1
if check_elb.health_check:
info['health_check'] = {
'target': check_elb.health_check.target,
'interval': check_elb.health_check.interval,
'timeout': check_elb.health_check.timeout,
'healthy_threshold': check_elb.health_check.healthy_threshold,
'unhealthy_threshold': check_elb.health_check.unhealthy_threshold,
}
if check_elb.listeners:
info['listeners'] = [self._api_listener_as_tuple(l)
for l in check_elb.listeners]
elif self.status == 'created':
# When creating a new ELB, listeners don't show in the
# immediately returned result, so just include the
# ones that were added
info['listeners'] = [self._listener_as_tuple(l)
for l in self.listeners]
else:
info['listeners'] = []
if self._check_attribute_support('connection_draining'):
info['connection_draining_timeout'] = int(self.elb_conn.get_lb_attribute(self.name, 'ConnectionDraining').timeout)
if self._check_attribute_support('connecting_settings'):
info['idle_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectingSettings').idle_timeout
if self._check_attribute_support('cross_zone_load_balancing'):
is_cross_az_lb_enabled = self.elb_conn.get_lb_attribute(self.name, 'CrossZoneLoadBalancing')
if is_cross_az_lb_enabled:
info['cross_az_load_balancing'] = 'yes'
else:
info['cross_az_load_balancing'] = 'no'
# return stickiness info?
info['tags'] = self.tags
return info
@_throttleable_operation(_THROTTLING_RETRIES)
def _wait_for_elb_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout // polling_increment_secs)
status_achieved = False
for x in range(0, max_retries):
try:
self.elb_conn.get_all_lb_attributes(self.name)
except (boto.exception.BotoServerError, Exception) as e:
if "LoadBalancerNotFound" in e.code:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
return status_achieved
@_throttleable_operation(_THROTTLING_RETRIES)
def _wait_for_elb_interface_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout // polling_increment_secs)
status_achieved = False
elb_interfaces = self.ec2_conn.get_all_network_interfaces(
filters={'attachment.instance-owner-id': 'amazon-elb',
'description': 'ELB {0}'.format(self.name)})
for x in range(0, max_retries):
for interface in elb_interfaces:
try:
result = self.ec2_conn.get_all_network_interfaces(interface.id)
if result == []:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except (boto.exception.BotoServerError, Exception) as e:
if 'InvalidNetworkInterfaceID' in e.code:
status_achieved = True
break
else:
self.module.fail_json(msg=to_native(e), exception=traceback.format_exc())
return status_achieved
@_throttleable_operation(_THROTTLING_RETRIES)
def _get_elb(self):
elbs = self.elb_conn.get_all_load_balancers()
for elb in elbs:
if self.name == elb.name:
self.status = 'ok'
return elb
def _get_elb_connection(self):
try:
return connect_to_aws(boto.ec2.elb, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
self.module.fail_json(msg=str(e))
def _get_ec2_connection(self):
try:
return connect_to_aws(boto.ec2, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, Exception) as e:
self.module.fail_json(msg=to_native(e), exception=traceback.format_exc())
@_throttleable_operation(_THROTTLING_RETRIES)
def _delete_elb(self):
# True if succeeds, exception raised if not
result = self.elb_conn.delete_load_balancer(name=self.name)
if result:
self.changed = True
self.status = 'deleted'
def _create_elb(self):
listeners = [self._listener_as_tuple(l) for l in self.listeners]
self.elb = self.elb_conn.create_load_balancer(name=self.name,
zones=self.zones,
security_groups=self.security_group_ids,
complex_listeners=listeners,
subnets=self.subnets,
scheme=self.scheme)
if self.elb:
# HACK: Work around a boto bug in which the listeners attribute is
# always set to the listeners argument to create_load_balancer, and
# not the complex_listeners
# We're not doing a self.elb = self._get_elb here because there
# might be eventual consistency issues and it doesn't necessarily
# make sense to wait until the ELB gets returned from the EC2 API.
# This is necessary in the event we hit the throttling errors and
# need to retry ensure_ok
# See https://github.com/boto/boto/issues/3526
self.elb.listeners = self.listeners
self.changed = True
self.status = 'created'
def _create_elb_listeners(self, listeners):
"""Takes a list of listener tuples and creates them"""
# True if succeeds, exception raised if not
self.changed = self.elb_conn.create_load_balancer_listeners(self.name,
complex_listeners=listeners)
def _delete_elb_listeners(self, listeners):
"""Takes a list of listener tuples and deletes them from the elb"""
ports = [l[0] for l in listeners]
# True if succeeds, exception raised if not
self.changed = self.elb_conn.delete_load_balancer_listeners(self.name,
ports)
def _set_elb_listeners(self):
"""
Creates listeners specified by self.listeners; overwrites existing
listeners on these ports; removes extraneous listeners
"""
listeners_to_add = []
listeners_to_remove = []
listeners_to_keep = []
# Check for any listeners we need to create or overwrite
for listener in self.listeners:
listener_as_tuple = self._listener_as_tuple(listener)
# First we loop through existing listeners to see if one is
# already specified for this port
existing_listener_found = None
for existing_listener in self.elb.listeners:
# Since ELB allows only one listener on each incoming port, a
# single match on the incoming port is all we're looking for
if existing_listener[0] == int(listener['load_balancer_port']):
existing_listener_found = self._api_listener_as_tuple(existing_listener)
break
if existing_listener_found:
# Does it match exactly?
if listener_as_tuple != existing_listener_found:
# The ports are the same but something else is different,
# so we'll remove the existing one and add the new one
listeners_to_remove.append(existing_listener_found)
listeners_to_add.append(listener_as_tuple)
else:
# We already have this listener, so we're going to keep it
listeners_to_keep.append(existing_listener_found)
else:
# We didn't find an existing listener, so just add the new one
listeners_to_add.append(listener_as_tuple)
# Check for any extraneous listeners we need to remove, if desired
if self.purge_listeners:
for existing_listener in self.elb.listeners:
existing_listener_tuple = self._api_listener_as_tuple(existing_listener)
if existing_listener_tuple in listeners_to_remove:
# Already queued for removal
continue
if existing_listener_tuple in listeners_to_keep:
# Keep this one around
continue
# Since we're not already removing it and we don't need to keep
# it, let's get rid of it
listeners_to_remove.append(existing_listener_tuple)
if listeners_to_remove:
self._delete_elb_listeners(listeners_to_remove)
if listeners_to_add:
self._create_elb_listeners(listeners_to_add)
def _api_listener_as_tuple(self, listener):
"""Adds ssl_certificate_id to ELB API tuple if present"""
base_tuple = listener.get_complex_tuple()
if listener.ssl_certificate_id and len(base_tuple) < 5:
return base_tuple + (listener.ssl_certificate_id,)
return base_tuple
def _listener_as_tuple(self, listener):
"""Formats listener as a 4- or 5-tuples, in the order specified by the
ELB API"""
# N.B. string manipulations on protocols below (str(), upper()) is to
# ensure format matches output from ELB API
listener_list = [
int(listener['load_balancer_port']),
int(listener['instance_port']),
str(listener['protocol'].upper()),
]
# Instance protocol is not required by ELB API; it defaults to match
# load balancer protocol. We'll mimic that behavior here
if 'instance_protocol' in listener:
listener_list.append(str(listener['instance_protocol'].upper()))
else:
listener_list.append(str(listener['protocol'].upper()))
if 'ssl_certificate_id' in listener:
listener_list.append(str(listener['ssl_certificate_id']))
return tuple(listener_list)
def _enable_zones(self, zones):
try:
self.elb.enable_zones(zones)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg='unable to enable zones: %s' % e.message, exception=traceback.format_exc())
self.changed = True
def _disable_zones(self, zones):
try:
self.elb.disable_zones(zones)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg='unable to disable zones: %s' % e.message, exception=traceback.format_exc())
self.changed = True
def _attach_subnets(self, subnets):
self.elb_conn.attach_lb_to_subnets(self.name, subnets)
self.changed = True
def _detach_subnets(self, subnets):
self.elb_conn.detach_lb_from_subnets(self.name, subnets)
self.changed = True
def _set_subnets(self):
"""Determine which subnets need to be attached or detached on the ELB"""
if self.subnets:
if self.purge_subnets:
subnets_to_detach = list(set(self.elb.subnets) - set(self.subnets))
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
else:
subnets_to_detach = None
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
if subnets_to_attach:
self._attach_subnets(subnets_to_attach)
if subnets_to_detach:
self._detach_subnets(subnets_to_detach)
def _get_scheme(self):
"""Determine if the current scheme is different than the scheme of the ELB"""
if self.scheme:
if self.elb.scheme != self.scheme:
if not self.wait:
self.module.fail_json(msg="Unable to modify scheme without using the wait option")
return True
return False
def _set_zones(self):
"""Determine which zones need to be enabled or disabled on the ELB"""
if self.zones:
if self.purge_zones:
zones_to_disable = list(set(self.elb.availability_zones) -
set(self.zones))
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
else:
zones_to_disable = None
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
if zones_to_enable:
self._enable_zones(zones_to_enable)
# N.B. This must come second, in case it would have removed all zones
if zones_to_disable:
self._disable_zones(zones_to_disable)
def _set_security_groups(self):
if self.security_group_ids is not None and set(self.elb.security_groups) != set(self.security_group_ids):
self.elb_conn.apply_security_groups_to_lb(self.name, self.security_group_ids)
self.changed = True
def _set_health_check(self):
"""Set health check values on ELB as needed"""
if self.health_check:
# This just makes it easier to compare each of the attributes
# and look for changes. Keys are attributes of the current
# health_check; values are desired values of new health_check
health_check_config = {
"target": self._get_health_check_target(),
"timeout": self.health_check['response_timeout'],
"interval": self.health_check['interval'],
"unhealthy_threshold": self.health_check['unhealthy_threshold'],
"healthy_threshold": self.health_check['healthy_threshold'],
}
update_health_check = False
# The health_check attribute is *not* set on newly created
# ELBs! So we have to create our own.
if not self.elb.health_check:
self.elb.health_check = HealthCheck()
for attr, desired_value in health_check_config.items():
if getattr(self.elb.health_check, attr) != desired_value:
setattr(self.elb.health_check, attr, desired_value)
update_health_check = True
if update_health_check:
self.elb.configure_health_check(self.elb.health_check)
self.changed = True
def _check_attribute_support(self, attr):
return hasattr(boto.ec2.elb.attributes.LbAttributes(), attr)
def _set_cross_az_load_balancing(self):
attributes = self.elb.get_attributes()
if self.cross_az_load_balancing:
if not attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = True
else:
if attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'CrossZoneLoadBalancing',
attributes.cross_zone_load_balancing.enabled)
def _set_access_log(self):
attributes = self.elb.get_attributes()
if self.access_logs:
if 's3_location' not in self.access_logs:
self.module.fail_json(msg='s3_location information required')
access_logs_config = {
"enabled": True,
"s3_bucket_name": self.access_logs['s3_location'],
"s3_bucket_prefix": self.access_logs.get('s3_prefix', ''),
"emit_interval": self.access_logs.get('interval', 60),
}
update_access_logs_config = False
for attr, desired_value in access_logs_config.items():
if getattr(attributes.access_log, attr) != desired_value:
setattr(attributes.access_log, attr, desired_value)
update_access_logs_config = True
if update_access_logs_config:
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
self.changed = True
elif attributes.access_log.enabled:
attributes.access_log.enabled = False
self.changed = True
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
def _set_connection_draining_timeout(self):
attributes = self.elb.get_attributes()
if self.connection_draining_timeout is not None:
if not attributes.connection_draining.enabled or \
attributes.connection_draining.timeout != self.connection_draining_timeout:
self.changed = True
attributes.connection_draining.enabled = True
attributes.connection_draining.timeout = self.connection_draining_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
else:
if attributes.connection_draining.enabled:
self.changed = True
attributes.connection_draining.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
def _set_idle_timeout(self):
attributes = self.elb.get_attributes()
if self.idle_timeout is not None:
if attributes.connecting_settings.idle_timeout != self.idle_timeout:
self.changed = True
attributes.connecting_settings.idle_timeout = self.idle_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectingSettings', attributes.connecting_settings)
def _policy_name(self, policy_type):
return 'elb-classic-lb-{0}'.format(to_native(policy_type, errors='surrogate_or_strict'))
def _create_policy(self, policy_param, policy_meth, policy):
getattr(self.elb_conn, policy_meth)(policy_param, self.elb.name, policy)
def _delete_policy(self, elb_name, policy):
self.elb_conn.delete_lb_policy(elb_name, policy)
def _update_policy(self, policy_param, policy_meth, policy_attr, policy):
self._delete_policy(self.elb.name, policy)
self._create_policy(policy_param, policy_meth, policy)
def _set_listener_policy(self, listeners_dict, policy=None):
policy = [] if policy is None else policy
for listener_port in listeners_dict:
if listeners_dict[listener_port].startswith('HTTP'):
self.elb_conn.set_lb_policies_of_listener(self.elb.name, listener_port, policy)
def _set_stickiness_policy(self, elb_info, listeners_dict, policy, **policy_attrs):
for p in getattr(elb_info.policies, policy_attrs['attr']):
if str(p.__dict__['policy_name']) == str(policy[0]):
if str(p.__dict__[policy_attrs['dict_key']]) != str(policy_attrs['param_value'] or 0):
self._set_listener_policy(listeners_dict)
self._update_policy(policy_attrs['param_value'], policy_attrs['method'], policy_attrs['attr'], policy[0])
self.changed = True
break
else:
self._create_policy(policy_attrs['param_value'], policy_attrs['method'], policy[0])
self.changed = True
self._set_listener_policy(listeners_dict, policy)
def select_stickiness_policy(self):
if self.stickiness:
if 'cookie' in self.stickiness and 'expiration' in self.stickiness:
self.module.fail_json(msg='\'cookie\' and \'expiration\' can not be set at the same time')
elb_info = self.elb_conn.get_all_load_balancers(self.elb.name)[0]
d = {}
for listener in elb_info.listeners:
d[listener[0]] = listener[2]
listeners_dict = d
if self.stickiness['type'] == 'loadbalancer':
policy = []
policy_type = 'LBCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']):
if 'expiration' not in self.stickiness:
self.module.fail_json(msg='expiration must be set when type is loadbalancer')
try:
expiration = self.stickiness['expiration'] if int(self.stickiness['expiration']) else None
except ValueError:
self.module.fail_json(msg='expiration must be set to an integer')
policy_attrs = {
'type': policy_type,
'attr': 'lb_cookie_stickiness_policies',
'method': 'create_lb_cookie_stickiness_policy',
'dict_key': 'cookie_expiration_period',
'param_value': expiration
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif not self.module.boolean(self.stickiness['enabled']):
if len(elb_info.policies.lb_cookie_stickiness_policies):
if elb_info.policies.lb_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
else:
self.changed = False
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
elif self.stickiness['type'] == 'application':
policy = []
policy_type = 'AppCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']):
if 'cookie' not in self.stickiness:
self.module.fail_json(msg='cookie must be set when type is application')
policy_attrs = {
'type': policy_type,
'attr': 'app_cookie_stickiness_policies',
'method': 'create_app_cookie_stickiness_policy',
'dict_key': 'cookie_name',
'param_value': self.stickiness['cookie']
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif not self.module.boolean(self.stickiness['enabled']):
if len(elb_info.policies.app_cookie_stickiness_policies):
if elb_info.policies.app_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
else:
self._set_listener_policy(listeners_dict)
def _get_backend_policies(self):
"""Get a list of backend policies"""
policies = []
if self.elb.backends is not None:
for backend in self.elb.backends:
if backend.policies is not None:
for policy in backend.policies:
policies.append(str(backend.instance_port) + ':' + policy.policy_name)
return policies
def _set_backend_policies(self):
"""Sets policies for all backends"""
ensure_proxy_protocol = False
replace = []
backend_policies = self._get_backend_policies()
# Find out what needs to be changed
for listener in self.listeners:
want = False
if 'proxy_protocol' in listener and listener['proxy_protocol']:
ensure_proxy_protocol = True
want = True
if str(listener['instance_port']) + ':ProxyProtocol-policy' in backend_policies:
if not want:
replace.append({'port': listener['instance_port'], 'policies': []})
elif want:
replace.append({'port': listener['instance_port'], 'policies': ['ProxyProtocol-policy']})
# enable or disable proxy protocol
if ensure_proxy_protocol:
self._set_proxy_protocol_policy()
# Make the backend policies so
for item in replace:
self.elb_conn.set_lb_policies_of_backend_server(self.elb.name, item['port'], item['policies'])
self.changed = True
def _get_proxy_protocol_policy(self):
"""Find out if the elb has a proxy protocol enabled"""
if self.elb.policies is not None and self.elb.policies.other_policies is not None:
for policy in self.elb.policies.other_policies:
if policy.policy_name == 'ProxyProtocol-policy':
return policy.policy_name
return None
def _set_proxy_protocol_policy(self):
"""Install a proxy protocol policy if needed"""
proxy_policy = self._get_proxy_protocol_policy()
if proxy_policy is None:
self.elb_conn.create_lb_policy(
self.elb.name, 'ProxyProtocol-policy', 'ProxyProtocolPolicyType', {'ProxyProtocol': True}
)
self.changed = True
# TODO: remove proxy protocol policy if not needed anymore? There is no side effect to leaving it there
def _diff_list(self, a, b):
"""Find the entries in list a that are not in list b"""
b = set(b)
return [aa for aa in a if aa not in b]
def _get_instance_ids(self):
"""Get the current list of instance ids installed in the elb"""
instances = []
if self.elb.instances is not None:
for instance in self.elb.instances:
instances.append(instance.id)
return instances
def _set_instance_ids(self):
"""Register or deregister instances from an lb instance"""
assert_instances = self.instance_ids or []
has_instances = self._get_instance_ids()
add_instances = self._diff_list(assert_instances, has_instances)
if add_instances:
self.elb_conn.register_instances(self.elb.name, add_instances)
self.changed = True
if self.purge_instance_ids:
remove_instances = self._diff_list(has_instances, assert_instances)
if remove_instances:
self.elb_conn.deregister_instances(self.elb.name, remove_instances)
self.changed = True
def _set_tags(self):
"""Add/Delete tags"""
if self.tags is None:
return
params = {'LoadBalancerNames.member.1': self.name}
tagdict = dict()
# get the current list of tags from the ELB, if ELB exists
if self.elb:
current_tags = self.elb_conn.get_list('DescribeTags', params,
[('member', Tag)])
tagdict = dict((tag.Key, tag.Value) for tag in current_tags
if hasattr(tag, 'Key'))
# Add missing tags
dictact = dict(set(self.tags.items()) - set(tagdict.items()))
if dictact:
for i, key in enumerate(dictact):
params['Tags.member.%d.Key' % (i + 1)] = key
params['Tags.member.%d.Value' % (i + 1)] = dictact[key]
self.elb_conn.make_request('AddTags', params)
self.changed = True
# Remove extra tags
dictact = dict(set(tagdict.items()) - set(self.tags.items()))
if dictact:
for i, key in enumerate(dictact):
params['Tags.member.%d.Key' % (i + 1)] = key
self.elb_conn.make_request('RemoveTags', params)
self.changed = True
def _get_health_check_target(self):
"""Compose target string from healthcheck parameters"""
protocol = self.health_check['ping_protocol'].upper()
path = ""
if protocol in ['HTTP', 'HTTPS'] and 'ping_path' in self.health_check:
path = self.health_check['ping_path']
return "%s:%s%s" % (protocol, self.health_check['ping_port'], path)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state={'required': True, 'choices': ['present', 'absent']},
name={'required': True},
listeners={'default': None, 'required': False, 'type': 'list'},
purge_listeners={'default': True, 'required': False, 'type': 'bool'},
instance_ids={'default': None, 'required': False, 'type': 'list'},
purge_instance_ids={'default': False, 'required': False, 'type': 'bool'},
zones={'default': None, 'required': False, 'type': 'list'},
purge_zones={'default': False, 'required': False, 'type': 'bool'},
security_group_ids={'default': None, 'required': False, 'type': 'list'},
security_group_names={'default': None, 'required': False, 'type': 'list'},
health_check={'default': None, 'required': False, 'type': 'dict'},
subnets={'default': None, 'required': False, 'type': 'list'},
purge_subnets={'default': False, 'required': False, 'type': 'bool'},
scheme={'default': 'internet-facing', 'required': False, 'choices': ['internal', 'internet-facing']},
connection_draining_timeout={'default': None, 'required': False, 'type': 'int'},
idle_timeout={'default': None, 'type': 'int', 'required': False},
cross_az_load_balancing={'default': None, 'type': 'bool', 'required': False},
stickiness={'default': None, 'required': False, 'type': 'dict'},
access_logs={'default': None, 'required': False, 'type': 'dict'},
wait={'default': False, 'type': 'bool', 'required': False},
wait_timeout={'default': 60, 'type': 'int', 'required': False},
tags={'default': None, 'required': False, 'type': 'dict'}
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['security_group_ids', 'security_group_names']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
name = module.params['name']
state = module.params['state']
listeners = module.params['listeners']
purge_listeners = module.params['purge_listeners']
instance_ids = module.params['instance_ids']
purge_instance_ids = module.params['purge_instance_ids']
zones = module.params['zones']
purge_zones = module.params['purge_zones']
security_group_ids = module.params['security_group_ids']
security_group_names = module.params['security_group_names']
health_check = module.params['health_check']
access_logs = module.params['access_logs']
subnets = module.params['subnets']
purge_subnets = module.params['purge_subnets']
scheme = module.params['scheme']
connection_draining_timeout = module.params['connection_draining_timeout']
idle_timeout = module.params['idle_timeout']
cross_az_load_balancing = module.params['cross_az_load_balancing']
stickiness = module.params['stickiness']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout']
tags = module.params['tags']
if state == 'present' and not listeners:
module.fail_json(msg="At least one listener is required for ELB creation")
if state == 'present' and not (zones or subnets):
module.fail_json(msg="At least one availability zone or subnet is required for ELB creation")
if wait_timeout > 600:
module.fail_json(msg='wait_timeout maximum is 600 seconds')
if security_group_names:
security_group_ids = []
try:
ec2 = connect_to_aws(boto.ec2, region, **aws_connect_params)
if subnets: # We have at least one subnet, ergo this is a VPC
vpc_conn = _get_vpc_connection(module=module, region=region, aws_connect_params=aws_connect_params)
vpc_id = vpc_conn.get_all_subnets([subnets[0]])[0].vpc_id
filters = {'vpc_id': vpc_id}
else:
filters = None
grp_details = ec2.get_all_security_groups(filters=filters)
for group_name in security_group_names:
if isinstance(group_name, string_types):
group_name = [group_name]
group_id = [str(grp.id) for grp in grp_details if str(grp.name) in group_name]
security_group_ids.extend(group_id)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
elb_man = ElbManager(module, name, listeners, purge_listeners, zones,
purge_zones, security_group_ids, health_check,
subnets, purge_subnets, scheme,
connection_draining_timeout, idle_timeout,
cross_az_load_balancing,
access_logs, stickiness, wait, wait_timeout, tags,
region=region, instance_ids=instance_ids, purge_instance_ids=purge_instance_ids,
**aws_connect_params)
# check for unsupported attributes for this version of boto
if cross_az_load_balancing and not elb_man._check_attribute_support('cross_zone_load_balancing'):
module.fail_json(msg="You must install boto >= 2.18.0 to use the cross_az_load_balancing attribute")
if connection_draining_timeout and not elb_man._check_attribute_support('connection_draining'):
module.fail_json(msg="You must install boto >= 2.28.0 to use the connection_draining_timeout attribute")
if idle_timeout and not elb_man._check_attribute_support('connecting_settings'):
module.fail_json(msg="You must install boto >= 2.33.0 to use the idle_timeout attribute")
if state == 'present':
elb_man.ensure_ok()
elif state == 'absent':
elb_man.ensure_gone()
ansible_facts = {'ec2_elb': 'info'}
ec2_facts_result = dict(changed=elb_man.changed,
elb=elb_man.get_info(),
ansible_facts=ansible_facts)
module.exit_json(**ec2_facts_result)
if __name__ == '__main__':
main()
| 39.624444 | 152 | 0.617651 |
1f31867851718f72337a71722c595389f335fb19 | 447 | py | Python | __main__.py | moth1995/AFS_PES_PSP_Converter | 89ce4a043c679f4089fa67112529a2d0b456c587 | [
"MIT"
] | 1 | 2022-01-25T06:12:17.000Z | 2022-01-25T06:12:17.000Z | __main__.py | moth1995/AFS_PES_PSP_Converter | 89ce4a043c679f4089fa67112529a2d0b456c587 | [
"MIT"
] | null | null | null | __main__.py | moth1995/AFS_PES_PSP_Converter | 89ce4a043c679f4089fa67112529a2d0b456c587 | [
"MIT"
] | null | null | null | from AFS import AFS
class Main:
def __init__(self):
# Ejemplo de como convertir de afs psp a afs generico
my_afs = AFS("over.afs",True)
my_afs.convert_to_dkz()
my_afs.save_file("new_over.afs")
# Ejemplo de como convertir de afs generico a afs psp
my_afs = AFS("new_over.afs",False)
my_afs.convert_to_psp()
my_afs.save_file("new_over2.afs")
if __name__ == "__main__":
Main() | 29.8 | 61 | 0.635347 |
afa93a012bf40917f81b1852272ad4c713641dba | 6,359 | py | Python | wavefront_api_client/models/dynatrace_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | null | null | null | wavefront_api_client/models/dynatrace_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | null | null | null | wavefront_api_client/models/dynatrace_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class DynatraceConfiguration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'dynatrace_api_token': 'str',
'environment_id': 'str',
'metric_filter_regex': 'str'
}
attribute_map = {
'dynatrace_api_token': 'dynatraceAPIToken',
'environment_id': 'environmentID',
'metric_filter_regex': 'metricFilterRegex'
}
def __init__(self, dynatrace_api_token=None, environment_id=None, metric_filter_regex=None, _configuration=None): # noqa: E501
"""DynatraceConfiguration - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._dynatrace_api_token = None
self._environment_id = None
self._metric_filter_regex = None
self.discriminator = None
self.dynatrace_api_token = dynatrace_api_token
if environment_id is not None:
self.environment_id = environment_id
if metric_filter_regex is not None:
self.metric_filter_regex = metric_filter_regex
@property
def dynatrace_api_token(self):
"""Gets the dynatrace_api_token of this DynatraceConfiguration. # noqa: E501
The Dynatrace API Token # noqa: E501
:return: The dynatrace_api_token of this DynatraceConfiguration. # noqa: E501
:rtype: str
"""
return self._dynatrace_api_token
@dynatrace_api_token.setter
def dynatrace_api_token(self, dynatrace_api_token):
"""Sets the dynatrace_api_token of this DynatraceConfiguration.
The Dynatrace API Token # noqa: E501
:param dynatrace_api_token: The dynatrace_api_token of this DynatraceConfiguration. # noqa: E501
:type: str
"""
if self._configuration.client_side_validation and dynatrace_api_token is None:
raise ValueError("Invalid value for `dynatrace_api_token`, must not be `None`") # noqa: E501
self._dynatrace_api_token = dynatrace_api_token
@property
def environment_id(self):
"""Gets the environment_id of this DynatraceConfiguration. # noqa: E501
The ID of Dynatrace Environment # noqa: E501
:return: The environment_id of this DynatraceConfiguration. # noqa: E501
:rtype: str
"""
return self._environment_id
@environment_id.setter
def environment_id(self, environment_id):
"""Sets the environment_id of this DynatraceConfiguration.
The ID of Dynatrace Environment # noqa: E501
:param environment_id: The environment_id of this DynatraceConfiguration. # noqa: E501
:type: str
"""
self._environment_id = environment_id
@property
def metric_filter_regex(self):
"""Gets the metric_filter_regex of this DynatraceConfiguration. # noqa: E501
A regular expression that a metric name must match (case-insensitively) in order to be ingested # noqa: E501
:return: The metric_filter_regex of this DynatraceConfiguration. # noqa: E501
:rtype: str
"""
return self._metric_filter_regex
@metric_filter_regex.setter
def metric_filter_regex(self, metric_filter_regex):
"""Sets the metric_filter_regex of this DynatraceConfiguration.
A regular expression that a metric name must match (case-insensitively) in order to be ingested # noqa: E501
:param metric_filter_regex: The metric_filter_regex of this DynatraceConfiguration. # noqa: E501
:type: str
"""
self._metric_filter_regex = metric_filter_regex
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DynatraceConfiguration, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DynatraceConfiguration):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, DynatraceConfiguration):
return True
return self.to_dict() != other.to_dict()
| 34.748634 | 409 | 0.643183 |
e43136be57ecb32bd1932e65a6b6e7c40e02f712 | 2,373 | py | Python | glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py | daespinel/glance | c8a00ea1f45679ce937146f9d7dadbbb056f1a48 | [
"Apache-2.0"
] | null | null | null | glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py | daespinel/glance | c8a00ea1f45679ce937146f9d7dadbbb056f1a48 | [
"Apache-2.0"
] | 5 | 2018-09-18T15:51:40.000Z | 2019-01-08T16:38:44.000Z | glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py | daespinel/glance | c8a00ea1f45679ce937146f9d7dadbbb056f1a48 | [
"Apache-2.0"
] | 6 | 2018-09-06T14:50:23.000Z | 2018-11-27T21:32:51.000Z | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import * # noqa
from glance.db.sqlalchemy.migrate_repo.schema import (
Boolean, DateTime, BigInteger, Integer, String,
Text) # noqa
def get_images_table(meta):
"""
Returns the Table object for the images table that corresponds to
the images table definition of this version.
"""
images = Table('images',
meta,
Column('id', Integer(), primary_key=True, nullable=False),
Column('name', String(255)),
Column('disk_format', String(20)),
Column('container_format', String(20)),
Column('size', BigInteger()),
Column('status', String(30), nullable=False),
Column('is_public',
Boolean(),
nullable=False,
default=False,
index=True),
Column('location', Text()),
Column('created_at', DateTime(), nullable=False),
Column('updated_at', DateTime()),
Column('deleted_at', DateTime()),
Column('deleted',
Boolean(),
nullable=False,
default=False,
index=True),
Column('checksum', String(32)),
Column('owner', String(255)),
mysql_engine='InnoDB',
extend_existing=True)
return images
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
images = get_images_table(meta)
owner = Column('owner', String(255))
owner.create(images)
| 35.954545 | 78 | 0.554994 |
8083ffff2a5e024bda35a1a512ae9b8a757bf091 | 7,485 | py | Python | magenta/models/image_stylization/image_stylization_evaluate.py | ebhtra/magenta | f1029448cf0924551c421d7d742490cc678b4e3f | [
"Apache-2.0"
] | 16 | 2016-09-02T04:59:30.000Z | 2022-01-11T10:38:29.000Z | magenta/models/image_stylization/image_stylization_evaluate.py | ebhtra/magenta | f1029448cf0924551c421d7d742490cc678b4e3f | [
"Apache-2.0"
] | 2 | 2016-09-25T16:39:59.000Z | 2016-11-18T17:43:41.000Z | magenta/models/image_stylization/image_stylization_evaluate.py | ebhtra/magenta | f1029448cf0924551c421d7d742490cc678b4e3f | [
"Apache-2.0"
] | 10 | 2016-09-02T04:59:32.000Z | 2021-09-29T06:57:24.000Z | # Copyright 2019 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Evaluates the N-styles style transfer model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import os
from magenta.models.image_stylization import image_utils
from magenta.models.image_stylization import learning
from magenta.models.image_stylization import model
import tensorflow as tf
slim = tf.contrib.slim
DEFAULT_CONTENT_WEIGHTS = '{"vgg_16/conv3": 1.0}'
DEFAULT_STYLE_WEIGHTS = ('{"vgg_16/conv1": 1e-4, "vgg_16/conv2": 1e-4,'
' "vgg_16/conv3": 1e-4, "vgg_16/conv4": 1e-4}')
flags = tf.app.flags
flags.DEFINE_boolean('style_grid', False,
'Whether to generate the style grid.')
flags.DEFINE_boolean('style_crossover', False,
'Whether to do a style crossover in the style grid.')
flags.DEFINE_boolean('learning_curves', True,
'Whether to evaluate learning curves for all styles.')
flags.DEFINE_integer('batch_size', 16, 'Batch size')
flags.DEFINE_integer('image_size', 256, 'Image size.')
flags.DEFINE_integer('eval_interval_secs', 60,
'Frequency, in seconds, at which evaluation is run.')
flags.DEFINE_integer('num_evals', 32, 'Number of evaluations of the losses.')
flags.DEFINE_integer('num_styles', None, 'Number of styles.')
flags.DEFINE_string('content_weights', DEFAULT_CONTENT_WEIGHTS,
'Content weights')
flags.DEFINE_string('eval_dir', None,
'Directory where the results are saved to.')
flags.DEFINE_string('train_dir', None,
'Directory for checkpoints and summaries')
flags.DEFINE_string('master', '',
'Name of the TensorFlow master to use.')
flags.DEFINE_string('style_coefficients', None,
'Scales the style weights conditioned on the style image.')
flags.DEFINE_string('style_dataset_file', None, 'Style dataset file.')
flags.DEFINE_string('style_weights', DEFAULT_STYLE_WEIGHTS,
'Style weights')
FLAGS = flags.FLAGS
def main(_):
with tf.Graph().as_default():
# Create inputs in [0, 1], as expected by vgg_16.
inputs, _ = image_utils.imagenet_inputs(
FLAGS.batch_size, FLAGS.image_size)
evaluation_images = image_utils.load_evaluation_images(FLAGS.image_size)
# Process style and weight flags
if FLAGS.style_coefficients is None:
style_coefficients = [1.0 for _ in range(FLAGS.num_styles)]
else:
style_coefficients = ast.literal_eval(FLAGS.style_coefficients)
if len(style_coefficients) != FLAGS.num_styles:
raise ValueError(
'number of style coefficients differs from number of styles')
content_weights = ast.literal_eval(FLAGS.content_weights)
style_weights = ast.literal_eval(FLAGS.style_weights)
# Load style images.
style_images, labels, style_gram_matrices = image_utils.style_image_inputs(
os.path.expanduser(FLAGS.style_dataset_file),
batch_size=FLAGS.num_styles, image_size=FLAGS.image_size,
square_crop=True, shuffle=False)
labels = tf.unstack(labels)
def _create_normalizer_params(style_label):
"""Creates normalizer parameters from a style label."""
return {'labels': tf.expand_dims(style_label, 0),
'num_categories': FLAGS.num_styles,
'center': True,
'scale': True}
# Dummy call to simplify the reuse logic
model.transform(inputs, reuse=False,
normalizer_params=_create_normalizer_params(labels[0]))
def _style_sweep(inputs):
"""Transfers all styles onto the input one at a time."""
inputs = tf.expand_dims(inputs, 0)
stylized_inputs = [
model.transform(
inputs,
reuse=True,
normalizer_params=_create_normalizer_params(style_label))
for _, style_label in enumerate(labels)]
return tf.concat([inputs] + stylized_inputs, 0)
if FLAGS.style_grid:
style_row = tf.concat(
[tf.ones([1, FLAGS.image_size, FLAGS.image_size, 3]), style_images],
0)
stylized_training_example = _style_sweep(inputs[0])
stylized_evaluation_images = [
_style_sweep(image) for image in tf.unstack(evaluation_images)]
stylized_noise = _style_sweep(
tf.random_uniform([FLAGS.image_size, FLAGS.image_size, 3]))
stylized_style_images = [
_style_sweep(image) for image in tf.unstack(style_images)]
if FLAGS.style_crossover:
grid = tf.concat(
[style_row, stylized_training_example, stylized_noise] +
stylized_evaluation_images + stylized_style_images,
0)
else:
grid = tf.concat(
[style_row, stylized_training_example, stylized_noise] +
stylized_evaluation_images,
0)
if FLAGS.style_crossover:
grid_shape = [
3 + evaluation_images.get_shape().as_list()[0] + FLAGS.num_styles,
1 + FLAGS.num_styles]
else:
grid_shape = [
3 + evaluation_images.get_shape().as_list()[0],
1 + FLAGS.num_styles]
tf.summary.image(
'Style Grid',
tf.cast(
image_utils.form_image_grid(
grid,
grid_shape,
[FLAGS.image_size, FLAGS.image_size],
3) * 255.0,
tf.uint8))
if FLAGS.learning_curves:
metrics = {}
for i, label in enumerate(labels):
gram_matrices = dict(
(key, value[i: i + 1])
for key, value in style_gram_matrices.items())
stylized_inputs = model.transform(
inputs,
reuse=True,
normalizer_params=_create_normalizer_params(label))
_, loss_dict = learning.total_loss(
inputs, stylized_inputs, gram_matrices, content_weights,
style_weights, reuse=i > 0)
for key, value in loss_dict.items():
metrics['{}_style_{}'.format(key, i)] = slim.metrics.streaming_mean(
value)
names_values, names_updates = slim.metrics.aggregate_metric_map(metrics)
for name, value in names_values.items():
summary_op = tf.summary.scalar(name, value, [])
print_op = tf.Print(summary_op, [value], name)
tf.add_to_collection(tf.GraphKeys.SUMMARIES, print_op)
eval_op = names_updates.values()
num_evals = FLAGS.num_evals
else:
eval_op = None
num_evals = 1
slim.evaluation.evaluation_loop(
master=FLAGS.master,
checkpoint_dir=os.path.expanduser(FLAGS.train_dir),
logdir=os.path.expanduser(FLAGS.eval_dir),
eval_op=eval_op,
num_evals=num_evals,
eval_interval_secs=FLAGS.eval_interval_secs)
def console_entry_point():
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| 38.188776 | 79 | 0.662391 |
d47b069e6d4b70873fbc56bb57e6ba28455457ca | 1,297 | py | Python | config/urls.py | puertoricanDev/una-hora | 36ab4c5124f04f13bbb178374412c8bf92d58eb6 | [
"MIT"
] | null | null | null | config/urls.py | puertoricanDev/una-hora | 36ab4c5124f04f13bbb178374412c8bf92d58eb6 | [
"MIT"
] | null | null | null | config/urls.py | puertoricanDev/una-hora | 36ab4c5124f04f13bbb178374412c8bf92d58eb6 | [
"MIT"
] | null | null | null | """una_hora URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.generic import TemplateView
urlpatterns = [
path("", TemplateView.as_view(template_name="base.html"), name="home-example"),
path("admin/", admin.site.urls),
path("users/", include("una_hora.users.urls")),
path("search/", include("una_hora.search.urls")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = (
[path("__debug__/", include(debug_toolbar.urls))]
+ urlpatterns
+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
)
| 35.054054 | 83 | 0.708558 |
4730e8642a280b0bda532b738018401630ed1419 | 6,440 | py | Python | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2018 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>1</version>
<name>TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN</name>
<primitive_test_id/>
<primitive_test_name>WIFIHAL_GetOrSetParamULongValue</primitive_test_name>
<primitive_test_version>2</primitive_test_version>
<status>FREE</status>
<synopsis>To set the ApWpsDevicePIN to a 4 digit value and check whether it is allow to set for 2.4GHz</synopsis>
<groups_id/>
<execution_time>1</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_WIFIHAL_219</test_case_id>
<test_objective>To set the WpsDevicePIN to a 4 digit value and check whether it is set for 2.4GHz</test_objective>
<test_type>Negative</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components should be in a running state else invoke cosa_start.sh manually that includes all the ccsp components and TDK Component
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>wifi_getApWpsDevicePIN()
wifi_setApWpsDevicePIN()</api_or_interface_used>
<input_parameters>methodName : getApWpsDevicePIN
methodName : setApWpsDevicePIN
radioIndex : 0</input_parameters>
<automation_approch>1. Load wifihal module
2. Using WIFIHAL_GetOrSetParamULongValue invoke wifi_getApWpsDevicePIN() and save the initial value
3. Using WIFIHAL_GetOrSetParamULongValue invoke wifi_setApWpsDevicePIN() and set a value with 4 digit.
4. Using WIFIHAL_GetOrSetParamULongValue invoke wifi_getApWpsDevicePIN() and get the previously set value
5. If the set and get values are the same, return FAILURE, else return SUCCESS
6. Revert to initial value if set operation is success
7. Unload wifihal module</automation_approch>
<except_output>Set operation should not be happen since it is a invalid number of digits</except_output>
<priority>High</priority>
<test_stub_interface>WIFIHAL</test_stub_interface>
<test_script>TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN</test_script>
<skipped>No</skipped>
<release_version/>
<remarks/>
</test_cases>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from wifiUtility import *;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("wifihal","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_WIFIHAL_2.4GHzSet4DigitApWpsDevicePIN');
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus
if "SUCCESS" in loadmodulestatus.upper():
obj.setLoadModuleStatus("SUCCESS");
#get the ApWpsDevicePIN
expectedresult="SUCCESS";
radioIndex = 0
getMethod = "getApWpsDevicePIN"
primitive = 'WIFIHAL_GetOrSetParamULongValue'
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
if expectedresult in actualresult :
initPIN = int(details.split(":")[1].strip())
#set the ApWpsDevicePIN
expectedresult="FAILURE";
radioIndex = 0
setMethod = "setApWpsDevicePIN"
setPIN = 1234
primitive = 'WIFIHAL_GetOrSetParamULongValue'
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, setPIN, setMethod)
if expectedresult in actualresult :
print "TEST STEP 1: Set the DevicePIN to a 4 digit Value"
print "SET VALUE IS :",setPIN
print "EXPECTED RESULT : DevicePIN set operation should not happen"
print "ACTUAL RESULT :Set operation returns FAILURE"
print "TEST EXECUTION RESULT: SUCCESS"
tdkTestObj.setResultStatus("SUCCESS");
else:
print "TEST STEP 2: Set the DevicePIN to a 4 Digit value"
print "SET VALUE IS :",setPIN
print "EXPECTED RESULT : DevicePIN set operation should not happen"
print "ACTUAL RESULT :Set operation returns SUCCESS"
print "TEST EXECUTION RESULT: FAILURE"
tdkTestObj.setResultStatus("FAILURE");
#check whether "setApWpsDevicePIN" not returns false success
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
getPIN = int(details.split(":")[1].strip())
if getPIN == setPIN:
#Revert the pin to initial value
expectedresult = "SUCCESS";
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, initPIN, setMethod)
if expectedresult in actualresult :
print "Successfully reverted to initial pin"
tdkTestObj.setResultStatus("SUCCESS");
else:
print "Unable to revert to initial value"
tdkTestObj.setResultStatus("FAILURE");
else:
print "HAL API setApWpsDevicePIN() returns false success"
tdkTestObj.setResultStatus("FAILURE");
else:
print "getApWpsDevicePIN() call failed"
tdkTestObj.setResultStatus("FAILURE");
obj.unloadModule("wifihal");
else:
print "Failed to load wifi module";
obj.setLoadModuleStatus("FAILURE");
| 43.221477 | 157 | 0.700932 |
e4cbcfd12f14c21cb61d73a5a18fcc4ef6fc6329 | 2,537 | py | Python | heatclient/v1/events.py | jasondunsmore/python-heatclient | 13b4f82835fcf5dd0f206745ae13378796b4a029 | [
"Apache-2.0"
] | null | null | null | heatclient/v1/events.py | jasondunsmore/python-heatclient | 13b4f82835fcf5dd0f206745ae13378796b4a029 | [
"Apache-2.0"
] | null | null | null | heatclient/v1/events.py | jasondunsmore/python-heatclient | 13b4f82835fcf5dd0f206745ae13378796b4a029 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves.urllib import parse
from heatclient.openstack.common.apiclient import base
from heatclient.openstack.common import strutils
from heatclient.v1 import stacks
DEFAULT_PAGE_SIZE = 20
class Event(base.Resource):
def __repr__(self):
return "<Event %s>" % self._info
def update(self, **fields):
self.manager.update(self, **fields)
def delete(self):
return self.manager.delete(self)
def data(self, **kwargs):
return self.manager.data(self, **kwargs)
class EventManager(stacks.StackChildManager):
resource_class = Event
def list(self, stack_id, resource_name=None):
"""Get a list of events.
:param stack_id: ID of stack the events belong to
:param resource_name: Optional name of resources to filter events by
:rtype: list of :class:`Event`
"""
if resource_name is None:
url = '/stacks/%s/events' % stack_id
else:
stack_id = self._resolve_stack_id(stack_id)
url = '/stacks/%s/resources/%s/events' % (
parse.quote(stack_id, ''),
parse.quote(strutils.safe_encode(resource_name), ''))
return self._list(url, "events")
def get(self, stack_id, resource_name, event_id):
"""Get the details for a specific event.
:param stack_id: ID of stack containing the event
:param resource_name: ID of resource the event belongs to
:param event_id: ID of event to get the details for
"""
stack_id = self._resolve_stack_id(stack_id)
url_str = '/stacks/%s/resources/%s/events/%s' % (
parse.quote(stack_id, ''),
parse.quote(strutils.safe_encode(resource_name), ''),
parse.quote(event_id, ''))
resp, body = self.client.json_request('GET', url_str)
return Event(self, body['event'])
| 35.732394 | 78 | 0.651951 |
06c48e22caf5803a6c0a8a3b788af66c4fe68253 | 288 | py | Python | projects/Countdown_timer/main.py | lfz97/python-mini-projects | e0cfd4b0fe5e0bb4d443daba594e83332d5fb720 | [
"MIT"
] | 2 | 2022-01-08T16:59:55.000Z | 2022-01-08T17:34:28.000Z | projects/Countdown_timer/main.py | Lifesucks-666/python-mini-projects | 79294db294dd495f3ceb55f4b6719bd980be476c | [
"MIT"
] | 14 | 2022-02-13T10:28:48.000Z | 2022-03-15T21:11:46.000Z | projects/Countdown_timer/main.py | Lifesucks-666/python-mini-projects | 79294db294dd495f3ceb55f4b6719bd980be476c | [
"MIT"
] | 2 | 2022-03-09T11:11:57.000Z | 2022-03-09T16:23:32.000Z | import time
def countdown(t):
while t:
mins, secs = divmod(t, 60)
timer = '{:02d}:{:02d}'.format(mins,secs)
print(timer, end="\r")
time.sleep(1)
t -= 1
print('Timer completed!')
t = input('Enter the time in seconds: ')
countdown(int(t)) | 19.2 | 49 | 0.541667 |
63112b0a4a1817fe611bce4388f9860008dca8d7 | 1,091 | py | Python | qanda/migrations/0002_comment.py | Emmanuel-code/Questions-Answers | 494cb1c4081c96b1a85230600dcd4d8248ad4ecf | [
"bzip2-1.0.6"
] | null | null | null | qanda/migrations/0002_comment.py | Emmanuel-code/Questions-Answers | 494cb1c4081c96b1a85230600dcd4d8248ad4ecf | [
"bzip2-1.0.6"
] | 10 | 2020-03-24T18:19:15.000Z | 2022-03-12T00:16:58.000Z | qanda/migrations/0002_comment.py | Emmanuel-code/Questions-Answers | 494cb1c4081c96b1a85230600dcd4d8248ad4ecf | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 3.0.2 on 2020-02-03 12:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('qanda', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=True)),
('answer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='qanda.Answer')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('created',),
},
),
]
| 35.193548 | 135 | 0.60495 |
770ef2df80fd144e92fa9b9d79e527544dd441f1 | 3,005 | py | Python | test/functional/mempool_resurrect_test.py | adnetcoin/adnetcoin | bf95a2432f82d7d1f07842d1a4cba96736e96abd | [
"MIT"
] | null | null | null | test/functional/mempool_resurrect_test.py | adnetcoin/adnetcoin | bf95a2432f82d7d1f07842d1a4cba96736e96abd | [
"MIT"
] | null | null | null | test/functional/mempool_resurrect_test.py | adnetcoin/adnetcoin | bf95a2432f82d7d1f07842d1a4cba96736e96abd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resurrection of mined transactions when the blockchain is re-organized."""
from test_framework.test_framework import AdnetcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(AdnetcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
self.setup_clean_chain = False
# Just need one node for this test
self.extra_args = [["-checkmempool"]]
def run_test(self):
node0_address = self.nodes[0].getnewaddress()
# Spend block 1/2/3's coinbase transactions
# Mine a block.
# Create three more transactions, spending the spends
# Mine another block.
# ... make sure all the transactions are confirmed
# Invalidate both blocks
# ... make sure all the transactions are put back in the mempool
# Mine a new block
# ... make sure all the transactions are confirmed again.
b = [ self.nodes[0].getblockhash(n) for n in range(1, 4) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spends1_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.99) for txid in coinbase_txids ]
spends1_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw ]
blocks = []
blocks.extend(self.nodes[0].generate(1))
spends2_raw = [ create_tx(self.nodes[0], txid, node0_address, 49.98) for txid in spends1_id ]
spends2_id = [ self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw ]
blocks.extend(self.nodes[0].generate(1))
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
# Use invalidateblock to re-org back; all transactions should
# end up unconfirmed and back in the mempool
for node in self.nodes:
node.invalidateblock(blocks[0])
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] == 0)
# Generate another block, they should all get mined
self.nodes[0].generate(1)
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| 41.164384 | 105 | 0.656905 |
2bdfd97142b379a05ffcb6bb77df3a9004fb383b | 5,915 | py | Python | tests/python/pants_test/backend/graph_info/tasks/test_list_targets.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/graph_info/tasks/test_list_targets.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/graph_info/tasks/test_list_targets.py | silverguo/pants | 141510d03fbf2b7e1a0b54f66b54088697f6fa51 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from textwrap import dedent
import pytest
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.repository import Repository
from pants.backend.jvm.scala_artifact import ScalaArtifact
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.python.targets.python_library import PythonLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.target import Target
from pants.core.project_info import list_targets_old
from pants.testutil.goal_rule_test_base import GoalRuleTestBase
class ListTargetsTest(GoalRuleTestBase):
goal_cls = list_targets_old.List
@classmethod
def alias_groups(cls):
return BuildFileAliases(
targets={
"target": Target,
"java_library": JavaLibrary,
"python_library": PythonLibrary,
},
objects={
"pants": lambda x: x,
"artifact": Artifact,
"scala_artifact": ScalaArtifact,
"public": Repository(
name="public", url="http://maven.example.com", push_db_basedir="/tmp"
),
},
)
@classmethod
def rules(cls):
return super().rules() + list_targets_old.rules()
def setUp(self) -> None:
super().setUp()
# Setup a BUILD tree for various list tests
class Lib:
def __init__(self, name: str, provides: bool = False) -> None:
self.name = name
self.provides = (
dedent(
f"""
artifact(
org='com.example',
name='{name}',
repo=public
)
"""
).strip()
if provides
else "None"
)
def create_library(path: str, *libs: Lib) -> None:
libs = libs or (Lib(os.path.basename(os.path.dirname(self.build_path(path)))),)
for lib in libs:
target = f"java_library(name='{lib.name}', provides={lib.provides}, sources=[])\n"
self.add_to_build_file(path, target)
create_library("a")
create_library("a/b", Lib("b", provides=True))
create_library("a/b/c", Lib("c"), Lib("c2", provides=True), Lib("c3"))
create_library("a/b/d")
create_library("a/b/e", Lib("e1"))
self.add_to_build_file(
"f",
dedent(
'''
target(
name='alias',
dependencies=[
'a/b/c:c3',
'a/b/d:d',
],
description = """
Exercises alias resolution.
Further description.
""",
)
'''
),
)
def test_list_all_empty(self):
# NB: Also renders a warning to stderr, which is challenging to detect here but confirmed in:
# tests/python/pants_test/integration/list_integration_test.py
self.assert_console_output(args=[])
def test_list_path(self):
self.assert_console_output("a/b:b", args=["a/b"])
def test_list_siblings(self):
self.assert_console_output("a/b:b", args=["a/b:"])
self.assert_console_output("a/b/c:c", "a/b/c:c2", "a/b/c:c3", args=["a/b/c/:"])
def test_list_descendants(self):
self.assert_console_output("a/b/c:c", "a/b/c:c2", "a/b/c:c3", args=["a/b/c/::"])
self.assert_console_output(
"a/b:b", "a/b/c:c", "a/b/c:c2", "a/b/c:c3", "a/b/d:d", "a/b/e:e1", args=["a/b::"]
)
@pytest.mark.skip(reason="flaky: https://github.com/pantsbuild/pants/issues/8678")
def test_list_all(self):
self.assert_entries(
"\n",
"a:a",
"a/b:b",
"a/b/c:c",
"a/b/c:c2",
"a/b/c:c3",
"a/b/d:d",
"a/b/e:e1",
"f:alias",
args=["::"],
)
self.assert_entries(
", ",
"a:a",
"a/b:b",
"a/b/c:c",
"a/b/c:c2",
"a/b/c:c3",
"a/b/d:d",
"a/b/e:e1",
"f:alias",
args=["--sep=, ", "::"],
)
self.assert_console_output(
"a:a",
"a/b:b",
"a/b/c:c",
"a/b/c:c2",
"a/b/c:c3",
"a/b/d:d",
"a/b/e:e1",
"f:alias",
args=["::"],
)
def test_list_provides(self):
self.assert_console_output(
"a/b:b com.example#b", "a/b/c:c2 com.example#c2", args=["--provides", "::"]
)
def test_list_provides_customcols(self):
self.assert_console_output(
"/tmp a/b:b http://maven.example.com public com.example#b",
"/tmp a/b/c:c2 http://maven.example.com public com.example#c2",
args=[
"--provides",
"--provides-columns=push_db_basedir,address,repo_url,repo_name,artifact_id",
"::",
],
)
def test_list_dedups(self):
self.assert_console_output("a/b/c:c3", "a/b/d:d", args=["a/b/d/::", "a/b/c:c3", "a/b/d:d"])
def test_list_documented(self):
self.assert_console_output(
# Confirm empty listing
args=["--documented", "a/b"],
)
self.assert_console_output_ordered(
"f:alias",
" Exercises alias resolution.",
" Further description.",
args=["--documented", "::"],
)
| 31.801075 | 101 | 0.495182 |
c294657ecf99c2d50f0e54ba0caf0405cd5fef24 | 2,349 | py | Python | DIZED_APPS/INCANTATION/modules/exploits/routers/huawei/hg630a_default_creds.py | tanc7/ArmsCommander-TestBed | e00bb166084735d8b0de058b54d6d98a057cd7d8 | [
"FSFUL"
] | 1 | 2018-10-17T04:49:42.000Z | 2018-10-17T04:49:42.000Z | DIZED_APPS/INCANTATION/routersploit/modules/exploits/routers/huawei/hg630a_default_creds.py | tanc7/ArmsCommander-TestBed | e00bb166084735d8b0de058b54d6d98a057cd7d8 | [
"FSFUL"
] | null | null | null | DIZED_APPS/INCANTATION/routersploit/modules/exploits/routers/huawei/hg630a_default_creds.py | tanc7/ArmsCommander-TestBed | e00bb166084735d8b0de058b54d6d98a057cd7d8 | [
"FSFUL"
] | null | null | null | import socket
import paramiko
from routersploit import (
exploits,
print_error,
print_success,
mute,
ssh_interactive,
validators
)
class Exploit(exploits.Exploit):
"""
Exploit implementation for Huawei HG630a and HG630a-50 devices. If the target is vulnerable it is possible to authenticate through SSH service.
"""
__info__ = {
'name': 'Huawei HG630a Default Credentials',
'description': 'Module exploits default SSH credentials Huawei HG630a and HG630a-50 devices. '
'If the target is vulnerable it is possible to authenticate through SSH service.',
'authors': [
'Murat Sahin (@murtshn)', # vulnerability discovery
'Marcin Bury <marcin.bury[at]reverse-shell.com>', # routersploit module
],
'references': [
'https://www.exploit-db.com/exploits/38663/',
],
'devices': [
'Huawei HG630a',
'Huawei HG630a-50',
],
}
target = exploits.Option('', 'Target IP address', validators=validators.address) # target address
ssh_port = exploits.Option(22, 'Target SSH Port', validators=validators.integer) # target port
user = exploits.Option('admin', 'Default username to log in with')
password = exploits.Option('admin', 'Default password to log in with')
def run(self):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(self.target, self.ssh_port, timeout=5, username=self.user, password=self.password)
except (paramiko.ssh_exception.SSHException, socket.error):
print_error("Exploit failed - cannot log in with credentials {} / {}".format(self.user, self.password))
return
else:
print_success("SSH - Successful authentication")
ssh_interactive(ssh)
@mute
def check(self):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(self.target, self.ssh_port, timeout=5, username=self.user, password=self.password)
except (paramiko.ssh_exception.SSHException, socket.error):
return False # target is not vulnerable
else:
return True # target is vulnerable
| 36.138462 | 147 | 0.639421 |
cf68c6036cd6465c57a59d48c7189df022afc921 | 22,057 | py | Python | venv/lib/python3.9/site-packages/s3transfer/crt.py | rmit-cyber-ready-cic/The-Leaders | 9d2fc03de4979c5b265f82405c23aa99d0f627a3 | [
"MIT"
] | 158 | 2016-01-13T20:54:18.000Z | 2022-03-10T17:52:47.000Z | s3transfer/crt.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 164 | 2016-01-13T00:54:14.000Z | 2022-03-14T09:01:54.000Z | s3transfer/crt.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 108 | 2016-01-12T23:50:28.000Z | 2022-02-25T06:31:51.000Z | # Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
from io import BytesIO
import threading
import botocore.awsrequest
import botocore.session
from botocore import UNSIGNED
from botocore.config import Config
from botocore.compat import urlsplit
from botocore.exceptions import NoCredentialsError
import awscrt.http
from awscrt.s3 import S3Client, S3RequestType, S3RequestTlsMode
from awscrt.io import ClientBootstrap, DefaultHostResolver, EventLoopGroup
from awscrt.io import ClientTlsContext, TlsContextOptions
from awscrt.auth import AwsCredentialsProvider, AwsCredentials
from s3transfer.exceptions import TransferNotDoneError
from s3transfer.futures import BaseTransferFuture, BaseTransferMeta
from s3transfer.utils import CallArgs, OSUtils, get_callbacks
from s3transfer.constants import GB, MB
logger = logging.getLogger(__name__)
class CRTCredentialProviderAdapter:
def __init__(self, botocore_credential_provider):
self._botocore_credential_provider = botocore_credential_provider
self._loaded_credentials = None
self._lock = threading.Lock()
def __call__(self):
credentials = self._get_credentials().get_frozen_credentials()
return AwsCredentials(credentials.access_key,
credentials.secret_key, credentials.token)
def _get_credentials(self):
with self._lock:
if self._loaded_credentials is None:
loaded_creds = self._botocore_credential_provider\
.load_credentials()
if loaded_creds is None:
raise NoCredentialsError()
self._loaded_credentials = loaded_creds
return self._loaded_credentials
def create_s3_crt_client(region,
botocore_credential_provider=None,
num_threads=None,
target_throughput=5 * GB / 8,
part_size=8 * MB,
use_ssl=True,
verify=None):
"""
:type region: str
:param region: The region used for signing
:type botocore_credential_provider:
Optional[botocore.credentials.CredentialResolver]
:param botocore_credential_provider: Provide credentials for CRT
to sign the request if not set, the request will not be signed
:type num_threads: Optional[int]
:param num_threads: Number of worker threads generated. Default
is the number of processors in the machine.
:type target_throughput: Optional[int]
:param target_throughput: Throughput target in Bytes.
Default is 0.625 GB/s (which translates to 5 Gb/s).
:type part_size: Optional[int]
:param part_size: Size, in Bytes, of parts that files will be downloaded
or uploaded in.
:type use_ssl: boolean
:param use_ssl: Whether or not to use SSL. By default, SSL is used.
Note that not all services support non-ssl connections.
:type verify: Optional[boolean/string]
:param verify: Whether or not to verify SSL certificates.
By default SSL certificates are verified. You can provide the
following values:
* False - do not validate SSL certificates. SSL will still be
used (unless use_ssl is False), but SSL certificates
will not be verified.
* path/to/cert/bundle.pem - A filename of the CA cert bundle to
use. Specify this argument if you want to use a custom CA cert
bundle instead of the default one on your system.
"""
event_loop_group = EventLoopGroup(num_threads)
host_resolver = DefaultHostResolver(event_loop_group)
bootstrap = ClientBootstrap(event_loop_group, host_resolver)
provider = None
tls_connection_options = None
tls_mode = S3RequestTlsMode.ENABLED if use_ssl \
else S3RequestTlsMode.DISABLED
if verify is not None:
tls_ctx_options = TlsContextOptions()
if verify:
tls_ctx_options.override_default_trust_store_from_path(
ca_filepath=verify)
else:
tls_ctx_options.verify_peer = False
client_tls_option = ClientTlsContext(tls_ctx_options)
tls_connection_options = client_tls_option.new_connection_options()
if botocore_credential_provider:
credentails_provider_adapter = CRTCredentialProviderAdapter(
botocore_credential_provider)
provider = AwsCredentialsProvider.new_delegate(
credentails_provider_adapter)
target_gbps = target_throughput * 8 / GB
return S3Client(
bootstrap=bootstrap,
region=region,
credential_provider=provider,
part_size=part_size,
tls_mode=tls_mode,
tls_connection_options=tls_connection_options,
throughput_target_gbps=target_gbps)
class CRTTransferManager(object):
def __init__(self, crt_s3_client, crt_request_serializer, osutil=None):
"""A transfer manager interface for Amazon S3 on CRT s3 client.
:type crt_s3_client: awscrt.s3.S3Client
:param crt_s3_client: The CRT s3 client, handling all the
HTTP requests and functions under then hood
:type crt_request_serializer: s3transfer.crt.BaseCRTRequestSerializer
:param crt_request_serializer: Serializer, generates unsigned crt HTTP
request.
:type osutil: s3transfer.utils.OSUtils
:param osutil: OSUtils object to use for os-related behavior when
using with transfer manager.
"""
if osutil is None:
self._osutil = OSUtils()
self._crt_s3_client = crt_s3_client
self._s3_args_creator = S3ClientArgsCreator(
crt_request_serializer, self._osutil)
self._future_coordinators = []
self._semaphore = threading.Semaphore(128) # not configurable
# A counter to create unique id's for each transfer submitted.
self._id_counter = 0
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, *args):
cancel = False
if exc_type:
cancel = True
self._shutdown(cancel)
def download(self, bucket, key, fileobj, extra_args=None,
subscribers=None):
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = {}
callargs = CallArgs(bucket=bucket, key=key, fileobj=fileobj,
extra_args=extra_args, subscribers=subscribers)
return self._submit_transfer("get_object", callargs)
def upload(self, fileobj, bucket, key, extra_args=None,
subscribers=None):
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = {}
callargs = CallArgs(
bucket=bucket, key=key, fileobj=fileobj,
extra_args=extra_args, subscribers=subscribers)
return self._submit_transfer("put_object", callargs)
def delete(self, bucket, key, extra_args=None,
subscribers=None):
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = {}
callargs = CallArgs(
bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers)
return self._submit_transfer("delete_object", callargs)
def shutdown(self, cancel=False):
self._shutdown(cancel)
def _cancel_transfers(self):
for coordinator in self._future_coordinators:
if not coordinator.done():
coordinator.cancel()
def _finish_transfers(self):
for coordinator in self._future_coordinators:
coordinator.result()
def _wait_transfers_done(self):
for coordinator in self._future_coordinators:
coordinator.wait_until_on_done_callbacks_complete()
def _shutdown(self, cancel=False):
if cancel:
self._cancel_transfers()
try:
self._finish_transfers()
except KeyboardInterrupt:
self._cancel_transfers()
except Exception:
pass
finally:
self._wait_transfers_done()
def _release_semaphore(self, **kwargs):
self._semaphore.release()
def _submit_transfer(self, request_type, call_args):
on_done_after_calls = [self._release_semaphore]
coordinator = CRTTransferCoordinator(transfer_id=self._id_counter)
components = {
'meta': CRTTransferMeta(self._id_counter, call_args),
'coordinator': coordinator
}
future = CRTTransferFuture(**components)
afterdone = AfterDoneHandler(coordinator)
on_done_after_calls.append(afterdone)
try:
self._semaphore.acquire()
on_queued = self._s3_args_creator.get_crt_callback(
future, 'queued')
on_queued()
crt_callargs = self._s3_args_creator.get_make_request_args(
request_type, call_args, coordinator,
future, on_done_after_calls)
crt_s3_request = self._crt_s3_client.make_request(**crt_callargs)
except Exception as e:
coordinator.set_exception(e, True)
on_done = self._s3_args_creator.get_crt_callback(
future, 'done', after_subscribers=on_done_after_calls)
on_done(error=e)
else:
coordinator.set_s3_request(crt_s3_request)
self._future_coordinators.append(coordinator)
self._id_counter += 1
return future
class CRTTransferMeta(BaseTransferMeta):
"""Holds metadata about the CRTTransferFuture"""
def __init__(self, transfer_id=None, call_args=None):
self._transfer_id = transfer_id
self._call_args = call_args
self._user_context = {}
@property
def call_args(self):
return self._call_args
@property
def transfer_id(self):
return self._transfer_id
@property
def user_context(self):
return self._user_context
class CRTTransferFuture(BaseTransferFuture):
def __init__(self, meta=None, coordinator=None):
"""The future associated to a submitted transfer request via CRT S3 client
:type meta: s3transfer.crt.CRTTransferMeta
:param meta: The metadata associated to the transfer future.
:type coordinator: s3transfer.crt.CRTTransferCoordinator
:param coordinator: The coordinator associated to the transfer future.
"""
self._meta = meta
if meta is None:
self._meta = CRTTransferMeta()
self._coordinator = coordinator
@property
def meta(self):
return self._meta
def done(self):
return self._coordinator.done()
def result(self, timeout=None):
self._coordinator.result(timeout)
def cancel(self):
self._coordinator.cancel()
def set_exception(self, exception):
"""Sets the exception on the future."""
if not self.done():
raise TransferNotDoneError(
'set_exception can only be called once the transfer is '
'complete.')
self._coordinator.set_exception(exception, override=True)
class BaseCRTRequestSerializer:
def serialize_http_request(self, transfer_type, future):
"""Serialize CRT HTTP requests.
:type transfer_type: string
:param transfer_type: the type of transfer made,
e.g 'put_object', 'get_object', 'delete_object'
:type future: s3transfer.crt.CRTTransferFuture
:rtype: awscrt.http.HttpRequest
:returns: An unsigned HTTP request to be used for the CRT S3 client
"""
raise NotImplementedError('serialize_http_request()')
class BotocoreCRTRequestSerializer(BaseCRTRequestSerializer):
def __init__(self, session, client_kwargs=None):
"""Serialize CRT HTTP request using botocore logic
It also takes into account configuration from both the session
and any keyword arguments that could be passed to
`Session.create_client()` when serializing the request.
:type session: botocore.session.Session
:type client_kwargs: Optional[Dict[str, str]])
:param client_kwargs: The kwargs for the botocore
s3 client initialization.
"""
self._session = session
if client_kwargs is None:
client_kwargs = {}
self._resolve_client_config(session, client_kwargs)
self._client = session.create_client(**client_kwargs)
self._client.meta.events.register(
'request-created.s3.*', self._capture_http_request)
self._client.meta.events.register(
'after-call.s3.*',
self._change_response_to_serialized_http_request)
self._client.meta.events.register(
'before-send.s3.*', self._make_fake_http_response)
def _resolve_client_config(self, session, client_kwargs):
user_provided_config = None
if session.get_default_client_config():
user_provided_config = session.get_default_client_config()
if 'config' in client_kwargs:
user_provided_config = client_kwargs['config']
client_config = Config(signature_version=UNSIGNED)
if user_provided_config:
client_config = user_provided_config.merge(client_config)
client_kwargs['config'] = client_config
client_kwargs["service_name"] = "s3"
def _crt_request_from_aws_request(self, aws_request):
url_parts = urlsplit(aws_request.url)
crt_path = url_parts.path
if url_parts.query:
crt_path = '%s?%s' % (crt_path, url_parts.query)
headers_list = []
for name, value in aws_request.headers.items():
if isinstance(value, str):
headers_list.append((name, value))
else:
headers_list.append((name, str(value, 'utf-8')))
crt_headers = awscrt.http.HttpHeaders(headers_list)
# CRT requires body (if it exists) to be an I/O stream.
crt_body_stream = None
if aws_request.body:
if hasattr(aws_request.body, 'seek'):
crt_body_stream = aws_request.body
else:
crt_body_stream = BytesIO(aws_request.body)
crt_request = awscrt.http.HttpRequest(
method=aws_request.method,
path=crt_path,
headers=crt_headers,
body_stream=crt_body_stream)
return crt_request
def _convert_to_crt_http_request(self, botocore_http_request):
# Logic that does CRTUtils.crt_request_from_aws_request
crt_request = self._crt_request_from_aws_request(
botocore_http_request)
if crt_request.headers.get("host") is None:
# If host is not set, set it for the request before using CRT s3
url_parts = urlsplit(botocore_http_request.url)
crt_request.headers.set("host", url_parts.netloc)
if crt_request.headers.get('Content-MD5') is not None:
crt_request.headers.remove("Content-MD5")
return crt_request
def _capture_http_request(self, request, **kwargs):
request.context['http_request'] = request
def _change_response_to_serialized_http_request(
self, context, parsed, **kwargs):
request = context['http_request']
parsed['HTTPRequest'] = request.prepare()
def _make_fake_http_response(self, request, **kwargs):
return botocore.awsrequest.AWSResponse(
None,
200,
{},
FakeRawResponse(b""),
)
def _get_botocore_http_request(self, client_method, call_args):
return getattr(self._client, client_method)(
Bucket=call_args.bucket, Key=call_args.key,
**call_args.extra_args)['HTTPRequest']
def serialize_http_request(self, transfer_type, future):
botocore_http_request = self._get_botocore_http_request(
transfer_type, future.meta.call_args)
crt_request = self._convert_to_crt_http_request(botocore_http_request)
return crt_request
class FakeRawResponse(BytesIO):
def stream(self, amt=1024, decode_content=None):
while True:
chunk = self.read(amt)
if not chunk:
break
yield chunk
class CRTTransferCoordinator:
"""A helper class for managing CRTTransferFuture"""
def __init__(self, transfer_id=None, s3_request=None):
self.transfer_id = transfer_id
self._s3_request = s3_request
self._lock = threading.Lock()
self._exception = None
self._crt_future = None
self._done_event = threading.Event()
@property
def s3_request(self):
return self._s3_request
def set_done_callbacks_complete(self):
self._done_event.set()
def wait_until_on_done_callbacks_complete(self, timeout=None):
self._done_event.wait(timeout)
def set_exception(self, exception, override=False):
with self._lock:
if not self.done() or override:
self._exception = exception
def cancel(self):
if self._s3_request:
self._s3_request.cancel()
def result(self, timeout=None):
if self._exception:
raise self._exception
try:
self._crt_future.result(timeout)
except KeyboardInterrupt:
self.cancel()
raise
finally:
if self._s3_request:
self._s3_request = None
self._crt_future.result(timeout)
def done(self):
if self._crt_future is None:
return False
return self._crt_future.done()
def set_s3_request(self, s3_request):
self._s3_request = s3_request
self._crt_future = self._s3_request.finished_future
class S3ClientArgsCreator:
def __init__(self, crt_request_serializer, os_utils):
self._request_serializer = crt_request_serializer
self._os_utils = os_utils
def get_make_request_args(
self, request_type, call_args, coordinator, future,
on_done_after_calls):
recv_filepath = None
send_filepath = None
s3_meta_request_type = getattr(
S3RequestType,
request_type.upper(),
S3RequestType.DEFAULT)
on_done_before_calls = []
if s3_meta_request_type == S3RequestType.GET_OBJECT:
final_filepath = call_args.fileobj
recv_filepath = self._os_utils.get_temp_filename(final_filepath)
file_ondone_call = RenameTempFileHandler(
coordinator, final_filepath, recv_filepath, self._os_utils)
on_done_before_calls.append(file_ondone_call)
elif s3_meta_request_type == S3RequestType.PUT_OBJECT:
send_filepath = call_args.fileobj
data_len = self._os_utils.get_file_size(send_filepath)
call_args.extra_args["ContentLength"] = data_len
crt_request = self._request_serializer.serialize_http_request(
request_type, future)
return {
'request': crt_request,
'type': s3_meta_request_type,
'recv_filepath': recv_filepath,
'send_filepath': send_filepath,
'on_done': self.get_crt_callback(future, 'done',
on_done_before_calls,
on_done_after_calls),
'on_progress': self.get_crt_callback(future, 'progress')
}
def get_crt_callback(self, future, callback_type,
before_subscribers=None, after_subscribers=None):
def invoke_all_callbacks(*args, **kwargs):
callbacks_list = []
if before_subscribers is not None:
callbacks_list += before_subscribers
callbacks_list += get_callbacks(future, callback_type)
if after_subscribers is not None:
callbacks_list += after_subscribers
for callback in callbacks_list:
# The get_callbacks helper will set the first augment
# by keyword, the other augments need to be set by keyword
# as well
if callback_type == "progress":
callback(bytes_transferred=args[0])
else:
callback(*args, **kwargs)
return invoke_all_callbacks
class RenameTempFileHandler:
def __init__(self, coordinator, final_filename, temp_filename, osutil):
self._coordinator = coordinator
self._final_filename = final_filename
self._temp_filename = temp_filename
self._osutil = osutil
def __call__(self, **kwargs):
error = kwargs['error']
if error:
self._osutil.remove_file(self._temp_filename)
else:
try:
self._osutil.rename_file(
self._temp_filename, self._final_filename)
except Exception as e:
self._osutil.remove_file(self._temp_filename)
# the CRT future has done already at this point
self._coordinator.set_exception(e)
class AfterDoneHandler:
def __init__(self, coordinator):
self._coordinator = coordinator
def __call__(self, **kwargs):
self._coordinator.set_done_callbacks_complete()
| 36.518212 | 82 | 0.653761 |
38614ce403eb6e57ea73868f75d9fcc78ab7889b | 2,332 | py | Python | tasks/__init__.py | ximliu/backend | 9fc49506355f227c32e980a1eb6d0f86244feb3d | [
"MIT"
] | 15 | 2020-11-27T04:03:34.000Z | 2022-03-04T11:00:07.000Z | tasks/__init__.py | ximliu/backend | 9fc49506355f227c32e980a1eb6d0f86244feb3d | [
"MIT"
] | 18 | 2021-06-03T06:03:02.000Z | 2022-02-21T08:58:09.000Z | tasks/__init__.py | ximliu/backend | 9fc49506355f227c32e980a1eb6d0f86244feb3d | [
"MIT"
] | 26 | 2020-11-26T09:00:03.000Z | 2022-02-16T04:20:53.000Z | from datetime import timedelta
from celery import Celery
from celery.schedules import crontab, schedule
from celery.signals import celeryd_init
import sentry_sdk
from sentry_sdk.integrations.celery import CeleryIntegration
from app.utils.ip import get_external_ip
from app.core import config
if config.ENABLE_SENTRY:
sentry_sdk.init(
release=f"{config.BACKEND_VERSION}",
environment=f"{config.ENVIRONMENT}",
dsn="https://74ad2dcda2794afa9a207be8e9c17ea5@sentry.leishi.io/4",
traces_sample_rate=1.0,
integrations=[CeleryIntegration()],
)
sentry_sdk.set_tag("panel.ip", get_external_ip())
celery_app = Celery("worker", broker="redis://redis:6379/0")
celery_app.conf.task_routes = {
"tasks.ansible.*": "high-queue",
"tasks.app.*": "high-queue",
"tasks.iptables.*": "high-queue",
"tasks.tc.*": "high-queue",
"tasks.*": "low-queue",
}
celery_app.conf.task_acks_late = True
celery_app.conf.worker_prefetch_multiplier = 1
celery_app.autodiscover_tasks(
[
"tasks.ansible",
"tasks.artifacts",
"tasks.app",
"tasks.ehco",
"tasks.brook",
"tasks.connect",
"tasks.clean",
"tasks.traffic",
"tasks.iptables",
"tasks.server",
"tasks.gost",
"tasks.tc",
"tasks.v2ray",
"tasks.socat",
"tasks.wstunnel",
"tasks.shadowsocks",
"tasks.node_exporter",
"tasks.tiny_port_mapper",
]
)
celery_app.conf.beat_schedule = {
"run-get-traffic": {
"task": "tasks.traffic.traffic_runner",
"schedule": schedule(timedelta(seconds=int(config.TRAFFIC_INTERVAL_SECONDS))),
},
"run-ddns": {
"task": "tasks.iptables.ddns_runner",
"schedule": schedule(timedelta(seconds=int(config.DDNS_INTERVAL_SECONDS))),
},
"run-clean-artifacts": {
"task": "tasks.artifacts.clean_artifacts_runner",
"schedule": crontab(minute=0, hour=0),
}
}
@celeryd_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
celery_app.send_task("tasks.ansible.ansible_hosts_runner")
celery_app.send_task(
"tasks.server.servers_runner",
kwargs={
"prepare_services": True,
"sync_scripts": True,
"init_iptables": True,
})
| 27.435294 | 86 | 0.641081 |
0b3b8688d958b736165ae865b92bf4abd4106a01 | 6,487 | py | Python | src/add_segment.py | quqixun/ADDetection | 7458e09f185104c1e3b25fe1f9798429fdabfee0 | [
"MIT"
] | 9 | 2018-11-21T05:44:47.000Z | 2020-11-21T09:57:31.000Z | src/add_segment.py | quqixun/ADDetection | 7458e09f185104c1e3b25fe1f9798429fdabfee0 | [
"MIT"
] | 5 | 2018-05-11T19:06:20.000Z | 2021-01-05T03:28:48.000Z | src/add_segment.py | quqixun/ADDetection | 7458e09f185104c1e3b25fe1f9798429fdabfee0 | [
"MIT"
] | 4 | 2018-11-22T03:16:16.000Z | 2019-11-24T12:24:16.000Z | # Alzheimer's Disease Detection
# Segment brain into GM, WM and CSF.
# Author: Qixun QU
# Copyleft: MIT Licience
# To tun this script, FSL should be installed.
# See https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FslInstallation.
# ,,, ,,,
# ;" '; ;' ",
# ; @.ss$$$$$$s.@ ;
# `s$$$$$$$$$$$$$$$'
# $$$$$$$$$$$$$$$$$$
# $$$$P""Y$$$Y""W$$$$$
# $$$$ p"$$$"q $$$$$
# $$$$ .$$$$$. $$$$'
# $$$DaU$$O$$DaU$$$'
# '$$$$'.^.'$$$$'
# '&$$$$$&'
from __future__ import print_function
import os
import shutil
import subprocess
import numpy as np
import nibabel as nib
from multiprocessing import Pool, cpu_count
# Helper function to run in multiple processes
def unwarp_segment(arg, **kwarg):
return ADDSegment._segment(*arg, **kwarg)
class ADDSegment(object):
def __init__(self, ad_dir, nc_dir):
'''___INIT__
Initialization. Set directory for input data.
The orangement of input images:
- Project Root Directory
--- data
----- adni_subj
------- AD (or NC) --> ad_dir (or nc_dir)
--------- subject_id
----------- scan_no
------------- whole.nii.gz
Inputs:
-------
- ad_dir: string, directory path of AD subjects.
- nc_dir: string, directory path of NC subjects.
'''
self.input_dirs = [ad_dir, nc_dir]
return
def run(self, processes=-1):
'''RUN
Do segmentation of brains in multiple processes.
For each brain image in "src_dir", segmentations
are firstly saved in "temp" as "dst_dir."
- Project Root Directory
--- data
----- adni_subj
------- AD (or NC) --> ad_dir (or nc_dir)
--------- subject_id
----------- scan_no --> src_dir
------------- whole.nii.gz
------------- temp --> dst_dir
Input:
------
- processes: int, number of processes,
if it is -1, all processors are available.
'''
# src_dirs contains directory of each scan
# dst_dirs contains directory of temporary folders
src_dirs, dst_dirs = [], []
for input_dir in self.input_dirs:
for subject in os.listdir(input_dir):
subj_dir = os.path.join(input_dir, subject)
for scan in os.listdir(subj_dir):
src_dir = os.path.join(subj_dir, scan)
dst_dir = os.path.join(src_dir, "temp")
src_dirs.append(src_dir)
dst_dirs.append(dst_dir)
# Map a couple of parameters to self._segment
paras = zip([self] * len(src_dirs), src_dirs, dst_dirs)
if processes == -1:
processes = cpu_count()
pool = Pool(processes=processes)
pool.map(unwarp_segment, paras)
return
def _segment(self, src_dir, dst_dir):
'''_SEGMENT
Call function for segmentation on input data.
Inputs:
-------
- src_dir: string, path of scan's directory.
- dst_dir: string, path of temporary folder.
'''
print("Segment on: ", src_dir)
try:
self.fast(src_dir, dst_dir)
except RuntimeError:
print("\tFalid on: ", src_dir)
return
@staticmethod
def fast(src_dir, dst_dir):
'''FAST
Call FSL FAST to do segmentation, and move outputs
from temporary folder to scan's folder.
Inputs:
-------
- src_dir: string, path of scan's directory.
- dst_dir: string, path of temporary folder.
'''
# Helper function to create temporary folder
def create_dir(path):
if not os.path.isdir(path):
os.makedirs(path)
return
# Load file in .nii.gz
def load_nii(path):
nii = nib.load(path)
return nii.get_data(), nii.get_affine()
# Save numpy array to .nii.gz file
def save_nii(data, path, affine):
nib.save(nib.Nifti1Image(data, affine), path)
return
# file_name is "whole.nii.gz"
file_name = os.listdir(src_dir)[0]
src_path = os.path.join(src_dir, file_name)
# Generate prefix of outputs
create_dir(dst_dir)
dst_prefix = os.path.join(dst_dir, file_name.split(".")[0])
# Run command, set paramsters as explained in
# https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST
command = ["fast", "-t", "1", "-n", "3", "-H", "0.1",
"-I", "1", "-l", "20.0", "-o", dst_prefix, src_path]
subprocess.call(command, stdout=open(os.devnull),
stderr=subprocess.STDOUT)
# Load whole brain
volume, affine = load_nii(src_path)
for scan in os.listdir(dst_dir):
mask_path = os.path.join(dst_dir, scan)
# Probabilistic segmentation of each tissuse has been obtained,
# to extract tissue, multiply whole brain with segmentation
if "pve_0" in scan:
# Segmentation of CSF
mask, _ = load_nii(mask_path)
csf = np.multiply(volume, mask)
dst_path = os.path.join(src_dir, "csf.nii.gz")
save_nii(csf, dst_path, affine)
elif "pve_1" in scan:
# Segmentation of GM
mask, _ = load_nii(mask_path)
gm = np.multiply(volume, mask)
dst_path = os.path.join(src_dir, "gm.nii.gz")
save_nii(gm, dst_path, affine)
elif "pve_2" in scan:
# Segmentation of WM
mask, _ = load_nii(mask_path)
wm = np.multiply(volume, mask)
dst_path = os.path.join(src_dir, "wm.nii.gz")
save_nii(wm, dst_path, affine)
# Remove temporary folder
shutil.rmtree(dst_dir)
return
if __name__ == "__main__":
# Set directories for input data
parent_dir = os.path.dirname(os.getcwd())
data_dir = os.path.join(parent_dir, "data", "adni_subj")
ad_dir = os.path.join(data_dir, "AD")
nc_dir = os.path.join(data_dir, "NC")
# Use all processors for segmentaion
seg = ADDSegment(ad_dir, nc_dir)
seg.run(processes=-1)
| 29.894009 | 75 | 0.523971 |
521037f8a8647d3da9cd1fadb3225b52fac0ac2c | 4,200 | py | Python | tests/test_lock.py | YanEricCossette/conda | 7651023ca56c4bcdf200090e75ae99f62124524c | [
"BSD-3-Clause"
] | null | null | null | tests/test_lock.py | YanEricCossette/conda | 7651023ca56c4bcdf200090e75ae99f62124524c | [
"BSD-3-Clause"
] | 3 | 2022-03-03T02:36:53.000Z | 2022-03-03T02:42:50.000Z | tests/test_lock.py | YanEricCossette/conda | 7651023ca56c4bcdf200090e75ae99f62124524c | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
import pytest
from conda.lock import DirectoryLock, FileLock, LockError
from os.path import basename, exists, isfile, join
def test_filelock_passes(tmpdir):
"""
Normal test on file lock
"""
package_name = "conda_file1"
tmpfile = join(tmpdir.strpath, package_name)
with FileLock(tmpfile) as lock:
path = basename(lock.lock_file_path)
assert tmpdir.join(path).exists() and tmpdir.join(path).isfile()
# lock should clean up after itself
assert not tmpdir.join(path).exists()
def test_filelock_locks(tmpdir):
"""
Test on file lock, multiple lock on same file
Lock error should raised
"""
package_name = "conda_file_2"
tmpfile = join(tmpdir.strpath, package_name)
with FileLock(tmpfile) as lock1:
path = basename(lock1.lock_file_path)
assert tmpdir.join(path).exists()
with pytest.raises(LockError) as execinfo:
with FileLock(tmpfile, retries=1) as lock2:
assert False # this should never happen
assert lock2.path_to_lock == lock1.path_to_lock
assert tmpdir.join(path).exists() and tmpdir.join(path).isfile()
# lock should clean up after itself
assert not tmpdir.join(path).exists()
def test_folder_locks(tmpdir):
"""
Test on Directory lock
"""
package_name = "dir_1"
tmpfile = join(tmpdir.strpath, package_name)
with DirectoryLock(tmpfile) as lock1:
assert exists(lock1.lock_file_path) and isfile(lock1.lock_file_path)
with pytest.raises(LockError) as execinfo:
with DirectoryLock(tmpfile, retries=1) as lock2:
assert False # this should never happen
assert exists(lock1.lock_file_path) and isfile(lock1.lock_file_path)
# lock should clean up after itself
assert not exists(lock1.lock_file_path)
def test_lock_thread(tmpdir):
"""
2 thread want to lock a file
One thread will have LockError Raised
"""
def lock_thread(tmpdir, file_path):
with FileLock(file_path) as lock1:
path = basename(lock1.lock_file_path)
assert tmpdir.join(path).exists() and tmpdir.join(path).isfile()
assert not tmpdir.join(path).exists()
from threading import Thread
package_name = "conda_file_3"
tmpfile = join(tmpdir.strpath, package_name)
t = Thread(target=lock_thread, args=(tmpdir, tmpfile))
with FileLock(tmpfile) as lock1:
t.start()
path = basename(lock1.lock_file_path)
assert tmpdir.join(path).exists() and tmpdir.join(path).isfile()
t.join()
# lock should clean up after itself
assert not tmpdir.join(path).exists()
def test_lock_retries(tmpdir):
"""
2 thread want to lock a same file
Lock has zero retries
One thread will have LockError raised
"""
def lock_thread_retries(tmpdir, file_path):
with pytest.raises(LockError) as execinfo:
with FileLock(file_path, retries=0):
assert False # should never enter here, since max_tries is 0
assert "LOCKERROR" in str(execinfo.value)
from threading import Thread
package_name = "conda_file_3"
tmpfile = join(tmpdir.strpath, package_name)
t = Thread(target=lock_thread_retries, args=(tmpdir, tmpfile))
with FileLock(tmpfile) as lock1:
t.start()
path = basename(lock1.lock_file_path)
assert tmpdir.join(path).exists() and tmpdir.join(path).isfile()
t.join()
# lock should clean up after itself
assert not tmpdir.join(path).exists()
def test_permission_file():
"""
Test when lock cannot be created due to permission
Make sure no exception raised
"""
from conda.auxlib.compat import Utf8NamedTemporaryFile
from conda.common.compat import text_type
with Utf8NamedTemporaryFile(mode='r') as f:
if not isinstance(f.name, text_type):
return
with FileLock(f.name) as lock:
path = basename(lock.lock_file_path)
assert not exists(join(f.name, path))
| 31.578947 | 77 | 0.66381 |
88e0137fb81d577d4949cf0371f5962453385704 | 969 | py | Python | tests/unit/pipeline/test_pipeline_factory.py | Breaka84/spooq | d44eb5ad98612ff59826e7caa6d3b255f3a351c2 | [
"MIT"
] | 3 | 2021-06-15T13:48:23.000Z | 2021-11-16T12:04:55.000Z | tests/unit/pipeline/test_pipeline_factory.py | Breaka84/spooq | d44eb5ad98612ff59826e7caa6d3b255f3a351c2 | [
"MIT"
] | 11 | 2020-05-22T13:46:22.000Z | 2021-11-16T09:34:09.000Z | tests/unit/pipeline/test_pipeline_factory.py | Breaka84/spooq | d44eb5ad98612ff59826e7caa6d3b255f3a351c2 | [
"MIT"
] | 1 | 2021-03-17T16:32:45.000Z | 2021-03-17T16:32:45.000Z | from __future__ import absolute_import
from builtins import object
import pytest
import json
from spooq.pipeline import PipelineFactory, Pipeline
from . import etl_pipeline_user_params, elt_pipeline_business_params
@pytest.mark.parametrize(
argnames="pipeline_type",
argvalues=[etl_pipeline_user_params, elt_pipeline_business_params],
ids=["ETL Batch Pipeline", "ELT Ad Hoc Pipeline"],
)
class TestETLBatchPipeline(object):
@pytest.fixture()
def pipeline_factory(self, pipeline_type, mocker):
metadata = pipeline_type.get_metadata()
pipeline_factory = PipelineFactory()
mocker.patch.object(pipeline_factory, "get_metadata")
pipeline_factory.get_metadata.return_value = metadata
return pipeline_factory
def test_get_pipeline(self, pipeline_factory, pipeline_type):
context_vars = pipeline_type.get_context_vars()
assert isinstance(pipeline_factory.get_pipeline(context_vars), Pipeline)
| 35.888889 | 80 | 0.773994 |
1e019f6e27cf1bbb287c9f9f13e81609a815d766 | 1,518 | py | Python | torch_crystals/simulations/rotations.py | StarostinV/CrystalsInPytorch | 97999fb97e47febe32c864aee301cc2f44be25e8 | [
"MIT"
] | null | null | null | torch_crystals/simulations/rotations.py | StarostinV/CrystalsInPytorch | 97999fb97e47febe32c864aee301cc2f44be25e8 | [
"MIT"
] | null | null | null | torch_crystals/simulations/rotations.py | StarostinV/CrystalsInPytorch | 97999fb97e47febe32c864aee301cc2f44be25e8 | [
"MIT"
] | null | null | null | import torch
from torch import Tensor
from ..utils import norm, to_t
def orientation_rotation_matrix(orientation: Tensor) -> Tensor:
return get_rotation_matrix_from_vectors_t(
to_t([0, 0, 1], device=orientation.device, dtype=orientation.dtype),
orientation
)
@torch.jit.script
def quaternion_to_matrix(quaternions: Tensor):
"""
Convert rotations given as quaternions to rotation matrices.
Args:
quaternions: quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
r, i, j, k = torch.unbind(quaternions, -1)
two_s = 2.0 / (quaternions * quaternions).sum(-1)
o = torch.stack(
(
1 - two_s * (j * j + k * k),
two_s * (i * j - k * r),
two_s * (i * k + j * r),
two_s * (i * j + k * r),
1 - two_s * (i * i + k * k),
two_s * (j * k - i * r),
two_s * (i * k - j * r),
two_s * (j * k + i * r),
1 - two_s * (i * i + j * j),
),
-1,
)
return o.reshape(quaternions.shape[:-1] + (3, 3))
@torch.jit.script
def get_rotation_matrix_from_vectors_t(v1: Tensor, v2: Tensor):
v1, v2 = norm(v1), norm(v2)
axis = norm(torch.cross(v1, v2))
angle = torch.arccos(torch.dot(v1, v2)) / 2
quaternion = torch.cat([
torch.cos(angle)[None],
torch.sin(angle) * axis,
])
return quaternion_to_matrix(quaternion)
| 28.111111 | 76 | 0.544137 |
1713dbbbdc030f1317da2bf92a8c25faa52a3d64 | 5,931 | py | Python | airflow/providers/google/cloud/transfers/sheets_to_gcs.py | Sonins/airflow | f008e9c98e95f56b98220639024e583e66f030cd | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/providers/google/cloud/transfers/sheets_to_gcs.py | Sonins/airflow | f008e9c98e95f56b98220639024e583e66f030cd | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2021-03-27T03:46:45.000Z | 2021-04-03T09:12:48.000Z | airflow/providers/google/cloud/transfers/sheets_to_gcs.py | dungdm93/airflow | 6d25d63679085279ca1672c2eee2c45d6704efaa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import csv
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Union
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleSheetsToGCSOperator(BaseOperator):
"""
Writes Google Sheet data into Google Cloud Storage.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleSheetsToGCSOperator`
:param spreadsheet_id: The Google Sheet ID to interact with.
:type spreadsheet_id: str
:param sheet_filter: Default to None, if provided, Should be an array of the sheet
titles to pull from.
:type sheet_filter: List[str]
:param destination_bucket: The destination Google cloud storage bucket where the
report should be written to. (templated)
:type destination_bucket: str
:param destination_path: The Google cloud storage URI array for the object created by the operator.
For example: ``path/to/my/files``.
:type destination_path: str
:param gcp_conn_id: The connection ID to use when fetching connection info.
:type gcp_conn_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = [
"spreadsheet_id",
"destination_bucket",
"destination_path",
"sheet_filter",
"impersonation_chain",
]
def __init__(
self,
*,
spreadsheet_id: str,
destination_bucket: str,
sheet_filter: Optional[List[str]] = None,
destination_path: Optional[str] = None,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.spreadsheet_id = spreadsheet_id
self.sheet_filter = sheet_filter
self.destination_bucket = destination_bucket
self.destination_path = destination_path
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _upload_data(
self,
gcs_hook: GCSHook,
hook: GSheetsHook,
sheet_range: str,
sheet_values: List[Any],
) -> str:
# Construct destination file path
sheet = hook.get_spreadsheet(self.spreadsheet_id)
file_name = f"{sheet['properties']['title']}_{sheet_range}.csv".replace(" ", "_")
dest_file_name = (
f"{self.destination_path.strip('/')}/{file_name}" if self.destination_path else file_name
)
with NamedTemporaryFile("w+") as temp_file:
# Write data
writer = csv.writer(temp_file)
writer.writerows(sheet_values)
temp_file.flush()
# Upload to GCS
gcs_hook.upload(
bucket_name=self.destination_bucket,
object_name=dest_file_name,
filename=temp_file.name,
)
return dest_file_name
def execute(self, context: 'Context'):
sheet_hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
# Pull data and upload
destination_array: List[str] = []
sheet_titles = sheet_hook.get_sheet_titles(
spreadsheet_id=self.spreadsheet_id, sheet_filter=self.sheet_filter
)
for sheet_range in sheet_titles:
data = sheet_hook.get_values(spreadsheet_id=self.spreadsheet_id, range_=sheet_range)
gcs_path_to_file = self._upload_data(gcs_hook, sheet_hook, sheet_range, data)
destination_array.append(gcs_path_to_file)
self.xcom_push(context, "destination_objects", destination_array)
return destination_array
| 40.346939 | 103 | 0.687237 |
c6ae8be89dc80c8aba2da32fff58a4e4646ba23f | 2,851 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/models/virtual_machine_scale_set_identity.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/models/virtual_machine_scale_set_identity.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/models/virtual_machine_scale_set_identity.py | xiafu-msft/azure-sdk-for-python | 4d9560cfd519ee60667f3cc2f5295a58c18625db | [
"MIT"
] | 1 | 2019-06-17T22:18:23.000Z | 2019-06-17T22:18:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetIdentity(Model):
"""Identity for the virtual machine scale set.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar principal_id: The principal id of virtual machine scale set
identity. This property will only be provided for a system assigned
identity.
:vartype principal_id: str
:ivar tenant_id: The tenant id associated with the virtual machine scale
set. This property will only be provided for a system assigned identity.
:vartype tenant_id: str
:param type: The type of identity used for the virtual machine scale set.
The type 'SystemAssigned, UserAssigned' includes both an implicitly
created identity and a set of user assigned identities. The type 'None'
will remove any identities from the virtual machine scale set. Possible
values include: 'SystemAssigned', 'UserAssigned', 'SystemAssigned,
UserAssigned', 'None'
:type type: str or
~azure.mgmt.compute.v2019_03_01.models.ResourceIdentityType
:param user_assigned_identities: The list of user identities associated
with the virtual machine scale set. The user identity dictionary key
references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:type user_assigned_identities: dict[str,
~azure.mgmt.compute.v2019_03_01.models.VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue]
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'ResourceIdentityType'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue}'},
}
def __init__(self, **kwargs):
super(VirtualMachineScaleSetIdentity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
self.type = kwargs.get('type', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
| 45.983871 | 148 | 0.686075 |
e8da9c0a2ee74899212871f70557f10b86909ce3 | 691 | py | Python | pathfinders/longest_path.py | Kartones/mazes-for-programmers-python-src | ae9849c2e7a5141de85556790f046d65fd5eb1a7 | [
"Unlicense"
] | 42 | 2017-08-15T18:49:16.000Z | 2022-02-13T20:13:05.000Z | pathfinders/longest_path.py | Kartones/mazes-for-programmers-python-src | ae9849c2e7a5141de85556790f046d65fd5eb1a7 | [
"Unlicense"
] | 4 | 2017-12-26T01:23:10.000Z | 2018-05-12T19:06:52.000Z | pathfinders/longest_path.py | Kartones/mazes-for-programmers-python-src | ae9849c2e7a5141de85556790f046d65fd5eb1a7 | [
"Unlicense"
] | 8 | 2017-08-15T19:09:20.000Z | 2022-01-06T21:11:03.000Z | from typing import Tuple
from base.distance_grid import DistanceGrid
Point = Tuple[int, int]
def calculate(grid: DistanceGrid) -> Tuple[Point, Point]:
"""
Calculates a longest path inside a maze, by calculating the longest past from nortwest corner,
then using that point as the actual start and calculating its most distant cell.
"""
start_cell = grid[0, 0]
if start_cell is None:
raise IndexError("Invalid start cell row {} column {}".format(0, 0))
new_start_cell, distance = start_cell.distances.max
goal_cell, distance = new_start_cell.distances.max
return (new_start_cell.row, new_start_cell.column), (goal_cell.row, goal_cell.column)
| 31.409091 | 98 | 0.726483 |
2993fc488bc441b5350f633b9423ed3c1c0ec36b | 701 | py | Python | src/airfly/_vendor/airflow/providers/qubole/operators/qubole_check.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | 7 | 2021-09-27T11:38:48.000Z | 2022-02-01T06:06:24.000Z | src/airfly/_vendor/airflow/providers/qubole/operators/qubole_check.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | null | null | null | src/airfly/_vendor/airflow/providers/qubole/operators/qubole_check.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | null | null | null | # Auto generated by 'inv collect-airflow'
from airfly._vendor.airflow.operators.sql import SQLCheckOperator, SQLValueCheckOperator
from airfly._vendor.airflow.providers.qubole.operators.qubole import QuboleOperator
class _QuboleCheckOperatorMixin:
pass
class QuboleCheckOperator(_QuboleCheckOperatorMixin, SQLCheckOperator, QuboleOperator):
qubole_conn_id: "str"
results_parser_callable: "typing.Callable"
class QuboleValueCheckOperator(
_QuboleCheckOperatorMixin, SQLValueCheckOperator, QuboleOperator
):
pass_value: "typing.Union[str, int, float]"
tolerance: "typing.Union[int, float, NoneType]"
results_parser_callable: "typing.Callable"
qubole_conn_id: "str"
| 31.863636 | 88 | 0.803138 |
5d9843a2af145c5ba84c399bf4a0805496656d57 | 6,790 | py | Python | nbviewer/utils.py | m-ueno/nbviewer | aa567da928dd022ce6a75b6c131a5ef3ff5c211a | [
"BSD-3-Clause-Clear"
] | 1 | 2020-04-01T07:17:27.000Z | 2020-04-01T07:17:27.000Z | nbviewer/utils.py | m-ueno/nbviewer | aa567da928dd022ce6a75b6c131a5ef3ff5c211a | [
"BSD-3-Clause-Clear"
] | null | null | null | nbviewer/utils.py | m-ueno/nbviewer | aa567da928dd022ce6a75b6c131a5ef3ff5c211a | [
"BSD-3-Clause-Clear"
] | 1 | 2021-09-10T17:18:44.000Z | 2021-09-10T17:18:44.000Z | #-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
# https://docs.python.org/3.1/library/base64.html#base64.decodestring
try:
from base64 import encodebytes
from base64 import decodebytes
except ImportError:
from base64 import encodestring as encodebytes
from base64 import decodestring as decodebytes
import cgi
from contextlib import contextmanager
import re
from subprocess import check_output
import time
try:
from urllib.parse import (
parse_qs,
quote as stdlib_quote,
urlencode,
urlparse,
urlunparse,
)
except ImportError:
from urllib import urlencode
from urllib2 import quote as stdlib_quote
from urlparse import (
parse_qs,
urlparse,
urlunparse,
)
from tornado.log import app_log
STRIP_PARAMS = [
'client_id',
'client_secret',
'access_token',
]
class EmptyClass(object):
"""
Simple empty class that returns itself for all functions called on it.
This allows us to call any method of any name on this, and it'll return another
instance of itself that'll allow any method to be called on it.
Primarily used to mock out the statsd client when statsd is not being used
"""
def empty_function(self, *args, **kwargs):
return self
def __getattr__(self, attr):
return self.empty_function
def quote(s):
"""unicode-safe quote
- accepts str+unicode (not bytes on py3)
- Python 2 requires str, not unicode
- always return unicode
"""
if not isinstance(s, str):
s = s.encode('utf8')
quoted = stdlib_quote(s)
if isinstance(quoted, bytes):
quoted = quoted.decode('utf8')
return quoted
def clean_filename(fn):
""" Github url sanitizes gist filenames to produce their permalink. This is
not provided over API, so we recreate it here. """
return re.sub('[^0-9a-zA-Z]+', '-', fn)
def url_path_join(*pieces):
"""Join components of url into a relative url
Use to prevent double slash when joining subpath. This will leave the
initial and final / in place
"""
initial = pieces[0].startswith('/')
final = pieces[-1].endswith('/')
stripped = [s.strip('/') for s in pieces]
result = '/'.join(s for s in stripped if s)
if initial:
result = '/' + result
if final:
result += '/'
if result == '//':
result = '/'
return result
def transform_ipynb_uri(uri, uri_rewrite_list):
"""Transform a given uri (an ipynb 'URI') into an app URL
State-free part of transforming URIs to nbviewer URLs.
:param uri: uri to transform
:param uri_rewrite_list: list of (URI regexes, URL templates) tuples
"""
for reg, rewrite in uri_rewrite_list:
matches = re.match(reg, uri)
if matches:
return rewrite.format(*matches.groups())
# encode query parameters as last url part
if '?' in uri:
uri, query = uri.split('?', 1)
uri = '%s/%s' % (uri, quote('?' + query))
return uri
# get_encoding_from_headers from requests.utils (1.2.3)
# (c) 2013 Kenneth Reitz
# used under Apache 2.0
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
# per #507, at least some hosts are providing UTF-8 without declaring it
# while the former choice of ISO-8859-1 wasn't known to be causing problems
# in the wild
if 'text' in content_type:
return 'utf-8'
def response_text(response, encoding=None):
"""mimic requests.text property, but for plain HTTPResponse"""
encoding = (
encoding or
get_encoding_from_headers(response.headers) or
'utf-8'
)
return response.body.decode(encoding, 'replace')
# parse_header_links from requests.util
# modified to actually return a dict, like the docstring says.
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
"""
links = {}
replace_chars = " '\""
for val in value.split(","):
try:
url, params = val.split(";", 1)
except ValueError:
url, params = val, ''
link = {}
parts = list(urlparse(url.strip("<> '\"")))
get_params = parse_qs(parts[4])
get_params = {
key: value[0]
for key, value in get_params.items()
if key not in STRIP_PARAMS
}
parts[4] = urlencode(get_params)
link["url"] = urlunparse(parts)
for param in params.split(";"):
try:
key, value = param.split("=")
except ValueError:
break
link[key.strip(replace_chars)] = value.strip(replace_chars)
if 'rel' in link:
links[link['rel']] = link
return links
def git_info(path):
"""Return some git info"""
command = ['git', 'log', '-1', '--format=%H\n%s\n%cD']
sha, msg, date = check_output(command, cwd=path).decode('utf8').splitlines()
return dict(
sha=sha,
date=date,
msg=msg,
)
def jupyter_info():
"""Get Jupyter info dict"""
import nbconvert
return dict(
nbconvert_version=nbconvert.__version__
)
def base64_decode(s):
"""unicode-safe base64
base64 API only talks bytes
"""
if not isinstance(s, bytes):
s = s.encode('ascii', 'replace')
decoded = decodebytes(s)
return decoded
def base64_encode(s):
"""unicode-safe base64
base64 API only talks bytes
"""
if not isinstance(s, bytes):
s = s.encode('ascii', 'replace')
encoded = encodebytes(s)
return encoded.decode('ascii')
@contextmanager
def time_block(message, debug_limit=1):
"""context manager for timing a block
logs millisecond timings of the block
If the time is longer than debug_limit,
then log level will be INFO,
otherwise it will be DEBUG.
"""
tic = time.time()
yield
dt = time.time() - tic
log = app_log.info if dt > debug_limit else app_log.debug
log("%s in %.2f ms", message, 1e3 * dt)
| 25.916031 | 118 | 0.613697 |
a93fac6bc358fa9b81d822f72bc966b400c0fcae | 3,483 | py | Python | poi_email_addresses.py | chrismartinis/enron | eab9433b29b67e042a3b7404f56a77b83fb1f116 | [
"MIT"
] | 92 | 2018-09-19T15:58:30.000Z | 2022-03-05T05:19:40.000Z | poi_email_addresses.py | chrismartinis/enron | eab9433b29b67e042a3b7404f56a77b83fb1f116 | [
"MIT"
] | 4 | 2018-09-10T15:08:24.000Z | 2021-02-02T21:51:03.000Z | poi_email_addresses.py | chrismartinis/enron | eab9433b29b67e042a3b7404f56a77b83fb1f116 | [
"MIT"
] | 59 | 2018-11-29T20:07:15.000Z | 2022-03-31T12:34:57.000Z | def poiEmails():
email_list = ["kenneth_lay@enron.net",
"kenneth_lay@enron.com",
"klay.enron@enron.com",
"kenneth.lay@enron.com",
"klay@enron.com",
"layk@enron.com",
"chairman.ken@enron.com",
"jeffreyskilling@yahoo.com",
"jeff_skilling@enron.com",
"jskilling@enron.com",
"effrey.skilling@enron.com",
"skilling@enron.com",
"jeffrey.k.skilling@enron.com",
"jeff.skilling@enron.com",
"kevin_a_howard.enronxgate.enron@enron.net",
"kevin.howard@enron.com",
"kevin.howard@enron.net",
"kevin.howard@gcm.com",
"michael.krautz@enron.com"
"scott.yeager@enron.com",
"syeager@fyi-net.com",
"scott_yeager@enron.net",
"syeager@flash.net",
"joe'.'hirko@enron.com",
"joe.hirko@enron.com",
"rex.shelby@enron.com",
"rex.shelby@enron.nt",
"rex_shelby@enron.net",
"jbrown@enron.com",
"james.brown@enron.com",
"rick.causey@enron.com",
"richard.causey@enron.com",
"rcausey@enron.com",
"calger@enron.com",
"chris.calger@enron.com",
"christopher.calger@enron.com",
"ccalger@enron.com",
"tim_despain.enronxgate.enron@enron.net",
"tim.despain@enron.com",
"kevin_hannon@enron.com",
"kevin'.'hannon@enron.com",
"kevin_hannon@enron.net",
"kevin.hannon@enron.com",
"mkoenig@enron.com",
"mark.koenig@enron.com",
"m..forney@enron.com",
"ken'.'rice@enron.com",
"ken.rice@enron.com",
"ken_rice@enron.com",
"ken_rice@enron.net",
"paula.rieker@enron.com",
"prieker@enron.com",
"andrew.fastow@enron.com",
"lfastow@pdq.net",
"andrew.s.fastow@enron.com",
"lfastow@pop.pdq.net",
"andy.fastow@enron.com",
"david.w.delainey@enron.com",
"delainey.dave@enron.com",
"'delainey@enron.com",
"david.delainey@enron.com",
"'david.delainey'@enron.com",
"dave.delainey@enron.com",
"delainey'.'david@enron.com",
"ben.glisan@enron.com",
"bglisan@enron.com",
"ben_f_glisan@enron.com",
"ben'.'glisan@enron.com",
"jeff.richter@enron.com",
"jrichter@nwlink.com",
"lawrencelawyer@aol.com",
"lawyer'.'larry@enron.com",
"larry_lawyer@enron.com",
"llawyer@enron.com",
"larry.lawyer@enron.com",
"lawrence.lawyer@enron.com",
"tbelden@enron.com",
"tim.belden@enron.com",
"tim_belden@pgn.com",
"tbelden@ect.enron.com",
"michael.kopper@enron.com",
"dave.duncan@enron.com",
"dave.duncan@cipco.org",
"duncan.dave@enron.com",
"ray.bowen@enron.com",
"raymond.bowen@enron.com",
"'bowen@enron.com",
"wes.colwell@enron.com",
"dan.boyle@enron.com",
"cloehr@enron.com",
"chris.loehr@enron.com"
]
return email_list
| 36.663158 | 56 | 0.488372 |
1647cc32ead01d127b37e27bbc9348cae77b3a45 | 189 | py | Python | tests/globals.py | meatballs/python_utils | 7e7ab9856c6dee95bdede03ab1784bda179e3b1c | [
"MIT"
] | null | null | null | tests/globals.py | meatballs/python_utils | 7e7ab9856c6dee95bdede03ab1784bda179e3b1c | [
"MIT"
] | null | null | null | tests/globals.py | meatballs/python_utils | 7e7ab9856c6dee95bdede03ab1784bda179e3b1c | [
"MIT"
] | 1 | 2016-02-05T13:43:03.000Z | 2016-02-05T13:43:03.000Z | project = 'matador-test'
environments = {
'test': {'dbms': 'oracle', 'connection': 'user@instance'}
}
credentials = {
'test': {'user': 'test_user', 'password': 'test_password'}
}
| 18.9 | 62 | 0.608466 |
5561591b0d00e3f04dd3cc98bb294e8e5d17604f | 473 | py | Python | gpytorch/models/__init__.py | bdecost/gpytorch | a5f1ad3e47daf3f8db04b605fb13ff3f9f871e3a | [
"MIT"
] | null | null | null | gpytorch/models/__init__.py | bdecost/gpytorch | a5f1ad3e47daf3f8db04b605fb13ff3f9f871e3a | [
"MIT"
] | null | null | null | gpytorch/models/__init__.py | bdecost/gpytorch | a5f1ad3e47daf3f8db04b605fb13ff3f9f871e3a | [
"MIT"
] | 1 | 2018-11-15T10:03:40.000Z | 2018-11-15T10:03:40.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .exact_gp import ExactGP
from .variational_gp import VariationalGP
from .grid_inducing_variational_gp import GridInducingVariationalGP
from .additive_grid_inducing_variational_gp import AdditiveGridInducingVariationalGP
__all__ = [ExactGP, VariationalGP, GridInducingVariationalGP, AdditiveGridInducingVariationalGP]
| 39.416667 | 96 | 0.894292 |
b096228fd516831ee33f513c00883fc680e9b964 | 3,158 | py | Python | isi_sdk_8_0_1/isi_sdk_8_0_1/models/mapping_identities.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_0_1/isi_sdk_8_0_1/models/mapping_identities.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_0_1/isi_sdk_8_0_1/models/mapping_identities.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 4
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_0_1.models.mapping_identity import MappingIdentity # noqa: F401,E501
class MappingIdentities(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'identities': 'list[MappingIdentity]'
}
attribute_map = {
'identities': 'identities'
}
def __init__(self, identities=None): # noqa: E501
"""MappingIdentities - a model defined in Swagger""" # noqa: E501
self._identities = None
self.discriminator = None
if identities is not None:
self.identities = identities
@property
def identities(self):
"""Gets the identities of this MappingIdentities. # noqa: E501
:return: The identities of this MappingIdentities. # noqa: E501
:rtype: list[MappingIdentity]
"""
return self._identities
@identities.setter
def identities(self, identities):
"""Sets the identities of this MappingIdentities.
:param identities: The identities of this MappingIdentities. # noqa: E501
:type: list[MappingIdentity]
"""
self._identities = identities
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MappingIdentities):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.46087 | 84 | 0.578847 |
4232b7867e72f31a55e3041bf315749b276a1b45 | 7,870 | py | Python | gen/argo/events/client/models/v1alpha1_event_source.py | argoproj-labs/argo-events-client-python | 3d6e3dffca4a12a490c2963f4ac90c8894948bb5 | [
"Apache-2.0"
] | null | null | null | gen/argo/events/client/models/v1alpha1_event_source.py | argoproj-labs/argo-events-client-python | 3d6e3dffca4a12a490c2963f4ac90c8894948bb5 | [
"Apache-2.0"
] | null | null | null | gen/argo/events/client/models/v1alpha1_event_source.py | argoproj-labs/argo-events-client-python | 3d6e3dffca4a12a490c2963f4ac90c8894948bb5 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Argo Events
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argo.events.client.configuration import Configuration
class V1alpha1EventSource(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1alpha1EventSourceSpec',
'status': 'V1alpha1EventSourceStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1EventSource - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
self.metadata = metadata
self.spec = spec
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1alpha1EventSource. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1alpha1EventSource. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1alpha1EventSource.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1alpha1EventSource. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1alpha1EventSource. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1alpha1EventSource. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1alpha1EventSource.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1alpha1EventSource. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1alpha1EventSource. # noqa: E501
:return: The metadata of this V1alpha1EventSource. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1alpha1EventSource.
:param metadata: The metadata of this V1alpha1EventSource. # noqa: E501
:type: V1ObjectMeta
"""
if self.local_vars_configuration.client_side_validation and metadata is None: # noqa: E501
raise ValueError("Invalid value for `metadata`, must not be `None`") # noqa: E501
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1alpha1EventSource. # noqa: E501
:return: The spec of this V1alpha1EventSource. # noqa: E501
:rtype: V1alpha1EventSourceSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1alpha1EventSource.
:param spec: The spec of this V1alpha1EventSource. # noqa: E501
:type: V1alpha1EventSourceSpec
"""
if self.local_vars_configuration.client_side_validation and spec is None: # noqa: E501
raise ValueError("Invalid value for `spec`, must not be `None`") # noqa: E501
self._spec = spec
@property
def status(self):
"""Gets the status of this V1alpha1EventSource. # noqa: E501
:return: The status of this V1alpha1EventSource. # noqa: E501
:rtype: V1alpha1EventSourceStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1alpha1EventSource.
:param status: The status of this V1alpha1EventSource. # noqa: E501
:type: V1alpha1EventSourceStatus
"""
if self.local_vars_configuration.client_side_validation and status is None: # noqa: E501
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1EventSource):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1EventSource):
return True
return self.to_dict() != other.to_dict()
| 33.922414 | 312 | 0.630241 |
e8cd6f8c1e1fbf829d5a54bb40bd5f9ed75f153b | 5,800 | py | Python | vit/command_bar.py | MarkWh1te/vit | 811c480ce9fa491cf419e04310bf49cbe7093e2d | [
"MIT"
] | 1 | 2020-01-20T10:38:17.000Z | 2020-01-20T10:38:17.000Z | vit/command_bar.py | MarkWh1te/vit | 811c480ce9fa491cf419e04310bf49cbe7093e2d | [
"MIT"
] | null | null | null | vit/command_bar.py | MarkWh1te/vit | 811c480ce9fa491cf419e04310bf49cbe7093e2d | [
"MIT"
] | null | null | null | import urwid
class CommandBar(urwid.Edit):
"""Custom urwid.Edit class for the command bar.
"""
def __init__(self, **kwargs):
self.event = kwargs['event']
self.autocomplete = kwargs['autocomplete']
self.metadata = None
self.history = CommandBarHistory()
kwargs.pop('event')
kwargs.pop('autocomplete')
return super().__init__(**kwargs)
def keypress(self, size, key):
"""Overrides Edit.keypress method.
"""
# TODO: Readline edit shortcuts.
if key not in ('tab', 'shift tab'):
self.autocomplete.deactivate()
if 'choices' in self.metadata:
op = self.metadata['op']
data = {
'choice': None,
'metadata': self.get_metadata(),
}
if key in self.metadata['choices']:
data['choice'] = self.metadata['choices'][key]
self.cleanup(op)
self.event.emit('command-bar:keypress', data)
return None
elif key in ('ctrl a',):
self.set_edit_pos(0)
return None
elif key in ('ctrl e',):
self.set_edit_pos(len(self.get_edit_text()))
return None
elif key in ('up', 'ctrl p'):
text = self.history.previous(self.metadata['history'])
if text != False:
self.set_edit_text(text)
return None
elif key in ('down', 'ctrl n'):
text = self.history.next(self.metadata['history'])
if text != False:
self.set_edit_text(text)
return None
elif key in ('enter', 'esc'):
text = self.get_edit_text().strip()
metadata = self.get_metadata()
data = {
'key': key,
'text': text,
'metadata': metadata,
}
self.cleanup(metadata['op'])
if text and key in ('enter'):
self.history.add(metadata['history'], text)
self.event.emit('command-bar:keypress', data)
return None
elif key in ('tab', 'shift tab'):
if self.is_autocomplete_op():
text = self.get_edit_text()
kwargs = {}
if key in ('shift tab',):
kwargs['reverse'] = True
self.autocomplete.activate(text, self.edit_pos, **kwargs)
return None
return super().keypress(size, key)
def is_autocomplete_op(self):
return self.metadata['op'] not in ['search-forward', 'search-reverse']
def set_edit_text(self, text, edit_pos=None):
ret = super().set_edit_text(text)
if not edit_pos:
edit_pos = len(text)
self.set_edit_pos(edit_pos)
return ret
def set_command_prompt(self, caption, edit_text=None):
self.set_caption(caption)
if edit_text:
self.set_edit_text(edit_text)
def activate(self, caption, metadata, edit_text=None):
self.set_metadata(metadata)
self.set_command_prompt(caption, edit_text)
def cleanup(self, command):
self.set_caption('')
self.set_edit_text('')
self.history.cleanup(command)
self.set_metadata(None)
def get_metadata(self):
return self.metadata.copy() if self.metadata else None
def prepare_metadata(self, metadata):
if metadata:
if 'history' not in metadata:
metadata['history'] = metadata['op']
return metadata
def set_metadata(self, metadata):
self.metadata = self.prepare_metadata(metadata)
def set_edit_text_callback(self):
return self.set_edit_text
class CommandBarHistory(object):
"""Holds command-specific history for the command bar.
"""
def __init__(self):
self.commands = {}
self.scrolling = False
def add(self, command, text):
if not self.exists(command):
self.commands[command] = {'items': ['']}
self.commands[command]['items'].insert(len(self.get_items(command)) - 1, text)
self.set_scrolling(command, False)
def previous(self, command):
if self.exists(command):
if not self.scrolling:
self.set_scrolling(command, True)
return self.current(command)
elif self.get_idx(command) > 0:
self.move_idx(command, -1)
return self.current(command)
return False
def next(self, command):
if self.exists(command) and self.scrolling and self.get_idx(command) < self.last_idx(command):
self.move_idx(command, 1)
return self.current(command)
return False
def cleanup(self, command):
self.set_scrolling(command, False)
def get_items(self, command):
return self.commands[command]['items']
def get_idx(self, command):
return self.commands[command]['idx']
def set_idx(self, command, idx):
self.commands[command]['idx'] = idx
def set_scrolling(self, command, scroll):
if self.exists(command):
# Don't count the ending empty string when setting the initial
# index for scrolling to start.
self.set_idx(command, self.last_idx(command) - 1)
self.scrolling = scroll
def move_idx(self, command, increment):
self.set_idx(command, self.get_idx(command) + increment)
def exists(self, command):
return command in self.commands
def current(self, command):
return self.get_items(command)[self.get_idx(command)] if self.exists(command) else False
def last_idx(self, command):
return len(self.get_items(command)) - 1 if self.exists(command) else None
| 33.918129 | 102 | 0.576207 |
27480333c2728c0a6eef7b38b05b7c0fdd5e369a | 496 | py | Python | connector/retrieve_connector_details.py | fivetran-connorbrereton/python-fivetran | d5e669c2a052a2922d1613bdbd429afd70fda03f | [
"MIT"
] | 3 | 2021-09-07T22:45:00.000Z | 2021-09-21T23:03:55.000Z | connector/retrieve_connector_details.py | fivetran-connorbrereton/python-fivetran | d5e669c2a052a2922d1613bdbd429afd70fda03f | [
"MIT"
] | null | null | null | connector/retrieve_connector_details.py | fivetran-connorbrereton/python-fivetran | d5e669c2a052a2922d1613bdbd429afd70fda03f | [
"MIT"
] | 1 | 2021-09-10T18:55:14.000Z | 2021-09-10T18:55:14.000Z | import json
import requests
import environ
from requests.auth import HTTPBasicAuth
def retrieve_connector_details():
env = environ.Env()
environ.Env.read_env()
# your connector ID
connector_id = ''
API_KEY = env("API_KEY")
API_SECRET = env("API_SECRET")
base64 = HTTPBasicAuth(API_KEY, API_SECRET)
endpoint = 'https://api.fivetran.com/v1/connectors/{}'.format(connector_id)
request = requests.get(url=endpoint, auth=base64).json()
return request | 22.545455 | 79 | 0.701613 |
728f1fe6b9dfe2d5d241a5a50b3642bd781f576a | 1,937 | py | Python | Python Tkinter Word Jumble Game/getHeightWidth.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 20 | 2020-08-19T23:27:01.000Z | 2022-02-03T12:02:17.000Z | Python Tkinter Word Jumble Game/getHeightWidth.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 1 | 2021-04-10T18:06:05.000Z | 2021-04-10T18:06:05.000Z | Python Tkinter Word Jumble Game/getHeightWidth.py | BrianMarquez3/Python-Course | 2622b4ddfd687505becfd246e82a2ed0cb9b76f3 | [
"MIT"
] | 2 | 2020-12-03T19:35:36.000Z | 2021-11-10T14:58:39.000Z | # Python Tkinter Word Jumble Game II, show hint on word jumble Game
# Pyhton Tkinter Obtener altura y ancho
from tkinter import *
from random import choice
from random import shuffle
root = Tk()
root.title('Python Tkinter Word Jumble Game')
root.iconbitmap('Python Tkinter Word Jumble Game/liam.ico')
root.geometry("600x400+-50+50")
my_label = Label(root, text="", font=("Helvetica", 48))
my_label.pack(pady=20)
def shuffler():
entry_anwer.delete(0, END)
answer_Label.config(text='')
# List os departamets words
states = ['ancash', 'apurimac', 'arequipa', 'ayacucho', 'cajamarca', 'cerro de pasco', 'cuzco', 'huancavelica', 'huanuco', 'ica', 'junin', 'la libertad', 'lambayeque', 'lima', 'madre de dios', 'moquegua', 'piura', 'puno', 'san martin', 'tacna', 'tumbes', 'ucayali', 'amazonas', 'loreto']
# Pick random state from list
global word
word = choice(states)
#my_label.config(text=word)
#Brack apart our chosen word
break_apart_word = list(word)
shuffle(break_apart_word)
#print(break_apart_word)
# Turn shiffeled List into a word
global shuffle_word
shuffle_word = ''
for letter in break_apart_word:
shuffle_word += letter
# print shuflle word the screen
my_label.config(text=shuffle_word)
# Create anwer Funtion
def answer():
if word == entry_anwer.get():
answer_Label.config(text="Correct")
else:
answer_Label.config(text="Incorrect")
entry_anwer = Entry(root, font=("Helvetica", 24))
entry_anwer.pack(pady=20)
button_frame = Frame(root)
button_frame.pack(pady=20)
my_button = Button(button_frame, text="Pick Another word", command=shuffler)
my_button.grid(row=0, column=0, padx=10)
answer_button = Button(button_frame, text="Answer", command=answer)
answer_button.grid(row=0, column=1, padx=10)
answer_Label = Label(root, text="", font=("Helvetica", 18))
answer_Label.pack(pady=20)
root.mainloop() | 29.348485 | 291 | 0.698503 |
9dc36793397cdd50b97c8e73b48769555a8f38a1 | 730 | py | Python | jorldy/config/c51/cartpole.py | ramanuzan/JORLDY | be371ad0607e5dba5d5082101c38c6a9f2c96767 | [
"Apache-2.0"
] | null | null | null | jorldy/config/c51/cartpole.py | ramanuzan/JORLDY | be371ad0607e5dba5d5082101c38c6a9f2c96767 | [
"Apache-2.0"
] | null | null | null | jorldy/config/c51/cartpole.py | ramanuzan/JORLDY | be371ad0607e5dba5d5082101c38c6a9f2c96767 | [
"Apache-2.0"
] | null | null | null | ### C51 CartPole Config ###
env = {
"name": "cartpole",
"action_type": "discrete",
"render": False,
}
agent = {
"name": "c51",
"network": "discrete_q_network",
"gamma": 0.99,
"epsilon_init": 1.0,
"epsilon_min": 0.01,
"explore_ratio": 0.2,
"buffer_size": 50000,
"batch_size": 32,
"start_train_step": 2000,
"target_update_period": 500,
"v_min": -1,
"v_max": 10,
"num_support": 51,
}
optim = {
"name": "adam",
"lr": 0.0001,
}
train = {
"training": True,
"load_path": None,
"run_step": 100000,
"print_period": 1000,
"save_period": 10000,
"eval_iteration": 5,
# distributed setting
"update_period": 32,
"num_workers": 8,
}
| 17.804878 | 36 | 0.552055 |
344416449351916de2bf8ac7e9cdd1bd31d92530 | 16,238 | py | Python | os_ken/services/protocols/bgp/bgp_sample_conf.py | rolaya/os-ken | 10009e41539c737c7c423f13e4f5bc5f46d219ff | [
"Apache-2.0"
] | 1 | 2019-04-24T04:01:07.000Z | 2019-04-24T04:01:07.000Z | os_ken/services/protocols/bgp/bgp_sample_conf.py | anlaneg/os-ken | 379a7694c3129cc0156343af71f4fca8830d9de5 | [
"Apache-2.0"
] | null | null | null | os_ken/services/protocols/bgp/bgp_sample_conf.py | anlaneg/os-ken | 379a7694c3129cc0156343af71f4fca8830d9de5 | [
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import os
from os_ken.services.protocols.bgp.bgpspeaker import RF_VPN_V4
from os_ken.services.protocols.bgp.bgpspeaker import RF_VPN_V6
from os_ken.services.protocols.bgp.bgpspeaker import RF_L2_EVPN
from os_ken.services.protocols.bgp.bgpspeaker import RF_VPNV4_FLOWSPEC
from os_ken.services.protocols.bgp.bgpspeaker import RF_VPNV6_FLOWSPEC
from os_ken.services.protocols.bgp.bgpspeaker import RF_L2VPN_FLOWSPEC
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_MAX_ET
from os_ken.services.protocols.bgp.bgpspeaker import ESI_TYPE_LACP
from os_ken.services.protocols.bgp.bgpspeaker import ESI_TYPE_MAC_BASED
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_ETH_AUTO_DISCOVERY
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_MAC_IP_ADV_ROUTE
from os_ken.services.protocols.bgp.bgpspeaker import TUNNEL_TYPE_VXLAN
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_MULTICAST_ETAG_ROUTE
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_ETH_SEGMENT
from os_ken.services.protocols.bgp.bgpspeaker import EVPN_IP_PREFIX_ROUTE
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_FAMILY_IPV4
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_FAMILY_IPV6
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_FAMILY_VPNV4
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_FAMILY_VPNV6
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_FAMILY_L2VPN
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_TA_SAMPLE
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_TA_TERMINAL
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_VLAN_POP
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_VLAN_PUSH
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_VLAN_SWAP
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_VLAN_RW_INNER
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_VLAN_RW_OUTER
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_TPID_TI
from os_ken.services.protocols.bgp.bgpspeaker import FLOWSPEC_TPID_TO
from os_ken.services.protocols.bgp.bgpspeaker import REDUNDANCY_MODE_SINGLE_ACTIVE
# =============================================================================
# BGP configuration.
# =============================================================================
BGP = {
# AS number for this BGP instance.
'local_as': 65001,
# BGP Router ID.
'router_id': '172.17.0.1',
# Default local preference
'local_pref': 100,
# List of TCP listen host addresses.
'bgp_server_hosts': ['0.0.0.0', '::'],
# List of BGP neighbors.
# The parameters for each neighbor are the same as the arguments of
# BGPSpeaker.neighbor_add() method.
'neighbors': [
{
'address': '172.17.0.2',
'remote_as': 65002,
'enable_ipv4': True,
'enable_ipv6': True,
'enable_vpnv4': True,
'enable_vpnv6': True,
},
{
'address': '172.17.0.3',
'remote_as': 65001,
'enable_evpn': True,
},
{
'address': '172.17.0.4',
'remote_as': 65001,
'enable_ipv4fs': True,
'enable_ipv6fs': True,
'enable_vpnv4fs': True,
'enable_vpnv6fs': True,
'enable_l2vpnfs': True,
},
],
# List of BGP VRF tables.
# The parameters for each VRF table are the same as the arguments of
# BGPSpeaker.vrf_add() method.
'vrfs': [
# Example of VRF for IPv4
{
'route_dist': '65001:100',
'import_rts': ['65001:100'],
'export_rts': ['65001:100'],
'route_family': RF_VPN_V4,
},
# Example of VRF for IPv6
{
'route_dist': '65001:150',
'import_rts': ['65001:150'],
'export_rts': ['65001:150'],
'route_family': RF_VPN_V6,
},
# Example of VRF for EVPN
{
'route_dist': '65001:200',
'import_rts': ['65001:200'],
'export_rts': ['65001:200'],
'route_family': RF_L2_EVPN,
},
# Example of VRF for IPv4 FlowSpec
{
'route_dist': '65001:250',
'import_rts': ['65001:250'],
'export_rts': ['65001:250'],
'route_family': RF_VPNV4_FLOWSPEC,
},
# Example of VRF for IPv6 FlowSpec
{
'route_dist': '65001:300',
'import_rts': ['65001:300'],
'export_rts': ['65001:300'],
'route_family': RF_VPNV6_FLOWSPEC,
},
# Example of VRF for L2VPN FlowSpec
{
'route_dist': '65001:350',
'import_rts': ['65001:350'],
'export_rts': ['65001:350'],
'route_family': RF_L2VPN_FLOWSPEC,
},
],
# List of BGP routes.
# The parameters for each route are the same as the arguments of
# the following methods:
# - BGPSpeaker.prefix_add()
# - BGPSpeaker.evpn_prefix_add()
# - BGPSpeaker.flowspec_prefix_add()
'routes': [
# Example of IPv4 prefix
{
'prefix': '10.10.1.0/24',
},
# Example of VPNv4 prefix
{
'prefix': '10.20.1.0/24',
'next_hop': '172.17.0.1',
'route_dist': '65001:100',
},
# Example of IPv6 prefix
{
'prefix': '2001:db8:1::/64',
},
# Example of VPNv6 prefix
{
'prefix': '2001:db8:2::/64',
'next_hop': '172.17.0.1',
'route_dist': '65001:150',
},
# Example of EVPN prefix
{
'route_type': EVPN_ETH_AUTO_DISCOVERY,
'route_dist': '65001:200',
'esi': {
'type': ESI_TYPE_LACP,
'mac_addr': 'aa:bb:cc:dd:ee:ff',
'port_key': 100,
},
'ethernet_tag_id': EVPN_MAX_ET,
'redundancy_mode': REDUNDANCY_MODE_SINGLE_ACTIVE,
},
{
'route_type': EVPN_MAC_IP_ADV_ROUTE,
'route_dist': '65001:200',
'esi': 0,
'ethernet_tag_id': 0,
'tunnel_type': TUNNEL_TYPE_VXLAN,
'vni': 200,
'mac_addr': 'aa:bb:cc:dd:ee:ff',
'ip_addr': '10.30.1.1',
'next_hop': '172.17.0.1',
},
{
'route_type': EVPN_MULTICAST_ETAG_ROUTE,
'route_dist': '65001:200',
'esi': 0,
'ethernet_tag_id': 0,
'ip_addr': '10.40.1.1',
},
{
'route_type': EVPN_ETH_SEGMENT,
'route_dist': '65001:200',
'esi': {
'type': ESI_TYPE_MAC_BASED,
'mac_addr': 'aa:bb:cc:dd:ee:ff',
'local_disc': 100,
},
'ip_addr': '172.17.0.1',
},
{
'route_type': EVPN_IP_PREFIX_ROUTE,
'route_dist': '65001:200',
'esi': 0,
'ethernet_tag_id': 0,
'ip_prefix': '10.50.1.0/24',
'gw_ip_addr': '172.16.0.1',
},
# Example of Flow Specification IPv4 prefix
{
'flowspec_family': FLOWSPEC_FAMILY_IPV4,
'rules': {
'dst_prefix': '10.60.1.0/24',
'src_prefix': '172.17.0.0/24',
'ip_proto': 6,
'port': '80 | 8000',
'dst_port': '>9000 & <9050',
'src_port': '>=8500 & <=9000',
'icmp_type': 0,
'icmp_code': 6,
'tcp_flags': 'SYN+ACK & !=URGENT',
'packet_len': 1000,
'dscp': '22 | 24',
'fragment': 'LF | ==FF',
},
'actions': {
'traffic_rate': {
'as_number': 0,
'rate_info': 100.0,
},
'traffic_action': {
'action': FLOWSPEC_TA_SAMPLE | FLOWSPEC_TA_TERMINAL,
},
'redirect': {
'as_number': 10,
'local_administrator': 100,
},
'traffic_marking': {
'dscp': 24,
}
},
},
# Example of Flow Specification VPNv4 prefix
{
'flowspec_family': FLOWSPEC_FAMILY_VPNV4,
'route_dist': '65001:250',
'rules': {
'dst_prefix': '10.70.1.0/24',
'src_prefix': '172.18.0.0/24',
'ip_proto': 6,
'port': '80 | 8000',
'dst_port': '>9000 & <9050',
'src_port': '>=8500 & <=9000',
'icmp_type': 0,
'icmp_code': 6,
'tcp_flags': 'SYN+ACK & !=URGENT',
'packet_len': 1000,
'dscp': '22 | 24',
'fragment': 'LF | ==FF',
},
'actions': {
'traffic_rate': {
'as_number': 0,
'rate_info': 100.0,
},
'traffic_action': {
'action': FLOWSPEC_TA_SAMPLE | FLOWSPEC_TA_TERMINAL,
},
'redirect': {
'as_number': 10,
'local_administrator': 100,
},
'traffic_marking': {
'dscp': 24,
}
},
},
# Example of Flow Specification IPv6 prefix
{
'flowspec_family': FLOWSPEC_FAMILY_IPV6,
'rules': {
'dst_prefix': '2001::1/128/32',
'src_prefix': '3001::2/128',
'next_header': 6,
'port': '80 | 8000',
'dst_port': '>9000 & <9050',
'src_port': '>=8500 & <=9000',
'icmp_type': 0,
'icmp_code': 6,
'tcp_flags': 'SYN+ACK & !=URGENT',
'packet_len': 1000,
'dscp': '22 | 24',
'fragment': 'LF | ==FF',
'flow_label': 100,
},
'actions': {
'traffic_rate': {
'as_number': 0,
'rate_info': 100.0,
},
'traffic_action': {
'action': FLOWSPEC_TA_SAMPLE | FLOWSPEC_TA_TERMINAL,
},
'redirect': {
'as_number': 10,
'local_administrator': 100,
},
'traffic_marking': {
'dscp': 24,
}
},
},
# Example of Flow Specification VPNv6 prefix
{
'flowspec_family': FLOWSPEC_FAMILY_VPNV6,
'route_dist': '65001:300',
'rules': {
'dst_prefix': '2001::1/128/32',
'src_prefix': '3001::2/128',
'next_header': 6,
'port': '80 | 8000',
'dst_port': '>9000 & <9050',
'src_port': '>=8500 & <=9000',
'icmp_type': 0,
'icmp_code': 6,
'tcp_flags': 'SYN+ACK & !=URGENT',
'packet_len': 1000,
'dscp': '22 | 24',
'fragment': 'LF | ==FF',
'flow_label': 100,
},
'actions': {
'traffic_rate': {
'as_number': 0,
'rate_info': 100.0,
},
'traffic_action': {
'action': FLOWSPEC_TA_SAMPLE | FLOWSPEC_TA_TERMINAL,
},
'redirect': {
'as_number': 10,
'local_administrator': 100,
},
'traffic_marking': {
'dscp': 24,
}
},
},
# Example of Flow Specification L2VPN prefix
{
'flowspec_family': FLOWSPEC_FAMILY_L2VPN,
'route_dist': '65001:350',
'rules': {
'ether_type': 0x0800,
'src_mac': '12:34:56:78:90:AB',
'dst_mac': 'BE:EF:C0:FF:EE:DD',
'llc_dsap': 0x42,
'llc_ssap': 0x42,
'llc_control': 100,
'snap': 0x12345,
'vlan_id': '>4000',
'vlan_cos': '>=3',
'inner_vlan_id': '<3000',
'inner_vlan_cos': '<=5',
},
'actions': {
'traffic_rate': {
'as_number': 0,
'rate_info': 100.0,
},
'traffic_action': {
'action': FLOWSPEC_TA_SAMPLE | FLOWSPEC_TA_TERMINAL,
},
'redirect': {
'as_number': 10,
'local_administrator': 100,
},
'traffic_marking': {
'dscp': 24,
},
'vlan_action': {
'actions_1': FLOWSPEC_VLAN_POP | FLOWSPEC_VLAN_PUSH,
'vlan_1': 3000,
'cos_1': 3,
'actions_2': FLOWSPEC_VLAN_SWAP,
'vlan_2': 4000,
'cos_2': 2,
},
'tpid_action': {
'actions': FLOWSPEC_TPID_TI | FLOWSPEC_TPID_TO,
'tpid_1': 200,
'tpid_2': 300,
}
},
}
],
}
# =============================================================================
# SSH server configuration.
# =============================================================================
SSH = {
'ssh_port': 4990,
'ssh_host': 'localhost',
# 'ssh_host_key': '/etc/ssh_host_rsa_key',
# 'ssh_username': 'os_ken',
# 'ssh_password': 'os_ken',
}
# =============================================================================
# Logging configuration.
# =============================================================================
LOGGING = {
# We use python logging package for logging.
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s ' +
'[%(process)d %(thread)d] %(message)s'
},
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(lineno)s ' +
'%(message)s'
},
'stats': {
'format': '%(message)s'
},
},
'handlers': {
# Outputs log to console.
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'console_stats': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'stats'
},
# Rotates log file when its size reaches 10MB.
'log_file': {
'level': 'ERROR',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join('.', 'bgpspeaker.log'),
'maxBytes': '10000000',
'formatter': 'verbose'
},
'stats_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join('.', 'statistics_bgps.log'),
'maxBytes': '10000000',
'formatter': 'stats'
},
},
# Fine-grained control of logging per instance.
'loggers': {
'bgpspeaker': {
'handlers': ['console', 'log_file'],
'level': 'DEBUG',
'propagate': False,
},
'stats': {
'handlers': ['stats_file', 'console_stats'],
'level': 'INFO',
'propagate': False,
'formatter': 'stats',
},
},
# Root loggers.
'root': {
'handlers': ['console', 'log_file'],
'level': 'DEBUG',
'propagate': True,
},
}
| 33.688797 | 82 | 0.462926 |
76a6be4bd19a74ab6cd81d2c914100a226b9328a | 1,226 | py | Python | setup.py | mzurzolo/clonedCodeChecker | a1a16d8024833982629e16a229b509a1d15c4fea | [
"BSD-3-Clause"
] | 2 | 2019-03-07T02:08:35.000Z | 2019-03-14T20:09:46.000Z | setup.py | mzurzolo/clonedCodeChecker | a1a16d8024833982629e16a229b509a1d15c4fea | [
"BSD-3-Clause"
] | 3 | 2020-03-24T16:50:10.000Z | 2021-02-02T21:58:16.000Z | setup.py | mzurzolo/clonedCodeChecker | a1a16d8024833982629e16a229b509a1d15c4fea | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from datetime import datetime
import setuptools
now = datetime.now()
now_midnight = int(
datetime(year=now.year, month=now.month, day=now.day).timestamp()
)
VERSION = "1.0.0.{}".format(now_midnight)
with open("clonedcodechecker/_version.py", "w") as file:
print("__version__ = \'{}\'".format(VERSION), file=file)
def main():
"""Package."""
setuptools.setup(
name="clonedcodechecker",
version=VERSION,
description="Cloned C++ Code Checker",
url="https://sourceforge.net/p/clonedcodechecker/mercurial/ci/default/tree/",
author="Michael Zurzolo <mikezurzolo@gmail.com>,\
Michael Turner <mt9u14@gmail.com>,\
Tajhay Felder <felder62@students.rowan.edu>,\
Kevin Eivich <eivichk6@students.rowan.edu>,\
Dylan Anderson <dylanjanderson4@gmail.com>,\
Emily Fliegel <fliege39@students.rowan.edu>",
author_email="listed in author",
license="BSD-3",
install_requires=["ruamel.yaml"],
entry_points={
"console_scripts": ["ccc = clonedcodechecker.codechecker:main"]
},
)
if __name__ == "__main__":
main()
| 29.190476 | 85 | 0.615824 |
29390ceeb3e26ba5b36b31cb6860a19d3cd86914 | 1,939 | py | Python | Week12/src/button.py | Kids-Hack-Labs/Winter2021 | 4c66d5cf05045d2724db2393a0c2c581f314f903 | [
"MIT"
] | null | null | null | Week12/src/button.py | Kids-Hack-Labs/Winter2021 | 4c66d5cf05045d2724db2393a0c2c581f314f903 | [
"MIT"
] | null | null | null | Week12/src/button.py | Kids-Hack-Labs/Winter2021 | 4c66d5cf05045d2724db2393a0c2c581f314f903 | [
"MIT"
] | null | null | null | from pygame import Color, Rect, Surface
import pygame.mouse as pm
from src.text_generator import TextGenerator
class Button():
STATES = ("NONE","OUT","HOVER","DOWN","UP")
def __init__(self, button_text, text_info, button_info, func):
self.colours = {Button.STATES[1]:button_info["out"],
Button.STATES[2]:button_info["hover"],
Button.STATES[3]:button_info["down"],
Button.STATES[4]:button_info["up"]}
self.rect = Rect(button_info["rect"])
self.surf = Surface(self.rect.size)
self.text_surf = TextGenerator.generate_text(button_text, text_info, None)
self.text_rect = self.text_surf.get_rect()
self.text_rect.center = (self.rect.width/2, self.rect.height/2)
self.on_click = func
self.current_state = Button.STATES[1]
self.previous_state = Button.STATES[1]
self.active = True
def update(self, delta):
if self.active:
self.current_state = self.check_states()
if self.previous_state == Button.STATES[3] and\
self.current_state == Button.STATES[2]:
self.on_click()
self.previous_state = self.current_state
def render(self,target):
self.surf.fill(self.colours[self.current_state])
self.surf.blit(self.text_surf, self.text_rect)
target.blit(self.surf, self.rect)
def check_states(self):
mouse_pos = pm.get_pos()
mouse_buttons = pm.get_pressed()
if not self.rect.collidepoint(mouse_pos):
return Button.STATES[1]
else:
if not mouse_buttons[0]:
return Button.STATES[2]
else:
return Button.STATES[3]
def deactivate(self):
self.active = False
self.current_state = Button.STATES[4]
def set_correct(self):
self.current_state = Button.STATES[1]
| 36.584906 | 82 | 0.608045 |
cd1c1b55766d43eaf1f1a006ec51ad283007fa3e | 2,085 | py | Python | files/iana-config/generate.py | endlessm/freedesktop-sdk | 3bfc8a401096a4a0a660a7276a8ceae734aac79e | [
"MIT"
] | null | null | null | files/iana-config/generate.py | endlessm/freedesktop-sdk | 3bfc8a401096a4a0a660a7276a8ceae734aac79e | [
"MIT"
] | null | null | null | files/iana-config/generate.py | endlessm/freedesktop-sdk | 3bfc8a401096a4a0a660a7276a8ceae734aac79e | [
"MIT"
] | null | null | null | import csv
def add_delim(value):
numdelims = int((24 - len(value) - 1) / 8)
if numdelims < 1:
numdelims = 1
delims = '\t' * numdelims
return value+delims
with open('service-names-port-numbers.csv', 'r') as f:
reader = csv.reader(f)
next(reader)
ports = {}
for row in reader:
name, port, protocol, description = tuple(row[:4])
if not name or not port or not protocol:
continue
prot = '{}/{}'.format(port, protocol)
if prot not in ports:
ports[prot] = (name, [], description)
else:
ports[prot][1].append(name)
with open('services', 'w') as out:
for prot, values in ports.items():
name, aliases, description = values
description = description.splitlines()
if description:
description = '# {}'.format(description[0])
else:
description = ''
aliases = ' '.join(aliases)
out.write('{}{}{}{}\n'.format(add_delim(name),
add_delim(prot),
add_delim(aliases),
description))
with open('protocols', 'w') as out:
with open('protocol-numbers-1.csv', 'r') as f:
reader = csv.reader(f)
next(reader)
# TODO check if PORTS is same as above
ports = {}
for row in reader:
number, keyword, description = tuple(row[:3])
if not keyword:
name = '#'
else:
name = keyword.lower()
if keyword.endswith(' (deprecated)'):
name = '#'
description = description.splitlines()
if description:
description = '# {}'.format(description[0])
else:
description = ''
out.write('{}{}{}{}\n'.format(add_delim(name),
add_delim(number),
add_delim(keyword),
description))
| 31.119403 | 61 | 0.472902 |
0ce01e00527cbda423999ec4f5a1843ccca84177 | 453 | py | Python | get.py | zb3/cert-list | 443ca01009557a2774864c008dba8e900a27a50a | [
"MIT",
"BSD-3-Clause"
] | null | null | null | get.py | zb3/cert-list | 443ca01009557a2774864c008dba8e900a27a50a | [
"MIT",
"BSD-3-Clause"
] | null | null | null | get.py | zb3/cert-list | 443ca01009557a2774864c008dba8e900a27a50a | [
"MIT",
"BSD-3-Clause"
] | null | null | null | import sys
from common import *
srv = sys.argv[3] if len(sys.argv) > 3 else 'https://ct.googleapis.com/rocketeer'
srv2 = stripname(srv)
start = int(sys.argv[1])
delta = int(sys.argv[2]) if len(sys.argv) > 2 else 1000
end = start+delta-1
tsize = fetch_treesize(srv)
print('Tree size: '+str(tsize))
counter = start
fetched_certs = fetch_certs(srv, start, end)
for c in fetched_certs['entries']:
print_cert_info(c, counter)
counter += 1
| 20.590909 | 81 | 0.688742 |
7d6fcd655e08a6ebf7710b2dd65832608a8e1b8a | 5,075 | py | Python | comvex/aft/config.py | blakechi/Transformer-based_Model_Implementations | b9090d377f37c00e0b8e5f09782b797beaef58a2 | [
"Apache-2.0"
] | 29 | 2021-06-14T08:27:43.000Z | 2022-02-07T13:40:27.000Z | comvex/aft/config.py | blakechi/Transformer-based_Model_Implementations | b9090d377f37c00e0b8e5f09782b797beaef58a2 | [
"Apache-2.0"
] | 3 | 2021-11-23T16:11:51.000Z | 2021-12-21T17:24:36.000Z | comvex/aft/config.py | blakechi/Transformer-based_Model_Implementations | b9090d377f37c00e0b8e5f09782b797beaef58a2 | [
"Apache-2.0"
] | 3 | 2021-06-27T08:18:57.000Z | 2021-12-17T07:29:59.000Z | from typing import Optional, Literal
from comvex.utils import ConfigBase
class AFTConfig(ConfigBase):
def __init__(
self,
image_size: int,
image_channel: int,
patch_size: int,
num_layers: int,
dim: int,
num_classes: int,
local_window_size: Optional[int] = 0,
hidden_dim: Optional[int] = None,
aft_mode: Literal["full", "simple", "local", "conv", "general"] = "full",
pool_mode: Literal["mean", "class"] = "mean",
query_act_fnc_name: str = "Sigmoid",
use_bias: bool = False,
ff_expand_scale: int = 4,
ff_dropout: float = 0.,
attention_dropout: float = 0.,
path_dropout: float = 0.,
# AFT - General, Full, Simple, Local
position_bias_dim: int = 128,
use_position_bias: bool = True,
# AFT - Conv
heads: int = 32,
epsilon: float = 1e-8,
# Possible Class Attention Layer
alpha: float = 1e-5,
cls_attn_heads: int = 16,
# Projection Head
pred_act_fnc_name: str = "ReLU"
) -> None:
super().__init__()
self.image_size=image_size
self.image_channel=image_channel
self.patch_size=patch_size
self.num_layers=num_layers
self.dim=dim
self.local_window_size=local_window_size
self.num_classes=num_classes
self.hidden_dim=hidden_dim
self.aft_mode=aft_mode
self.pool_mode=pool_mode
self.query_act_fnc_name=query_act_fnc_name
self.use_bias=use_bias
self.ff_expand_scale=ff_expand_scale
self.ff_dropout=ff_dropout
self.attention_dropout=attention_dropout
self.path_dropout=path_dropout
# AFT - General, Full, Simple, Local
self.position_bias_dim=position_bias_dim
self.use_position_bias=use_position_bias
# AFT - Conv
self.heads=heads
self.epsilon=epsilon
# Possible Class Attention Layer
self.alpha=alpha
self.cls_attn_heads=cls_attn_heads
self.pred_act_fnc_name=pred_act_fnc_name
@classmethod
def AFT_Full_tiny(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=192,
aft_mode="full",
pool_mode="mean",
position_bias_dim=128,
local_window_size=None,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Full_small(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=384,
aft_mode="full",
pool_mode="mean",
position_bias_dim=128,
local_window_size=None,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Conv_tiny_32_11(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=192,
heads=32,
local_window_size=11,
aft_mode="conv",
pool_mode="mean",
position_bias_dim=128,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Conv_tiny_192_11(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=192,
heads=192,
local_window_size=11,
aft_mode="conv",
pool_mode="mean",
position_bias_dim=128,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Conv_small_16_11(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=384,
heads=16,
local_window_size=11,
aft_mode="conv",
pool_mode="mean",
position_bias_dim=128,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Conv_small_384_11(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=384,
heads=384,
local_window_size=11,
aft_mode="conv",
pool_mode="mean",
position_bias_dim=128,
num_classes=num_classes,
**kwargs
)
@classmethod
def AFT_Conv_small_384_15(cls, num_classes: int, **kwargs) -> "AFTConfig":
return cls(
224,
3,
16,
num_layers=12,
dim=384,
heads=384,
local_window_size=15,
aft_mode="conv",
pool_mode="mean",
position_bias_dim=128,
num_classes=num_classes,
**kwargs
) | 28.038674 | 81 | 0.529655 |
722ddf9e72e8e495fd15b2a1237cf5b14c2d32ad | 3,359 | py | Python | senlin_tempest_plugin/tests/api/profiles/test_profile_validate_negative.py | openstack/senlin-tempest-plugin | f71166b2c6619746ac24614ed151e4befdb1f495 | [
"Apache-2.0"
] | 7 | 2017-10-31T13:31:20.000Z | 2020-01-08T02:36:37.000Z | senlin_tempest_plugin/tests/api/profiles/test_profile_validate_negative.py | openstack/senlin-tempest-plugin | f71166b2c6619746ac24614ed151e4befdb1f495 | [
"Apache-2.0"
] | null | null | null | senlin_tempest_plugin/tests/api/profiles/test_profile_validate_negative.py | openstack/senlin-tempest-plugin | f71166b2c6619746ac24614ed151e4befdb1f495 | [
"Apache-2.0"
] | 1 | 2018-01-10T20:36:55.000Z | 2018-01-10T20:36:55.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.lib import decorators
from tempest.lib import exceptions
from senlin_tempest_plugin.common import constants
from senlin_tempest_plugin.tests.api import base
class TestProfileValidateNegativeBadRequest(base.BaseSenlinAPITest):
@decorators.attr(type=['negative'])
@decorators.idempotent_id('d128781c-808f-4dee-b8b6-abe4def40eb1')
def test_profile_validate_empty_body(self):
params = {}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.BadRequest,
self.client.validate_obj,
'profiles', params)
message = ex.resp_body['error']['message']
self.assertEqual("Request body missing 'profile' key.", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7c66eaa1-a78c-4b60-9b0f-c6fa91f28778')
def test_profile_validate_no_spec(self):
params = {
'profile': {}
}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.BadRequest,
self.client.validate_obj,
'profiles', params)
message = ex.resp_body['error']['message']
self.assertEqual("'spec' is a required property", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('d661c452-3752-4196-9649-4b44ac9c55a6')
def test_profile_validate_profile_type_incorrect(self):
spec = copy.deepcopy(constants.spec_nova_server)
spec['type'] = 'senlin.profile.bogus'
params = {
'profile': {
'spec': spec
}
}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.NotFound,
self.client.validate_obj,
'profiles', params)
message = ex.resp_body['error']['message']
self.assertEqual(
"The profile_type 'senlin.profile.bogus-1.0' could "
"not be found.", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('c0fe55cf-608c-4e89-bf85-4561805fc867')
def test_profile_validate_spec_validation_failed(self):
spec = copy.deepcopy(constants.spec_nova_server)
spec['properties']['bogus'] = 'foo'
params = {
'profile': {
'spec': spec
}
}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.BadRequest,
self.client.validate_obj,
'profiles', params)
message = ex.resp_body['error']['message']
self.assertEqual("Unrecognizable spec item 'bogus'", str(message))
| 39.05814 | 77 | 0.629949 |
dbedd1e8af65c608f62d0a742367ba08c25ced4e | 3,641 | py | Python | techcrm/services/webv2/project/forms.py | techakademi/DockerEgitimProjeler | 9ab6b5005fe53a2da6b5c0df2537d3d6d9238d80 | [
"MIT"
] | null | null | null | techcrm/services/webv2/project/forms.py | techakademi/DockerEgitimProjeler | 9ab6b5005fe53a2da6b5c0df2537d3d6d9238d80 | [
"MIT"
] | null | null | null | techcrm/services/webv2/project/forms.py | techakademi/DockerEgitimProjeler | 9ab6b5005fe53a2da6b5c0df2537d3d6d9238d80 | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from flask_login import current_user
from wtforms import StringField, PasswordField, SubmitField, BooleanField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from project.models import kullanicilar, sirketler
#Kullanıcı oluşturma Formu
class K_KayitFormu(FlaskForm):
k_adi = StringField('Kullanıcı Adı', validators=[DataRequired(), Length(min=2, max=10)]) #Kullanıcı Adı, boş bırakılamaz, en az 2 en fazla da 10 karakterden oluşabilir.
k_eposta = StringField('Eposta', validators=[DataRequired(), Email()]) #Kullanıcının eposta adresi alanı
k_parola = PasswordField('Parola', validators=[DataRequired()]) #Kullanıcı için parola alanı
confirm_password = PasswordField('Parola\'yı Tekrar girin lütfen', validators=[DataRequired(), EqualTo('K_Parola')]) #Parolanın girilen parola ile eşitliğini kontrol ediyor
submit = SubmitField('Kullanıcı Oluştur')
def validate_K_Adi(self, k_adi):
kullanici = kullanicilar.query.filter_by(k_adi=k_adi.data).first()
if kullanici:
raise ValidationError('Bu isimde bir kullanıcı mevcut, lütfen başka bir kullanıcı adı giriniz.')
def validate_K_EPosta(self, K_EPosta):
kullanici = Kullanicilar.query.filter_by(K_EPosta=K_EPosta.data).first()
if kullanici:
raise ValidationError('Bu isimde bir eposta mevcut, lütfen başka bir eposta adresi giriniz.')
#Kullanıcı giriş Formu
class K_GirisFormu(FlaskForm):
K_EPosta = StringField('Eposta', validators=[DataRequired(), Email()]) #Kullanıcının eposta adresi alanı
K_Parola = PasswordField('Parola', validators=[DataRequired()]) #Kullanıcı için parola alanı
remember = BooleanField('Beni Hatırla')
submit = SubmitField('Sisteme Gir')
#Kullanıcı Güncelleme Formu
class K_KayitGuncelle(FlaskForm):
k_adi = StringField('Kullanıcı Adı', validators=[DataRequired(), Length(min=2, max=10)]) #Kullanıcı Adı, boş bırakılamaz, en az 2 en fazla da 10 karakterden oluşabilir.
k_eposta = StringField('Eposta', validators=[DataRequired(), Email()]) #Kullanıcının eposta adresi alanı
picture = FileField('Profil Resmini Güncelle', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Profilimi Güncelle')
def validate_K_Adi(self, k_adi):
if k_adi.data != current_user.k_adi:
kullanici = Kullanicilar.query.filter_by(k_adi=k_adi.data).first()
if kullanici:
raise ValidationError('Bu isimde bir kullanıcı mevcut, lütfen başka bir kullanıcı adı giriniz.')
def validate_K_EPosta(self, K_EPosta):
if K_EPosta.data != current_user.K_EPosta:
kullanici = Kullanicilar.query.filter_by(K_EPosta=K_EPosta.data).first()
if kullanici:
raise ValidationError('Bu isimde bir eposta mevcut, lütfen başka bir eposta adresi giriniz.')
#Yeni Şirket Oluştur
class YeniSirket(FlaskForm):
Company = StringField('Şirket Adı', validators=[DataRequired()])
Temsilci = StringField('Temsilci', validators=[DataRequired()])
Tms_Eposta = StringField('Temsilci Eposta', validators=[DataRequired()])
City = StringField('Şehir', validators=[DataRequired()])
Country = StringField('Ülke', validators=[DataRequired()])
Domain = StringField('Domain', validators=[DataRequired()])
Hostname = StringField('Hostname', validators=[DataRequired()])
PubIP = StringField('IP adresi', validators=[DataRequired()])
RevPuan = StringField('Puan', validators=[DataRequired()])
submit = SubmitField('Kaydet')
| 54.343284 | 178 | 0.728646 |
85eedd0bc5332516537cff76ab0e605307acb90a | 5,284 | py | Python | test/functional/p2p_leak.py | shamimiceewu025/glee | aa0dc8240f2552e4c64a0b722d4e5f25dd981e66 | [
"MIT"
] | null | null | null | test/functional/p2p_leak.py | shamimiceewu025/glee | aa0dc8240f2552e4c64a0b722d4e5f25dd981e66 | [
"MIT"
] | null | null | null | test/functional/p2p_leak.py | shamimiceewu025/glee | aa0dc8240f2552e4c64a0b722d4e5f25dd981e66 | [
"MIT"
] | 1 | 2020-11-04T07:04:44.000Z | 2020-11-04T07:04:44.000Z | #!/usr/bin/env python3
# Copyright (c) 2017-2019 The GleecBTC Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import msg_getaddr, msg_ping, msg_verack
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import GleecBTCTestFramework
from test_framework.util import wait_until
banscore = 10
class CLazyNode(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
def bad_message(self, message):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
def on_open(self):
self.ever_connected = True
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_reject(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if gleecbtcd ban behavior changes
def on_open(self):
super().on_open()
for i in range(banscore):
self.send_message(msg_verack())
def on_reject(self, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
class CNodeNoVersionIdle(CLazyNode):
def __init__(self):
super().__init__()
# Node that sends a version but not a verack.
class CNodeNoVerackIdle(CLazyNode):
def __init__(self):
self.version_received = False
super().__init__()
def on_reject(self, message): pass
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PLeakTest(GleecBTCTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore=' + str(banscore)]]
def run_test(self):
no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False, wait_for_verack=False)
no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False, wait_for_verack=False)
no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
#Give the node enough time to possibly leak out a message
time.sleep(5)
#This node should have been banned
assert not no_version_bannode.is_connected
self.nodes[0].disconnect_p2ps()
# Wait until all connections are closed
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
# Make sure no unexpected messages came in
assert no_version_bannode.unexpected_msg == False
assert no_version_idlenode.unexpected_msg == False
assert no_verack_idlenode.unexpected_msg == False
if __name__ == '__main__':
P2PLeakTest().main()
| 41.606299 | 127 | 0.731264 |
fea48a553ca4adec1bc832e1816b3cf42c367938 | 13,559 | py | Python | quota_checker.py | forgeservicelab/forge.insightly-sync | bc3687ee820f03a9de556aadeba499d110f57500 | [
"MIT"
] | null | null | null | quota_checker.py | forgeservicelab/forge.insightly-sync | bc3687ee820f03a9de556aadeba499d110f57500 | [
"MIT"
] | null | null | null | quota_checker.py | forgeservicelab/forge.insightly-sync | bc3687ee820f03a9de556aadeba499d110f57500 | [
"MIT"
] | null | null | null | """Check OpenStack tenants' quotas."""
from __init__ import sanitize
from time import sleep
from ldap import SCOPE_SUBORDINATE
from swiftclient import service as swiftService
from cinderclient.v2 import client as cinderClient
from keystoneclient.exceptions import NotFound
from keystoneclient.v3 import client as keystoneClient
from keystoneclient.v3.roles import RoleManager
from keystoneclient.v3.groups import GroupManager
from keystoneclient.v3.domains import DomainManager
from keystoneclient.v3.projects import ProjectManager
from keystoneclient.v3.role_assignments import RoleAssignmentManager
from neutronclient.v2_0 import client as neutronClient
from novaclient.v1_1 import client as novaClient
from novaclient.exceptions import Conflict
from novaclient.exceptions import NotFound
from novaclient.exceptions import BadRequest
from novaclient.exceptions import Unauthorized
from ldap_updater import LDAPUpdater
class QuotaChecker:
"""Check and enforce OpenStack tenant quota.
Verifies that a given tenant does have its correct allocated quota.
Attributes:
DEFAULT_QUOTA (dict): The default quota for a service developer.
PARTNER_QUOTA (dict): The default quota for a partner with CRA.
BIGDATA_QUOTA (dict): The quota for big data enabled projects.
"""
_DEFAULT_QUOTA_NAME = 'Default CRA quota'
_BIGDATA_QUOTA_NAME = 'Bigdata CRA quota'
DEFAULT_QUOTA = {
'instances': 16,
'cores': 16,
'ram': 32 * 1024,
'floating_ips': 5,
'cinder_GB': 1024,
'swift_bytes': 1024 * 1024 * 1024 * 1024,
'flavors': ['m1.tiny', 'm1.small', 'm1.medium', 'm1.large', 'm1.x-large']
}
PARTNER_QUOTA = {
'instances': 1,
'cores': 1,
'ram': 1024,
'floating_ips': 1,
'cinder_GB': 40,
'swift_bytes': 40 * 1024 * 1024 * 1024,
'flavors': ['m1.tiny']
}
BIGDATA_QUOTA = {
'instances': 16,
'cores': 46,
'ram': 400 * 1024,
'floating_ips': 15,
'cinder_GB': 1024,
'swift_bytes': 1024 * 1024 * 1024 * 1024,
'flavors': ['m1.tiny', 'm1.small', 'hadoop.small', 'hadoop.medium', 'hadoop.large']
}
def __init__(self, username=None, password=None, tenantid=None, baseurl=None):
"""Set instance authentication constants.
Args:
username (str): OpenStack administrator username.
password (str): OpenStack administrator password.
tenantid (str): OpenStack tenant for the administrator account.
baseurl (str): OpenStack environment URI.
"""
self._AUTH_USERNAME = username
self._AUTH_PASSWORD = password
self._AUTH_TENANTID = tenantid
self._BASE_URL = baseurl
keystone = keystoneClient.Client(username=self._AUTH_USERNAME,
password=self._AUTH_PASSWORD,
project_name=self._AUTH_TENANTID,
auth_url='%s:5001/v3' % self._BASE_URL)
self._roleManager = RoleManager(keystone)
self._groupManager = GroupManager(keystone)
self._domainManager = DomainManager(keystone)
self._projectManager = ProjectManager(keystone)
self._roleAssignmentManager = RoleAssignmentManager(keystone)
def _getOpenstackGroup(self, group):
try:
os_group = self._groupManager.find(name=group)
except:
return None
return os_group
def _getTenantId(self, tenant):
projectMap = dict(map(lambda assignment: (assignment.group['id'], assignment.scope['project']['id']),
filter(lambda a: 'group' in a._info.keys(), self._roleAssignmentManager.list())))
return projectMap[tenant].strip() if tenant in projectMap.keys() else None
def _ensureTenantNetwork(self, tenant):
neutron = neutronClient.Client(username=self._AUTH_USERNAME,
password=self._AUTH_PASSWORD,
tenant_id=self._AUTH_TENANTID,
auth_url='%s:5001/v2.0' % self._BASE_URL)
if not filter(lambda network: network['tenant_id'] == tenant, neutron.list_networks()['networks']):
network = neutron.create_network({'network': {'name': 'default', 'tenant_id': tenant}})['network']
while not neutron.list_networks(id=network['id'])['networks']:
sleep(1)
allocated_cidrs = map(lambda chunk: (int(chunk[0]), int(chunk[1])),
map(lambda cidr: cidr['cidr'].split('/')[0].split('.')[-2:],
filter(lambda subnet: subnet['cidr'].endswith('/27'),
neutron.list_subnets()['subnets'])))
if (192, 0) in allocated_cidrs:
allocated_cidrs.remove((192, 0))
if allocated_cidrs:
max_bigchunk = max(map(lambda chunk: chunk[0], allocated_cidrs))
max_smlchunk = max(map(lambda chunk: chunk[1], filter(lambda c: c[0] == max_bigchunk, allocated_cidrs)))
if max_bigchunk == 191 and max_smlchunk == 224:
max_bigchunk = 192
max_smlchunk = 0
if max_smlchunk == 224:
cidr = '.'.join([str(chunk) for chunk in [192, 168, max_bigchunk + 1, 0]]) + '/27'
else:
cidr = '.'.join([str(chunk) for chunk in [192, 168, max_bigchunk, max_smlchunk + 32]]) + '/27'
else:
cidr = '192.168.0.0/27'
subnet = neutron.create_subnet({'subnet': {'name': 'default-subnet',
'cidr': cidr,
'dns_nameservers': ['193.166.4.24', '193.166.4.25'],
'tenant_id': tenant,
'network_id': network['id'],
'ip_version': '4'}})['subnet']
while not neutron.list_subnets(id=subnet['id'])['subnets']:
sleep(1)
router = neutron.create_router({'router': {'tenant_id': tenant,
'name': 'default-router'}})['router']
while not neutron.list_routers(id=router['id'])['routers']:
sleep(1)
public_net_id = filter(lambda n: n['router:external'],
neutron.list_networks(name='public')['networks'])[0]['id']
neutron.add_gateway_router(router['id'], {'network_id': public_net_id})
neutron.add_interface_router(router['id'], {'subnet_id': subnet['id']})
def _getTenantQuota(self, tenant, tenantType):
quota = None
statedQuota = map(lambda q: q['FIELD_VALUE'], filter(lambda f: f['CUSTOM_FIELD_ID'] == 'PROJECT_FIELD_1',
tenant['CUSTOMFIELDS']))[0]
if statedQuota == self._BIGDATA_QUOTA_NAME:
quota = self.BIGDATA_QUOTA
else:
if statedQuota == self._DEFAULT_QUOTA_NAME:
if tenantType == LDAPUpdater.FPA_CRA:
quota = self.PARTNER_QUOTA
if tenantType == LDAPUpdater.SDA:
quota = self.DEFAULT_QUOTA
return quota
def _grantAccess(self, client, flavor, tenant):
try:
client.flavor_access.add_tenant_access(flavor, tenant)
except Conflict:
pass
def _revokeAccess(self, client, flavor, tenant):
try:
client.flavor_access.remove_tenant_access(flavor, tenant)
except NotFound:
pass
def _enforceQuota(self, ldap_tenant, quotaDefinition, ldap_conn=None):
openstackGroup = self._getOpenstackGroup(ldap_tenant)
if openstackGroup:
tenant = self._getTenantId(ldap_tenant)
if not tenant:
# Create or map tenant in openstack
project = self._projectManager.list(name=ldap_tenant)
if not project:
project = self._projectManager.create(ldap_tenant, self._domainManager.find(id='default'))
self._roleManager.grant(self._roleManager.find(name='member').id,
group=openstackGroup.id,
project=project.id)
tenant = project.id
if ldap_conn and ldap_tenant in map(lambda t: t[0].split(',')[0].split('=')[1],
ldap_conn.ldap_search('cn=digile.platform,ou=projects,\
dc=forgeservicelab,dc=fi',
SCOPE_SUBORDINATE, attrsonly=1)):
with novaClient.Client(username=self._AUTH_USERNAME,
api_key=self._AUTH_PASSWORD,
tenant_id=tenant,
auth_url='%s:5001/v2.0' % self._BASE_URL) as nova:
try:
nova.security_group_rules.create(nova.security_groups.find(name='default').id,
ip_protocol='tcp',
from_port=22,
to_port=22,
cidr='86.50.27.230/32')
except Unauthorized:
# butler.service not yet part of the tenant, wait for next round.
pass
except BadRequest:
# Rule already exists, that's OK.
pass
self._ensureTenantNetwork(tenant)
if quotaDefinition:
service_opts = {
'meta': ['quota-bytes:%s' % quotaDefinition['swift_bytes']],
'os_username': self._AUTH_USERNAME,
'os_password': self._AUTH_PASSWORD,
'os_auth_url': '%s:5001/v2.0' % self._BASE_URL,
'os_storage_url': '%s:8081/v1/AUTH_%s' % (self._BASE_URL, self._projectManager.get(tenant).name),
'os_tenant_name': self._AUTH_TENANTID
}
swift = swiftService.SwiftService(options=service_opts)
swift.post()
del swift
cinder = cinderClient.Client(username=self._AUTH_USERNAME,
api_key=self._AUTH_PASSWORD,
tenant_id=self._AUTH_TENANTID,
auth_url=service_opts['os_auth_url'])
cinder.quotas.update(tenant, gigabytes=quotaDefinition['cinder_GB'])
del cinder
with novaClient.Client(username=self._AUTH_USERNAME,
api_key=self._AUTH_PASSWORD,
tenant_id=self._AUTH_TENANTID,
auth_url=service_opts['os_auth_url']) as nova:
nova.quotas.update(tenant,
instances=quotaDefinition['instances'],
cores=quotaDefinition['cores'],
ram=quotaDefinition['ram'],
floating_ips=quotaDefinition['floating_ips'])
allFlavors = nova.flavors.findall(is_public=None)
map(lambda f: self._grantAccess(nova, f, tenant),
filter(lambda f: f.name.encode() in quotaDefinition['flavors'], allFlavors))
map(lambda f: self._revokeAccess(nova, f, tenant),
filter(lambda f: f.name.encode() not in quotaDefinition['flavors'], allFlavors))
neutron = neutronClient.Client(username=self._AUTH_USERNAME,
password=self._AUTH_PASSWORD,
tenant_id=self._AUTH_TENANTID,
auth_url=service_opts['os_auth_url'])
neutron.update_quota(tenant, {'quota': {'floatingip': quotaDefinition['floating_ips']}})
del neutron
with novaClient.Client(username=self._AUTH_USERNAME,
api_key=self._AUTH_PASSWORD,
tenant_id=self._AUTH_TENANTID,
auth_url='%s:5001/v2.0' % self._BASE_URL) as nova:
self._grantAccess(nova, nova.flavors.find(name='m1.tiny', is_public=None), tenant)
def enforceQuotas(self, tenantList, tenantsType, ldap_conn=None):
"""Enforce the quota for each tenant on the list.
Args:
tenantList (List): A list of tenants as JSON from Insightly.
tenantsType (str): A description of the type of tenant, one of 'SDA', 'FPA' or 'FPA (CRA)'.
"""
map(lambda t: self._enforceQuota(sanitize(t['PROJECT_NAME']), self._getTenantQuota(t, tenantsType),
ldap_conn), tenantList)
| 48.949458 | 120 | 0.533963 |
4e2aae0a5de973b39ef103397c879e6cd2af511e | 821 | py | Python | twitter-consumer/app.py | VirtualSatai/humble-bot | 2f83642a319d933da46427b5b8090149576e2c76 | [
"MIT"
] | null | null | null | twitter-consumer/app.py | VirtualSatai/humble-bot | 2f83642a319d933da46427b5b8090149576e2c76 | [
"MIT"
] | 10 | 2018-05-15T08:07:32.000Z | 2018-05-21T08:04:04.000Z | twitter-consumer/app.py | VirtualSatai/humble-bot | 2f83642a319d933da46427b5b8090149576e2c76 | [
"MIT"
] | 1 | 2018-05-17T13:26:55.000Z | 2018-05-17T13:26:55.000Z | import json
import os
import time
import zmq
import twitter
from zmq import Context
api = twitter.Api(
consumer_key=os.environ["TWITTER_CONSUMER_KEY"],
consumer_secret=os.environ["TWITTER_CONSUMER_SECRET"],
access_token_key=os.environ["TWITTER_ACCESS_KEY"],
access_token_secret=os.environ["TWITTER_ACCESS_SECRET"],
)
ctx = Context()
TOPICFILTER = "1"
print("Connecting to the producer")
socket = ctx.socket(zmq.SUB)
socket.connect("tcp://product-consumer:5558")
socket.setsockopt_string(zmq.SUBSCRIBE, TOPICFILTER)
while True:
multipart = socket.recv_multipart()
topic = multipart[0]
data = multipart[1]
item = json.loads(data)
message = f"{item['name']} costs {item['price'][0]} {item['price'][1]} at {item['url']}"
print(f"Posting: {message}")
api.PostUpdate(message)
| 24.147059 | 92 | 0.717418 |
b423399553401f8b4e740fd2faf193c34c46f0f3 | 1,236 | py | Python | easy/merge-two-sorted-lists.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | easy/merge-two-sorted-lists.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | easy/merge-two-sorted-lists.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | '''Merge two sorted linked lists and return it as a sorted list. The list should be made by splicing together the nodes of the first two lists.
Example 1:
Input: l1 = [1,2,4], l2 = [1,3,4]
Output: [1,1,2,3,4,4]
Example 2:
Input: l1 = [], l2 = []
Output: []
Example 3:
Input: l1 = [], l2 = [0]
Output: [0]'''
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
cur = ListNode(0)
ans = cur
while l1 and l2:
if l1.val > l2.val:
cur.next = l2
l2 = l2.next
else:
cur.next = l1
l1 = l1.next
cur = cur.next
if l1:
cur.next = l1
else:
cur.next = l2
return ans.next
s = Solution()
# li: 1->2->4
l1_1 = ListNode(1)
l1_2 = ListNode(2)
l1_4 = ListNode(4)
l1_1.next = l1_2
l1_2.next = l1_4
# li: 1->3->4
l2_1 = ListNode(1)
l2_3 = ListNode(3)
l2_4 = ListNode(4)
l2_1.next = l2_3
l2_3.next = l2_4
ans = s.mergeTwoLists(l1_1, l2_1)
while ans:
print(ans.val)
ans = ans.next
| 16.931507 | 143 | 0.546117 |
c7730ff13598edb5915adc4b531d80eb6bdb8bfe | 2,996 | py | Python | spotty/providers/aws/instance_manager.py | greglira/spotty | 0b5073621ba8e19be75b6f9701e6c9971b6d17fb | [
"MIT"
] | null | null | null | spotty/providers/aws/instance_manager.py | greglira/spotty | 0b5073621ba8e19be75b6f9701e6c9971b6d17fb | [
"MIT"
] | null | null | null | spotty/providers/aws/instance_manager.py | greglira/spotty | 0b5073621ba8e19be75b6f9701e6c9971b6d17fb | [
"MIT"
] | null | null | null | from spotty.errors.instance_not_running import InstanceNotRunningError
from spotty.deployment.abstract_cloud_instance.abstract_cloud_instance_manager import AbstractCloudInstanceManager
from spotty.providers.aws.resource_managers.bucket_manager import BucketManager
from spotty.providers.aws.config.instance_config import InstanceConfig
from spotty.providers.aws.data_transfer import DataTransfer
from spotty.providers.aws.instance_deployment import InstanceDeployment
from spotty.utils import render_table
class InstanceManager(AbstractCloudInstanceManager):
instance_config: InstanceConfig
bucket_manager: BucketManager
data_transfer: DataTransfer
instance_deployment: InstanceDeployment
def _get_instance_config(self, instance_config: dict) -> InstanceConfig:
"""Validates the instance config and returns an InstanceConfig object."""
return InstanceConfig(instance_config, self.project_config)
def _get_bucket_manager(self) -> BucketManager:
"""Returns an bucket manager."""
return BucketManager(self.instance_config.project_config.project_name, self.instance_config.region)
def _get_data_transfer(self) -> DataTransfer:
"""Returns a data transfer object."""
return DataTransfer(
local_project_dir=self.project_config.project_dir,
host_project_dir=self.instance_config.host_project_dir,
sync_filters=self.project_config.sync_filters,
instance_name=self.instance_config.name,
region=self.instance_config.region,
)
def _get_instance_deployment(self) -> InstanceDeployment:
"""Returns an instance deployment manager."""
return InstanceDeployment(self.instance_config)
def get_status_text(self):
instance = self.instance_deployment.get_instance()
if not instance:
raise InstanceNotRunningError(self.instance_config.name)
table = [
('Instance State', instance.state),
('Instance Type', instance.instance_type),
('Availability Zone', instance.availability_zone),
]
if instance.public_ip_address:
table.append(('Public IP Address', instance.public_ip_address))
if instance.private_ip_address:
table.append(('Private IP Address', instance.private_ip_address))
if instance.lifecycle == 'spot':
spot_price = instance.get_spot_price()
table.append(('Purchasing Option', 'Spot Instance'))
table.append(('Spot Instance Price', '$%.04f' % spot_price))
else:
on_demand_price = instance.get_on_demand_price()
table.append(('Purchasing Option', 'On-Demand Instance'))
table.append(('Instance Price', ('$%.04f (us-east-1)' % on_demand_price) if on_demand_price else 'Unknown'))
return render_table(table)
@property
def ssh_key_path(self):
return self.instance_deployment.key_pair_manager.key_path
| 42.8 | 120 | 0.719626 |
97a7d79be0b1d338bd8f114ca2b5030a8844f0bb | 3,263 | py | Python | beanstalk_worker/services.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
] | 29 | 2020-12-26T07:22:19.000Z | 2022-03-07T13:40:09.000Z | beanstalk_worker/services.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
] | 150 | 2020-11-09T15:03:27.000Z | 2022-03-07T15:36:07.000Z | beanstalk_worker/services.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
] | 4 | 2020-11-09T10:38:13.000Z | 2021-10-04T09:42:47.000Z | import decimal
import importlib
import json
from datetime import datetime
from unittest import mock
from iaso.models.base import Task, RUNNING, QUEUED
import boto3
import dateparser
from django.conf import settings
from django.db import connection
from django.utils import timezone
from logging import getLogger
logger = getLogger(__name__)
def json_dump(obj):
if isinstance(obj, datetime):
return {"__type__": "datetime", "value": obj.isoformat()}
elif isinstance(obj, decimal.Decimal):
return {"__type__": "decimal", "value": str(obj)}
else:
assert False, type(obj)
def json_load(obj):
if "__type__" in obj:
if obj["__type__"] == "datetime":
return dateparser.parse(obj["value"])
elif obj["__type__"] == "decimal":
return decimal.Decimal(obj["value"])
else:
assert False
else:
return obj
class _TaskServiceBase:
def run_task(self, body):
data = json.loads(body, object_hook=json_load)
self.run(data["module"], data["method"], data["task_id"], data["args"], data["kwargs"])
def run(self, module_name, method_name, task_id, args, kwargs):
"""run a task, called by the view that receives them from the queue"""
kwargs["_immediate"] = True
task = Task.objects.get(id=task_id)
if task.status == QUEUED: # ensure a task is only run once
task.status = RUNNING
task.started_at = timezone.now()
task.save()
module = importlib.import_module(module_name)
method = getattr(module, method_name)
assert method._is_task
method(*args, task=task, **kwargs)
task.refresh_from_db()
if task.status == RUNNING:
logger.warning(f"Task {task} still in status RUNNING after execution")
def enqueue(self, module_name, method_name, args, kwargs, task_id):
body = json.dumps(
{"module": module_name, "method": method_name, "task_id": task_id, "args": args, "kwargs": kwargs},
default=json_dump,
)
return self._enqueue(body)
class FakeTaskService(_TaskServiceBase):
def __init__(self):
self.clear()
def _enqueue(self, body):
self.queue.append(body)
return {"result": "recorded into fake queue service"}
def clear(self):
"""wipe the test queue"""
self.queue = []
def run_all(self):
"""run everything in the test queue"""
# clear on_commit stuff
if connection.in_atomic_block:
while connection.run_on_commit:
sids, func = connection.run_on_commit.pop(0)
func()
count = len(self.queue)
while self.queue:
b = self.queue.pop(0)
self.run_task(b)
return count
def run_task(self, body):
with mock.patch("django.conf.settings.BEANSTALK_WORKER", True):
return super().run_task(body)
class TaskService(_TaskServiceBase):
def _enqueue(self, body):
sqs = boto3.client("sqs", region_name=settings.BEANSTALK_SQS_REGION)
return sqs.send_message(QueueUrl=settings.BEANSTALK_SQS_URL, MessageAttributes={}, MessageBody=body)
| 30.495327 | 111 | 0.626111 |
153985703bdaa9501ebb05ca70e70ad71f2dc644 | 1,454 | py | Python | src/AstroNASA.py | cloud0x9/AsteroidTwitter | d135b88fc0f04765610b0c8bf47f131884a38d53 | [
"MIT"
] | null | null | null | src/AstroNASA.py | cloud0x9/AsteroidTwitter | d135b88fc0f04765610b0c8bf47f131884a38d53 | [
"MIT"
] | null | null | null | src/AstroNASA.py | cloud0x9/AsteroidTwitter | d135b88fc0f04765610b0c8bf47f131884a38d53 | [
"MIT"
] | null | null | null | """
make calls to the NASA API to retrieve astroid data and make sense of it
last update by Ilyass on 5/14/21
"""
import requests
import json
from datetime import date
#get today's date and make an API call to NASA with it to get todays astroid data
todays_date = str(date.today())
URL_NeoFeed = "https://api.nasa.gov/neo/rest/v1/feed"
params = {
'api_key': 'DEMO_KEY',
'start_date': todays_date,
'end_date': todays_date
}
#read in the JSON response from NASA
response = requests.get(URL_NeoFeed, params=params).json()
rdump = json.dumps(response)
rload = json.loads(rdump)
#extract the total number of asteroids from the NASA JSON
def getNumAstro():
# get total number of astroiods
element_count = rload['element_count']
return element_count
#extract the total number of asteroids that are deemed dangerous from the NASA JSON
def getNumHazAstro():
###get number of hazordous astrouous
counter = 0
for x in range(getNumAstro()):
if rload['near_earth_objects'][todays_date][x]['is_potentially_hazardous_asteroid'] == True:
counter += 1
return counter
#take the data extracted from NASA and make sense of it by putting it into a sentence so it can be posted to twitter
def getAstroSentence():
AstroSum = "Today there are a total of "+ str(getNumAstro()) + " asteroids near earth according to @NASA , of those " + str(getNumAstro()) + " asteroids , "+ str(getNumHazAstro()) + " are deemed hazardous"
return AstroSum
| 33.813953 | 208 | 0.737964 |
dbbae504643c0404d1ba27f09f78484a746a14b3 | 8,685 | py | Python | src/bin/shipyard_client/tests/unit/cli/create/test_create_actions.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | 12 | 2018-05-18T18:59:23.000Z | 2019-05-10T12:31:44.000Z | src/bin/shipyard_client/tests/unit/cli/create/test_create_actions.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | 4 | 2021-07-28T14:36:57.000Z | 2022-03-22T16:39:23.000Z | src/bin/shipyard_client/tests/unit/cli/create/test_create_actions.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | 9 | 2018-05-18T16:42:41.000Z | 2019-04-18T20:12:14.000Z | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import responses
import yaml
from shipyard_client.api_client.base_client import BaseClient
from shipyard_client.cli.create.actions import CreateAction
from shipyard_client.cli.create.actions import CreateConfigdocs
from tests.unit.cli import stubs
resp_body = """
{
"dag_status": "SCHEDULED",
"parameters": {},
"dag_execution_date": "2017-09-24T19:05:49",
"id": "01BTTMFVDKZFRJM80FGD7J1AKN",
"dag_id": "deploy_site",
"name": "deploy_site",
"user": "shipyard",
"context_marker": "629f2ea2-c59d-46b9-8641-7367a91a7016",
"timestamp": "2017-09-24 19:05:43.603591"
}
"""
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_action(*args):
responses.add(responses.POST,
'http://shiptest/actions',
body=resp_body,
status=201)
response = CreateAction(
stubs.StubCliContext(),
action_name='deploy_site',
param=None,
allow_intermediate_commits=False).invoke_and_return_resp()
assert 'Name' in response
assert 'Action' in response
assert 'Lifecycle' in response
assert 'action/01BTTMFVDKZFRJM80FGD7J1AKN' in response
assert 'Error:' not in response
assert '0/0/0' in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_action_400(*args):
responses.add(responses.POST,
'http://shiptest/actions',
body=stubs.gen_err_resp(message='Error_400',
reason='bad action'),
status=400)
response = CreateAction(
stubs.StubCliContext(),
action_name='deploy_dogs',
param=None,
allow_intermediate_commits=False).invoke_and_return_resp()
assert 'Error_400' in response
assert 'bad action' in response
assert 'action/01BTTMFVDKZFRJM80FGD7J1AKN' not in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_action_409(*args):
responses.add(responses.POST,
'http://shiptest/actions',
body=stubs.gen_err_resp(message='Error_409',
reason='bad validations'),
status=409)
response = CreateAction(
stubs.StubCliContext(),
action_name='deploy_site',
param=None,
allow_intermediate_commits=False).invoke_and_return_resp()
assert 'Error_409' in response
assert 'bad validations' in response
assert 'action/01BTTMFVDKZFRJM80FGD7J1AKN' not in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_configdocs(*args):
succ_resp = stubs.gen_err_resp(message='Validations succeeded',
sub_error_count=0,
sub_info_count=0,
reason='Validation',
code=200)
responses.add(responses.POST,
'http://shiptest/configdocs/design',
body=succ_resp,
status=201)
filename = 'tests/unit/cli/create/sample_yaml/sample.yaml'
document_data = yaml.dump_all(filename)
file_list = (filename,)
response = CreateConfigdocs(stubs.StubCliContext(),
'design',
'append',
False,
document_data,
file_list).invoke_and_return_resp()
assert 'Configuration documents added.'
assert 'Status: Validations succeeded' in response
assert 'Reason: Validation' in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_configdocs_201_with_val_fails(*args):
succ_resp = stubs.gen_err_resp(message='Validations failed',
sub_message='Some reason',
sub_error_count=2,
sub_info_count=1,
reason='Validation',
code=400)
responses.add(responses.POST,
'http://shiptest/configdocs/design',
body=succ_resp,
status=201)
filename = 'tests/unit/cli/create/sample_yaml/sample.yaml'
document_data = yaml.dump_all(filename)
file_list = (filename,)
response = CreateConfigdocs(stubs.StubCliContext(),
'design',
'append',
False,
document_data,
file_list).invoke_and_return_resp()
assert 'Configuration documents added.' in response
assert 'Status: Validations failed' in response
assert 'Reason: Validation' in response
assert 'Some reason-1' in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_configdocs_409(*args):
err_resp = stubs.gen_err_resp(message='Invalid collection',
sub_message='Buffer is either not...',
sub_error_count=1,
sub_info_count=0,
reason='Buffermode : append',
code=409)
responses.add(responses.POST,
'http://shiptest/configdocs/design',
body=err_resp,
status=409)
filename = 'tests/unit/cli/create/sample_yaml/sample.yaml'
document_data = yaml.dump_all(filename)
file_list = (filename,)
response = CreateConfigdocs(stubs.StubCliContext(),
'design',
'append',
False,
document_data,
file_list).invoke_and_return_resp()
assert 'Error: Invalid collection' in response
assert 'Reason: Buffermode : append' in response
assert 'Buffer is either not...' in response
@responses.activate
@mock.patch.object(BaseClient, 'get_endpoint', lambda x: 'http://shiptest')
@mock.patch.object(BaseClient, 'get_token', lambda x: 'abc')
def test_create_configdocs_empty(*args):
def validating_callback(request):
# a request that has empty_collection should have no body.
assert request.body is None
resp_body = stubs.gen_err_resp(
message='Validations succeeded',
sub_error_count=0,
sub_info_count=0,
reason='Validation',
code=200)
return (201, {}, resp_body)
responses.add_callback(
responses.POST,
'http://shiptest/configdocs/design',
callback=validating_callback,
content_type='application/json')
filename = 'tests/unit/cli/create/sample_yaml/sample.yaml'
document_data = yaml.dump_all(filename)
file_list = (filename, )
# pass data and empty_collection = True - should init with data, but
# not send the data on invoke
action = CreateConfigdocs(
stubs.StubCliContext(),
collection='design',
buffer_mode='append',
empty_collection=True,
data=document_data,
filenames=file_list)
assert action.data == document_data
assert action.empty_collection == True
response = action.invoke_and_return_resp()
assert response.startswith("Configuration documents added.")
| 38.092105 | 75 | 0.614277 |
e2e6f950de2f6f284055cc7e409814e146060ac2 | 9,601 | py | Python | convlab2/policy/larl/multiwoz/utils/delexicalize.py | ljw23/ConvLab-2 | 13d48ea0e441701bd66100689b6c25b561f15525 | [
"Apache-2.0"
] | 339 | 2020-03-04T09:43:22.000Z | 2022-03-26T17:27:38.000Z | convlab2/policy/larl/multiwoz/utils/delexicalize.py | ljw23/ConvLab-2 | 13d48ea0e441701bd66100689b6c25b561f15525 | [
"Apache-2.0"
] | 122 | 2020-04-12T04:19:06.000Z | 2022-03-23T14:20:57.000Z | convlab2/policy/larl/multiwoz/utils/delexicalize.py | ljw23/ConvLab-2 | 13d48ea0e441701bd66100689b6c25b561f15525 | [
"Apache-2.0"
] | 138 | 2020-02-18T16:48:04.000Z | 2022-03-26T17:27:43.000Z | import pickle
import re
import simplejson as json
def normalize(text):
# lower case every word
text = text.lower()
# replace white spaces in front and end
text = re.sub(r'^\s*|\s*$', '', text)
# hotel domain pfb30
text = re.sub(r"b&b", "bed and breakfast", text)
text = re.sub(r"b and b", "bed and breakfast", text)
# normalize phone number
ms = re.findall('\(?(\d{3})\)?[-.\s]?(\d{3})[-.\s]?(\d{4,5})', text)
if ms:
sidx = 0
for m in ms:
sidx = text.find(m[0], sidx)
if text[sidx - 1] == '(':
sidx -= 1
eidx = text.find(m[-1], sidx) + len(m[-1])
text = text.replace(text[sidx:eidx], ''.join(m))
# normalize postcode
ms = re.findall('([a-z]{1}[\. ]?[a-z]{1}[\. ]?\d{1,2}[, ]+\d{1}[\. ]?[a-z]{1}[\. ]?[a-z]{1}|[a-z]{2}\d{2}[a-z]{2})',
text)
if ms:
sidx = 0
for m in ms:
sidx = text.find(m, sidx)
eidx = sidx + len(m)
text = text[:sidx] + re.sub('[,\. ]', '', m) + text[eidx:]
# weird unicode bug
text = re.sub(u"(\u2018|\u2019)", "'", text)
# replace time and and price
text = re.sub(timepat, ' [value_time] ', text)
text = re.sub(pricepat, ' [value_price] ', text)
#text = re.sub(pricepat2, '[value_price]', text)
# replace st.
text = text.replace(';', ',')
text = re.sub('$\/', '', text)
text = text.replace('/', ' and ')
# replace other special characters
text = text.replace('-', ' ')
text = re.sub('[\":\<>@\(\)]', '', text)
# insert white space before and after tokens:
for token in ['?', '.', ',', '!']:
text = insertSpace(token, text)
# insert white space for 's
text = insertSpace('\'s', text)
# replace it's, does't, you'd ... etc
text = re.sub('^\'', '', text)
text = re.sub('\'$', '', text)
text = re.sub('\'\s', ' ', text)
text = re.sub('\s\'', ' ', text)
for fromx, tox in replacements:
text = ' ' + text + ' '
text = text.replace(fromx, tox)[1:-1]
# remove multiple spaces
text = re.sub(' +', ' ', text)
# concatenate numbers
tmp = text
tokens = text.split()
i = 1
while i < len(tokens):
if re.match(u'^\d+$', tokens[i]) and \
re.match(u'\d+$', tokens[i - 1]):
tokens[i - 1] += tokens[i]
del tokens[i]
else:
i += 1
text = ' '.join(tokens)
return text
digitpat = re.compile('\d+')
timepat = re.compile("\d{1,2}[:]\d{1,2}")
pricepat2 = re.compile("\d{1,3}[.]\d{1,2}")
# FORMAT
# domain_value
# restaurant_postcode
# restaurant_address
# taxi_car8
# taxi_number
# train_id etc..
def prepareSlotValuesIndependent():
domains = ['restaurant', 'hotel', 'attraction',
'train', 'taxi', 'hospital', 'police']
requestables = ['phone', 'address', 'postcode', 'reference', 'id']
dic = []
dic_area = []
dic_food = []
dic_price = []
# read databases
for domain in domains:
try:
fin = open('db/' + domain + '_db.json')
db_json = json.load(fin)
fin.close()
for ent in db_json:
for key, val in ent.items():
if val == '?' or val == 'free':
pass
elif key == 'address':
dic.append(
(normalize(val), '[' + domain + '_' + 'address' + ']'))
if "road" in val:
val = val.replace("road", "rd")
dic.append(
(normalize(val), '[' + domain + '_' + 'address' + ']'))
elif "rd" in val:
val = val.replace("rd", "road")
dic.append(
(normalize(val), '[' + domain + '_' + 'address' + ']'))
elif "st" in val:
val = val.replace("st", "street")
dic.append(
(normalize(val), '[' + domain + '_' + 'address' + ']'))
elif "street" in val:
val = val.replace("street", "st")
dic.append(
(normalize(val), '[' + domain + '_' + 'address' + ']'))
elif key == 'name':
dic.append(
(normalize(val), '[' + domain + '_' + 'name' + ']'))
if "b & b" in val:
val = val.replace("b & b", "bed and breakfast")
dic.append(
(normalize(val), '[' + domain + '_' + 'name' + ']'))
elif "bed and breakfast" in val:
val = val.replace("bed and breakfast", "b & b")
dic.append(
(normalize(val), '[' + domain + '_' + 'name' + ']'))
elif "hotel" in val and 'gonville' not in val:
val = val.replace("hotel", "")
dic.append(
(normalize(val), '[' + domain + '_' + 'name' + ']'))
elif "restaurant" in val:
val = val.replace("restaurant", "")
dic.append(
(normalize(val), '[' + domain + '_' + 'name' + ']'))
elif key == 'postcode':
dic.append(
(normalize(val), '[' + domain + '_' + 'postcode' + ']'))
elif key == 'phone':
dic.append((val, '[' + domain + '_' + 'phone' + ']'))
elif key == 'trainID':
dic.append(
(normalize(val), '[' + domain + '_' + 'id' + ']'))
elif key == 'department':
dic.append(
(normalize(val), '[' + domain + '_' + 'department' + ']'))
# NORMAL DELEX
elif key == 'area':
dic_area.append(
(normalize(val), '[' + 'value' + '_' + 'area' + ']'))
elif key == 'food':
dic_food.append(
(normalize(val), '[' + 'value' + '_' + 'food' + ']'))
elif key == 'pricerange':
dic_price.append(
(normalize(val), '[' + 'value' + '_' + 'pricerange' + ']'))
else:
pass
# TODO car type?
except:
pass
if domain == 'hospital':
dic.append(
(normalize('Hills Rd'), '[' + domain + '_' + 'address' + ']'))
dic.append((normalize('Hills Road'),
'[' + domain + '_' + 'address' + ']'))
dic.append(
(normalize('CB20QQ'), '[' + domain + '_' + 'postcode' + ']'))
dic.append(('01223245151', '[' + domain + '_' + 'phone' + ']'))
dic.append(('1223245151', '[' + domain + '_' + 'phone' + ']'))
dic.append(('0122324515', '[' + domain + '_' + 'phone' + ']'))
dic.append((normalize('Addenbrookes Hospital'),
'[' + domain + '_' + 'name' + ']'))
elif domain == 'police':
dic.append(
(normalize('Parkside'), '[' + domain + '_' + 'address' + ']'))
dic.append(
(normalize('CB11JG'), '[' + domain + '_' + 'postcode' + ']'))
dic.append(('01223358966', '[' + domain + '_' + 'phone' + ']'))
dic.append(('1223358966', '[' + domain + '_' + 'phone' + ']'))
dic.append((normalize('Parkside Police Station'),
'[' + domain + '_' + 'name' + ']'))
# add at the end places from trains
# fin = file('db/' + 'train' + '_db.json')
fin = open('db/' + 'train' + '_db.json')
db_json = json.load(fin)
fin.close()
for ent in db_json:
for key, val in ent.items():
if key == 'departure' or key == 'destination':
dic.append(
(normalize(val), '[' + 'value' + '_' + 'place' + ']'))
# add specific values:
for key in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']:
dic.append((normalize(key), '[' + 'value' + '_' + 'day' + ']'))
# more general values add at the end
dic.extend(dic_area)
dic.extend(dic_food)
dic.extend(dic_price)
return dic
def delexicalise(utt, dictionary):
for key, val in dictionary:
utt = (' ' + utt + ' ').replace(' ' + key + ' ', ' ' + val + ' ')
utt = utt[1:-1] # why this?
return utt
def delexicaliseDomain(utt, dictionary, domain):
for key, val in dictionary:
if key == domain or key == 'value':
utt = (' ' + utt + ' ').replace(' ' + key + ' ', ' ' + val + ' ')
utt = utt[1:-1] # why this?
# go through rest of domain in case we are missing something out?
for key, val in dictionary:
utt = (' ' + utt + ' ').replace(' ' + key + ' ', ' ' + val + ' ')
utt = utt[1:-1] # why this?
return utt
if __name__ == '__main__':
dic = prepareSlotValuesIndependent()
pickle.dump(dic, open('data/svdic.pkl', 'wb'))
| 36.785441 | 120 | 0.412457 |
f709bb26c60915265cade2b384fdde8847a123c3 | 1,070 | py | Python | messenger/migrations/0001_initial.py | lucida-no/hdo-quiz-service | 32e03165e8d495f1290edd2b96cc1cba415f9799 | [
"BSD-3-Clause"
] | null | null | null | messenger/migrations/0001_initial.py | lucida-no/hdo-quiz-service | 32e03165e8d495f1290edd2b96cc1cba415f9799 | [
"BSD-3-Clause"
] | 13 | 2017-01-01T23:23:29.000Z | 2017-05-27T11:15:38.000Z | messenger/migrations/0001_initial.py | lucida-no/hdo-messenger-backend | 32e03165e8d495f1290edd2b96cc1cba415f9799 | [
"BSD-3-Clause"
] | 1 | 2017-01-01T16:32:30.000Z | 2017-01-01T16:32:30.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-03-05 13:47
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ChatSession',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(choices=[('in_progress', 'In progress'), ('complete', 'Complete')], default='in_progress', max_length=100)),
('uuid', models.UUIDField(default=uuid.uuid4, unique=True)),
('user_id', models.CharField(db_index=True, max_length=100)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True, db_index=True)),
('meta', jsonfield.fields.JSONField(blank=True, default=dict)),
],
),
]
| 34.516129 | 151 | 0.614019 |
8ae83f5a89033cedc3e1cdd996f4657b1f2e20f1 | 6,184 | py | Python | meiduo1/apps/goods/models.py | caoyongpeng/CYP_meiduo | 378cc05a8621b36dc15714a10258606860bb5ad2 | [
"MIT"
] | null | null | null | meiduo1/apps/goods/models.py | caoyongpeng/CYP_meiduo | 378cc05a8621b36dc15714a10258606860bb5ad2 | [
"MIT"
] | null | null | null | meiduo1/apps/goods/models.py | caoyongpeng/CYP_meiduo | 378cc05a8621b36dc15714a10258606860bb5ad2 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
from utils.models import BaseModel
class GoodsCategory(BaseModel):
"""商品类别"""
name = models.CharField(max_length=10, verbose_name='名称')
parent = models.ForeignKey('self', related_name='subs', null=True, blank=True, on_delete=models.CASCADE, verbose_name='父类别')
class Meta:
db_table = 'tb_goods_category'
verbose_name = '商品类别'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
class GoodsChannelGroup(BaseModel):
"""商品频道组"""
name = models.CharField(max_length=20, verbose_name='频道组名')
class Meta:
db_table = 'tb_channel_group'
verbose_name = '商品频道组'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
class GoodsChannel(BaseModel):
"""商品频道"""
group = models.ForeignKey(GoodsChannelGroup, verbose_name='频道组名')
category = models.ForeignKey(GoodsCategory, on_delete=models.CASCADE, verbose_name='顶级商品类别')
url = models.CharField(max_length=50, verbose_name='频道页面链接')
sequence = models.IntegerField(verbose_name='组内顺序')
class Meta:
db_table = 'tb_goods_channel'
verbose_name = '商品频道'
verbose_name_plural = verbose_name
def __str__(self):
return self.category.name
class Brand(BaseModel):
"""品牌"""
name = models.CharField(max_length=20, verbose_name='名称')
logo = models.ImageField(verbose_name='Logo图片')
first_letter = models.CharField(max_length=1, verbose_name='品牌首字母')
class Meta:
db_table = 'tb_brand'
verbose_name = '品牌'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
class SPU(BaseModel):
"""商品SPU"""
name = models.CharField(max_length=50, verbose_name='名称')
brand = models.ForeignKey(Brand, on_delete=models.PROTECT, verbose_name='品牌')
category1 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat1_spu', verbose_name='一级类别')
category2 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat2_spu', verbose_name='二级类别')
category3 = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, related_name='cat3_spu', verbose_name='三级类别')
sales = models.IntegerField(default=0, verbose_name='销量')
comments = models.IntegerField(default=0, verbose_name='评价数')
desc_detail = models.TextField(default='', verbose_name='详细介绍')
desc_pack = models.TextField(default='', verbose_name='包装信息')
desc_service = models.TextField(default='', verbose_name='售后服务')
class Meta:
db_table = 'tb_spu'
verbose_name = '商品SPU'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
class SKU(BaseModel):
"""商品SKU"""
name = models.CharField(max_length=50, verbose_name='名称')
caption = models.CharField(max_length=100, verbose_name='副标题')
spu = models.ForeignKey(SPU, on_delete=models.CASCADE, verbose_name='商品')
category = models.ForeignKey(GoodsCategory, on_delete=models.PROTECT, verbose_name='从属类别')
price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='单价')
cost_price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='进价')
market_price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='市场价')
stock = models.IntegerField(default=0, verbose_name='库存')
sales = models.IntegerField(default=0, verbose_name='销量')
comments = models.IntegerField(default=0, verbose_name='评价数')
is_launched = models.BooleanField(default=True, verbose_name='是否上架销售')
default_image = models.ImageField(max_length=200, default='', null=True, blank=True, verbose_name='默认图片')
class Meta:
db_table = 'tb_sku'
verbose_name = '商品SKU'
verbose_name_plural = verbose_name
def __str__(self):
return '%s: %s' % (self.id, self.name)
class SKUImage(BaseModel):
"""SKU图片"""
sku = models.ForeignKey(SKU, on_delete=models.CASCADE, verbose_name='sku')
image = models.ImageField(verbose_name='图片')
class Meta:
db_table = 'tb_sku_image'
verbose_name = 'SKU图片'
verbose_name_plural = verbose_name
def __str__(self):
return '%s %s' % (self.sku.name, self.id)
class SPUSpecification(BaseModel):
"""商品SPU规格"""
spu = models.ForeignKey(SPU, on_delete=models.CASCADE, related_name='specs', verbose_name='商品SPU')
name = models.CharField(max_length=20, verbose_name='规格名称')
class Meta:
db_table = 'tb_spu_specification'
verbose_name = '商品SPU规格'
verbose_name_plural = verbose_name
def __str__(self):
return '%s: %s' % (self.spu.name, self.name)
class SpecificationOption(BaseModel):
"""规格选项"""
spec = models.ForeignKey(SPUSpecification, related_name='options', on_delete=models.CASCADE, verbose_name='规格')
value = models.CharField(max_length=20, verbose_name='选项值')
class Meta:
db_table = 'tb_specification_option'
verbose_name = '规格选项'
verbose_name_plural = verbose_name
def __str__(self):
return '%s - %s' % (self.spec, self.value)
class SKUSpecification(BaseModel):
"""SKU具体规格"""
sku = models.ForeignKey(SKU, related_name='specs', on_delete=models.CASCADE, verbose_name='sku')
spec = models.ForeignKey(SPUSpecification, on_delete=models.PROTECT, verbose_name='规格名称')
option = models.ForeignKey(SpecificationOption, on_delete=models.PROTECT, verbose_name='规格值')
class Meta:
db_table = 'tb_sku_specification'
verbose_name = 'SKU规格'
verbose_name_plural = verbose_name
def __str__(self):
return '%s: %s - %s' % (self.sku, self.spec.name, self.option.value)
class GoodsVisitCount(BaseModel):
"""统计分类商品访问量模型类"""
category = models.ForeignKey(GoodsCategory, on_delete=models.CASCADE, verbose_name='商品分类')
count = models.IntegerField(verbose_name='访问量', default=0)
date = models.DateField(auto_now_add=True, verbose_name='统计日期')
class Meta:
db_table = 'tb_goods_visit'
verbose_name = '统计分类商品访问量'
verbose_name_plural = verbose_name
| 35.337143 | 128 | 0.695181 |
e8f27ca6b347590c456608ff9c9e0323ec513a43 | 9,900 | py | Python | tests/pytorch/test_frame.py | astonzhang/dgl | 2664ed2dff9d0e9e45d2349c531b460b31cec215 | [
"Apache-2.0"
] | null | null | null | tests/pytorch/test_frame.py | astonzhang/dgl | 2664ed2dff9d0e9e45d2349c531b460b31cec215 | [
"Apache-2.0"
] | null | null | null | tests/pytorch/test_frame.py | astonzhang/dgl | 2664ed2dff9d0e9e45d2349c531b460b31cec215 | [
"Apache-2.0"
] | 1 | 2019-07-05T17:10:08.000Z | 2019-07-05T17:10:08.000Z | import torch as th
from torch.autograd import Variable
import numpy as np
from dgl.frame import Frame, FrameRef
from dgl.utils import Index, toindex
import utils as U
N = 10
D = 5
def check_fail(fn):
try:
fn()
return False
except:
return True
def create_test_data(grad=False):
c1 = Variable(th.randn(N, D), requires_grad=grad)
c2 = Variable(th.randn(N, D), requires_grad=grad)
c3 = Variable(th.randn(N, D), requires_grad=grad)
return {'a1' : c1, 'a2' : c2, 'a3' : c3}
def test_create():
data = create_test_data()
f1 = Frame(num_rows=N)
for k, v in data.items():
f1.update_column(k, v)
print(f1.schemes)
assert f1.keys() == set(data.keys())
assert f1.num_columns == 3
assert f1.num_rows == N
f2 = Frame(data)
assert f2.keys() == set(data.keys())
assert f2.num_columns == 3
assert f2.num_rows == N
f1.clear()
assert len(f1.schemes) == 0
assert f1.num_rows == 0
def test_column1():
# Test frame column getter/setter
data = create_test_data()
f = Frame(data)
assert f.num_rows == N
assert len(f) == 3
assert U.allclose(f['a1'].data, data['a1'].data)
f['a1'] = data['a2']
assert U.allclose(f['a2'].data, data['a2'].data)
# add a different length column should fail
def failed_add_col():
f['a4'] = th.zeros([N+1, D])
assert check_fail(failed_add_col)
# delete all the columns
del f['a1']
del f['a2']
assert len(f) == 1
del f['a3']
assert len(f) == 0
def test_column2():
# Test frameref column getter/setter
data = Frame(create_test_data())
f = FrameRef(data, [3, 4, 5, 6, 7])
assert f.num_rows == 5
assert len(f) == 3
assert U.allclose(f['a1'], data['a1'].data[3:8])
# set column should reflect on the referenced data
f['a1'] = th.zeros([5, D])
assert U.allclose(data['a1'].data[3:8], th.zeros([5, D]))
# add new partial column should fail with error initializer
f.set_initializer(lambda shape, dtype : assert_(False))
def failed_add_col():
f['a4'] = th.ones([5, D])
assert check_fail(failed_add_col)
def test_append1():
# test append API on Frame
data = create_test_data()
f1 = Frame()
f2 = Frame(data)
f1.append(data)
assert f1.num_rows == N
f1.append(f2)
assert f1.num_rows == 2 * N
c1 = f1['a1']
assert c1.data.shape == (2 * N, D)
truth = th.cat([data['a1'], data['a1']])
assert U.allclose(truth, c1.data)
# append dict of different length columns should fail
f3 = {'a1' : th.zeros((3, D)), 'a2' : th.zeros((3, D)), 'a3' : th.zeros((2, D))}
def failed_append():
f1.append(f3)
assert check_fail(failed_append)
def test_append2():
# test append on FrameRef
data = Frame(create_test_data())
f = FrameRef(data)
assert f.is_contiguous()
assert f.is_span_whole_column()
assert f.num_rows == N
# append on the underlying frame should not reflect on the ref
data.append(data)
assert f.is_contiguous()
assert not f.is_span_whole_column()
assert f.num_rows == N
# append on the FrameRef should work
f.append(data)
assert not f.is_contiguous()
assert not f.is_span_whole_column()
assert f.num_rows == 3 * N
new_idx = list(range(N)) + list(range(2*N, 4*N))
assert th.all(f.index().tousertensor() == th.tensor(new_idx, dtype=th.int64))
assert data.num_rows == 4 * N
def test_append3():
# test append on empty frame
f = Frame(num_rows=5)
data = {'h' : th.ones((3, 2))}
f.append(data)
assert f.num_rows == 8
ans = th.cat([th.zeros((5, 2)), th.ones((3, 2))], dim=0)
assert U.allclose(f['h'].data, ans)
# test append with new column
data = {'h' : 2 * th.ones((3, 2)), 'w' : 2 * th.ones((3, 2))}
f.append(data)
assert f.num_rows == 11
ans1 = th.cat([ans, 2 * th.ones((3, 2))], 0)
ans2 = th.cat([th.zeros((8, 2)), 2 * th.ones((3, 2))], 0)
assert U.allclose(f['h'].data, ans1)
assert U.allclose(f['w'].data, ans2)
def test_row1():
# test row getter/setter
data = create_test_data()
f = FrameRef(Frame(data))
# getter
# test non-duplicate keys
rowid = Index(th.tensor([0, 2]))
rows = f[rowid]
for k, v in rows.items():
assert v.shape == (len(rowid), D)
assert U.allclose(v, data[k][rowid])
# test duplicate keys
rowid = Index(th.tensor([8, 2, 2, 1]))
rows = f[rowid]
for k, v in rows.items():
assert v.shape == (len(rowid), D)
assert U.allclose(v, data[k][rowid])
# setter
rowid = Index(th.tensor([0, 2, 4]))
vals = {'a1' : th.zeros((len(rowid), D)),
'a2' : th.zeros((len(rowid), D)),
'a3' : th.zeros((len(rowid), D)),
}
f[rowid] = vals
for k, v in f[rowid].items():
assert U.allclose(v, th.zeros((len(rowid), D)))
# setting rows with new column should raise error with error initializer
f.set_initializer(lambda shape, dtype : assert_(False))
def failed_update_rows():
vals['a4'] = th.ones((len(rowid), D))
f[rowid] = vals
assert check_fail(failed_update_rows)
def test_row2():
# test row getter/setter autograd compatibility
data = create_test_data(grad=True)
f = FrameRef(Frame(data))
# getter
c1 = f['a1']
# test non-duplicate keys
rowid = Index(th.tensor([0, 2]))
rows = f[rowid]
rows['a1'].backward(th.ones((len(rowid), D)))
assert U.allclose(c1.grad[:,0], th.tensor([1., 0., 1., 0., 0., 0., 0., 0., 0., 0.]))
c1.grad.data.zero_()
# test duplicate keys
rowid = Index(th.tensor([8, 2, 2, 1]))
rows = f[rowid]
rows['a1'].backward(th.ones((len(rowid), D)))
assert U.allclose(c1.grad[:,0], th.tensor([0., 1., 2., 0., 0., 0., 0., 0., 1., 0.]))
c1.grad.data.zero_()
# setter
c1 = f['a1']
rowid = Index(th.tensor([0, 2, 4]))
vals = {'a1' : Variable(th.zeros((len(rowid), D)), requires_grad=True),
'a2' : Variable(th.zeros((len(rowid), D)), requires_grad=True),
'a3' : Variable(th.zeros((len(rowid), D)), requires_grad=True),
}
f[rowid] = vals
c11 = f['a1']
c11.backward(th.ones((N, D)))
assert U.allclose(c1.grad[:,0], th.tensor([0., 1., 0., 1., 0., 1., 1., 1., 1., 1.]))
assert U.allclose(vals['a1'].grad, th.ones((len(rowid), D)))
assert vals['a2'].grad is None
def test_row3():
# test row delete
data = Frame(create_test_data())
f = FrameRef(data)
assert f.is_contiguous()
assert f.is_span_whole_column()
assert f.num_rows == N
del f[toindex(th.tensor([2, 3]))]
assert not f.is_contiguous()
assert not f.is_span_whole_column()
# delete is lazy: only reflect on the ref while the
# underlying storage should not be touched
assert f.num_rows == N - 2
assert data.num_rows == N
newidx = list(range(N))
newidx.pop(2)
newidx.pop(2)
newidx = toindex(newidx)
for k, v in f.items():
assert U.allclose(v, data[k][newidx])
def test_row4():
# test updating row with empty frame but has preset num_rows
f = FrameRef(Frame(num_rows=5))
rowid = Index(th.tensor([0, 2, 4]))
f[rowid] = {'h' : th.ones((3, 2))}
ans = th.zeros((5, 2))
ans[th.tensor([0, 2, 4])] = th.ones((3, 2))
assert U.allclose(f['h'], ans)
def test_sharing():
data = Frame(create_test_data())
f1 = FrameRef(data, index=[0, 1, 2, 3])
f2 = FrameRef(data, index=[2, 3, 4, 5, 6])
# test read
for k, v in f1.items():
assert U.allclose(data[k].data[0:4], v)
for k, v in f2.items():
assert U.allclose(data[k].data[2:7], v)
f2_a1 = f2['a1'].data
# test write
# update own ref should not been seen by the other.
f1[Index(th.tensor([0, 1]))] = {
'a1' : th.zeros([2, D]),
'a2' : th.zeros([2, D]),
'a3' : th.zeros([2, D]),
}
assert U.allclose(f2['a1'], f2_a1)
# update shared space should been seen by the other.
f1[Index(th.tensor([2, 3]))] = {
'a1' : th.ones([2, D]),
'a2' : th.ones([2, D]),
'a3' : th.ones([2, D]),
}
f2_a1[0:2] = th.ones([2, D])
assert U.allclose(f2['a1'], f2_a1)
def test_slicing():
data = Frame(create_test_data(grad=True))
f1 = FrameRef(data, index=slice(1, 5))
f2 = FrameRef(data, index=slice(3, 8))
# test read
for k, v in f1.items():
assert U.allclose(data[k].data[1:5], v)
f2_a1 = f2['a1'].data
# test write
f1[Index(th.tensor([0, 1]))] = {
'a1': th.zeros([2, D]),
'a2': th.zeros([2, D]),
'a3': th.zeros([2, D]),
}
assert U.allclose(f2['a1'], f2_a1)
f1[Index(th.tensor([2, 3]))] = {
'a1': th.ones([2, D]),
'a2': th.ones([2, D]),
'a3': th.ones([2, D]),
}
f2_a1[0:2] = 1
assert U.allclose(f2['a1'], f2_a1)
f1[2:4] = {
'a1': th.zeros([2, D]),
'a2': th.zeros([2, D]),
'a3': th.zeros([2, D]),
}
f2_a1[0:2] = 0
assert U.allclose(f2['a1'], f2_a1)
def test_add_rows():
data = Frame()
f1 = FrameRef(data)
f1.add_rows(4)
x = th.randn(1, 4)
f1[Index(th.tensor([0]))] = {'x': x}
ans = th.cat([x, th.zeros(3, 4)])
assert U.allclose(f1['x'], ans)
f1.add_rows(4)
f1[4:8] = {'x': th.ones(4, 4), 'y': th.ones(4, 5)}
ans = th.cat([ans, th.ones(4, 4)])
assert U.allclose(f1['x'], ans)
ans = th.cat([th.zeros(4, 5), th.ones(4, 5)])
assert U.allclose(f1['y'], ans)
if __name__ == '__main__':
test_create()
test_column1()
test_column2()
test_append1()
test_append2()
test_append3()
test_row1()
test_row2()
test_row3()
test_row4()
test_sharing()
test_slicing()
test_add_rows()
| 30.745342 | 88 | 0.564242 |
f7cd7ac4c9d924cf77ef92160f6f9112b6c12405 | 1,496 | py | Python | webserver/webapp/python/logout.py | sebrink/SILVEROCELOT | c47dcd1f031007471d635d995b679ee85c1e5857 | [
"MIT"
] | 2 | 2019-01-30T20:51:03.000Z | 2019-12-14T04:57:23.000Z | webserver/webapp/python/logout.py | sebrink/SILVEROCELOT | c47dcd1f031007471d635d995b679ee85c1e5857 | [
"MIT"
] | 27 | 2019-02-06T02:32:51.000Z | 2019-04-22T04:08:56.000Z | webserver/webapp/python/logout.py | sebrink/SILVEROCELOT | c47dcd1f031007471d635d995b679ee85c1e5857 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import cgi, os
import cgitb; cgitb.enable()
import MySQLdb
import jwt
import Cookie
import urllib
from os import environ
from datetime import datetime, timedelta
import json
print('Content-type: text/html\r\n')
conn = MySQLdb.connect(host="database", user="root", passwd="MEEP1234", db="webdata")
cursor = conn.cursor()
form = cgi.FieldStorage()
token = None
session = None
if 'HTTP_COOKIE' in os.environ:
cookie_string=os.environ.get('HTTP_COOKIE')
c=Cookie.SimpleCookie()
c.load(cookie_string)
try:
session=c['session'].value
except KeyError:
pass
if session is None:
print('\r\n')
print('invalid token')
else:
try:
print('\r\n')
decoded = jwt.decode(session, 'secret', algorithms=['HS256'])
cursor.execute('select * from `User Store` where `User Store`.`UID` = "{}"'.format(decoded.get('username')))
ret = cursor.fetchall()
if len(ret) == 0:
print('invalid token')
exit()
if datetime.now() > datetime.strptime(decoded.get('expireDate'), '%Y-%m-%dT%H:%M:%S.%f'):
print('invalid token')
exit()
expires = (datetime.strptime('1970-1-1T12:34:56.100000','%Y-%m-%dT%H:%M:%S.%f').isoformat())
encoded_jwt = jwt.encode({'username': decoded.get('username'), 'expireDate': expires}, 'secret', algorithm='HS256')
print('\r\n')
print('session=' + encoded_jwt + '; path=/;')
except KeyError:
print('\r\n')
print('invalid token')
| 24.933333 | 121 | 0.631016 |
7a5b4f3c133effbe5eadd5a1a46e56c3ecf6a5d2 | 1,894 | py | Python | cellphonedb/utils/tests/test_dataframe_same_data.py | chapuzzo/cellphonedb | 27edf276024d7ab761cad30e3310dfbe9658052a | [
"MIT"
] | 278 | 2018-10-03T22:12:09.000Z | 2022-03-28T15:33:17.000Z | cellphonedb/utils/tests/test_dataframe_same_data.py | chapuzzo/cellphonedb | 27edf276024d7ab761cad30e3310dfbe9658052a | [
"MIT"
] | 263 | 2018-11-16T14:41:31.000Z | 2022-03-30T08:38:26.000Z | cellphonedb/utils/tests/test_dataframe_same_data.py | chapuzzo/cellphonedb | 27edf276024d7ab761cad30e3310dfbe9658052a | [
"MIT"
] | 106 | 2018-10-18T15:11:57.000Z | 2022-03-14T19:50:27.000Z | from unittest import TestCase
import pandas as pd
from cellphonedb.utils import dataframe_functions
class TestDataframeSameData(TestCase):
def test_compare_empty(self):
self.assertTrue(dataframe_functions.dataframes_has_same_data(pd.DataFrame(), pd.DataFrame()))
def test_equal(self):
dataframe1 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
dataframe2 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
self.assertTrue(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
def test_different(self):
dataframe1 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
dataframe2 = pd.DataFrame({'col1': [5, 2], 'col2': [3, 4]})
self.assertFalse(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
def test_equal_unsorted(self):
dataframe1 = pd.DataFrame({'col1': [2, 1], 'col2': [4, 3]})
dataframe2 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
self.assertTrue(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
def test_different_unsorted(self):
dataframe1 = pd.DataFrame({'col1': [2, 1], 'col2': [4, 5]})
dataframe2 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
self.assertFalse(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
def test_equal_unsorted_columns(self):
dataframe1 = pd.DataFrame({'col2': [3, 4], 'col1': [1, 2]})
dataframe2 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
self.assertTrue(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
def test_different_unsorted_columns(self):
dataframe1 = pd.DataFrame({'col2': [3, 4], 'col1': [1, 2]})
dataframe2 = pd.DataFrame({'col1': [1, 2], 'col2': [3, 5]})
self.assertFalse(dataframe_functions.dataframes_has_same_data(dataframe1, dataframe2))
| 40.297872 | 101 | 0.661035 |
a1f5e76f6ed4247b21eaa459e1b3e2990e8ded23 | 2,505 | py | Python | aquascope/webserver/api/items.py | MicroscopeIT/aquascope_backend | 6b8c13ca3d6bd0a96f750fae809b6cf5a0062f24 | [
"MIT"
] | null | null | null | aquascope/webserver/api/items.py | MicroscopeIT/aquascope_backend | 6b8c13ca3d6bd0a96f750fae809b6cf5a0062f24 | [
"MIT"
] | 3 | 2021-06-08T19:50:36.000Z | 2021-09-08T01:15:33.000Z | aquascope/webserver/api/items.py | MicroscopeIT/aquascope_backend | 6b8c13ca3d6bd0a96f750fae809b6cf5a0062f24 | [
"MIT"
] | 2 | 2019-05-15T13:30:42.000Z | 2020-06-12T02:42:49.000Z | from flask import current_app as app
from flask import request
from flask_jwt_extended import jwt_required
from flask_restful import Resource
from aquascope.webserver.data_access.db import Item, bulk_replace, find_items
from aquascope.webserver.data_access.db.items import paged_find_items
from aquascope.webserver.data_access.storage.blob import get_urls_for_items_dicts
from aquascope.webserver.schema.items import PostItemsUpdateSchema, GetItemsSchema, GetPagedItemsSchema
from aquascope.webserver.schema.custom_schema import FormattedValidationError
class PagedItems(Resource):
@jwt_required
def get(self):
schema = GetPagedItemsSchema()
try:
args = schema.load(request.args)
except FormattedValidationError as e:
return e.formatted_messages, 400
db = app.config['db']
page_size = app.config['page_size']
page_number = args.pop('continuation_token', 1)
items = list(paged_find_items(db, page_number, page_size, with_default_projection=True,
serializable=True, **args))
urls = get_urls_for_items_dicts(items)
response = {
'items': items,
'urls': urls
}
if len(items) == page_size and page_size > 0:
response['continuation_token'] = page_number + 1
return response
class Items(Resource):
@jwt_required
def get(self):
schema = GetItemsSchema()
try:
args = schema.load(request.args)
except FormattedValidationError as e:
return e.formatted_messages, 400
db = app.config['db']
items = list(find_items(db, with_default_projection=True, serializable=True, **args))
urls = get_urls_for_items_dicts(items)
return {
'items': items,
'urls': urls
}
@jwt_required
def post(self):
json_data = request.get_json(force=True)
schema = PostItemsUpdateSchema(many=True)
try:
json_data = schema.load(json_data)
except FormattedValidationError as e:
return e.formatted_messages, 400
update_pairs = [
(Item.from_request(elem['current']), Item.from_request(elem['update'])) for elem in json_data
]
db = app.config['db']
result = bulk_replace(db, update_pairs)
return {
"matched": result.matched_count,
"modified": result.modified_count
}
| 31.3125 | 105 | 0.647505 |
d9cb2c33d96d414d437fbdfbd4b580f4c67c599b | 1,600 | py | Python | visualization.py | Puraneshi/PaletteCluster | 7d73cc6de1d5e7b1c1c912233ef140bbfe33ff2a | [
"MIT"
] | null | null | null | visualization.py | Puraneshi/PaletteCluster | 7d73cc6de1d5e7b1c1c912233ef140bbfe33ff2a | [
"MIT"
] | null | null | null | visualization.py | Puraneshi/PaletteCluster | 7d73cc6de1d5e7b1c1c912233ef140bbfe33ff2a | [
"MIT"
] | 1 | 2019-06-18T18:20:29.000Z | 2019-06-18T18:20:29.000Z | from PIL import Image
import numpy as np
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# configuration variables and loading the image
filename = "totoro.jpg"
outputfile = filename.split('.')[0] + "palette.png"
im = Image.open(filename)
width, height = im.size
pixel_samples = 415
step = int(width*height//pixel_samples)
number_of_clusters = 5
# append only a sample of pixels according to variable 'pixel_samples'
pixels = np.array(im.getdata())
x = []
r = []
g = []
b = []
for pixel in pixels[::step]:
if np.linalg.norm(pixel) > 50:
x.append(pixel)
r.append(pixel[0])
g.append(pixel[1])
b.append(pixel[2])
x = np.array(x)
# find the pixel clusters in the image
# if the number of clusters is not close to the real number
# KMeans will average distant pixels, distorting the color
kmean = KMeans(n_clusters=number_of_clusters)
kmean.fit(x)
centers = kmean.cluster_centers_
labels = kmean.labels_
# get coordinates of clusters
rk = []
gk = []
bk = []
for center in kmean.cluster_centers_:
rk.append(center[0])
gk.append(center[1])
bk.append(center[2])
# plotting code
pixel_size = 10
pixel_normalized_color = x/255
cluster_size = 500
cluster_color = "red" # or (255, 0, 0)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(r, g, b, s=pixel_size, c=pixel_normalized_color)
ax.scatter(rk, gk, bk, c=cluster_color, s=cluster_size)
ax.set_xlabel('R')
ax.set_ylabel('G')
ax.set_zlabel('B')
plt.show()
| 25 | 71 | 0.6825 |
e90a32ed2b38b2e449e9be8f6eb95bf937f957a6 | 4,735 | py | Python | model/code2seq.py | bzz/code2seq-pyTorch | bcae5d44354aa55cad11fda851cfa5c95b45614f | [
"MIT"
] | null | null | null | model/code2seq.py | bzz/code2seq-pyTorch | bcae5d44354aa55cad11fda851cfa5c95b45614f | [
"MIT"
] | null | null | null | model/code2seq.py | bzz/code2seq-pyTorch | bcae5d44354aa55cad11fda851cfa5c95b45614f | [
"MIT"
] | null | null | null | from typing import Dict, List
import torch
import torch.nn.functional as F
from configs import Code2SeqConfig
from dataset import Vocabulary, PathContextBatch
from model.modules import PathEncoder, PathDecoder
from utils.common import PAD, SOS, UNK
from utils.metrics import SubtokenStatistic
from .base_code_model import BaseCodeModel
class Code2Seq(BaseCodeModel):
def __init__(self, config: Code2SeqConfig, vocab: Vocabulary, num_workers: int):
super().__init__(config.hyperparams, vocab, num_workers)
self.save_hyperparameters()
if SOS not in vocab.label_to_id:
vocab.label_to_id[SOS] = len(vocab.label_to_id)
encoder_config = config.encoder_config
decoder_config = config.decoder_config
self.encoder = PathEncoder(
encoder_config,
decoder_config.decoder_size,
len(vocab.token_to_id),
vocab.token_to_id[PAD],
len(vocab.type_to_id),
vocab.type_to_id[PAD],
)
self.decoder = PathDecoder(
decoder_config, len(vocab.label_to_id), vocab.label_to_id[SOS], vocab.label_to_id[PAD]
)
def forward(
self,
samples: Dict[str, torch.Tensor],
paths_for_label: List[int],
output_length: int,
target_sequence: torch.Tensor = None,
) -> torch.Tensor:
return self.decoder(self.encoder(samples), paths_for_label, output_length, target_sequence)
def _calculate_loss(self, logits: torch.Tensor, labels: torch.Tensor) -> torch.Tensor:
"""Calculate cross entropy with ignoring PAD index
:param logits: [seq length; batch size; vocab size]
:param labels: [seq length; batch size]
:return: [1]
"""
batch_size = labels.shape[-1]
# [batch size; vocab size; seq length]
_logits = logits.permute(1, 2, 0)
# [batch size; seq length]
_labels = labels.permute(1, 0)
# [batch size; seq length]
loss = F.cross_entropy(_logits, _labels, reduction="none")
# [batch size; seq length]
mask = _labels != self.vocab.label_to_id[PAD]
# [batch size; seq length]
loss = loss * mask
# [1]
loss = loss.sum() / batch_size
return loss
def _general_epoch_end(self, outputs: List[Dict], loss_key: str, group: str) -> Dict:
with torch.no_grad():
logs = {f"{group}/loss": torch.stack([out[loss_key] for out in outputs]).mean()}
logs.update(
SubtokenStatistic.union_statistics([out["statistic"] for out in outputs]).calculate_metrics(group)
)
progress_bar = {k: v for k, v in logs.items() if k in [f"{group}/loss", f"{group}/f1"]}
return {f"{group}_loss": logs[f"{group}/loss"], "log": logs, "progress_bar": progress_bar}
def _calculate_metric(self, logits: torch.Tensor, labels: torch.Tensor) -> SubtokenStatistic:
with torch.no_grad():
# [seq length; batch size]
prediction = logits.argmax(-1)
mask_max_value, mask_max_indices = torch.max(prediction == self.vocab.label_to_id[PAD], dim=0)
mask_max_indices[~mask_max_value] = prediction.shape[0]
mask = torch.arange(prediction.shape[0], device=self.device).view(-1, 1) >= mask_max_indices
prediction[mask] = self.vocab.label_to_id[PAD]
statistic = SubtokenStatistic().calculate_statistic(
labels, prediction, [self.vocab.label_to_id[t] for t in [PAD, UNK]],
)
return statistic
def training_step(self, batch: PathContextBatch, batch_idx: int) -> Dict:
logits = self(batch.context, batch.contexts_per_label, batch.labels.shape[0], batch.labels)
loss = self._calculate_loss(logits, batch.labels)
log = {"train/loss": loss}
statistic = self._calculate_metric(logits, batch.labels)
log.update(statistic.calculate_metrics(group="train"))
progress_bar = {"train/f1": log["train/f1"]}
return {"loss": loss, "log": log, "progress_bar": progress_bar, "statistic": statistic}
def validation_step(self, batch: PathContextBatch, batch_idx: int) -> Dict:
# [seq length; batch size; vocab size]
logits = self(batch.context, batch.contexts_per_label, batch.labels.shape[0])
loss = self._calculate_loss(logits, batch.labels)
statistic = self._calculate_metric(logits, batch.labels)
return {"val_loss": loss, "statistic": statistic}
def test_step(self, batch: PathContextBatch, batch_idx: int) -> Dict:
result = self.validation_step(batch, batch_idx)
result["test_loss"] = result.pop("val_loss")
return result
| 43.440367 | 114 | 0.645829 |
932a998bc92866e4ea0322c665d2ce90e87fa69b | 8,267 | py | Python | r3det/models/dense_heads/rotate_retina_head.py | SJTU-Thinklab-Det/r3det-pytorch | aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd | [
"Apache-2.0"
] | 42 | 2021-12-09T10:02:35.000Z | 2022-03-30T08:40:20.000Z | r3det/models/dense_heads/rotate_retina_head.py | SJTU-Thinklab-Det/r3det-pytorch | aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd | [
"Apache-2.0"
] | 13 | 2021-12-14T01:47:32.000Z | 2022-03-30T08:01:17.000Z | r3det/models/dense_heads/rotate_retina_head.py | SJTU-Thinklab-Det/r3det-pytorch | aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd | [
"Apache-2.0"
] | 5 | 2021-12-14T09:57:29.000Z | 2022-03-03T12:25:54.000Z | import torch.nn as nn
from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init
from mmcv.runner import force_fp32
from mmdet.models.builder import HEADS
from .rotate_anchor_head import RAnchorHead
@HEADS.register_module()
class RRetinaHead(RAnchorHead):
r"""An anchor-based head used in `RetinaNet
<https://arxiv.org/pdf/1708.02002.pdf>`_.
The head contains two subnetworks. The first classifies anchor boxes and
the second regresses deltas for the anchors.
Example:
>>> import torch
>>> self = RetinaHead(11, 7)
>>> x = torch.rand(1, 7, 32, 32)
>>> cls_score, bbox_pred = self.forward_single(x)
>>> # Each anchor predicts a score for each class except background
>>> cls_per_anchor = cls_score.shape[1] / self.num_anchors
>>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors
>>> assert cls_per_anchor == (self.num_classes)
>>> assert box_per_anchor == 4
"""
def __init__(self,
num_classes,
in_channels,
stacked_convs=4,
conv_cfg=None,
norm_cfg=None,
anchor_generator=dict(
type='AnchorGenerator',
octave_base_scale=4,
scales_per_octave=3,
ratios=[0.5, 1.0, 2.0],
strides=[8, 16, 32, 64, 128]),
**kwargs):
self.stacked_convs = stacked_convs
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
super(RRetinaHead, self).__init__(
num_classes,
in_channels,
anchor_generator=anchor_generator,
**kwargs)
def _init_layers(self):
"""Initialize layers of the head."""
self.relu = nn.ReLU(inplace=True)
self.cls_convs = nn.ModuleList()
self.reg_convs = nn.ModuleList()
for i in range(self.stacked_convs):
chn = self.in_channels if i == 0 else self.feat_channels
self.cls_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.reg_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.retina_cls = nn.Conv2d(
self.feat_channels,
self.num_anchors * self.cls_out_channels,
3,
padding=1)
self.retina_reg = nn.Conv2d(
self.feat_channels, self.num_anchors * 5, 3, padding=1)
def init_weights(self):
"""Initialize weights of the head."""
for m in self.cls_convs:
normal_init(m.conv, std=0.01)
for m in self.reg_convs:
normal_init(m.conv, std=0.01)
bias_cls = bias_init_with_prob(0.01)
normal_init(self.retina_cls, std=0.01, bias=bias_cls)
normal_init(self.retina_reg, std=0.01)
def forward_single(self, x):
"""Forward feature of a single scale level.
Args:
x (torch.Tensor): Features of a single scale level.
Returns:
tuple:
cls_score (torch.Tensor): Cls scores for a single scale level
the channels number is num_anchors * num_classes.
bbox_pred (torch.Tensor): Box energies / deltas for a single
scale level, the channels number is num_anchors * 4.
"""
cls_feat = x
reg_feat = x
for cls_conv in self.cls_convs:
cls_feat = cls_conv(cls_feat)
for reg_conv in self.reg_convs:
reg_feat = reg_conv(reg_feat)
cls_score = self.retina_cls(cls_feat)
bbox_pred = self.retina_reg(reg_feat)
return cls_score, bbox_pred
@force_fp32(apply_to=('cls_scores', 'bbox_preds'))
def filter_bboxes(self, cls_scores, bbox_preds):
"""Filter predicted bounding boxes at each position of the feature
maps. Only one bounding boxes with highest score will be left at each
position. This filter will be used in R3Det prior to the first feature
refinement stage.
Args:
cls_scores (list[Tensor]): Box scores for each scale level
Has shape (N, num_anchors * num_classes, H, W)
bbox_preds (list[Tensor]): Box energies / deltas for each scale
level with shape (N, num_anchors * 5, H, W)
Returns:
list[list[Tensor]]: best or refined rbboxes of each level
of each image.
"""
num_levels = len(cls_scores)
assert num_levels == len(bbox_preds)
num_imgs = cls_scores[0].size(0)
for i in range(num_levels):
assert num_imgs == cls_scores[i].size(0) == bbox_preds[i].size(0)
device = cls_scores[0].device
featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)]
mlvl_anchors = self.anchor_generator.grid_priors(
featmap_sizes, device=device)
bboxes_list = [[] for _ in range(num_imgs)]
for lvl in range(num_levels):
cls_score = cls_scores[lvl]
bbox_pred = bbox_preds[lvl]
anchors = mlvl_anchors[lvl]
cls_score = cls_score.permute(0, 2, 3, 1)
cls_score = cls_score.reshape(num_imgs, -1, self.num_anchors,
self.cls_out_channels)
cls_score, _ = cls_score.max(dim=-1, keepdim=True)
best_ind = cls_score.argmax(dim=-2, keepdim=True)
best_ind = best_ind.expand(-1, -1, -1, 5)
bbox_pred = bbox_pred.permute(0, 2, 3, 1)
bbox_pred = bbox_pred.reshape(num_imgs, -1, self.num_anchors, 5)
best_pred = bbox_pred.gather(
dim=-2, index=best_ind).squeeze(dim=-2)
anchors = anchors.reshape(-1, self.num_anchors, 5)
for img_id in range(num_imgs):
best_ind_i = best_ind[img_id]
best_pred_i = best_pred[img_id]
best_anchor_i = anchors.gather(
dim=-2, index=best_ind_i).squeeze(dim=-2)
best_bbox_i = self.bbox_coder.decode(best_anchor_i,
best_pred_i)
bboxes_list[img_id].append(best_bbox_i.detach())
return bboxes_list
@force_fp32(apply_to=('cls_scores', 'bbox_preds'))
def refine_bboxes(self, cls_scores, bbox_preds):
"""This function will be used in S2ANet, whose num_anchors=1.
Args:
cls_scores (list[Tensor]): Box scores for each scale level
Has shape (N, num_classes, H, W)
bbox_preds (list[Tensor]): Box energies / deltas for each scale
level with shape (N, 5, H, W)
Returns:
list[list[Tensor]]: refined rbboxes of each level of each image.
"""
num_levels = len(cls_scores)
assert num_levels == len(bbox_preds)
num_imgs = cls_scores[0].size(0)
for i in range(num_levels):
assert num_imgs == cls_scores[i].size(0) == bbox_preds[i].size(0)
device = cls_scores[0].device
featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)]
mlvl_anchors = self.anchor_generator.grid_priors(
featmap_sizes, device=device)
bboxes_list = [[] for _ in range(num_imgs)]
for lvl in range(num_levels):
bbox_pred = bbox_preds[lvl]
bbox_pred = bbox_pred.permute(0, 2, 3, 1)
bbox_pred = bbox_pred.reshape(num_imgs, -1, 5)
anchors = mlvl_anchors[lvl]
for img_id in range(num_imgs):
bbox_pred_i = bbox_pred[img_id]
decode_bbox_i = self.bbox_coder.decode(anchors, bbox_pred_i)
bboxes_list[img_id].append(decode_bbox_i.detach())
return bboxes_list
| 37.748858 | 78 | 0.56913 |
17aa18811ed57fc375a7cf375f1fde99d6392988 | 1,671 | py | Python | chameleon/legacy/skfeature/function/wrapper/svm_forward.py | diviyat/chameleon | 12c212ef3ecaab73e6b2a4ae378b25495926ad75 | [
"MIT"
] | 2 | 2021-10-21T23:09:00.000Z | 2021-12-14T07:55:43.000Z | chameleon/legacy/skfeature/function/wrapper/svm_forward.py | diviyat/chameleon | 12c212ef3ecaab73e6b2a4ae378b25495926ad75 | [
"MIT"
] | null | null | null | chameleon/legacy/skfeature/function/wrapper/svm_forward.py | diviyat/chameleon | 12c212ef3ecaab73e6b2a4ae378b25495926ad75 | [
"MIT"
] | null | null | null | import numpy as np
from sklearn.svm import SVC
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
def svm_forward(X, y, n_selected_features):
"""
This function implements the forward feature selection algorithm based on SVM
Input
-----
X: {numpy array}, shape (n_samples, n_features)
input data
y: {numpy array}, shape (n_samples,)
input class labels
n_selected_features: {int}
number of selected features
Output
------
F: {numpy array}, shape (n_features, )
index of selected features
"""
n_samples, n_features = X.shape
# using 10 fold cross validation
cv = KFold(n_samples, n_folds=10, shuffle=True)
# choose SVM as the classifier
clf = SVC()
# selected feature set, initialized to be empty
F = []
count = 0
while count < n_selected_features:
max_acc = 0
for i in range(n_features):
if i not in F:
F.append(i)
X_tmp = X[:, F]
acc = 0
for train, test in cv:
clf.fit(X_tmp[train], y[train])
y_predict = clf.predict(X_tmp[test])
acc_tmp = accuracy_score(y[test], y_predict)
acc += acc_tmp
acc = float(acc)/10
F.pop()
# record the feature which results in the largest accuracy
if acc > max_acc:
max_acc = acc
idx = i
# add the feature which results in the largest accuracy
F.append(idx)
count += 1
return np.array(F) | 29.839286 | 81 | 0.557151 |
e33bff7d2687761d21cb79fce496e2ca268b3aa5 | 54,414 | py | Python | tensorflow/python/saved_model/load_test.py | drewszurko/tensorflow | f2a9a2cdd87673182e94b3c25fcfc210315d014b | [
"Apache-2.0"
] | null | null | null | tensorflow/python/saved_model/load_test.py | drewszurko/tensorflow | f2a9a2cdd87673182e94b3c25fcfc210315d014b | [
"Apache-2.0"
] | 59 | 2019-06-17T09:37:49.000Z | 2022-01-19T01:21:34.000Z | tensorflow/python/saved_model/load_test.py | CostasVoliotisXO/tensorflow | e6785ff6cc9c0dfe688c3ab7c22d27134de75368 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for trackable object SavedModel loading."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import os
import tempfile
import weakref
from absl.testing import parameterized
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import backprop
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import input_layer
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.engine import training as training_lib
from tensorflow.python.keras.layers import convolutional
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.optimizer_v2 import adam
from tensorflow.python.lib.io import file_io
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import save
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import util
from tensorflow.python.util import tf_inspect
@parameterized.named_parameters(
dict(testcase_name="ReloadOnce", cycles=1),
dict(testcase_name="ReloadTwice", cycles=2),
dict(testcase_name="ReloadThrice", cycles=3))
class LoadTest(test.TestCase, parameterized.TestCase):
def cycle(self, obj, cycles, signatures=None):
to_save = obj
# TODO(vbardiovsky): It would be nice if exported protos reached a fixed
# point w.r.t. saving/restoring, ideally after 2nd saving.
for _ in range(cycles):
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(to_save, path, signatures)
loaded = load.load(path)
to_save = loaded
return loaded
def test_structure_import(self, cycles):
root = tracking.AutoTrackable()
root.dep_one = tracking.AutoTrackable()
root.dep_two = tracking.AutoTrackable()
root.dep_two.dep = tracking.AutoTrackable()
root.dep_three = root.dep_two.dep
imported = self.cycle(root, cycles)
self.assertIs(imported.dep_three, imported.dep_two.dep)
self.assertIsNot(imported.dep_one, imported.dep_two)
def test_variables(self, cycles):
root = tracking.AutoTrackable()
root.v1 = variables.Variable(1., trainable=True)
root.v2 = variables.Variable(2., trainable=False)
imported = self.cycle(root, cycles)
self.assertEqual(imported.v1.numpy(), 1.0)
self.assertTrue(imported.v1.trainable)
self.assertEqual(imported.v2.numpy(), 2.0)
self.assertFalse(imported.v2.trainable)
def test_variables_name(self, cycles):
root = tracking.AutoTrackable()
# Test 2 variables with same name: should work as the checkpoint
# is based on object name and not on variable name.
root.v1 = variables.Variable(1., trainable=True, name="v1")
root.v2 = variables.Variable(2., trainable=False, name="v1")
imported = self.cycle(root, cycles)
self.assertEqual(imported.v1.numpy(), 1.0)
self.assertEqual(imported.v2.numpy(), 2.0)
self.assertEqual(imported.v1.name, root.v1.name)
self.assertEqual(imported.v2.name, root.v2.name)
with variable_scope.variable_scope("foo"):
imported = self.cycle(root, cycles)
self.assertTrue(imported.v1.name.startswith("foo/"))
self.assertTrue(imported.v2.name.startswith("foo/"))
@test_util.run_in_graph_and_eager_modes
def test_capture_variables(self, cycles):
root = tracking.AutoTrackable()
root.weights = variables.Variable(2.)
self.evaluate(root.weights.initializer)
root.f = def_function.function(
lambda x: root.weights * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
for _ in range(cycles):
imported = self.cycle(root, 1)
self.evaluate(imported.weights.initializer)
self.assertEqual(4., self.evaluate(imported.f(constant_op.constant(2.))))
self.evaluate(imported.weights.assign(4.0))
self.assertEqual(8., self.evaluate(imported.f(constant_op.constant(2.))))
@test_util.run_in_graph_and_eager_modes
def test_capture_constant(self, cycles):
root = tracking.AutoTrackable()
captured_constant = constant_op.constant(2.)
root.f = def_function.function(
lambda x: captured_constant * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
imported = self.cycle(root, cycles)
self.assertEqual(4., self.evaluate(imported.f(constant_op.constant(2.))))
def test_control_outputs(self, cycles):
exported = tracking.AutoTrackable()
exported.v = variables.Variable(1.)
exported.f = def_function.function(
lambda: exported.v.assign(2., name="should_be_control_output"))
exported_graph = exported.f.get_concrete_function().graph
self.assertIn(
exported_graph.get_operation_by_name("should_be_control_output"),
exported_graph.control_outputs)
imported = self.cycle(exported, cycles)
# Calling get_concrete_function wraps in a second call operation; we want to
# inspect the original function body for the control output; digging into
# graph.as_graph_def() and its FunctionDefLibrary is another option.
imported_concrete, = imported.f._concrete_functions
imported_graph = imported_concrete.graph
self.assertIn(
imported_graph.get_operation_by_name("should_be_control_output"),
imported_graph.control_outputs)
def _make_asset(self, contents):
filename = tempfile.mktemp(prefix=self.get_temp_dir())
with open(filename, "w") as f:
f.write(contents)
return filename
@test_util.run_in_graph_and_eager_modes
def test_assets(self, cycles):
file1 = self._make_asset("contents 1")
file2 = self._make_asset("contents 2")
root = tracking.AutoTrackable()
root.asset1 = tracking.TrackableAsset(file1)
root.asset2 = tracking.TrackableAsset(file2)
save_dir = os.path.join(self.get_temp_dir(), "save_dir")
save.save(root, save_dir)
file_io.delete_file(file1)
file_io.delete_file(file2)
load_dir = os.path.join(self.get_temp_dir(), "load_dir")
file_io.rename(save_dir, load_dir)
imported = load.load(load_dir)
with open(self.evaluate(imported.asset1.asset_path), "r") as f:
self.assertEqual("contents 1", f.read())
with open(self.evaluate(imported.asset2.asset_path), "r") as f:
self.assertEqual("contents 2", f.read())
def test_capture_assets(self, cycles):
root = tracking.AutoTrackable()
root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
root.f = def_function.function(
lambda: root.vocab.asset_path,
input_signature=[])
imported = self.cycle(root, cycles)
original_output = root.f().numpy()
imported_output = imported.f().numpy()
self.assertNotEqual(original_output, imported_output)
with open(imported_output, "r") as f:
self.assertEqual("contents", f.read())
def test_capture_assets_in_graph(self, cycles):
root = tracking.AutoTrackable()
root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
root.f = def_function.function(
lambda: root.vocab.asset_path,
input_signature=[])
original_output = root.f().numpy()
if cycles > 1:
root = self.cycle(root, cycles - 1)
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(root, path)
with ops.Graph().as_default():
imported = load.load(path)
imported_tensor = imported.f()
with monitored_session.MonitoredSession() as sess:
imported_output = sess.run(imported_tensor)
self.assertNotEqual(original_output, imported_output)
with open(imported_output, "r") as f:
self.assertEqual("contents", f.read())
def test_dedup_assets(self, cycles):
vocab = self._make_asset("contents")
root = tracking.AutoTrackable()
root.asset1 = tracking.TrackableAsset(vocab)
root.asset2 = tracking.TrackableAsset(vocab)
imported = self.cycle(root, cycles)
self.assertEqual(imported.asset1.asset_path.numpy(),
imported.asset2.asset_path.numpy())
def test_implicit_input_signature(self, cycles):
@def_function.function
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func
# Add two traces.
root.f(constant_op.constant(1.))
root.f(constant_op.constant(1))
imported = self.cycle(root, cycles)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
self.assertEqual(14, imported.f(constant_op.constant(7)).numpy())
def test_explicit_input_signature(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func
imported = self.cycle(root, cycles)
self.assertEqual(4., imported.f(constant_op.constant(2.0)).numpy())
def test_explicit_save_signature(self, cycles):
@def_function.function
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func
imported = self.cycle(
root, cycles, {
"f":
root.f.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32))
})
self.assertEqual(4., imported.f(constant_op.constant(2.0)).numpy())
def test_nested_functions(self, cycles):
f = def_function.function(
lambda x: x*2.0,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
g = def_function.function(
lambda x: f(x) + 1.0,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.AutoTrackable()
root.g = g
imported = self.cycle(root, cycles)
imported.g(constant_op.constant([1.0]))
def test_function_with_default_bool_input(self, cycles):
def func(x, training=False):
if training:
return 2 * x
else:
return 7
root = tracking.AutoTrackable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root, cycles)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
def test_function_with_default_none_input(self, cycles):
def func(x, dtype=None):
if dtype:
return array_ops.zeros(shape=x.shape, dtype=dtype)
else:
return array_ops.zeros(shape=x.shape, dtype=dtypes.float32)
root = tracking.AutoTrackable()
root.f = def_function.function(func)
self.assertAllEqual([0.0, 0.0, 0.0],
root.f(constant_op.constant([1, 2, 3])).numpy())
self.assertAllEqual([0.0, 0.0, 0.0],
root.f(constant_op.constant([1.0, 2.0, 3.0])).numpy())
self.assertAllEqual([0.0, 0.0, 0.0, 0.0],
root.f(constant_op.constant([1, 2, 3, 4])).numpy())
self.assertAllEqual([0, 0, 0],
root.f(
constant_op.constant([1.0, 2.0, 3.0]),
dtype=dtypes.int32).numpy())
concrete_functions = root.f._list_all_concrete_functions_for_serialization() # pylint: disable=protected-access
self.assertEqual(4, len(concrete_functions))
imported = self.cycle(root, cycles)
self.assertAllEqual([0.0, 0.0, 0.0],
imported.f(constant_op.constant([1, 2, 3]),
None).numpy())
self.assertAllEqual([0.0, 0.0, 0.0],
imported.f(constant_op.constant([1.0, 2.0,
3.0])).numpy())
self.assertAllEqual([0.0, 0.0, 0.0, 0.0],
imported.f(constant_op.constant([1, 2, 3, 4])).numpy())
self.assertAllEqual([0, 0, 0],
imported.f(
constant_op.constant([1.0, 2.0, 3.0]),
dtype=dtypes.int32).numpy())
def test_function_no_return(self, cycles):
class TrackableWithOneVariable(tracking.AutoTrackable):
def __init__(self, initial_value=0.0):
super(TrackableWithOneVariable, self).__init__()
self.variable = variables.Variable(initial_value)
@def_function.function
def increase(self, by=1.0):
self.variable.assign_add(by)
obj = TrackableWithOneVariable(5.0)
obj.increase(constant_op.constant(10.0))
self.assertEqual(15.0, obj.variable.numpy())
obj.increase()
self.assertEqual(16.0, obj.variable.numpy())
imported = self.cycle(obj, cycles)
imported.increase(constant_op.constant(10.0))
self.assertEqual(26.0, imported.variable.numpy())
imported.increase(constant_op.constant(1.0))
self.assertEqual(27.0, imported.variable.numpy())
def test_structured_inputs(self, cycles):
def func(x, training=True):
# x is a nested structure, we care about one particular tensor.
_, (a, b) = x
if training:
return 2 * a["a"] + b
else:
return 7
root = tracking.AutoTrackable()
root.f = def_function.function(func)
x = constant_op.constant(10)
y = constant_op.constant(11)
input1 = [6, ({"a": x}, y)]
input2 = [7, ({"a": x}, y)] # Not compatible with input1 signature.
input3 = [6, ({"a": y}, x)] # Compatible with input1 signature.
# Note: by only calling f(input1) before serialization, only inputs with
# matching signature will be valid on the loaded model.
self.assertEqual(31, root.f(input1).numpy())
imported = self.cycle(root, cycles)
with self.assertRaisesRegexp(ValueError,
"Could not find matching function to call"):
imported.f(input2)
self.assertEqual(31, imported.f(input1).numpy())
self.assertEqual(32, imported.f(input3).numpy())
def test_structured_output(self, cycles):
# Use fields with non-alphabetical order
named_tuple_type = collections.namedtuple("NamedTupleHello", ["b", "a"])
def func(input1, input2):
named_tuple = named_tuple_type(a=input1 + input2, b=input1 * input2)
return [named_tuple, input2, {"x": 0.5}]
root = tracking.AutoTrackable()
root.f = def_function.function(func)
result = root.f(constant_op.constant(2), constant_op.constant(3))
self.assertEqual(5, result[0].a.numpy())
self.assertEqual(6, result[0].b.numpy())
self.assertEqual(["b", "a"], list(result[0]._asdict().keys()))
self.assertEqual(3, result[1].numpy())
self.assertEqual(0.5, result[2]["x"].numpy())
imported = self.cycle(root, cycles)
result = imported.f(constant_op.constant(2), constant_op.constant(5))
self.assertEqual(7, result[0].a.numpy())
self.assertEqual(10, result[0].b.numpy())
self.assertEqual(["b", "a"], list(result[0]._asdict().keys()))
self.assertEqual(5, result[1].numpy())
self.assertEqual(0.5, result[2]["x"].numpy())
def test_optimizer(self, cycles):
class _HasOptimizer(module.Module):
def __init__(self):
super(_HasOptimizer, self).__init__()
self.layer = core.Dense(1)
self.optimizer = adam.Adam(0.01)
@def_function.function
def __call__(self, x):
return self.layer(x)
@def_function.function
def train(self, x, y):
with backprop.GradientTape() as tape:
predicted = self(x)
loss = math_ops.reduce_sum(math_ops.abs(y - predicted))
train_vars = self.layer.trainable_variables
grads = tape.gradient(loss, train_vars)
self.optimizer.apply_gradients(zip(grads, train_vars))
root = _HasOptimizer()
train_input = dict(x=constant_op.constant([[1.]]),
y=constant_op.constant([[2.]]))
root.train(**train_input)
imported = self.cycle(root, cycles)
self.assertAllClose(root.optimizer.learning_rate.numpy(),
imported.optimizer.learning_rate.numpy())
self.assertAllClose(root(constant_op.constant([[-0.5]])),
imported(constant_op.constant([[-0.5]])))
root.train(**train_input)
imported.train(**train_input)
self.assertAllClose(root(constant_op.constant([[-0.5]])),
imported(constant_op.constant([[-0.5]])))
def test_positional_arguments(self, cycles):
def func(x, training=False, abc=7.1, defg=7.7):
del abc
if training:
return 2 * x
if defg == 7:
return 6
else:
return 7
root = tracking.AutoTrackable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
self.assertEqual(6, root.f(constant_op.constant(1), defg=7.0).numpy())
imported = self.cycle(root, cycles)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
self.assertEqual(6, imported.f(constant_op.constant(1), defg=7.0).numpy())
def test_additional_kwargs(self, cycles):
def func(x, training=False, **options):
del options
if training:
return 2 * x
else:
return 7
root = tracking.AutoTrackable()
root.f = def_function.function(func)
x = constant_op.constant(10)
self.assertEqual(7, root.f(x, learning_rate=0.5, epochs=3).numpy())
imported = self.cycle(root, cycles)
with self.assertRaisesRegexp(ValueError,
"Could not find matching function to call.*"):
imported.f(x, learning_rate=0.5, epochs=4)
self.assertEqual(7, imported.f(x, learning_rate=0.5, epochs=3).numpy())
def test_member_function(self, cycles):
class TrackableWithMember(tracking.AutoTrackable):
def __init__(self):
super(TrackableWithMember, self).__init__()
self._some_value = 20
@def_function.function
def f(self, x, training=False):
if training:
return 2 * x
else:
return 7 + self._some_value
root = TrackableWithMember()
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(27, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root, cycles)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(27, imported.f(constant_op.constant(2)).numpy())
def test_side_effect_listing(self, cycles):
class M(tracking.AutoTrackable):
def __init__(self):
super(M, self).__init__()
self.var = None
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def f(self, x):
if self.var is None:
self.var = variables.Variable(2.)
return x * self.var
m = M()
self.cycle(m, cycles)
self.assertEqual(4.0, m.f(constant_op.constant(2.0)).numpy())
def test_basic_backprop(self, cycles):
weight = variables.Variable(1., trainable=True)
bias = variables.Variable(0., trainable=True)
g = def_function.function(
lambda x: x*weight + bias,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.AutoTrackable()
root.weight = weight
root.bias = bias
root.g = g
imported = self.cycle(root, cycles)
with backprop.GradientTape() as t:
x = constant_op.constant([3.5])
loss = imported.g(x)
grad = t.gradient(loss, [imported.weight, imported.bias])
self.assertAllClose(grad, [3.5, 1.0])
def test_nested_backprop(self, cycles):
weight = variables.Variable(1., trainable=True)
bias = variables.Variable(0., trainable=True)
# Note: this function gets called from other function defs via a
# "PartitionedCall" op node.
@def_function.function(input_signature=[
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32)])
def mul(x, y):
return x * y
# Note: this function gets called from other function defs via a
# "StatefulPartitionedCall" op node.
@def_function.function(input_signature=[
tensor_spec.TensorSpec(None, dtypes.float32)])
def f(x):
return mul(weight.read_value(), x)
@def_function.function(input_signature=[
tensor_spec.TensorSpec(None, dtypes.float32)])
def g(x):
return f(x) + bias,
@def_function.function(input_signature=[
tensor_spec.TensorSpec(None, dtypes.float32)])
def h(x):
return g(x) + bias,
root = tracking.AutoTrackable()
root.weight = weight
root.bias = bias
root.g = h
imported = self.cycle(root, cycles)
with backprop.GradientTape() as t:
x = constant_op.constant([3.5])
loss = imported.g(x)
grad = t.gradient(loss, [imported.weight, imported.bias])
self.assertAllClose(grad, [3.5, 2.0])
def test_callable(self, cycles):
class M1(tracking.AutoTrackable):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def __call__(self, x):
return x
root = tracking.AutoTrackable()
root.m1 = M1()
root.m2 = tracking.AutoTrackable()
root.m2.__call__ = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
imported = self.cycle(root, cycles)
x = constant_op.constant(1.0)
self.assertTrue(callable(imported.m1))
self.assertAllEqual(root.m1(x), imported.m1(x))
# Note: `root.m2` was not callable since `__call__` attribute was set
# into the instance and not on the class. But after a serialization cycle
# that starts to work.
self.assertTrue(callable(imported.m2))
self.assertAllEqual(root.m2.__call__(x), imported.m2(x))
# Verify that user objects without `__call__` attribute are not callable.
self.assertFalse(callable(imported))
def test_chain_callable(self, cycles):
func = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
root = tracking.AutoTrackable()
root.__call__ = tracking.AutoTrackable()
root.__call__.__call__ = tracking.AutoTrackable()
root.__call__.__call__.__call__ = func
imported = self.cycle(root, cycles)
self.assertTrue(callable(imported))
x = constant_op.constant(1.0)
self.assertAllEqual(imported(x).numpy(), 3.0)
def test_load_in_graph_mode(self, cycles):
root = tracking.AutoTrackable()
root.v1 = variables.Variable(1.)
root.v2 = variables.Variable(2.)
root.f = def_function.function(
lambda x: root.v2 * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
if cycles > 1:
root = self.cycle(root, cycles - 1)
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(root, path)
with ops.Graph().as_default():
imported = load.load(path)
var_v1 = imported.v1
output = imported.f(constant_op.constant(2.))
with monitored_session.MonitoredSession() as sess:
self.assertEqual(1.0, sess.run(var_v1))
self.assertEqual(4.0, sess.run(output))
def test_load_in_func_graph(self, cycles):
root = tracking.AutoTrackable()
root.v1 = variables.Variable(1.)
root.v2 = variables.Variable(2.)
root.f = def_function.function(
lambda x: root.v2 * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
if cycles > 1:
root = self.cycle(root, cycles - 1)
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(root, path)
closure = tracking.AutoTrackable()
@def_function.function
def func(x):
if not hasattr(closure, "model"):
closure.model = load.load(path)
return closure.model.f(x)
inputs = constant_op.constant(2.)
self.assertEqual(4.0, func(inputs).numpy())
def test_soft_matching(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec([None], dtypes.int32)])
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func
self.assertAllEqual([2], root.f(constant_op.constant([1])).numpy())
self.assertAllEqual([2, 4], root.f(constant_op.constant([1, 2])).numpy())
concrete_functions = root.f._list_all_concrete_functions_for_serialization() # pylint: disable=protected-access
self.assertEqual(1, len(concrete_functions))
imported = self.cycle(root, cycles)
with self.assertRaisesRegexp(ValueError, "Python inputs incompatible"):
# We cannot call the function with a constant of shape ().
imported.f(constant_op.constant(2)).numpy()
# TODO(vbardiovsky): When classes are revived with input_signatures, we
# should also check that the calls below are not generating any more
# concrete functions.
self.assertAllEqual([2, 4, 6, 8],
imported.f(constant_op.constant([1, 2, 3, 4])).numpy())
self.assertAllEqual([2, 4, 6],
imported.f(constant_op.constant([1, 2, 3])).numpy())
def test_get_concrete_function(self, cycles):
@def_function.function
def func(x, training=False):
if training:
return 2 * x
else:
return 3 * x
func.get_concrete_function(
tensor_spec.TensorSpec([None], dtypes.int32), True)
func.get_concrete_function(tensor_spec.TensorSpec([None], dtypes.float32))
root = tracking.AutoTrackable()
root.f = func
imported = self.cycle(root, cycles)
concrete = imported.f.get_concrete_function(
training=True, x=tensor_spec.TensorSpec([None], dtypes.int32))
self.assertAllEqual([2, 4, 6, 8],
concrete(x=constant_op.constant([1, 2, 3, 4])).numpy())
with self.assertRaisesRegexp(ValueError,
"Could not find matching function to call"):
imported.f.get_concrete_function(
tensor_spec.TensorSpec([None], dtypes.int32))
imported.f.get_concrete_function(
tensor_spec.TensorSpec([None], dtypes.int32), True)
def test_concrete_function(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec([None], dtypes.int32)])
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func.get_concrete_function()
self.assertAllEqual([2], root.f(constant_op.constant([1])).numpy())
self.assertAllEqual([2, 4], root.f(constant_op.constant([1, 2])).numpy())
# TODO(andresp): Fix exporting of loaded concrete functions as signatures.
imported = self.cycle(root, cycles, signatures={})
self.assertAllEqual([2, 4, 6, 8],
imported.f(constant_op.constant([1, 2, 3, 4])).numpy())
self.assertAllEqual([2, 4, 6],
imported.f(constant_op.constant([1, 2, 3])).numpy())
def test_concrete_function_arg_names(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec([None], dtypes.int32)])
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func.get_concrete_function()
self.assertAllEqual([2], root.f(constant_op.constant([1])).numpy())
# TODO(andresp): Fix exporting of loaded concrete functions as signatures.
imported = self.cycle(root, cycles, signatures={})
self.assertAllEqual([2, 4, 6],
imported.f(x=constant_op.constant([1, 2, 3])).numpy())
def test_concrete_function_no_signature(self, cycles):
@def_function.function
def func(x):
return 2 * x
root = tracking.AutoTrackable()
root.f = func.get_concrete_function(constant_op.constant([1]))
self.assertAllEqual([4], root.f(constant_op.constant([2])).numpy())
# TODO(andresp): Fix exporting of loaded concrete functions as signatures.
imported = self.cycle(root, cycles, signatures={})
self.assertAllEqual([6],
imported.f(constant_op.constant([3])).numpy())
def test_concrete_function_backprop(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec([None], dtypes.float32)])
def func(x):
return x ** 2.
root = tracking.AutoTrackable()
root.f = func.get_concrete_function()
def _compute_gradient(function):
with backprop.GradientTape() as tape:
inp = constant_op.constant(1.)
tape.watch(inp)
output = function(inp)
return tape.gradient(output, inp)
self.assertEqual(2., _compute_gradient(root.f).numpy())
# TODO(andresp): Fix exporting of loaded concrete functions as signatures.
imported = self.cycle(root, cycles, signatures={})
self.assertEqual(2., _compute_gradient(imported.f).numpy())
def test_revived_concrete_function_kwargs(self, cycles):
@def_function.function
def func(x, y):
return x * (y + 1.)
root = tracking.AutoTrackable()
root.f = func.get_concrete_function(
tensor_spec.TensorSpec([], dtypes.float32),
tensor_spec.TensorSpec([], dtypes.float32))
self.assertEqual(8., root.f(y=constant_op.constant(3.),
x=constant_op.constant(2.)).numpy())
# TODO(andresp): Fix exporting of loaded concrete functions as signatures.
imported = self.cycle(root, cycles, signatures={})
self.assertEqual(8., imported.f(y=constant_op.constant(3.),
x=constant_op.constant(2.)).numpy())
def test_revived_concrete_function_tensorspec_kwargs(self, cycles):
@def_function.function
def func(*args):
x, y = args
return x * (y + 1.)
root = tracking.AutoTrackable()
root.f = func.get_concrete_function(
tensor_spec.TensorSpec([], dtypes.float32, name="x"),
tensor_spec.TensorSpec([], dtypes.float32, name="y"))
self.assertEqual(8., root.f(y=constant_op.constant(3.),
x=constant_op.constant(2.)).numpy())
imported = self.cycle(root, cycles, signatures={})
self.assertEqual(8., imported.f(y=constant_op.constant(3.),
x=constant_op.constant(2.)).numpy())
def test_concrete_function_variable_argument(self, cycles):
# TODO(allenl): Fix variables in input signatures.
self.skipTest("Need to fix encoding of variables in inputs signatures")
capture = variables.Variable(0)
@def_function.function
def func(v):
v.assign_add(1)
capture.assign_sub(1)
vsave = variables.Variable(1)
root = tracking.AutoTrackable()
root.f = func.get_concrete_function(vsave)
root.capture = capture
self.assertEqual(1, vsave.numpy())
root.f(vsave)
self.assertEqual(2, vsave.numpy())
self.assertEqual(-1, capture.numpy())
imported = self.cycle(root, cycles)
vload = variables.Variable(1)
imported.f(vload)
self.assertEqual(2, vload.numpy())
imported.f(v=vload)
self.assertEqual(3, vload.numpy())
self.assertEqual(-3, imported.capture.numpy())
self.assertEqual(-1, capture.numpy())
def test_function_and_component(self, cycles):
@def_function.function
def func(v):
return v + 1
root = tracking.AutoTrackable()
root.func = func
root.concrete_func = func.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.int32))
one = constant_op.constant(1)
self.assertEqual(2, root.func(one).numpy())
self.assertEqual(2, root.concrete_func(one).numpy())
imported = self.cycle(root, cycles)
self.assertEqual(2, imported.func(one).numpy())
self.assertEqual(2, imported.concrete_func(one).numpy())
def test_dict(self, cycles):
root = tracking.AutoTrackable()
root.variables = dict(a=variables.Variable(1.))
root.variables["b"] = variables.Variable(2.)
root.variables["c"] = 1
root.funcs = dict(
a=def_function.function(lambda: constant_op.constant(100.)))
root.funcs["conc"] = root.funcs["a"].get_concrete_function()
imported = self.cycle(root, cycles)
self.assertEqual(1., imported.variables["a"].numpy())
self.assertEqual(2., imported.variables["b"].numpy())
self.assertEqual(set(["a", "b"]), set(imported.variables.keys()))
self.assertEqual(100., imported.funcs["a"]().numpy())
self.assertEqual(100., imported.funcs["conc"]().numpy())
def test_list(self, cycles):
root = tracking.AutoTrackable()
root.variables = [variables.Variable(1.)]
root.variables.append(1)
root.variables.append(variables.Variable(3.))
imported = self.cycle(root, cycles)
self.assertEqual(1., imported.variables[0].numpy())
self.assertEqual(3., imported.variables[2].numpy())
self.assertIs(None, imported.variables[1])
self.assertEqual(3, len(imported.variables))
def test_functions_list(self, cycles):
root = tracking.AutoTrackable()
v1 = variables.Variable(1.)
root.losses = [def_function.function(lambda: math_ops.reduce_sum(v1 ** 2))]
root.variables = [v1]
@def_function.function
def _v2_loss():
if len(root.variables) == 1:
v2 = variables.Variable(2.)
root.variables.append(v2)
return math_ops.reduce_sum(root.variables[1] ** 2)
root.losses.append(_v2_loss)
self.assertAllClose([1., 4.], [loss() for loss in root.losses])
imported = self.cycle(root, cycles)
self.assertAllClose([1., 4.], [loss() for loss in imported.losses])
imported.variables[0].assign(3.)
imported.variables[1].assign(4.)
self.assertAllClose([9., 16.], [loss() for loss in imported.losses])
def test_captured_constant(self, cycles):
const = array_ops.zeros([100])
root = tracking.AutoTrackable()
root.f = def_function.function(lambda: const + 1.)
root.g = def_function.function(lambda: const + 2.)
self.assertAllClose(array_ops.ones([100]), root.f())
self.assertAllClose(2. * array_ops.ones([100]), root.g())
imported = self.cycle(root, cycles)
self.assertAllClose(array_ops.ones([100]), imported.f())
self.assertAllClose(2. * array_ops.ones([100]), imported.g())
# TODO(b/123408994): Use the public get_concrete_function.
f_concrete = imported.f._list_all_concrete_functions_for_serialization()[0]
g_concrete = imported.g._list_all_concrete_functions_for_serialization()[0]
self.assertLen(f_concrete.captured_inputs, 1)
self.assertLen(g_concrete.captured_inputs, 1)
# We should be using the same captured EagerTensor in both functions, not
# duplicating the constant.
self.assertIs(f_concrete.captured_inputs[0],
g_concrete.captured_inputs[0])
def test_functions_accessed_once(self, cycles):
class Exported(tracking.AutoTrackable):
def __init__(self):
self._counter = 0
@property
def make_func(self):
@def_function.function
def f():
return constant_op.constant(self._counter)
f.get_concrete_function() # force a trace
self._counter += 1
return f
exported = Exported()
imported = self.cycle(exported, cycles)
self.assertEqual(0, imported.make_func().numpy())
self.assertEqual(1, exported.make_func().numpy())
def test_overwritten_signatures_error(self, cycles):
exported = tracking.AutoTrackable()
exported.f = def_function.function(lambda: constant_op.constant(1.))
imported = self.cycle(
exported, cycles,
signatures={"key": exported.f.get_concrete_function()})
self.assertEqual(1., imported.signatures["key"]()["output_0"].numpy())
imported.signatures = {"key1": imported.signatures["key"]}
with self.assertRaisesRegexp(ValueError, "signatures"):
save.save(imported, tempfile.mkdtemp(prefix=self.get_temp_dir()))
def test_signature_loading(self, cycles):
class Exported(tracking.AutoTrackable):
def __init__(self):
self.v = variables.Variable(3.)
@def_function.function
def do(self, x):
return self.v * x
exported = Exported()
imported = self.cycle(
exported,
cycles=1,
signatures=exported.do.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32)))
for _ in range(cycles - 1):
imported = self.cycle(imported, cycles=1, signatures=imported.signatures)
self.assertEqual(["serving_default"], list(imported.signatures.keys()))
imported_function = imported.signatures["serving_default"]
two = constant_op.constant(2.)
self.assertEqual(6., imported_function(x=two)["output_0"].numpy())
imported.v.assign(4.)
self.assertEqual(8., imported_function(x=two)["output_0"].numpy())
self.assertEqual(8., imported_function(two)["output_0"].numpy())
with self.assertRaises(TypeError):
# The signatures mapping is immutable
imported.signatures["random_key"] = 3
def test_multiple_argument_signatures_no_positional(self, cycles):
class Exported(tracking.AutoTrackable):
@def_function.function
def do(self, x, y):
return x + y
exported = Exported()
imported = self.cycle(
exported, cycles=1, signatures=exported.do.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32)))
for _ in range(cycles - 1):
imported = self.cycle(imported, cycles=1, signatures=imported.signatures)
with self.assertRaises(TypeError):
imported.signatures["serving_default"](
constant_op.constant(1.),
y=constant_op.constant(2.))
self.assertEqual(
{"output_0": 3.},
self.evaluate(imported.signatures["serving_default"](
x=constant_op.constant(1.),
y=constant_op.constant(2.))))
def _make_model_with_tables(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1_initializer = lookup_ops.KeyValueTensorInitializer(keys, values)
table1 = lookup_ops.HashTable(table1_initializer, default_val)
table2_file = self._make_asset("test\nfoo\nbrain\n")
table2_initializer = lookup_ops.TextFileIdTableInitializer(table2_file)
table2 = lookup_ops.HashTable(table2_initializer, default_val)
def _make_lookup_function(table):
signature = [tensor_spec.TensorSpec(None, dtypes.string)]
return def_function.function(input_signature=signature)(
lambda x: table.lookup(x)) # pylint: disable=unnecessary-lambda
root = tracking.AutoTrackable()
root.table1 = table1
root.lookup1 = _make_lookup_function(table1)
root.table2 = table2
root.lookup2 = _make_lookup_function(table2)
return root
def test_table(self, cycles):
root = self._make_model_with_tables()
imported = self.cycle(root, cycles, signatures={})
keys = constant_op.constant(["brain", "test", "foo", "surgery"])
self.assertAllEqual([0, -1, -1, 2], imported.lookup1(keys).numpy())
self.assertAllEqual([2, 0, 1, -1], imported.lookup2(keys).numpy())
def test_table_collections_untouched_eager(self, cycles):
def _gather_nonempty_collections():
graph = ops.get_default_graph()
gathered = {}
for collection in graph.collections:
collection_contents = graph.get_collection(collection)
if collection_contents:
gathered[collection] = collection_contents
return gathered
root = self._make_model_with_tables()
# Warm up collections to ignore those that don't expand every iteration,
# e.g. the __varscope collection.
self.cycle(root, 1)
original_collections = _gather_nonempty_collections()
self.cycle(root, cycles)
self.assertEqual(original_collections, _gather_nonempty_collections())
def test_table_in_graph(self, cycles):
root = self._make_model_with_tables()
if cycles > 1:
root = self.cycle(root, cycles - 1)
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(root, path)
imported = self.cycle(root, 1)
with ops.Graph().as_default():
imported = load.load(path)
keys = constant_op.constant(["brain", "test", "foo", "surgery"])
output1 = imported.lookup1(keys)
output2 = imported.lookup2(keys)
with monitored_session.MonitoredSession() as sess:
self.assertAllEqual([0, -1, -1, 2], sess.run(output1))
self.assertAllEqual([2, 0, 1, -1], sess.run(output2))
def test_perserve_argspec(self, cycles):
def f(a, b, c): # pylint: disable=unused-argument
return None
original_fullargspec = tf_inspect.getfullargspec(f)
root = tracking.AutoTrackable()
root.f = def_function.function(f)
imported = self.cycle(root, cycles)
restored_fullargspec = tf_inspect.getfullargspec(imported.f)
self.assertEqual(original_fullargspec, restored_fullargspec)
def test_canonicalize_inputs(self, cycles):
@def_function.function(autograph=False)
def func(a=1, b=2, c=3, training=True):
if training:
return [a, b, c, training]
else:
return [c, b, a, training]
# TODO(b/123501567): Work-around to trigger generic traces of a function
# with extra non tensor args.
signature = 3*[tensor_spec.TensorSpec(None, dtypes.float32)]
@def_function.function(input_signature=signature)
def trigger(a, b, c):
func(a, b, c, True)
func(a, b, c, False)
trigger.get_concrete_function()
root = tracking.AutoTrackable()
root.f = func
root = self.cycle(root, cycles)
self.assertAllEqual(root.f(), [1.0, 2.0, 3.0, True])
self.assertAllEqual(root.f(-1.0, training=False), [3.0, 2.0, -1.0, False])
with self.assertRaisesRegexp(ValueError,
"Could not find matching function"):
root.f(["hello", 1.0])
def test_prefer_specific_trace(self, cycles):
@def_function.function(autograph=False)
def func(a):
if isinstance(a, int):
return a
else:
return a + 1
self.assertAllEqual(2, func(2).numpy())
self.assertAllEqual(3, func(constant_op.constant(2)).numpy())
root = tracking.AutoTrackable()
root.f = func
root = self.cycle(root, cycles)
self.assertAllEqual(2, root.f(2).numpy())
self.assertAllEqual(4, root.f(3).numpy())
self.assertAllEqual(3, root.f(constant_op.constant(2)).numpy())
self.assertAllEqual(4, root.f(constant_op.constant(3)).numpy())
def test_partial(self, cycles):
def f(x, y):
return x + y
func = def_function.function(
functools.partial(f, x=array_ops.zeros([1]), y=array_ops.ones([1])))
root = tracking.AutoTrackable()
root.f = func
self.assertAllEqual(root.f(), [1.0])
root = self.cycle(root, cycles)
self.assertAllEqual(root.f(), [1.0])
def test_partial_with_non_tensor_defaults(self, cycles):
def f(x, y=3):
return x + y
func = def_function.function(functools.partial(f, y=5))
root = tracking.AutoTrackable()
root.f = func
self.assertAllEqual(root.f(1), 6)
root = self.cycle(root, cycles)
self.assertAllEqual(root.f(1), 6)
def test_partial_with_positional(self, cycles):
def f(x, y):
return x + y
func = def_function.function(functools.partial(f, constant_op.constant(5)))
root = tracking.AutoTrackable()
root.f = func
self.assertAllEqual(root.f(1), 6)
root = self.cycle(root, cycles)
self.assertAllEqual(root.f(1), 6)
def test_partial_with_positional_captured_tensors(self, cycles):
def f(x, y):
return x + y
tensor = constant_op.constant(5) + constant_op.constant(7)
func = def_function.function(functools.partial(f, tensor))
root = tracking.AutoTrackable()
root.f = func
self.assertAllEqual(root.f(1), 13)
root = self.cycle(root, cycles)
self.assertAllEqual(root.f(1), 13)
def test_partial_keyword_hiding_default(self, cycles):
def f(x=3, training=True, y=7):
if training:
return x + y
else:
return x + y + 2
func = def_function.function(functools.partial(f, y=6))
root = tracking.AutoTrackable()
root.f = func
self.assertEqual(root.f().numpy(), 9)
self.assertEqual(root.f(training=False).numpy(), 11)
root = self.cycle(root, cycles)
self.assertEqual(root.f().numpy(), 9)
self.assertEqual(root.f(training=False).numpy(), 11)
def test_partial_with_kwargs(self, cycles):
def f(a, b, *args, **kwargs):
args_sum = sum(args)
return a + b + kwargs["some_tensor"] * kwargs["learning_rate"] + args_sum
constant_tensor = constant_op.constant(10)
func = def_function.function(
functools.partial(
f, 7, 1, 2, learning_rate=3, some_tensor=constant_tensor))
root = tracking.AutoTrackable()
root.f = func
self.assertEqual(root.f(constant_op.constant(4)).numpy(), 44)
root = self.cycle(root, cycles)
self.assertEqual(root.f(constant_op.constant(5)).numpy(), 45)
def test_partial_with_passed_fn_as_default(self, cycles):
def f(x, y):
return x(3) + y
def my_func(a):
return 2 * a
func = def_function.function(functools.partial(f, my_func))
root = tracking.AutoTrackable()
root.f = func
self.assertEqual(root.f(constant_op.constant(3)).numpy(), 9)
root = self.cycle(root, cycles)
self.assertEqual(root.f(constant_op.constant(3)).numpy(), 9)
def test_convert_to_input_signature(self, cycles):
@def_function.function(
input_signature=[tensor_spec.TensorSpec([None], dtypes.int32)])
def func(x):
return x
root = tracking.AutoTrackable()
root.f = func
root = self.cycle(root, cycles)
self.assertEqual([2], root.f([2]).numpy())
def test_named_tuple(self, cycles):
class NamedTupleType(collections.namedtuple("NamedTupleType", ["a", "b"])):
pass
@def_function.function
def f(x):
return x.a + x.b
f.get_concrete_function(
NamedTupleType(
a=tensor_spec.TensorSpec(None, dtypes.float32, name="a"),
b=tensor_spec.TensorSpec(None, dtypes.float32, name="b")))
obj = tracking.AutoTrackable()
obj.__call__ = f
imported = self.cycle(obj, cycles)
self.assertAllClose(3.,
imported(NamedTupleType(a=constant_op.constant(1.),
b=constant_op.constant(2.))))
def test_extra_args(self, cycles):
@def_function.function
def f(x):
return math_ops.add(x["a"], 1.)
# Trigger a trace.
f({"a": constant_op.constant(2.0)})
obj = tracking.AutoTrackable()
obj.__call__ = f
imported = self.cycle(obj, cycles)
self.assertEqual(4.0, imported({"a": 3.0}).numpy())
with self.assertRaisesRegexp(ValueError,
"Could not find matching function to call"):
imported({"a": 2.0, "b": 3.0})
def test_shapes_available(self, cycles):
@def_function.function(input_signature=[
tensor_spec.TensorSpec([None, 3], dtypes.int32),
tensor_spec.TensorSpec([None, 2], dtypes.int32)
])
def func(x, y):
return array_ops.concat([x, y], axis=1)
root = tracking.AutoTrackable()
root.f = func
root = self.cycle(root, cycles)
imported_graph = root.f.get_concrete_function().graph
input_x, input_y = imported_graph.inputs
self.assertEqual([None, 3], input_x.shape.as_list())
self.assertEqual([None, 2], input_y.shape.as_list())
output, = imported_graph.outputs
self.assertEqual([None, 5], output.shape.as_list())
signature = root.signatures["serving_default"]
self.assertEqual(
[None, 3], signature.inputs[0].shape.as_list())
self.assertEqual(
[None, 2], signature.inputs[1].shape.as_list())
self.assertEqual(
[None, 5], signature.outputs[0].shape.as_list())
def test_variables_destroyed(self, cycles):
v1 = variables.Variable(1.)
weak_v1 = weakref.ref(v1)
root = util.Checkpoint(v=v1)
root = self.cycle(root, cycles)
del v1
self.assertIsNone(weak_v1())
weak_v2 = weakref.ref(root.v)
del root
self.assertIsNone(weak_v2())
def test_variable_attributes_preserved(self, cycles):
v = variables.Variable(
1.,
trainable=False,
synchronization=variables.VariableSynchronization.NONE,
aggregation=variables.VariableAggregation.ONLY_FIRST_REPLICA)
self.assertEqual(variables.VariableSynchronization.NONE,
v.synchronization)
self.assertEqual(variables.VariableAggregation.ONLY_FIRST_REPLICA,
v.aggregation)
root = util.Checkpoint(v=v)
root = self.cycle(root, cycles)
self.assertEqual(False, root.v.trainable)
self.assertEqual(variables.VariableSynchronization.NONE,
root.v.synchronization)
self.assertEqual(variables.VariableAggregation.ONLY_FIRST_REPLICA,
root.v.aggregation)
def test_captured_dataset(self, cycles):
class HasDataset(module.Module):
def __init__(self):
super(HasDataset, self).__init__()
self.dataset = (
dataset_ops.Dataset.range(5)
.map(lambda x: x ** 2))
@def_function.function
def __call__(self, x):
current_sum = array_ops.zeros([], dtype=dtypes.int64)
for element in self.dataset:
current_sum += x * element
return current_sum
root = HasDataset()
self.assertEqual(3 * (1 + 4 + 9 + 16),
root(constant_op.constant(3, dtype=dtypes.int64)).numpy())
root = self.cycle(root, cycles)
self.assertEqual(3 * (1 + 4 + 9 + 16),
root(constant_op.constant(3, dtype=dtypes.int64)).numpy())
def test_dense_features_layer(self, cycles):
columns = [feature_column_v2.numeric_column("x"),
feature_column_v2.numeric_column("y")]
layer = feature_column_v2.DenseFeatures(columns)
model = sequential.Sequential([layer])
model_input = {"x": constant_op.constant([[1.]]),
"y": constant_op.constant([[2.]])}
self.assertAllClose([[1., 2.]], model.predict(model_input))
loaded = self.cycle(model, cycles)
output, = loaded._default_save_signature(model_input).values()
self.assertAllClose([[1., 2.]], output)
signature_output, = loaded.signatures["serving_default"](
**model_input).values()
self.assertAllClose([[1., 2.]], signature_output)
def test_dense_features_layer_fit(self, cycles):
columns = [feature_column_v2.numeric_column("x")]
model = sequential.Sequential(
[feature_column_v2.DenseFeatures(columns),
core.Dense(1)])
model_input = {"x": constant_op.constant([[1.]])}
model.compile(optimizer="adam", loss="mse")
model.fit(model_input, constant_op.constant([[3.]]))
loaded = self.cycle(model, cycles)
loaded._default_save_signature(model_input)
loaded.signatures["serving_default"](**model_input)
def test_functional_model_with_conv(self, cycles):
x = input_layer.Input(name="x", shape=(None, None, 3), dtype=dtypes.float32)
conved = convolutional.Conv2D(filters=3, kernel_size=3, dilation_rate=2)(x)
model = training_lib.Model([x], conved)
model_input = array_ops.ones((1, 10, 10, 3))
initial_output = model.predict([model_input])
model = self.cycle(model, cycles)
self.assertAllClose(
[initial_output],
list(model.signatures["serving_default"](model_input).values()))
class SingleCycleTests(test.TestCase, parameterized.TestCase):
def test_load_with_tags(self):
root = tracking.AutoTrackable()
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(root, path)
with self.assertRaises(ValueError):
load.load(path, tags=[tag_constants.EVAL])
load.load(path, tags=[tag_constants.SERVING])
load.load(path, tags=tag_constants.SERVING)
load.load(path, tags=set([tag_constants.SERVING]))
def test_docstring_examples(self):
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
exported = util.Checkpoint(v=variables.Variable(3.))
exported.f = def_function.function(
lambda x: exported.v * x,
input_signature=[
tensor_spec.TensorSpec(shape=None, dtype=dtypes.float32)])
save.save(exported, path)
imported = load.load(path)
self.assertEqual(3., imported.v.numpy())
self.assertEqual(6., imported.f(x=constant_op.constant(2.)).numpy())
save.save(exported, path, exported.f.get_concrete_function())
imported = load.load(path)
f = imported.signatures["serving_default"]
self.assertAllEqual(
[[-3.]],
f(x=constant_op.constant([[-1.]]))["output_0"].numpy())
if __name__ == "__main__":
test.main()
| 35.89314 | 116 | 0.670379 |
5a40d8fb42990117951e68cef034d506d20ff144 | 9,296 | py | Python | sdl2/test/sdl2ext_ebs_test.py | zmarvel/py-sdl2 | fa91007e6eebcbf5838f08cfe8d0b9d5cdf3ab83 | [
"CC0-1.0"
] | null | null | null | sdl2/test/sdl2ext_ebs_test.py | zmarvel/py-sdl2 | fa91007e6eebcbf5838f08cfe8d0b9d5cdf3ab83 | [
"CC0-1.0"
] | null | null | null | sdl2/test/sdl2ext_ebs_test.py | zmarvel/py-sdl2 | fa91007e6eebcbf5838f08cfe8d0b9d5cdf3ab83 | [
"CC0-1.0"
] | null | null | null | import sys
import unittest
from sdl2.ext.ebs import Entity, System, Applicator, World
class Position(object):
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def __eq__(self, other):
return self.x == other.x and self.y == other.y
class Movement(object):
def __init__(self, vx=0, vy=0):
self.vx = vx
self.vy = vy
class PositionEntity(Entity):
def __init__(self, world, x=0, y=0):
self.position = Position(x, y)
class MovingEntity(Entity):
def __init__(self, world, x=0, y=0, vx=0, vy=0):
self.position = Position(x, y)
self.movement = Movement(vx, vy)
class PosEntity(Entity):
def __init__(self, world, x=0, y=0):
self.pos = Position(x, y)
class PositionSystem(System):
def __init__(self):
super(PositionSystem, self).__init__()
self.componenttypes = (Position,)
def process(self, world, components):
for c in components:
c.x += 1
c.y += 1
class MovementApplicator(Applicator):
def __init__(self):
super(MovementApplicator, self).__init__()
self.componenttypes = (Position, Movement)
def process(self, world, componentsets):
for p, m in componentsets:
p.x += m.vx
p.y += m.vy
class SDL2ExtEBSTest(unittest.TestCase):
__tags__ = ["ebs", "sdl2ext"]
def test_Entity(self):
world = World()
world.add_system(PositionSystem())
e = Entity(world)
e2 = Entity(world)
self.assertIsInstance(e, Entity)
self.assertIsInstance(e2, Entity)
self.assertNotEqual(e, e2)
p = PositionEntity(world)
self.assertIsInstance(p, PositionEntity)
self.assertIsInstance(p, Entity)
def test_Entity_id(self):
world = World()
ent1 = Entity(world)
ent2 = Entity(world)
self.assertNotEqual(ent1.id, ent2.id)
def test_Entity_world(self):
world = World()
world2 = World()
ent1 = Entity(world)
ent2 = Entity(world2)
self.assertEqual(ent1.world, world)
self.assertNotEqual(ent1.world, world2)
self.assertEqual(ent2.world, world2)
self.assertNotEqual(ent2.world, world)
self.assertNotEqual(ent1.world, ent2.world)
def test_Entity_delete(self):
w = World()
e1 = Entity(w)
e2 = Entity(w)
self.assertEqual(len(w.entities), 2)
e1.delete()
self.assertEqual(len(w.entities), 1)
e2.delete()
self.assertEqual(len(w.entities), 0)
# The next two should have no effect
e1.delete()
e2.delete()
def test_Entity__inheritance(self):
world = World()
pos1 = PositionEntity(world)
pos2 = PositionEntity(world, 10, 10)
for p in (pos1, pos2):
self.assertIsInstance(p, PositionEntity)
self.assertIsInstance(p, Entity)
self.assertIsInstance(p.position, Position)
def test_Entity__access(self):
world = World()
pos1 = PositionEntity(world)
pos2 = PosEntity(world)
pos1.position.x = 10
# components are _always_ identified by a lower-case class name.
def sx(p, v):
p.pos.x = v
self.assertRaises(AttributeError, sx, pos2, 10)
def test_World(self):
w = World()
self.assertIsInstance(w, World)
def test_World_add_remove_system(self):
world = World()
self.assertIsInstance(world, World)
class SimpleSystem(object):
def __init__(self):
self.componenttypes = (Position,)
def process(self, world, components):
pass
for method in (world.add_system, world.remove_system):
for val in (None, "Test", Position, Entity(world)):
self.assertRaises(ValueError, method, val)
psystem = SimpleSystem()
world.add_system(psystem)
self.assertTrue(len(world.systems) != 0)
self.assertTrue(psystem in world.systems)
world.remove_system(psystem)
self.assertTrue(len(world.systems) == 0)
self.assertTrue(psystem not in world.systems)
psystem = PositionSystem()
world.add_system(psystem)
self.assertTrue(len(world.systems) != 0)
self.assertTrue(psystem in world.systems)
entity = PositionEntity(world)
self.assertIsInstance(entity.position, Position)
world.remove_system(psystem)
self.assertTrue(len(world.systems) == 0)
self.assertTrue(psystem not in world.systems)
# The data must stay intact in the world, even if the processing
# system has been removed.
self.assertIsInstance(entity.position, Position)
def test_World_entities(self):
w = World()
self.assertEqual(len(w.entities), 0)
for x in range(100):
Entity(w)
self.assertEqual(len(w.entities), 100)
def test_World_delete(self):
w = World()
e1 = Entity(w)
e2 = Entity(w)
self.assertEqual(len(w.entities), 2)
w.delete(e1)
self.assertEqual(len(w.entities), 1)
w.delete(e2)
self.assertEqual(len(w.entities), 0)
# The next two should have no effect
w.delete(e1)
w.delete(e2)
def test_World_delete_entities(self):
w = World()
e1 = Entity(w)
e2 = Entity(w)
self.assertEqual(len(w.entities), 2)
w.delete_entities((e1, e2))
self.assertEqual(len(w.entities), 0)
# The next should have no effect
w.delete_entities((e1, e2))
def test_World_get_entities(self):
w = World()
e1 = PositionEntity(w, 1, 1)
e2 = PositionEntity(w, 1, 2)
self.assertEqual(len(w.get_entities(e1.position)), 1)
e2.position.y = 1
self.assertEqual(len(w.get_entities(e1.position)), 2)
def test_System(self):
world = World()
self.assertRaises(ValueError, world.add_system, None)
self.assertRaises(ValueError, world.add_system, 1234)
self.assertRaises(ValueError, world.add_system, "Test")
class ErrornousSystem(System):
def __init__(self):
super(ErrornousSystem, self).__init__()
esystem = ErrornousSystem()
# No component types defined.
self.assertRaises(ValueError, world.add_system, esystem)
self.assertEqual(len(world.systems), 0)
psystem = PositionSystem()
world.add_system(psystem)
self.assertTrue(psystem in world.systems)
def test_System_process(self):
world = World()
class ErrornousSystem(System):
def __init__(self):
super(ErrornousSystem, self).__init__()
self.componenttypes = (Position,)
esystem = ErrornousSystem()
world.add_system(esystem)
for x in range(10):
PositionEntity(world)
self.assertTrue(esystem in world.systems)
self.assertRaises(NotImplementedError, world.process)
world2 = World()
psystem = PositionSystem()
world2.add_system(psystem)
for x in range(10):
PositionEntity(world2)
self.assertTrue(psystem in world2.systems)
world2.process()
for c in world2.components[Position].values():
self.assertEqual(c.x, 1)
self.assertEqual(c.y, 1)
world2.process()
for c in world2.components[Position].values():
self.assertEqual(c.x, 2)
self.assertEqual(c.y, 2)
def test_Applicator(self):
world = World()
class ErrornousApplicator(Applicator):
def __init__(self):
super(ErrornousApplicator, self).__init__()
eapplicator = ErrornousApplicator()
# No component types defined.
self.assertRaises(ValueError, world.add_system, eapplicator)
self.assertEqual(len(world.systems), 0)
mapplicator = MovementApplicator()
world.add_system(mapplicator)
self.assertTrue(mapplicator in world.systems)
def test_Applicator_process(self):
world = World()
class ErrornousApplicator(Applicator):
def __init__(self):
super(ErrornousApplicator, self).__init__()
self.componenttypes = (Position, Movement)
eapplicator = ErrornousApplicator()
world.add_system(eapplicator)
for x in range(10):
MovingEntity(world)
self.assertTrue(eapplicator in world.systems)
self.assertRaises(NotImplementedError, world.process)
world2 = World()
mapplicator = MovementApplicator()
world2.add_system(mapplicator)
for x in range(10):
MovingEntity(world2, vx=1, vy=1)
self.assertTrue(mapplicator in world2.systems)
world2.process()
for c in world2.components[Position].values():
self.assertEqual(c.x, 1)
self.assertEqual(c.y, 1)
world2.process()
for c in world2.components[Position].values():
self.assertEqual(c.x, 2)
self.assertEqual(c.y, 2)
if __name__ == '__main__':
sys.exit(unittest.main())
| 29.699681 | 72 | 0.604346 |
204120700ceefa1eb4a2cbb2f48575717bde5003 | 3,390 | py | Python | rest_api/settings.py | shafaqshaikh/TODO-API-USING-DRF | 186db02a851e745c7f37f9b4b5e26173b5dd7fa2 | [
"MIT"
] | null | null | null | rest_api/settings.py | shafaqshaikh/TODO-API-USING-DRF | 186db02a851e745c7f37f9b4b5e26173b5dd7fa2 | [
"MIT"
] | null | null | null | rest_api/settings.py | shafaqshaikh/TODO-API-USING-DRF | 186db02a851e745c7f37f9b4b5e26173b5dd7fa2 | [
"MIT"
] | null | null | null | """
Django settings for rest_api project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^6#bjr#dmf15^mtdic75sxd_b*q4hc_4-rew(%5m%nly8^rrr5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api',
'frontend',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rest_api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rest_api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
'/var/www/static/',
]
| 26.484375 | 92 | 0.666962 |
337d70c29304292792ccb62fc962b1329f1e796c | 674 | py | Python | socialdistribution/manage.py | deasisrj1/CMPUT404-Project-BetterSocial | f5197a757b69f10d0b911a32159f7fc5641fe7bd | [
"Apache-2.0"
] | 1 | 2022-01-14T04:37:54.000Z | 2022-01-14T04:37:54.000Z | socialdistribution/manage.py | deasisrj1/CMPUT404-Project-BetterSocial | f5197a757b69f10d0b911a32159f7fc5641fe7bd | [
"Apache-2.0"
] | 88 | 2022-02-19T00:16:44.000Z | 2022-03-29T03:05:08.000Z | socialdistribution/manage.py | CMPUT404-F21T0/CMPUT404-Project-BetterSocial | 04a621915108a434d50e900165cefdb0d4cca45c | [
"Apache-2.0"
] | 4 | 2021-02-14T15:13:15.000Z | 2021-04-17T06:21:11.000Z | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'socialdistribution.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29.304348 | 82 | 0.683976 |
7e65afe1adaaa808e28fb8b8ea6ee63919fd1b2e | 519 | py | Python | src/mod_stats_by_aircraft/migrations/0009_fix_captures.py | FGlazov/IL2Stats_GlobalAircraftStatsMod | 9eb60a233e775316d01ad4ba2dcf9db22b58438a | [
"MIT"
] | null | null | null | src/mod_stats_by_aircraft/migrations/0009_fix_captures.py | FGlazov/IL2Stats_GlobalAircraftStatsMod | 9eb60a233e775316d01ad4ba2dcf9db22b58438a | [
"MIT"
] | null | null | null | src/mod_stats_by_aircraft/migrations/0009_fix_captures.py | FGlazov/IL2Stats_GlobalAircraftStatsMod | 9eb60a233e775316d01ad4ba2dcf9db22b58438a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2022-02-13 08:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mod_stats_by_aircraft', '0008_recompute_defensive_ammo_breakdowns'),
]
operations = [
migrations.AddField(
model_name='sortieaugmentation',
name='fixed_captures',
field=models.BooleanField(db_index=True, default=False),
),
]
| 24.714286 | 78 | 0.660886 |
cad22d578a753bdffea30d03aeec7b1bdad27004 | 6,274 | py | Python | Transfer/Simplebaseline/train.py | chakkritte/EEEA-Net | 260c2a5c673a806315fc5b529b9c9112c48ca8ae | [
"Apache-2.0"
] | 3 | 2021-08-30T01:36:52.000Z | 2021-11-05T07:36:28.000Z | Transfer/Simplebaseline/train.py | chakkritte/EEEA-Net | 260c2a5c673a806315fc5b529b9c9112c48ca8ae | [
"Apache-2.0"
] | 1 | 2021-11-29T12:00:56.000Z | 2021-11-30T04:07:28.000Z | Transfer/Simplebaseline/train.py | chakkritte/EEEA-Net | 260c2a5c673a806315fc5b529b9c9112c48ca8ae | [
"Apache-2.0"
] | 2 | 2021-08-17T10:06:59.000Z | 2021-08-30T01:36:57.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import pprint
import shutil
import torch
import torch.nn.parallel
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
from lib.core.config import config, update_config, update_dir, get_model_name
from lib.core.loss import JointsMSELoss
from lib.core.function import train, validate
from lib.utils.utils import get_optimizer, save_checkpoint, create_logger
from lib.utils.multadds_params_count import comp_multadds, count_parameters_in_MB
from lib.models import posenet
from lib import dataset
from lib import models
pretrained_paths = {
'pairnas': "PairNAS_CIFAR10_ImageNet_weight.pth",
'darts': 'DARTS_CIFAR10_ImageNet_weight.pth',
'nasnet': 'nasnetamobile-7e03cead.pth',
'mnasnet': 'mnasnet1.0_top1_73.512-f206786ef8.pth',
'mobilenetv2': 'mobilenet_v2-b0353104.pth',
'shufflenetv2': 'shufflenetv2_x1-5666bf0f80.pth',
'moea': 'MOEA_c8.pt',
'moeasota': 'MOEA_SOTA_c8.pt',
'moea16': 'MOEA_c16.pt',
'moeasota12': 'MOEA_SOTA_c12.pt',
'nsgac1': 'net-flops@137',
'nsgac2': 'net-flops@217',
'mobilenetv3': 'mobilenetv3-large-1cd25616.pth',
}
def parse_args():
parser = argparse.ArgumentParser(description='Train keypoints network')
# It can be mobilenetv2, shufflenetv2, mnasnet, darts, pairnas, nasnet
parser.add_argument('--net', default='pairnas', type=str)
parser.add_argument('--dataset_path', default='/raid/huangsh/datasets/MSCOCO2017/')
parser.add_argument('--cfg', default='experiments/coco_256x192_d256x3_adam_lr1e-3.ymal')
parser.add_argument('--model', default='posenet')
parser.add_argument('--gpu', type=str, default='0')
args, rest = parser.parse_known_args()
# update config
update_config(args.cfg)
# training
parser.add_argument('--frequent', help='frequency of logging', default=config.PRINT_FREQ, type=int)
parser.add_argument('--workers', help='num of dataloader workers', type=int, default=8)
args = parser.parse_args()
return args
def reset_config(config, args):
config.DATASET.ROOT = args.dataset_path
config.MODEL.NAME = '{}_{}'.format(args.model, args.net)
if 'coco' in args.cfg:
config.TEST.COCO_BBOX_FILE = '{}/{}'.format(args.dataset_path, config.TEST.COCO_BBOX_FILE)
config.MODEL.PRETRAINED = 'pretrained_models/{}'.format(pretrained_paths[args.net])
config.GPUS = args.gpu
config.WORKERS = args.workers
def main():
args = parse_args()
#os.environ['CUDA_VISIBLE_DEVICES'] = '{}'.format(args.gpu)
reset_config(config, args)
logger, final_output_dir, tb_log_dir = create_logger(config, args.cfg, 'train')
logger.info(pprint.pformat(args))
logger.info(pprint.pformat(config))
# cudnn related setting
cudnn.benchmark = config.CUDNN.BENCHMARK
torch.backends.cudnn.deterministic = config.CUDNN.DETERMINISTIC
torch.backends.cudnn.enabled = config.CUDNN.ENABLED
model = eval('{}.get_pose_net'.format(args.model))(config, is_train=True)
model.eval()
params = count_parameters_in_MB(model)
logger.info("Params = %.2fMB" % params)
mult_adds = comp_multadds(model, input_size=(3, config.MODEL.IMAGE_SIZE[1], config.MODEL.IMAGE_SIZE[0]))
logger.info("Mult-Adds = %.2fMB" % mult_adds)
model = nn.DataParallel(model)
model.train()
model = model.cuda()
# copy model file
# define loss function (criterion) and optimizer
criterion = JointsMSELoss(use_target_weight=config.LOSS.USE_TARGET_WEIGHT).cuda()
optimizer = get_optimizer(config, model)
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, config.TRAIN.LR_STEP, config.TRAIN.LR_FACTOR)
# Data loading code
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
train_dataset = eval('dataset.'+config.DATASET.DATASET)(config, config.DATASET.ROOT, config.DATASET.TRAIN_SET, True,
transforms.Compose([transforms.ToTensor(), normalize,])
)
valid_dataset = eval('dataset.'+config.DATASET.DATASET)(config, config.DATASET.ROOT, config.DATASET.TEST_SET, False,
transforms.Compose([transforms.ToTensor(), normalize,])
)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=config.TRAIN.BATCH_SIZE,
shuffle=config.TRAIN.SHUFFLE, num_workers=config.WORKERS, pin_memory=True
)
valid_loader = torch.utils.data.DataLoader(valid_dataset, batch_size=config.TEST.BATCH_SIZE,
shuffle=False, num_workers=config.WORKERS, pin_memory=True
)
best_perf = 0.0
for epoch in range(config.TRAIN.BEGIN_EPOCH, config.TRAIN.END_EPOCH):
# train for one epoch
train(config, train_loader, model, criterion, optimizer, epoch, final_output_dir, tb_log_dir)
# evaluate on validation set
perf_indicator = validate(config, valid_loader, valid_dataset, model, criterion, final_output_dir, tb_log_dir)
if perf_indicator > best_perf:
best_perf = perf_indicator
best_model = True
else:
best_model = False
logger.info('=> saving checkpoint to {}'.format(final_output_dir))
save_checkpoint({'epoch': epoch + 1, 'model': get_model_name(config), 'state_dict': model.state_dict(),
'perf': perf_indicator, 'optimizer': optimizer.state_dict(),}, best_model, final_output_dir
)
final_model_state_file = os.path.join(final_output_dir, 'final_state.pth.tar')
logger.info('saving final model state to {}'.format(final_model_state_file))
torch.save(model.state_dict(), final_model_state_file)
lr_scheduler.step()
if __name__ == '__main__':
main()
| 42.107383 | 120 | 0.676124 |
9f7bd99eac2cdf574ac320946f136d3bb4f4a15b | 13,336 | py | Python | main/tools/certificates/management/commands/issue_certificate_multi.py | csev/class2go | f9419ae16448d20fc882170f95cfd1c4dc3331ca | [
"Apache-2.0"
] | 2 | 2015-10-31T23:12:52.000Z | 2021-01-19T11:03:00.000Z | main/tools/certificates/management/commands/issue_certificate_multi.py | Andymic/class2go | 45a457e89790cb83942d24ada816357dc91b8fe4 | [
"Apache-2.0"
] | null | null | null | main/tools/certificates/management/commands/issue_certificate_multi.py | Andymic/class2go | 45a457e89790cb83942d24ada816357dc91b8fe4 | [
"Apache-2.0"
] | null | null | null | from collections import defaultdict, namedtuple
import json
import logging
from optparse import make_option
import os
try:
import pdfkit
except ImportError, msg:
pdfkit = False
import pprint
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from c2g.models import Course, CourseCertificate, CourseStudentScore, UserProfile
import settings
from tools.certificates import tasks as cert_tasks
logger = logging.getLogger(__name__)
GLOBAL_DEBUG = False
def debug_out(s):
sys.stdout.write(s + '\n')
class TemplateCache(object):
"""In-memory cache of certificate template strings loaded off disk."""
def __init__(self):
self.__templates = {}
def get(self, asset_prefix, asset_path, cert_type):
"""Return the string containing the unrendered template"""
# probably not necessary if we use a defaultdict?
if (asset_prefix, asset_path, cert_type) not in self.__templates:
infile_name = 'certificate-' + cert_type + '.html'
if not asset_prefix: asset_prefix = getattr(settings, 'MEDIA_ROOT', '/')
infile_path = os.path.join(asset_prefix, asset_path, infile_name)
template_file = open(infile_path, 'rb')
unrendered_template = template_file.read()
template_file.close()
self.__templates[(asset_prefix, asset_path, cert_type)] = unrendered_template
if GLOBAL_DEBUG: debug_out("Caching template for %s, %s, %s" % (asset_prefix, asset_path, cert_type))
return unrendered_template
else:
return self.__templates[(asset_prefix, asset_path, cert_type)]
class CertificateCache(object):
"""In-memory cache of certificate metadata loaded from the database.
Also, creates certificate entries in the database if they do not exist."""
def __init__(self):
self.__certs = {}
def get(self, course, type_tag):
"""Return the CourseCertificate database entry for this course and type."""
# unnecessary if we use a defaultdict?
# except we'd still need a bit of structure to instantiate missing certs
if (course, type_tag) not in self.__certs:
assets_path = os.path.join(course.prefix, course.suffix, 'certificates', 'assets')
storage_path = os.path.join(course.prefix, course.suffix, 'certificates', 'storage')
(certificate_info, create_status) = CourseCertificate.objects.get_or_create(course=course, assets=assets_path, storage=storage_path, type=type_tag)
self.__certs[(course, type_tag)] = certificate_info
if GLOBAL_DEBUG: debug_out("Caching cert for %s, %s" % (course.handle, type_tag))
return certificate_info
else:
return self.__certs[(course, type_tag)]
class Command(BaseCommand):
args = "<course_handle> <no_cert_file> <cert_conditions_file>"
help = """Statement the students for a specified course handle
Parameter course_handle is, unsurprisingly, the course_handle for the course to
be processed, i.e., 'db--winter2013'.
File no_cert_file is a newline-delimited list of usernames who should never be
given statements (for example, admin users and cheaters). For example, it may
look like:
---begin sample no_cert_file.txt
admin
another-admin
cheater@example.com
BadAlicePoorBob
---end sample no_cert_file.txt
File cert_conditions_file is a json-formatted list of dictionaries, processed
in order for each student in a course. The first dictionary that matches the
student will be used and no subsequent ones. Each dictionary is a set of
certifications (keys) and the tests that determine whether they apply (values).
They are applied with equal weighting, so if several certifications within one
dictionary apply to a user, the user will get all of those certifications. To
make different certifications mutually exclusive, put them in different
dictionaries. The tests portion of a dictionary entry (values) consists of a
list containing lists of exactly 2 items. These two-item sublists are ANDed
together. That is a given certification (dictionary key) will be given to a
student only if every test in the test list (dictionary value) passes. The
format of the 2-item sublists consists of a score tag as used by the Aggregator
and a fractional value indicating what proportion of the available points must
be earned.
To clarify, here are some examples:
This example file has three mutually-distinct conditions:
'no cert', which is implicit
'distinction', which happens when the student has a 75% on exercises
tagged by the aggregator as 'accomplishment' AND has 50%
on exercises tagged by the aggregator as
'challenge-exercises'
---begin first cert_conditions_file example
[{'distinction':[['accomplishment', 0.75], ['challenge-exercises', 0.50]]},{'accomplishment': [['accomplishment', 0.50]]}]
---end first cert_conditions_file example
In this example, 'distinction' and 'accomplishment' can be achieved without
mutual exclusion:
---begin second cert_conditions_file example
[{'distinction':[['accomplishment', 0.75], ['challenge-exercises', 0.50]], 'accomplishment': [['accomplishment', 0.50]]}]
---end second cert_conditions_file example
The PDFKti library has a number of system dependencies which cannot be
installed from pip. Please check the tools/certificates/README_WKHTML.md
and README_SETUP.md for additional notes.
"""
option_list = (
make_option('-s', '--single', dest='single_student', default="", help="Force run on only <single_student>"),
make_option('-P', '--skip-pdf', dest='skip_pdf', action="store_true", default=False, help="Skip PDF generation and attachment"),
make_option('-D', '--debug', dest='DEBUG', action="store_true", default=False, help="Describe everything as it happens"),
) + BaseCommand.option_list
def handle(self, *args, **options):
# Option processing
if len(args) != 3:
raise CommandError("Wrong number of arguments, %d instead of 3" % len(args))
if not pdfkit:
raise CommandError("Can't issue certificates without python library pdfkit installed")
course_handle = args[0].strip()
no_cert_file = args[1].strip()
cert_conditions_file = args[2].strip()
single_student = None
single_student_username = options.get('single_student', '')
if single_student_username:
single_student = User.objects.get(username=single_student_username)
global GLOBAL_DEBUG
if options['DEBUG']:
GLOBAL_DEBUG = True
if GLOBAL_DEBUG: debug_out("Option processing complete, memoizing working objects")
# Working object memoization
if len(course_handle) == 0:
raise CommandError("Bad course handle: '%s'" % course_handle)
if len(no_cert_file) == 0:
raise CommandError("Bad no_cert_file: '%s'" % no_cert_file)
if len(cert_conditions_file) == 0:
raise CommandError("Bad cert_conditions_file: '%s'" % cert_conditions_file)
try:
course = Course.objects.get(handle=course_handle, mode='ready')
except:
raise CommandError("Bad course handle or could not retrieve course '%s'" % course_handle)
if GLOBAL_DEBUG: debug_out("Loaded course metadata for %s" % course.handle)
donotcertify = set()
with open(no_cert_file) as nocertfile:
# See also documented no_cert_file format at EOF
donotcertify = set((username.strip() for username in nocertfile.readlines()))
if GLOBAL_DEBUG: debug_out("Loaded 'do not certify' list %s" % no_cert_file)
with open(cert_conditions_file) as binning_desc:
tmp_str = binning_desc.read()
binning = json.loads(tmp_str)
if GLOBAL_DEBUG: debug_out("Loaded 'certification conditions' file %s" % cert_conditions_file)
def __all_students(course):
debug_counter = 0
for student in course.get_all_students():
debug_counter += 1
if debug_counter % 100 == 0:
print debug_counter
#if GLOBAL_DEBUG and debug_counter % 100 == 0: debug_out(str(debug_counter))
yield student
def __one_student(course):
if GLOBAL_DEBUG: debug_out("Processing single student %s" % single_student.username)
yield single_student
student_generator = __all_students if not single_student_username else __one_student
def __apply_test(test, subtotals_d):
"""A 'test' is a pair like ['scoring tag', percentage_that_passes]"""
# See also documented cert_conditions_file format at EOF
#testscore = subtotals_d[test[0]]
# forces failure on missing key
testscore = subtotals_d.get(test[0], (0, 100))
# score total test multiplier
return testscore[0] >= (testscore[1] * test[1])
templates = TemplateCache()
certificates = CertificateCache()
if GLOBAL_DEBUG: debug_out("Memoization of working objects complete, processing students")
# assign certificates and generate assets
got_certs = defaultdict(int)
for student in student_generator(course):
if student.username in donotcertify:
# log a message and move to the next student
logger.info("class2go statement generation: %s skipped for entry in no_cert_file %s" % (student.username, no_cert_file))
continue
subtotals_d = {}
subtotals = CourseStudentScore.objects.filter(course=course, student=student).values_list('tag', 'score', 'total')
for sub in subtotals:
subtotals_d[sub[0]] = (sub[1], sub[2])
# ok now do the binning for real
earned_certs = set()
for cert_set in binning:
for certificate_type, tests in cert_set.iteritems():
if reduce(lambda x,y: x and y, (__apply_test(test, subtotals_d) for test in tests)):
earned_certs.add(certificate_type)
got_certs[certificate_type] += 1
if earned_certs:
break
if not earned_certs:
got_certs['none'] += 1
# ok now actually assign the cert object and run pdf generation
profile = UserProfile.objects.get(user=student)
for cert in earned_certs:
cert_info = certificates.get(course, cert)
# Attach "platonic" certification to user's profile
profile.certificates.add(cert_info)
profile.save()
if not options['skip_pdf']:
# Fire off worker task to build the pdf and upload it to s3
cert_prefix = ''
templatestr = templates.get(cert_prefix, cert_info.assets, cert_info.type)
context_d = {}
for k,v in subtotals_d.iteritems():
context_d[k.replace('-','_')] = v
celery_job = cert_tasks.makePDF.delay(templatestr, cert_prefix, course, cert_info, student, context_in=context_d)
if GLOBAL_DEBUG: debug_out("Attached PDF for %s at %s" % (student.username, celery_job))
print "Certification process complete. Stats:"
pprint.pprint(got_certs)
#########################################################
# This is an example no_certs_file: it consists of a newline-delimeted list of usernames
# of students who should never receive certificates for this course
#########################################################
#BAD_PERSON_USERNAME
#cheater@example.com
#admin
#########################################################
# This is an example cert_conditions_file with inline comments to help you understand
# how it works
#########################################################
#
#/* This is an ordered list. The first one that has any dictionary value match
# * is the one that will be used. */
#[
# /* This is a dictionary of a set of certification conditions which may be
# * obtained simultaneously. If your various certification conditions are
# * mutually exclusive, then there will be several dictionaries with only one
# * key/value pair each. */
# /* this is a list of clauses which get ANDed together */
# /* aggregator tag name */
# /* % of max */
# {'distinction': [['accomplishment', 0.75], ['challenge-exercises', 0.50]],},
# {'accomplishment': [['accomplishment', 0.50],],},
#]
#
# Except of course you can't have trailing commas or comments or excess
# whitespace in your json, so to be valid you'd have to write it like:
#[{'distinction':[['accomplishment', 0.75], ['challenge-exercises', 0.50]]},{'accomplishment': [['accomplishment', 0.50]]}]
| 48.494545 | 159 | 0.645096 |
cabf3c589066f3600e914199f966db2d8eb5cf10 | 213 | py | Python | day-of-the-week.py | 11aparna91/LeetCodesPython | 317ddd963122e082ced8a6510bd04255d59b6c35 | [
"MIT"
] | 1 | 2021-10-06T00:07:30.000Z | 2021-10-06T00:07:30.000Z | day-of-the-week.py | 11aparna91/LeetCodesPython | 317ddd963122e082ced8a6510bd04255d59b6c35 | [
"MIT"
] | null | null | null | day-of-the-week.py | 11aparna91/LeetCodesPython | 317ddd963122e082ced8a6510bd04255d59b6c35 | [
"MIT"
] | null | null | null | ########################Problem 1185################################
class Solution:
def dayOfTheWeek(self, day: int, month: int, year: int) -> str:
return date(year,month,day).strftime("%A")
| 35.5 | 68 | 0.460094 |
79442e3bf682b78dcf039fdc9f2801e603921ff8 | 872 | py | Python | alipay/aop/api/response/AlipayOpenDesCreateResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayOpenDesCreateResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayOpenDesCreateResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.GavintestNewLeveaOne import GavintestNewLeveaOne
class AlipayOpenDesCreateResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenDesCreateResponse, self).__init__()
self._ces = None
@property
def ces(self):
return self._ces
@ces.setter
def ces(self, value):
if isinstance(value, GavintestNewLeveaOne):
self._ces = value
else:
self._ces = GavintestNewLeveaOne.from_alipay_dict(value)
def parse_response_content(self, response_content):
response = super(AlipayOpenDesCreateResponse, self).parse_response_content(response_content)
if 'ces' in response:
self.ces = response['ces']
| 29.066667 | 100 | 0.698394 |
d3761eab1c5422f9f54a3398f477078906312fe1 | 3,084 | py | Python | pycon2013_socketio/chat/models.py | lukesneeringer/pycon2013-socketio | 6361a5df843aafa514991eb25d75bd4c2514dd68 | [
"BSD-3-Clause"
] | 4 | 2015-01-06T16:55:14.000Z | 2016-09-03T00:18:22.000Z | pycon2013_socketio/chat/models.py | lukesneeringer/pycon2013-socketio | 6361a5df843aafa514991eb25d75bd4c2514dd68 | [
"BSD-3-Clause"
] | null | null | null | pycon2013_socketio/chat/models.py | lukesneeringer/pycon2013-socketio | 6361a5df843aafa514991eb25d75bd4c2514dd68 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from redis import Redis
import json
class Room(models.Model):
"""Model representing a chat room. Chat rooms have a short, human-readable
name, a slug that is their URI, and a topic."""
id = models.SlugField(primary_key=True)
topic = models.CharField(max_length=250)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('id',)
def __iter__(self):
return {
'slug': self.id,
'topic': self.topic,
}.iteritems()
@property
def redis_key(self):
return 'room_%s' % self.id
class Event(models.Model):
"""Model representing a single event occurring within a chat room."""
room = models.ForeignKey(Room)
user_name = models.CharField(max_length=30, db_index=True)
event_type = models.CharField(max_length=20, choices=(
('statement', 'Statement'),
('user_joined', 'User Joined'),
('user_left', 'User Left'),
('topic_set', 'Topic Set'),
), db_index=True)
message = models.TextField()
created = models.DateTimeField(auto_now_add=True, db_index=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __iter__(self):
# Most of the time, just send down a rough dictionary representation
# of the object.
answer = {
'room': self.room.id,
'type': self.event_type,
'user': self.user_name,
'message': self.message,
'timestamp': self.created.strftime('%Y-%m-%d %H:%M:%S'),
}
# Edge Case: On topics, I would prefer send down a more descriptive
# message, as well as an extra `topic` key with just the new topic.
if self.event_type == 'topic_set':
answer['topic'] = self.message
answer['message'] = '{user} set the topic to "{topic}".'.format(
topic=self.message,
user=self.user_name,
)
# Okay, done.
return answer.iteritems()
def save(self, *args, **kwargs):
"""Save the event, and publish the event in Redis."""
# If this is a user_joined or user_left event,
# set a consistent message.
if self.event_type == 'user_joined':
self.message = '%s has joined the room.' % self.user_name
if self.event_type == 'user_left':
self.message = '%s has left the room.' % self.user_name
# Perform a standard save.
return_value = super(Event, self).save(*args, **kwargs)
# Create a Redis object.
redis = Redis(
host=settings.REDIS_HOST,
port=int(settings.REDIS_PORT),
db=int(settings.REDIS_DB),
password=settings.REDIS_PASSWORD,
)
# Publish the event in Redis.
redis.publish(self.room.redis_key, json.dumps(dict(self)))
return return_value | 32.463158 | 78 | 0.603437 |
c2b838454719f6dfd730f344dad8efbc72ed04aa | 655 | py | Python | chronologer/__init__.py | dandavison/chronologer | bb97b0b0393beaaf22769a258f1d96b6e07af130 | [
"MIT"
] | 165 | 2019-10-15T07:27:18.000Z | 2022-03-25T16:11:59.000Z | chronologer/__init__.py | dandavison/chronologer | bb97b0b0393beaaf22769a258f1d96b6e07af130 | [
"MIT"
] | 7 | 2019-12-11T16:00:33.000Z | 2021-08-03T09:19:47.000Z | chronologer/__init__.py | dandavison/chronologer | bb97b0b0393beaaf22769a258f1d96b6e07af130 | [
"MIT"
] | 7 | 2020-01-09T11:18:48.000Z | 2022-03-02T16:18:31.000Z | """Usage: chronologer CONFIG_FILE [--dry-run]
Options:
-h --help Show this screen.
--dry-run Just print commands; don't do anything.
"""
from docopt import docopt
from chronologer.benchmark import benchmark
from chronologer.benchmark import has_benchmark
from chronologer.config import config
from chronologer.hyperfine import combine_benchmark_files
from chronologer.vega import write_html
def main():
opt = docopt(__doc__)
config.populate_from_file(opt["CONFIG_FILE"], opt)
for commit in config.get_commits():
if not has_benchmark(commit):
benchmark(commit)
combine_benchmark_files()
write_html()
| 27.291667 | 57 | 0.740458 |
58d276afa64409160b5a0f72083be3d89541dd73 | 22,057 | py | Python | Ars_Magica_5th/arm5_py_integration/__init__.py | Luviants/roll20-character-sheets | 44dcd50f54af14ae0c3dd3d85b1634ee56fe353e | [
"MIT"
] | 1,104 | 2015-01-02T17:02:48.000Z | 2022-03-31T19:32:41.000Z | Ars_Magica_5th/arm5_py_integration/__init__.py | Luviants/roll20-character-sheets | 44dcd50f54af14ae0c3dd3d85b1634ee56fe353e | [
"MIT"
] | 4,560 | 2015-01-04T22:17:13.000Z | 2022-03-31T21:29:56.000Z | Ars_Magica_5th/arm5_py_integration/__init__.py | Luviants/roll20-character-sheets | 44dcd50f54af14ae0c3dd3d85b1634ee56fe353e | [
"MIT"
] | 5,009 | 2015-01-01T16:21:24.000Z | 2022-03-30T12:50:54.000Z | """Module for providing the parts in the template.html file"""
import csv
from pathlib import Path
import markdown
from bs4 import BeautifulSoup as soup
from .helpers import (
CHARACTERISTICS,
FORMS,
TECHNIQUES,
enumerate_helper,
repeat_template,
)
from .translations import translation_attrs, translation_attrs_setup
# Xp helper
def xp(
name: str, *, suffix="_exp", adv_suffix="_advancementExp", tot_suffix="_totalExp"
) -> str:
"""
Generate the HTML for the Xp parts of arts & abilities
"""
return f"""[<input type="text" class="number_3" name="attr_{name}{suffix}" value="0"/>/<input type="text" class="number_3 advance" name="attr_{name}{adv_suffix}" value="0" readonly/>/<input type="text" class="number_3 total" name="attr_{name}{tot_suffix}" value="0" readonly/>]"""
def alert(title: str, text: str, *, level: str = "warning", ID: str = None):
"""
Generate the HTML to display a banner that can be permanently hidden
This is used to inform player of important changes in updates.
Arguments:
text: Main text of the banner
title: Title of the banner
type: On of "warning", "info". The aspect of the banner
ID: optional string ID of this banner, if you need to check if it is
open/closed somewhere. Do NOT use numbers
"""
if not level in ("info", "warning"):
raise ValueError("Level must be among 'info', 'warning'")
if ID is None:
alert_id = alert.numid
alert.numid += 1
else:
alert_id = str(ID)
alert.strid.append(alert_id)
return f"""<input type="hidden" class="alert-hidder" name="attr_alert-{alert_id}" value="0"/>
<div class="alert alert-{level}">
<div>
<h3> {level.title()} - {title}</h3>
{text}
</div>
<label class="fakebutton">
<input type="checkbox" name="attr_alert-{alert_id}" value="1" /> ×
</label>
</div>"""
# python supports attributes on function
# we use that to store the internal global variable used by the function
alert.numid = 0
alert.strid = []
def disable_old_alerts(marker: str):
lines = ",\n ".join(
f'"alert-{i}": 1' for i in list(range(alert.numid)) + alert.strid
)
return f"""setAttrs({{
"{marker}": 1,
{lines}
}}); """
# Add new parts to this dictionary
# parts can be defined in other modules and imported if the generating
# code is long
GLOBALS = {
# makes the module available
"markdown": markdown,
# makes those function available in the HTML
"xp": xp,
"alert": alert,
"disable_old_alerts": disable_old_alerts,
# Makes those values available in the HTML
"translation_attrs": translation_attrs,
"translation_attrs_setup": translation_attrs_setup,
"html_header": "<!-- DO NOT MODIFY !\nThis file is automatically generated from a template. Any change will be overwritten\n-->",
"css_header": "/* DO NOT MODIFY !\nThis file is automatically generated from a tempalte. Any change will be overwritten\n*/",
}
# Personality traits
GLOBALS["personality_trait_rows"] = repeat_template(
"""<tr>
<td><input type="text" class="heading_2" style="width:245px" name="attr_Personality_Trait%%"/></td>
<td><input type="text" class="number_1" style="width:70px;" name="attr_Personality_Trait%%_score"/></td>
<td><div class="flex-container">
<button type="roll" class="button simple-roll" name="roll_personality%%_simple" value="&{template:generic} {{Banner=^{personality} ^{roll}}} {{Label=@{Personality_Trait%%}}} {{Result=[[@{simple-die} + [[@{Personality_Trait%%_Score}]] [@{Personality_Trait%%}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}] ]]}} "></button>
<button type="roll" class="button stress-roll" name="roll_personality%%_stress" value="&{template:generic} {{Banner=^{personality} ^{roll}}} {{Label=@{Personality_Trait%%}}} {{Result=[[@{stress-die} + [[@{Personality_Trait%%_Score}]] [@{Personality_Trait%%}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}] ]]}} {{stress=1}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"></button>
</div></td>
</tr>""",
range(1, 7),
)
# Reputations
GLOBALS["reputation_rows"] = repeat_template(
"""<tr>
<td><input type="text" class="heading_2" name="attr_Reputations%%"/></td>
<td><input type="text" class="heading_2a" name="attr_Reputations%%_type"/></td>
<td><input type="text" class="number_1" style="width:50px;" name="attr_Reputations%%_score"/></td>
<td><div class="flex-container">
<button type="roll" class="button simple-roll" name="roll_reputation%%_simple" value="&{template:generic} {{Banner=^{reputation} ^{roll}}} {{Label=@{Reputations%%}}} {{Result=[[@{simple-die} + [[@{Reputations%%_Score}]] [@{Reputations%%}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}] ]] }}"></button>
<button type="roll" class="button stress-roll" name="roll_reputation%%_stress" value="&{template:generic} {{Banner=^{reputation} ^{roll}}} {{Label=@{Reputations%%}}} {{Result=[[@{stress-die} + [[@{Reputations%%_Score}]] [@{Reputations%%}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}]]] }} {{stress=1}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"></button>
</div></td>
</tr>""",
range(1, 7),
)
# Characteristics definitions
characteristic_roll = "(@{%(Char)s_Score}) [@{%(char)s_i18n}] + (@{wound_total}) [@{wounds_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}]"
GLOBALS["characteristic_rows"] = repeat_template(
"""<tr>
<th data-i18n="%(char)s" >%(Char)s</th>
<td><input type="text" class="heading_2" name="attr_%(Char)s_Description"/></td>
<td><input type="text" class="number_1" name="attr_%(Char)s_Score" value="0"/></td>
<td><input type="text" class="number_1" name="attr_%(Char)s_Aging" value="0"/></td>
<td><div class="flex-container">
<button type="roll" class="button simple-roll" name="roll_%(Char)s_simple" value="&{template:ability} {{name= @{character_name}}} {{label0=^{%(char)s}}} {{banner=@{%(Char)s_Description}}} {{label1=^{score}}} {{result1=@{%(Char)s_Score}}} {{label2=^{characteristic-m}}} {{label2=^{weakness-m}}} {{result2=[[[[floor(@{Fatigue})]][@{fatigue_i18n}] + @{wound_total}[@{wounds_i18n}]]]}} {{label3=^{circumstances-m}}} {{result3=[[(?{@{circumstantial_i18n}|0})]]}} {{result0=[[ @{simple-die} + $characteristic_roll$ ]]}}"></button>
<button type="roll" class="button stress-roll" name="roll_%(Char)s_stress" value="&{template:ability} {{name= @{character_name}}} {{label0=^{%(char)s}}} {{banner=@{%(Char)s_Description}}} {{label1=^{score}}} {{result1=@{%(Char)s_Score}}} {{label2=^{characteristic-m}}} {{label2=^{weakness-m}}} {{result2=[[[[floor(@{Fatigue})]][@{fatigue_i18n}] + @{wound_total}[@{wounds_i18n}]]]}} {{label3=^{circumstances-m}}} {{result3=[[(?{@{circumstantial_i18n}|0})]]}} {{result0=[[ @{stress-die} + $characteristic_roll$ ]]}} {{stress=1}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"></button>
</div></td>
</tr>""".replace(
"$characteristic_roll$", characteristic_roll
),
CHARACTERISTICS,
str_key="char",
)
# Characteristic options
GLOBALS["characteristic_score_options"] = repeat_template(
"""<option value="@{%(Char)s_Score}" data-i18n="%(char)s" >%(Char)s</option>""",
CHARACTERISTICS,
str_key="char",
)
GLOBALS["characteristic_score_ask"] = (
"?{@{characteristic_i18n}|"
+ "| ".join(
"@{%(char)s_i18n}, @{%(Char)s_Score} [@{%(char)s_i18n}]"
% {"char": char, "Char": char.capitalize()}
for char in CHARACTERISTICS
)
+ "}"
)
GLOBALS["characteristic_name_options"] = repeat_template(
"""<option value="%(Char)s" data-i18n="%(char)s" >%(Char)s</option>""",
CHARACTERISTICS,
str_key="char",
)
GLOBALS["characteristic_name_ask_attr"] = (
"?{@{characteristic_i18n}|"
+ "| ".join(
"@{%(char)s_i18n},@{%(char)s_Score} [@{%(char)s_i18n}]" % {"char": char}
for char in CHARACTERISTICS
)
+ "}"
)
# Abilities
ability_roll_template = "&{template:ability} {{name=@{character_name}}} {{label0=@{Ability_name}}} {{banner=@{Ability_Speciality}}} {{label1=^{rank}}} {{result1= [[ @{Ability_Score} + @{Ability_Puissant} ]]}} {{label2=@{Ability_CharacName}}} {{result2=[[@{sys_at}@{character_name}@{sys_pipe}@{Ability_CharacName}_Score@{sys_rbk}]]}} {{label3=^{weakness-m}}} {{result3=[[ ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (@{wound_total}) [@{wounds_i18n}] ]]}} {{label4=^{circumstances-m}}} {{result4=[[(?{@{circumstantial_i18n}|0})]]}} {{result0=%(roll)s}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"
ability_roll = "[[ %(die)s + (@{Ability_Score} + @{Ability_Puissant}) [@{Ability_name}] + (@{sys_at}@{character_name}@{sys_pipe}@{Ability_CharacName}_Score@{sys_rbk}) [@{sys_at}@{character_name}@{sys_pipe}@{Ability_CharacName}_i18n@{sys_rbk}] + (@{wound_total}) [@{wounds_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (?{@{circumstantial_i18n}|0}) [@{circumstances_i18n}] ]]"
GLOBALS["ability_roll_simple"] = ability_roll_template % {
"roll": ability_roll % {"die": "@{simple-die}"}
}
GLOBALS["ability_roll_stress"] = (
ability_roll_template % {"roll": ability_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
# Technique definitions
GLOBALS["technique_definitions"] = repeat_template(
"""<tr>
<td><input type="text" class="number_3" name="attr_%(Tech)s_Score" value="0"/></td>
<td data-i18n="%(tech)s" >%(Tech)s</td>
<td>"""
+ xp("%(Tech)s")
+ """</td>
<td style="text-align: center"><input type="text" class="number_3 minor" name="attr_%(Tech)s_Puissant" value="0"/></td>
</tr>""",
TECHNIQUES,
str_key="tech",
)
# Technique options
GLOBALS["technique_score_options"] = repeat_template(
"""<option value="(@{%(Tech)s_Score} + @{%(Tech)s_Puissant}) [@{%(tech)s_i18n}]" data-i18n="%(tech)s" >%(Tech)s</option>""",
TECHNIQUES,
str_key="tech",
)
GLOBALS["technique_score_options_unlabeled"] = repeat_template(
"""<option value="@{%(Tech)s_Score} + @{%(Tech)s_Puissant}" data-i18n="%(tech)s" >%(Tech)s</option>""",
TECHNIQUES,
str_key="tech",
)
GLOBALS["technique_name_options"] = repeat_template(
"""<option value="%(Tech)s" data-i18n="%(tech)s" >%(Tech)s</option>""",
TECHNIQUES,
str_key="tech",
)
GLOBALS["technique_enumerated_options"] = repeat_template(
"""<option value="%(index)s" data-i18n="%(tech)s" >%(Tech)s</option>""",
enumerate_helper(TECHNIQUES, [str.capitalize], start=1),
tuple_keys=("index", "tech", "Tech"),
)
# Form definitions
form_template = (
"""<tr>
<td><input type="text" class="number_3" name="attr_%(Form)s_Score" value="0"/></td>
<td data-i18n="%(form)s" >%(Form)s</td>
<td>"""
+ xp("%(Form)s")
+ """</td>
<td style="text-align: center"><input type="text" class="number_3 minor" name="attr_%(Form)s_Puissant" value="0"/></td>
</tr>"""
)
GLOBALS["form_definitions_1"] = repeat_template(
form_template, FORMS[:5], str_key="form"
)
GLOBALS["form_definitions_2"] = repeat_template(
form_template, FORMS[5:], str_key="form"
)
# Form options
GLOBALS["form_score_options"] = repeat_template(
"""<option value="(@{%(Form)s_Score} + @{%(Form)s_Puissant}) [@{%(form)s_i18n}]" data-i18n="%(form)s" >%(Form)s</option>""",
FORMS,
str_key="form",
)
GLOBALS["form_score_options_unlabeled"] = repeat_template(
"""<option value="@{%(Form)s_Score} + @{%(Form)s_Puissant}" data-i18n="%(form)s" >%(Form)s</option>""",
FORMS,
str_key="form",
)
GLOBALS["form_name_options"] = repeat_template(
"""<option value="%(Form)s" data-i18n="%(form)s" >%(Form)s</option>""",
FORMS,
str_key="form",
)
GLOBALS["form_enumerated_options"] = repeat_template(
"""<option value="%(index)s" data-i18n="%(form)s" >%(Form)s</option>""",
enumerate_helper(FORMS, [str.capitalize], start=1),
tuple_keys=("index", "form", "Form"),
)
# Casting rolls
## Magic tab
spontaneous_roll_template = "&{template:arcane} {{label0=^{spontaneous} ^{casting}}} {{result0=%(roll)s}} {{label1=^{aura}}} {{result1=@{aura}}} {{label2=^{weakness-m}}} {{result2=[[ @{wound_total}[@{wounds_i18n}] + [[floor(@{fatigue})]][@{fatigue_i18n}] ]]}} {{label3=^{circumstances-m}}} {{result3=?{@{modifiers_i18n}|0}}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical-spontaneous)}}"
spontaneous_roll = "[[(%(die)s + @{Spontaneous1_Technique} + @{Spontaneous1_Form} + ([[@{Spontaneous1_Focus}]]) [@{focus_i18n}] + (@{gestures}) + (@{words}) + (@{Stamina_Score}) [@{stamina_i18n}] + (@{aura}) [@{aura_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (@{wound_total}) [@{wounds_i18n}] + (?{@{modifiers_i18n}|0}) [@{modifiers_i18n}] )/2 ]]"
GLOBALS["spontaneous_roll_simple"] = spontaneous_roll_template % {
"roll": spontaneous_roll % {"die": "@{simple-die}"}
}
GLOBALS["spontaneous_roll_stress"] = (
spontaneous_roll_template % {"roll": spontaneous_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
ceremonial_roll_template = "&{template:arcane} {{label0=^{ceremonial} ^{casting}}} {{result0= %(roll)s }} {{label1=^{aura}}} {{result1=@{aura}}} {{label2=^{weakness-m}}} {{result2=[[@{wound_total}[@{wounds_i18n}] + [[floor(@{fatigue})]][@{fatigue_i18n}] ]]}} {{label3=^{circumstances-m}}} {{result3=?{@{modifiers_i18n}|0}}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical-spontaneous)}}"
ceremonial_roll = "[[(%(die)s+ @{Ceremonial_Technique} + @{Ceremonial_Form} + ([[@{Ceremonial_Focus}]]) [@{focus_i18n}] + (@{gestures}) + (@{words}) + (@{Stamina_Score}) [@{stamina_i18n}] + (@{aura}) [@{aura_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (@{wound_total}) [@{wounds_i18n}] + (@{Ceremonial_Artes_Lib}) [@{artes_i18n}] + (@{Ceremonial_Philos}) [@{philos_i18n}] + (?{@{modifiers_i18n}|0}) [@{modifiers_i18n}] )/2 ]]"
GLOBALS["ceremonial_roll_simple"] = ceremonial_roll_template % {
"roll": ceremonial_roll % {"die": "@{simple-die}"}
}
GLOBALS["ceremonial_roll_stress"] = (
ceremonial_roll_template % {"roll": ceremonial_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
formulaic_roll_template = "&{template:arcane} {{label0=^{formulaic} ^{casting}}} {{result0= %(roll)s }} {{label1=^{aura}}} {{result1=@{aura}}} {{label2=^{weakness-m}}} {{result2=[[@{wound_total}[@{wounds_i18n}] + [[floor(@{fatigue})]][@{fatigue_i18n}] ]]}} {{label3=^{circumstances-m}}} {{result3=?{@{modifiers_i18n}|0}}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"
formulaic_roll = "[[%(die)s + @{Formulaic_Technique} + @{Formulaic_Form} + ([[@{Formulaic_Focus}]]) [@{focus_i18n}] + (@{gestures}) + (@{words}) + (@{Stamina_Score}) [@{stamina_i18n}] + (@{aura}) [@{aura_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (@{wound_total}) [@{wounds_i18n}] + (?{@{modifiers_i18n}|0}) [@{modifiers_i18n}] ]]"
GLOBALS["formulaic_roll_simple"] = formulaic_roll_template % {
"roll": formulaic_roll % {"die": "@{simple-die}"}
}
GLOBALS["formulaic_roll_stress"] = (
formulaic_roll_template % {"roll": formulaic_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
ritual_roll_template = "&{template:arcane} {{label0=^{ritual} ^{casting}}} {{result0= %(roll)s }} {{label1=^{aura}}} {{result1=@{aura}}} {{label2=^{weakness-m}}} {{result2=[[ @{wound_total}[@{wounds_i18n}] + [[floor(@{fatigue})]][@{fatigue_i18n}] ]]}} {{label3=^{circumstances-m}}} {{result3=?{@{modifiers_i18n}|0}}} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"
ritual_roll = "[[%(die)s + @{Ritual_Technique} + @{Ritual_Form} + ([[@{Ritual_Focus}]]) [@{focus_i18n}] + (@{Stamina_Score}) [@{stamina_i18n}] + (@{aura}) [@{aura_i18n}] + (@{Ritual_Artes_Lib}) [@{artes_i18n}] + (@{Ritual_Philos}) [@{philos_i18n}] + (@{wound_total}) [@{wounds_i18n}] + ([[floor(@{fatigue})]]) [@{fatigue_i18n}] + (?{@{modifiers_i18n}|0}) [@{modifiers_i18n}] ]]"
GLOBALS["ritual_roll_simple"] = ritual_roll_template % {
"roll": ritual_roll % {"die": "@{simple-die}"}
}
GLOBALS["ritual_roll_stress"] = (
ritual_roll_template % {"roll": ritual_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
## Spells
# Deferred attribute access to get the spell's technique value
spell_tech_value = "(@{sys_at}@{character_name}@{sys_pipe}@{spell_tech_name}_Score@{sys_rbk} + @{sys_at}@{character_name}@{sys_pipe}@{spell_tech_name}_Puissant@{sys_rbk}) [@{sys_at}@{character_name}@{sys_pipe}@{spell_tech_name}_i18n@{sys_rbk}]"
spell_form_value = "(@{sys_at}@{character_name}@{sys_pipe}@{spell_form_name}_Score@{sys_rbk} + @{sys_at}@{character_name}@{sys_pipe}@{spell_form_name}_Puissant@{sys_rbk}) [@{sys_at}@{character_name}@{sys_pipe}@{spell_form_name}_i18n@{sys_rbk}]"
# Export the deferred attribute access for use in the HTML since the focus depends on them
GLOBALS["spell_tech_value"] = spell_tech_value
GLOBALS["spell_form_value"] = spell_form_value
spell_roll_template = "&{template:spell} {{spell= @{spell_name}}} {{character= @{character_name} }} {{sigil=@{sigil}}} {{roll= %(roll)s }} {{range= @{spell_range} }} {{duration= @{spell_duration} }} {{target= @{spell_target} }} {{effect= @{spell_note} }} {{mastery= @{spell_note-2} }} {{Technique= @{sys_at}@{character_name}@{sys_pipe}@{spell_tech_name}_i18n@{sys_rbk} }} {{Form= @{sys_at}@{character_name}@{sys_pipe}@{spell_form_name}_i18n@{sys_rbk} }} {{Level= @{spell_level} }} {{botch-button=[@{botch_i18n}!](~@{character_name}|botch)}} {{crit-button=[@{critical_i18n}!](~@{character_name}|critical)}}"
spell_roll = (
"[[%(die)s + (@{Stamina_Score}) [@{stamina_i18n}] + "
+ spell_tech_value
+ " + "
+ spell_form_value
+ "+ ([[@{spell_Focus}]]) [@{focus_i18n}] + (@{spell_bonus}) [@{bonus_i18n}] + (@{gestures}) + (@{words}) + (@{aura}) [@{aura_i18n}] + ([[floor(@{Fatigue})]]) [@{fatigue_i18n}] + (@{wound_total}) [@{wounds_i18n}] + (?{@{modifiers_i18n}|0}) [@{modifiers_i18n}] ]]"
)
GLOBALS["spell_roll_simple"] = spell_roll_template % {
"roll": spell_roll % {"die": "@{simple-die}"}
}
GLOBALS["spell_roll_stress"] = (
spell_roll_template % {"roll": spell_roll % {"die": "@{stress-die}"}}
) + " {{stress=1}}"
# Botch formula
GLOBALS["botch_separate"] = (
"&{template:botch} {{roll= "
+ (
"?{@{botch_num_i18n} | "
+ "|".join(
f"{n} {'Die' if n==1 else 'Dice'}," + " ".join(["[[1d10cf10cs0]]"] * n)
for n in range(1, 9)
)
+ "}"
)
+ " }} {{type=Grouped}}"
)
# Fatigue
add_fatigue_lvl_num = 10
GLOBALS["fatigue_levels_options"] = repeat_template(
"""<option value="%%">%%</option>""", range(0, add_fatigue_lvl_num + 1)
)
GLOBALS["additional_fatigue_levels"] = repeat_template(
"""<tr class="addfatigue-%(num)s">
<td><input type="radio" class="radio_1" name="attr_Fatigue" value="%(value)s"><span></span></td>
<td style="text-align:center;">0</td>
<td>2 min.</td>
<td data-i18n="winded" >Winded</td>
</tr>""",
[(str(i), str(i / 1000)) for i in range(1, add_fatigue_lvl_num + 1)],
tuple_keys=("num", "value"),
)
GLOBALS["fatigue_level_css"] = "\n".join(
(
# IF the fatigue selector is not on a value for which the level is visible
"".join(
':not(.sheet-fatigue-proxy[value="%s"])' % val
for val in range(lvl, add_fatigue_lvl_num + 1)
)
# THEN hide the level
+ (" + table tr.sheet-addfatigue-%s" % lvl)
+ " {\n display: none;\n}"
)
for lvl in range(1, add_fatigue_lvl_num + 1)
)
# Documentation
with open(Path(__file__).parents[1] / "documentation.md") as f:
html = markdown.markdown("".join(f))
html = soup(html, "html.parser")
for i in range(1, 10):
for tag in html.find_all(f"h{i}"):
tag.attrs["class"] = tag.get("class", "") + " heading_label"
GLOBALS["documentation"] = html.prettify()
# Rolltemplate
## Custom rolltemplate colors
with open(Path(__file__).parent / "css_colors.csv", newline="") as f:
reader = csv.DictReader(f)
css_rules = []
for color_def in reader:
# Base CSS rules
lines_header = [
f".sheet-rolltemplate-custom .sheet-crt-container.sheet-crt-color-{color_def['color']} {{",
f" --header-bg-color: {color_def['hex']};",
]
lines_rolls = [
f".sheet-rolltemplate-custom .sheet-crt-container.sheet-crt-rlcolor-{color_def['color']} .inlinerollresult {{",
f" --roll-bg-color: {color_def['hex']};",
]
lines_buttons = [
f".sheet-rolltemplate-custom .sheet-crt-container.sheet-crt-btcolor-{color_def['color']} a {{",
f" --button-bg-color: {color_def['hex']};",
]
# Adapt text color to background color
hex = color_def["hex"].lstrip("#")
r, g, b = tuple(int(hex[2 * i : 2 * i + 2], 16) / 255 for i in range(3))
# Assuming sRGB -> Luma
# may need fixing, color spaces are confusing
luma = 0.2126 * r + 0.7152 * g + 0.0722 * b
if luma > 0.5: # arbitrary threshold
# switch to black text if luma is high enough
lines_header.append(" --header-text-color: #000;")
lines_buttons.append(" --button-text-color: #000;")
if luma < 0.5:
lines_rolls.append(" --roll-text-color: #FFF;")
# Build the rules
for lines in (lines_header, lines_rolls, lines_buttons):
lines.append("}")
css_rules.append("\n".join(lines))
GLOBALS["custom_rt_color_css"] = "\n".join(css_rules)
| 52.767943 | 676 | 0.628689 |
7b8ddfbee5bf86617d753c10ff1d70b3075a3ade | 6,338 | py | Python | legate/core/partition.py | magnatelee/legate.core | 3975601f1cba08a4618e412146715c1027bba0dc | [
"Apache-2.0"
] | 116 | 2021-04-12T18:06:51.000Z | 2022-03-28T07:24:46.000Z | legate/core/partition.py | magnatelee/legate.core | 3975601f1cba08a4618e412146715c1027bba0dc | [
"Apache-2.0"
] | 55 | 2021-04-21T10:04:55.000Z | 2022-03-31T01:38:51.000Z | legate/core/partition.py | magnatelee/legate.core | 3975601f1cba08a4618e412146715c1027bba0dc | [
"Apache-2.0"
] | 20 | 2021-04-17T08:06:15.000Z | 2022-02-22T20:10:36.000Z | # Copyright 2021 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from enum import IntEnum, unique
from .launcher import Broadcast, Partition
from .legion import (
IndexPartition,
PartitionByRestriction,
Rect,
Transform,
legion,
)
from .shape import Shape
@unique
class Restriction(IntEnum):
RESTRICTED = -2
AVOIDED = -1
UNRESTRICTED = 1
class PartitionBase(object):
pass
class Replicate(PartitionBase):
@property
def color_shape(self):
return None
@property
def requirement(self):
return Broadcast
def is_complete_for(self, extents, offsets):
return True
def is_disjoint_for(self, launch_domain):
return launch_domain is None
def __hash__(self):
return hash(self.__class__)
def __eq__(self, other):
return isinstance(other, Replicate)
def __str__(self):
return "Replicate"
def __repr__(self):
return str(self)
def needs_delinearization(self, launch_ndim):
return False
def satisfies_restriction(self, restrictions):
return True
def translate(self, offset):
return self
def construct(self, region, complete=False):
return None
REPLICATE = Replicate()
class Interval(object):
def __init__(self, lo, extent):
self._lo = lo
self._hi = lo + extent
def overlaps(self, other):
return not (other._hi <= self._lo or self._hi <= other._lo)
class Tiling(PartitionBase):
def __init__(self, runtime, tile_shape, color_shape, offset=None):
assert len(tile_shape) == len(color_shape)
if offset is None:
offset = Shape((0,) * len(tile_shape))
self._runtime = runtime
self._tile_shape = tile_shape
self._color_shape = color_shape
self._offset = offset
self._hash = None
def __eq__(self, other):
return (
isinstance(other, Tiling)
and self._tile_shape == other._tile_shape
and self._color_shape == other._color_shape
and self._offset == other._offset
)
@property
def runtime(self):
return self._runtime
@property
def tile_shape(self):
return self._tile_shape
@property
def color_shape(self):
return self._color_shape
@property
def requirement(self):
return Partition
@property
def offset(self):
return self._offset
def __hash__(self):
if self._hash is None:
self._hash = hash(
(
self.__class__,
self._tile_shape,
self._color_shape,
self._offset,
)
)
return self._hash
def __str__(self):
return (
f"Tiling(tile:{self._tile_shape}, "
f"color:{self._color_shape}, "
f"offset:{self._offset})"
)
def __repr__(self):
return str(self)
def needs_delinearization(self, launch_ndim):
return launch_ndim != self._color_shape.ndim
def satisfies_restriction(self, restrictions):
for dim, restriction in enumerate(restrictions):
if (
restriction == Restriction.RESTRICTED
and self.color_shape[dim] > 1
):
return False
return True
def is_complete_for(self, extents, offsets):
my_lo = self._offset
my_hi = self._offset + self.tile_shape * self.color_shape
return my_lo <= offsets and offsets + extents <= my_hi
def is_disjoint_for(self, launch_domain):
return launch_domain.get_volume() <= self.color_shape.volume()
def has_color(self, color):
return color >= 0 and color < self._color_shape
def get_subregion_size(self, extents, color):
lo = self._tile_shape * color + self._offset
hi = self._tile_shape * (color + 1) + self._offset
lo = Shape(max(0, coord) for coord in lo)
hi = Shape(min(max, coord) for (max, coord) in zip(extents, hi))
return Shape(hi - lo)
def get_subregion_offsets(self, color):
return self._tile_shape * color + self._offset
def translate(self, offset):
return Tiling(
self._runtime,
self._tile_shape,
self._color_shape,
self._offset + offset,
)
def construct(self, region, complete=False):
index_space = region.index_space
index_partition = self._runtime.find_partition(index_space, self)
if index_partition is None:
tile_shape = self._tile_shape
transform = Transform(tile_shape.ndim, tile_shape.ndim)
for idx, size in enumerate(tile_shape):
transform.trans[idx, idx] = size
lo = Shape((0,) * tile_shape.ndim) + self._offset
hi = self._tile_shape - 1 + self._offset
extent = Rect(hi, lo, exclusive=False)
color_space = self._runtime.find_or_create_index_space(
self.color_shape
)
functor = PartitionByRestriction(transform, extent)
if complete:
kind = legion.LEGION_DISJOINT_COMPLETE_KIND
else:
kind = legion.LEGION_DISJOINT_INCOMPLETE_KIND
index_partition = IndexPartition(
self._runtime.legion_context,
self._runtime.legion_runtime,
index_space,
color_space,
functor,
kind=kind,
keep=True, # export this partition functor to other libraries
)
self._runtime.record_partition(index_space, self, index_partition)
return region.get_child(index_partition)
| 27.798246 | 78 | 0.612969 |
1dce11b66aadd3845d28f37f4ec9ca0a66535699 | 988 | py | Python | pyclinic/models/swagger2.py | ElSnoMan/pyclinic | 4ced06490a5369eb8806f2dbfa297ea17ca9dad2 | [
"MIT"
] | 3 | 2021-08-05T20:51:22.000Z | 2021-08-12T22:56:12.000Z | pyclinic/models/swagger2.py | ElSnoMan/pyclinic | 4ced06490a5369eb8806f2dbfa297ea17ca9dad2 | [
"MIT"
] | 29 | 2021-08-03T20:58:03.000Z | 2021-09-18T03:02:41.000Z | pyclinic/models/swagger2.py | ElSnoMan/pyclinic | 4ced06490a5369eb8806f2dbfa297ea17ca9dad2 | [
"MIT"
] | 1 | 2021-08-13T22:54:45.000Z | 2021-08-13T22:54:45.000Z | from typing import List, Dict, Optional
from pydantic import BaseModel, Field
class SwaggerInfo(BaseModel):
title: str
description: Optional[str]
version: str
class SwaggerMethod(BaseModel):
responses: Dict[str, Dict[str, str]]
summary: str
description: Optional[str]
parameters: Optional[List[Dict]]
class SwaggerPath(BaseModel):
path: str
method: SwaggerMethod
class SwaggerDefinition(BaseModel):
type: str
properties: Dict
additional_properties: bool = Field(..., alias="additionalProperties")
required: List[str]
description: Optional[str]
class Swagger2Model(BaseModel):
swagger: str = "2.0"
info: SwaggerInfo
host: str # aka BASE_URL like https://api.example.com
base_path: str = Field(..., alias="basePath") # like /api/v3
schemes: List[str] = ["https"]
produces: List[str]
consumes: List[str]
paths: Dict[str, Dict[str, SwaggerPath]]
definitions: Dict[str, SwaggerDefinition]
| 24.097561 | 74 | 0.690283 |
bf2999e1b028d0925a9bb6e428c857829dd2b5db | 2,235 | py | Python | lib/surface/endpoints/services/list.py | bopopescu/Google-Cloud-SDK-1 | c4683bacb2f6192d8a816932e438a0493085469b | [
"Apache-2.0"
] | null | null | null | lib/surface/endpoints/services/list.py | bopopescu/Google-Cloud-SDK-1 | c4683bacb2f6192d8a816932e438a0493085469b | [
"Apache-2.0"
] | null | null | null | lib/surface/endpoints/services/list.py | bopopescu/Google-Cloud-SDK-1 | c4683bacb2f6192d8a816932e438a0493085469b | [
"Apache-2.0"
] | 1 | 2020-07-24T20:13:29.000Z | 2020-07-24T20:13:29.000Z | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""service-management list command."""
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.endpoints import services_util
from googlecloudsdk.calliope import base
class List(base.ListCommand):
"""List services for a project.
This command lists the services that are produced by a project.
## EXAMPLES
To list the services the current project produces, run:
$ {command}
"""
_DEFAULT_PAGE_SIZE = 2000
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
# Remove unneeded list-related flags from parser
base.URI_FLAG.RemoveFromParser(parser)
parser.display_info.AddFormat("""
table(
serviceName:label=NAME,
serviceConfig.title
)
""")
def Run(self, args):
"""Run 'endpoints list'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
The list of managed services for this project.
"""
client = services_util.GetClientInstance()
validated_project = services_util.GetValidatedProject(args.project)
request = services_util.GetProducedListRequest(validated_project)
return list_pager.YieldFromList(
client.services,
request,
limit=args.limit,
batch_size_attribute='pageSize',
batch_size=args.page_size or self._DEFAULT_PAGE_SIZE,
field='services')
| 28.291139 | 75 | 0.705593 |
1ff72ef6ba72e89466cbad73076208509d389347 | 13,901 | py | Python | python/mead/downloader.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | null | null | null | python/mead/downloader.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | null | null | null | python/mead/downloader.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | 3 | 2019-05-27T04:52:21.000Z | 2022-02-15T00:22:53.000Z | from six.moves.urllib.request import urlretrieve
import os
import re
import gzip
import logging
import tarfile
import zipfile
import hashlib
import shutil
from baseline.mime_type import mime_type
from baseline.progress import create_progress_bar
from baseline.utils import export, read_json, write_json
__all__ = []
exporter = export(__all__)
logger = logging.getLogger('mead')
DATA_CACHE_CONF = "data-cache.json"
@exporter
def delete_old_copy(file_name):
if os.path.exists(file_name):
if os.path.isfile(file_name):
os.remove(file_name)
else:
shutil.rmtree(file_name)
return file_name
@exporter
def extract_gzip(file_loc):
temp_file = delete_old_copy("{}.1".format(file_loc))
with gzip.open(file_loc, 'rb') as f_in:
with open(temp_file, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
if mime_type(temp_file) == "application/x-tar":
return extract_tar(temp_file)
else:
shutil.move(temp_file, file_loc)
return file_loc
@exporter
def extract_tar(file_loc):
temp_file = delete_old_copy("{}.1".format(file_loc))
with tarfile.open(file_loc, "r") as tar_ref:
tar_ref.extractall(temp_file)
if len(os.listdir(temp_file)) != 1:
raise RuntimeError("tar extraction unsuccessful")
return os.path.join(temp_file, os.listdir(temp_file)[0])
@exporter
def extract_zip(file_loc):
temp_file = delete_old_copy("{}.1".format(file_loc))
with zipfile.ZipFile(file_loc, "r") as zip_ref:
zip_ref.extractall(temp_file)
return temp_file
@exporter
def extractor(filepath, cache_dir, extractor_func):
with open(filepath, 'rb') as f:
sha1 = hashlib.sha1(f.read()).hexdigest()
logger.info("extracting file..")
path_to_save = filepath if extractor_func is None else extractor_func(filepath)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
path_to_save_sha1 = os.path.join(cache_dir, sha1)
delete_old_copy(path_to_save_sha1)
shutil.move(path_to_save, path_to_save_sha1)
logger.info("downloaded data saved in {}".format(path_to_save_sha1))
return path_to_save_sha1
@exporter
def web_downloader(url):
# Use a class to simulate the nonlocal keyword in 2.7
class Context: pg = None
def _report_hook(count, block_size, total_size):
if Context.pg is None:
length = int((total_size + block_size - 1) / float(block_size)) if total_size != -1 else 1
Context.pg = create_progress_bar(length)
Context.pg.update()
path_to_save = "/tmp/data.dload-{}".format(os.getpid())
try:
path_to_save, _ = urlretrieve(url, path_to_save, reporthook=_report_hook)
Context.pg.done()
except: # this is too broad but there are too many exceptions to handle separately
raise RuntimeError("failed to download data from [url]: {} [to]: {}".format(url, path_to_save))
return path_to_save
@exporter
def validate_url(url):
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return re.match(regex, url) is not None
@exporter
def update_cache(key, data_download_cache):
dcache = read_json(os.path.join(data_download_cache, DATA_CACHE_CONF))
if key not in dcache:
return
del dcache[key]
write_json(dcache, os.path.join(data_download_cache, DATA_CACHE_CONF))
def _verify_file(file_loc):
# dropbox doesn't give 404 in case the file does not exist, produces an HTML. The actual files are never HTMLs.
if not os.path.exists(file_loc):
return False
if os.path.isfile(file_loc) and mime_type(file_loc) == "text/html":
return False
return True
@exporter
def is_file_correct(file_loc, data_dcache=None, key=None):
"""check if the file location mentioned in the json file is correct, i.e.,
exists and not corrupted. This is needed when the direct download link/ path for a file
changes and the user is unaware. This is not tracked by sha1 either. If it returns False, delete the corrupted file.
Additionally, if the file location is a URL, i.e. exists in the cache, delete it so that it can be re-downloaded.
Keyword arguments:
file_loc -- location of the file
data_dcache -- data download cache location (default None, for local system file paths)
key -- URL for download (default None, for local system file paths)
"""
if _verify_file(file_loc):
return True
# Some files are prefixes (the datasset.json has `train` and the data has `train.fr` and `train.en`)
dir_name = os.path.dirname(file_loc)
# When we are using this for checking embeddings file_loc is a url so we need this check.
if os.path.exists(dir_name):
files = [os.path.join(dir_name, f) for f in os.listdir(dir_name) if os.path.join(dir_name, f).startswith(file_loc)]
if files and all(_verify_file(f) for f in files):
return True
delete_old_copy(file_loc)
if key is not None: # cache file validation
update_cache(key, data_dcache)
return False
@exporter
def is_dir_correct(dir_loc, dataset_desc, data_dcache, key, ignore_file_check=False):
"""check if the directory extracted from the zip location mentioned in the datasets json file is correct, i.e.,
all files inside exist and are not corrupted. If not, we will update the cache try to re-download them.
Keyword arguments:
dir_loc -- location of the directory
dataset_desc -- to know the individual file locations inside the directory
data_dcache -- data download cache location
key -- URL for download
ignore_file_check --to handle enc_dec datasets, see later.
"""
if not os.path.exists(dir_loc) or not os.path.isdir(dir_loc):
update_cache(key, data_dcache)
return False
if ignore_file_check: # for enc_dec tasks there's no direct downloads
return True
files = [os.path.join(dir_loc, dataset_desc[k]) for k in dataset_desc if k.endswith("_file")]
for f in files:
if not is_file_correct(f, key, data_dcache):
return False
return True
@exporter
class Downloader(object):
ZIPD = {'application/gzip': extract_gzip, 'application/zip': extract_zip}
def __init__(self, data_download_cache, cache_ignore):
super(Downloader, self).__init__()
self.cache_ignore = cache_ignore
self.data_download_cache = data_download_cache
def download(self):
pass
@exporter
class SingleFileDownloader(Downloader):
def __init__(self, dataset_file, data_download_cache, cache_ignore=False):
super(SingleFileDownloader, self).__init__(data_download_cache, cache_ignore)
self.dataset_file = dataset_file
self.data_download_cache = data_download_cache
def download(self):
file_loc = self.dataset_file
if is_file_correct(file_loc):
return file_loc
elif validate_url(file_loc): # is it a web URL? check if exists in cache
url = file_loc
dcache_path = os.path.join(self.data_download_cache, DATA_CACHE_CONF)
dcache = read_json(dcache_path)
if url in dcache and is_file_correct(dcache[url], self.data_download_cache, url) and not self.cache_ignore:
logger.info("file for {} found in cache, not downloading".format(url))
return dcache[url]
else: # download the file in the cache, update the json
cache_dir = self.data_download_cache
logger.info("using {} as data/embeddings cache".format(cache_dir))
temp_file = web_downloader(url)
dload_file = extractor(filepath=temp_file, cache_dir=cache_dir,
extractor_func=Downloader.ZIPD.get(mime_type(temp_file), None))
dcache.update({url: dload_file})
write_json(dcache, os.path.join(self.data_download_cache, DATA_CACHE_CONF))
return dload_file
raise RuntimeError("the file [{}] is not in cache and can not be downloaded".format(file_loc))
@exporter
class DataDownloader(Downloader):
def __init__(self, dataset_desc, data_download_cache, enc_dec=False, cache_ignore=False):
super(DataDownloader, self).__init__(data_download_cache, cache_ignore)
self.dataset_desc = dataset_desc
self.data_download_cache = data_download_cache
self.enc_dec = enc_dec
def download(self):
dload_bundle = self.dataset_desc.get("download", None)
if dload_bundle is not None: # download a zip/tar/tar.gz directory, look for train, dev test files inside that.
dcache_path = os.path.join(self.data_download_cache, DATA_CACHE_CONF)
dcache = read_json(dcache_path)
if dload_bundle in dcache and \
is_dir_correct(dcache[dload_bundle], self.dataset_desc, self.data_download_cache, dload_bundle,
self.enc_dec) and not self.cache_ignore:
download_dir = dcache[dload_bundle]
logger.info("files for {} found in cache, not downloading".format(dload_bundle))
return {k: os.path.join(download_dir, self.dataset_desc[k]) for k in self.dataset_desc
if k.endswith("_file")}
else: # try to download the bundle and unzip
if not validate_url(dload_bundle):
raise RuntimeError("can not download from the given url")
else:
cache_dir = self.data_download_cache
temp_file = web_downloader(dload_bundle)
download_dir = extractor(filepath=temp_file, cache_dir=cache_dir,
extractor_func=Downloader.ZIPD.get(mime_type(temp_file), None))
if "sha1" in self.dataset_desc:
if os.path.split(download_dir)[-1] != self.dataset_desc["sha1"]:
raise RuntimeError("The sha1 of the downloaded file does not match with the provided one")
dcache.update({dload_bundle: download_dir})
write_json(dcache, os.path.join(self.data_download_cache, DATA_CACHE_CONF))
return {k: os.path.join(download_dir, self.dataset_desc[k]) for k in self.dataset_desc
if k.endswith("_file")}
else: # we have download links to every file or they exist
if not self.enc_dec:
return {k: SingleFileDownloader(self.dataset_desc[k], self.data_download_cache).download()
for k in self.dataset_desc if k.endswith("_file") and self.dataset_desc[k]}
else:
return {k: self.dataset_desc[k] for k in self.dataset_desc if k.endswith("_file")}
# these files can not be downloaded because there's a post processing on them.
@exporter
class EmbeddingDownloader(Downloader):
def __init__(self, embedding_file, embedding_dsz, embedding_sha1, data_download_cache, cache_ignore=False):
super(EmbeddingDownloader, self).__init__(data_download_cache, cache_ignore)
self.embedding_file = embedding_file
self.embedding_key = embedding_dsz
self.data_download_cache = data_download_cache
self.sha1 = embedding_sha1
@staticmethod
def _get_embedding_file(loc, key):
if os.path.isfile(loc):
logger.info("embedding file location: {}".format(loc))
return loc
else: # This is a directory, return the actual file
files = [x for x in os.listdir(loc) if str(key) in x]
if len(files) == 0:
raise RuntimeError("No embedding file found for the given key [{}]".format(key))
elif len(files) > 1:
logger.info("multiple embedding files found for the given key [{}], choosing {}".format(key, files[0]))
embed_file_loc = os.path.join(loc, files[0])
return embed_file_loc
def download(self):
if is_file_correct(self.embedding_file):
logger.info("embedding file location: {}".format(self.embedding_file))
return self.embedding_file
dcache_path = os.path.join(self.data_download_cache, DATA_CACHE_CONF)
dcache = read_json(dcache_path)
if self.embedding_file in dcache and not self.cache_ignore:
download_loc = dcache[self.embedding_file]
logger.info("files for {} found in cache".format(self.embedding_file))
return self._get_embedding_file(download_loc, self.embedding_key)
else: # try to download the bundle and unzip
url = self.embedding_file
if not validate_url(url):
raise RuntimeError("can not download from the given url")
else:
cache_dir = self.data_download_cache
temp_file = web_downloader(url)
download_loc = extractor(filepath=temp_file, cache_dir=cache_dir,
extractor_func=Downloader.ZIPD.get(mime_type(temp_file), None))
if self.sha1 is not None:
if os.path.split(download_loc)[-1] != self.sha1:
raise RuntimeError("The sha1 of the downloaded file does not match with the provided one")
dcache.update({url: download_loc})
write_json(dcache, os.path.join(self.data_download_cache, DATA_CACHE_CONF))
return self._get_embedding_file(download_loc, self.embedding_key)
| 43.990506 | 123 | 0.657219 |
6756f8f3cfa1127da42c7bb823305802d3233c33 | 2,782 | py | Python | main/utils.py | sirodoht/chaitinschool.org | ecebfc897855ac9752780ce69fbf6207714f9bfe | [
"MIT"
] | 2 | 2022-02-03T14:51:46.000Z | 2022-03-18T18:34:42.000Z | main/utils.py | sirodoht/chaitinschool.org | ecebfc897855ac9752780ce69fbf6207714f9bfe | [
"MIT"
] | null | null | null | main/utils.py | sirodoht/chaitinschool.org | ecebfc897855ac9752780ce69fbf6207714f9bfe | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from django.conf import settings
from main import models
def get_protocol():
if settings.DEBUG:
return "http:"
else:
return "https:"
def get_ics(workshop):
begin_timestamp = datetime.strftime(workshop.scheduled_at, "%Y%m%dT%H%M%S")
finish_date = workshop.scheduled_at + timedelta(hours=2)
finish_timestamp = datetime.strftime(finish_date, "%Y%m%dT%H%M%S")
location_address_escaped = workshop.location_address.replace(",", "\\,")
return f"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:{settings.PROJECT_NAME_SLUG}/ics
CALSCALE:GREGORIAN
BEGIN:VTIMEZONE
TZID:Europe/London
BEGIN:DAYLIGHT
TZNAME:GMT+1
TZOFFSETFROM:+0000
TZOFFSETTO:+0100
DTSTART:19810329T010000
RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU
END:DAYLIGHT
BEGIN:STANDARD
TZNAME:GMT
TZOFFSETFROM:+0100
TZOFFSETTO:+0000
DTSTART:19961027T020000
RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
TRANSP:OPAQUE
DTSTAMP:{begin_timestamp}Z
UID:{begin_timestamp}@{settings.PROJECT_URL}
DTSTART;TZID=Europe/London:{begin_timestamp}
DTEND;TZID=Europe/London:{finish_timestamp}
SUMMARY:{settings.PROJECT_NAME}: {workshop.title}
DESCRIPTION:{get_protocol()}//{settings.CANONICAL_HOST}/workshops/{workshop.slug}/
LOCATION:{workshop.location_name}\\, {location_address_escaped}
URL;VALUE=URI:{workshop.location_url}
LAST-MODIFIED:{begin_timestamp}Z
CREATED:{begin_timestamp}Z
END:VEVENT
END:VCALENDAR
"""
def get_email_body_footer(unsubscribe_url):
body_footer = "\n\n"
body_footer += "---\n"
body_footer += "Unsubscribe:\n"
body_footer += unsubscribe_url + "\n"
return body_footer
def get_email_attachments(workshop_slug):
"""Return attachments array with ICS event."""
attachments = []
if workshop_slug != "no-ics":
workshop = models.Workshop.objects.get(slug=workshop_slug)
ics_content = get_ics(workshop)
attachments.append(
(
f"{settings.PROJECT_NAME_SLUG}-{workshop.slug}.ics",
ics_content,
"application/octet-stream",
),
)
return attachments
def get_workshop_for_email(workshop):
"""Get string of body of email for a workshop."""
date_str = workshop.scheduled_at.strftime("%a, %B %-d, %Y at %H:%M")
workshop_url = get_protocol() + workshop.get_absolute_url()
return (
"You are attending:\n\n"
+ f"**{workshop.title}**\n"
+ f"{workshop_url}\n\n"
+ f"Location:\n{workshop.location_name}\n"
+ f"{workshop.location_address}\n"
+ f"{workshop.location_url}\n\n"
+ f"On {date_str}\n\n"
+ "See you there!\n\n"
+ "Chatin School\n"
+ f"{get_protocol()}//{settings.CANONICAL_HOST}\n"
)
| 28.10101 | 82 | 0.692308 |
cae200768a686438f17cf2648e6c9b9557c7fc9f | 660 | py | Python | app/manage.py | mohit4/Saga | fa2b9e2b557e8222b2b72028a448a3bec6a85e80 | [
"MIT"
] | null | null | null | app/manage.py | mohit4/Saga | fa2b9e2b557e8222b2b72028a448a3bec6a85e80 | [
"MIT"
] | null | null | null | app/manage.py | mohit4/Saga | fa2b9e2b557e8222b2b72028a448a3bec6a85e80 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'saga.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.695652 | 73 | 0.677273 |
053fdbd6461e04cf5a703162977a4f0746cb7630 | 22,847 | py | Python | twitter_to_sqlite/utils.py | rixx/twitter-to-sqlite | f09d611782a8372cfb002792dfa727325afb4db6 | [
"Apache-2.0"
] | 318 | 2019-09-04T01:54:05.000Z | 2022-03-05T16:45:09.000Z | twitter_to_sqlite/utils.py | rixx/twitter-to-sqlite | f09d611782a8372cfb002792dfa727325afb4db6 | [
"Apache-2.0"
] | 62 | 2019-09-03T21:27:37.000Z | 2022-02-24T18:10:19.000Z | twitter_to_sqlite/utils.py | rixx/twitter-to-sqlite | f09d611782a8372cfb002792dfa727325afb4db6 | [
"Apache-2.0"
] | 17 | 2019-12-28T19:34:12.000Z | 2022-03-05T11:33:14.000Z | import click
import datetime
import html
import json
import pathlib
import re
import sqlite3
import time
import urllib.parse
import zipfile
from dateutil import parser
from requests_oauthlib import OAuth1Session
import sqlite_utils
# Twitter API error codes
RATE_LIMIT_ERROR_CODE = 88
SINCE_ID_TYPES = {
"user": 1,
"home": 2,
"mentions": 3,
"search": 4,
}
COUNT_HISTORY_TYPES = {
"followers": 1,
"friends": 2,
"listed": 3,
# Don't track these - they're uninteresting and really noisy in terms
# of writing new rows to the count_history table:
# "favourites": 4,
# "statuses": 5,
}
source_re = re.compile('<a href="(?P<url>.*?)".*?>(?P<name>.*?)</a>')
class UserDoesNotExist(click.ClickException):
def __init__(self, identifier):
super().__init__("User '{}' does not exist".format(identifier))
def open_database(db_path):
db = sqlite_utils.Database(db_path)
# Only run migrations if this is an existing DB (has tables)
if db.tables:
migrate(db)
return db
def migrate(db):
from twitter_to_sqlite.migrations import MIGRATIONS
if "migrations" not in db.table_names():
db["migrations"].create({"name": str, "applied": str}, pk="name")
applied_migrations = {
m[0] for m in db.conn.execute("select name from migrations").fetchall()
}
for migration in MIGRATIONS:
name = migration.__name__
if name in applied_migrations:
continue
migration(db)
db["migrations"].insert(
{"name": name, "applied": datetime.datetime.utcnow().isoformat()}
)
def session_for_auth(auth):
return OAuth1Session(
client_key=auth["api_key"],
client_secret=auth["api_secret_key"],
resource_owner_key=auth["access_token"],
resource_owner_secret=auth["access_token_secret"],
)
def fetch_user_list_chunks(
session, user_id=None, screen_name=None, sleep=61, noun="followers"
):
cursor = -1
users = []
while cursor:
headers, body = fetch_user_list(session, cursor, user_id, screen_name, noun)
yield body["users"]
cursor = body["next_cursor"]
if not cursor:
break
time.sleep(sleep) # Rate limit = 15 per 15 minutes!
def fetch_user_list(session, cursor, user_id=None, screen_name=None, noun="followers"):
args = user_args(user_id, screen_name)
args.update({"count": 200, "cursor": cursor})
r = session.get(
"https://api.twitter.com/1.1/{}/list.json?".format(noun)
+ urllib.parse.urlencode(args)
)
return r.headers, r.json()
def fetch_lists(db, session, user_id=None, screen_name=None):
lists_url = "https://api.twitter.com/1.1/lists/ownerships.json"
args = user_args(user_id, screen_name)
args["count"] = 1000
fetched_lists = []
# For the moment we don't paginate
for list_row in session.get(lists_url, params=args).json()["lists"]:
del list_row["id_str"]
user = list_row.pop("user")
save_users(db, [user])
list_row["user"] = user["id"]
list_row["created_at"] = parser.parse(list_row["created_at"])
fetched_lists.append(list_row)
db["lists"].insert_all(fetched_lists, pk="id", foreign_keys=("user",), replace=True)
return fetched_lists
def get_profile(db, session, user_id=None, screen_name=None):
if not (user_id or screen_name):
profile = session.get(
"https://api.twitter.com/1.1/account/verify_credentials.json"
).json()
else:
args = user_args(user_id, screen_name)
url = "https://api.twitter.com/1.1/users/show.json"
if args:
url += "?" + urllib.parse.urlencode(args)
response = session.get(url)
if response.status_code == 404:
raise UserDoesNotExist(screen_name or user_id)
profile = response.json()
save_users(db, [profile])
return profile
def fetch_timeline(
session,
url,
db,
args=None,
sleep=1,
stop_after=None,
key=None,
since_id=None,
since=False,
since_type=None,
since_key=None,
):
# See https://developer.twitter.com/en/docs/tweets/timelines/guides/working-with-timelines
if since and since_id:
raise click.ClickException("Use either --since or --since_id, not both")
since_type_id = None
last_since_id = None
if since_type is not None:
assert since_key is not None
since_type_id = SINCE_ID_TYPES[since_type]
# Figure out the last since_id in case we need it
try:
last_since_id = db.conn.execute(
"""
select since_id from since_ids
where type = ? and key = ?
""",
[since_type_id, since_key],
).fetchall()[0][0]
except (IndexError, sqlite3.OperationalError):
pass
if since:
# Load since_id from database
since_id = last_since_id
args = dict(args or {})
args["count"] = 200
if stop_after is not None:
args["count"] = stop_after
if since_id:
args["since_id"] = since_id
args["tweet_mode"] = "extended"
min_seen_id = None
num_rate_limit_errors = 0
while True:
if min_seen_id is not None:
args["max_id"] = min_seen_id - 1
response = session.get(url, params=args)
tweets = response.json()
if "errors" in tweets:
# Was it a rate limit error? If so sleep and try again
if RATE_LIMIT_ERROR_CODE == tweets["errors"][0]["code"]:
num_rate_limit_errors += 1
assert num_rate_limit_errors < 5, "More than 5 rate limit errors"
print(
"Rate limit exceeded - will sleep 15s and try again {}".format(
repr(response.headers)
)
)
time.sleep(15)
continue
else:
raise Exception(str(tweets["errors"]))
if key is not None:
tweets = tweets[key]
if not tweets:
break
for tweet in tweets:
yield tweet
min_seen_id = min(t["id"] for t in tweets)
max_seen_id = max(t["id"] for t in tweets)
if last_since_id is not None:
max_seen_id = max((last_since_id, max_seen_id))
last_since_id = max_seen_id
if since_type_id is not None and since_key is not None:
db["since_ids"].insert(
{
"type": since_type_id,
"key": since_key,
"since_id": max_seen_id,
},
replace=True,
)
if stop_after is not None:
break
time.sleep(sleep)
def fetch_user_timeline(
session,
db,
user_id=None,
screen_name=None,
stop_after=None,
since_id=None,
since=False,
):
args = user_args(user_id, screen_name)
yield from fetch_timeline(
session,
"https://api.twitter.com/1.1/statuses/user_timeline.json",
db,
args,
sleep=1,
stop_after=stop_after,
since_id=since_id,
since_type="user",
since_key="id:{}".format(user_id) if user_id else screen_name,
since=since,
)
def fetch_favorites(session, db, user_id=None, screen_name=None, stop_after=None):
args = user_args(user_id, screen_name)
# Rate limit 75/15 mins = 5/minute = every 12 seconds
sleep = 12
yield from fetch_timeline(
session,
"https://api.twitter.com/1.1/favorites/list.json",
db,
args,
sleep=sleep,
stop_after=stop_after,
)
def user_args(user_id, screen_name):
args = {}
if user_id:
args["user_id"] = user_id
if screen_name:
args["screen_name"] = screen_name
return args
def expand_entities(s, entities):
for _, ents in entities.items():
for ent in ents:
if "url" in ent:
replacement = ent["expanded_url"] or ent["url"]
s = s.replace(ent["url"], replacement)
return s
def transform_user(user):
user["created_at"] = parser.parse(user["created_at"])
if user["description"] and "description" in user.get("entities", {}):
user["description"] = expand_entities(
user["description"], user["entities"]["description"]
)
if user["url"] and "url" in user.get("entities", {}):
user["url"] = expand_entities(user["url"], user["entities"]["url"])
user.pop("entities", None)
user.pop("status", None)
to_remove = [k for k in user if k.endswith("_str")]
for key in to_remove:
del user[key]
def transform_tweet(tweet):
tweet["full_text"] = html.unescape(
expand_entities(tweet["full_text"], tweet.pop("entities"))
)
to_remove = [k for k in tweet if k.endswith("_str")] + [
"quoted_status_id",
"quoted_status_permalink",
]
for key in to_remove:
if key in tweet:
del tweet[key]
tweet["created_at"] = parser.parse(tweet["created_at"]).isoformat()
def ensure_tables(db):
table_names = set(db.table_names())
if "places" not in table_names:
db["places"].create({"id": str}, pk="id")
if "sources" not in table_names:
db["sources"].create({"id": str, "name": str, "url": str}, pk="id")
if "users" not in table_names:
db["users"].create(
{
"id": int,
"screen_name": str,
"name": str,
"description": str,
"location": str,
},
pk="id",
)
db["users"].enable_fts(
["name", "screen_name", "description", "location"], create_triggers=True
)
if "tweets" not in table_names:
db["tweets"].create(
{
"id": int,
"user": int,
"created_at": str,
"full_text": str,
"retweeted_status": int,
"quoted_status": int,
"place": str,
"source": str,
},
pk="id",
foreign_keys=(
("user", "users", "id"),
("place", "places", "id"),
("source", "sources", "id"),
),
)
db["tweets"].enable_fts(["full_text"], create_triggers=True)
db["tweets"].add_foreign_key("retweeted_status", "tweets")
db["tweets"].add_foreign_key("quoted_status", "tweets")
if "following" not in table_names:
db["following"].create(
{"followed_id": int, "follower_id": int, "first_seen": str},
pk=("followed_id", "follower_id"),
foreign_keys=(
("followed_id", "users", "id"),
("follower_id", "users", "id"),
),
)
# Ensure following has indexes
following_indexes = {tuple(i.columns) for i in db["following"].indexes}
if ("followed_id",) not in following_indexes:
db["following"].create_index(["followed_id"])
if ("follower_id",) not in following_indexes:
db["following"].create_index(["follower_id"])
# Tables for tracking --since
if "since_ids" not in table_names:
db["since_id_types"].create(
{
"id": int,
"name": str,
},
pk="id",
)
db["since_id_types"].insert_all(
[{"id": id, "name": name} for name, id in SINCE_ID_TYPES.items()]
)
db["since_ids"].create(
{"type": int, "key": str, "since_id": int},
pk=("type", "key"),
foreign_keys=(("type", "since_id_types", "id"),),
)
# Tables for recording history of user follower counts etc
if "count_history" not in table_names:
db["count_history_types"].create(
{
"id": int,
"name": str,
},
pk="id",
)
db["count_history_types"].insert_all(
[{"id": id, "name": name} for name, id in COUNT_HISTORY_TYPES.items()]
)
db["count_history"].create(
{"type": int, "user": int, "datetime": str, "count": int},
pk=("type", "user", "datetime"),
foreign_keys=(
("type", "count_history_types", "id"),
("user", "users", "id"),
),
)
def save_tweets(db, tweets, favorited_by=None):
ensure_tables(db)
for tweet in tweets:
transform_tweet(tweet)
user = tweet.pop("user")
transform_user(user)
tweet["user"] = user["id"]
tweet["source"] = extract_and_save_source(db, tweet["source"])
if tweet.get("place"):
db["places"].insert(tweet["place"], pk="id", alter=True, replace=True)
tweet["place"] = tweet["place"]["id"]
# extended_entities contains media
extended_entities = tweet.pop("extended_entities", None)
# Deal with nested retweeted_status / quoted_status
nested = []
for tweet_key in ("quoted_status", "retweeted_status"):
if tweet.get(tweet_key):
nested.append(tweet[tweet_key])
tweet[tweet_key] = tweet[tweet_key]["id"]
if nested:
save_tweets(db, nested)
db["users"].insert(user, pk="id", alter=True, replace=True)
save_user_counts(db, user)
table = db["tweets"].insert(tweet, pk="id", alter=True, replace=True)
if favorited_by is not None:
db["favorited_by"].insert(
{"tweet": tweet["id"], "user": favorited_by},
pk=("user", "tweet"),
foreign_keys=("tweet", "user"),
replace=True,
)
if extended_entities and extended_entities.get("media"):
for media in extended_entities["media"]:
# TODO: Remove this line when .m2m() grows alter=True
db["media"].insert(media, pk="id", alter=True, replace=True)
table.m2m("media", media, pk="id")
def save_users(db, users, followed_id=None, follower_id=None):
assert not (followed_id and follower_id)
ensure_tables(db)
for user in users:
transform_user(user)
db["users"].insert_all(users, pk="id", alter=True, replace=True)
for user in users:
save_user_counts(db, user)
if followed_id or follower_id:
first_seen = datetime.datetime.utcnow().isoformat()
db["following"].insert_all(
(
{
"followed_id": followed_id or user["id"],
"follower_id": follower_id or user["id"],
"first_seen": first_seen,
}
for user in users
),
ignore=True,
)
def fetch_user_batches(session, ids_or_screen_names, use_ids=False, sleep=1):
# Yields lists of up to 70 users (tried 100 but got this error:
# # {'code': 18, 'message': 'Too many terms specified in query.'} )
batches = []
batch = []
for id in ids_or_screen_names:
batch.append(id)
if len(batch) == 70:
batches.append(batch)
batch = []
if batch:
batches.append(batch)
url = "https://api.twitter.com/1.1/users/lookup.json"
for batch in batches:
if use_ids:
args = {"user_id": ",".join(map(str, batch))}
else:
args = {"screen_name": ",".join(batch)}
users = session.get(url, params=args).json()
yield users
time.sleep(sleep)
def fetch_status_batches(session, tweet_ids, sleep=1):
# Yields lists of up to 100 tweets
batches = []
batch = []
for id in tweet_ids:
batch.append(id)
if len(batch) == 100:
batches.append(batch)
batch = []
if batch:
batches.append(batch)
url = "https://api.twitter.com/1.1/statuses/lookup.json"
for batch in batches:
args = {"id": ",".join(map(str, batch)), "tweet_mode": "extended"}
tweets = session.get(url, params=args).json()
yield tweets
time.sleep(sleep)
def resolve_identifiers(db, identifiers, attach, sql):
if sql:
if attach:
for filepath in attach:
if ":" in filepath:
alias, filepath = filepath.split(":", 1)
else:
alias = filepath.split("/")[-1].split(".")[0]
attach_sql = """
ATTACH DATABASE '{}' AS [{}];
""".format(
str(pathlib.Path(filepath).resolve()), alias
)
db.conn.execute(attach_sql)
sql_identifiers = [r[0] for r in db.conn.execute(sql).fetchall()]
else:
sql_identifiers = []
return list(identifiers) + sql_identifiers
def fetch_and_save_list(db, session, identifier, identifier_is_id=False):
show_url = "https://api.twitter.com/1.1/lists/show.json"
args = {}
if identifier_is_id:
args["list_id"] = identifier
else:
screen_name, slug = identifier.split("/")
args.update({"owner_screen_name": screen_name, "slug": slug})
# First fetch the list details
data = session.get(show_url, params=args).json()
list_id = data["id"]
del data["id_str"]
user = data.pop("user")
save_users(db, [user])
data["user"] = user["id"]
data["created_at"] = parser.parse(data["created_at"])
db["lists"].insert(data, pk="id", foreign_keys=("user",), replace=True)
# Now fetch the members
url = "https://api.twitter.com/1.1/lists/members.json"
cursor = -1
while cursor:
args.update({"count": 5000, "cursor": cursor})
body = session.get(url, params=args).json()
users = body["users"]
save_users(db, users)
db["list_members"].insert_all(
({"list": list_id, "user": user["id"]} for user in users),
pk=("list", "user"),
foreign_keys=("list", "user"),
replace=True,
)
cursor = body["next_cursor"]
if not cursor:
break
time.sleep(1) # Rate limit = 900 per 15 minutes
def cursor_paginate(session, url, args, key, page_size=200, sleep=None):
"Execute cursor pagination, yelding 'key' for each page"
args = dict(args)
args["page_size"] = page_size
cursor = -1
while cursor:
args["cursor"] = cursor
r = session.get(url, params=args)
raise_if_error(r)
body = r.json()
yield body[key]
cursor = body["next_cursor"]
if not cursor:
break
if sleep is not None:
time.sleep(sleep)
class TwitterApiError(Exception):
def __init__(self, headers, body):
self.headers = headers
self.body = body
def __repr__(self):
return "{}: {}".format(self.body, self.headers)
def raise_if_error(r):
if "errors" in r.json():
raise TwitterApiError(r.headers, r.json()["errors"])
def stream_filter(session, track=None, follow=None, locations=None, language=None):
session.stream = True
args = {"tweet_mode": "extended"}
for key, value in (
("track", track),
("follow", follow),
("locations", locations),
("language", language),
):
if value is None:
continue
if not isinstance(value, str):
value = ",".join(map(str, value))
args[key] = value
while True:
response = session.post(
"https://stream.twitter.com/1.1/statuses/filter.json", params=args
)
for line in response.iter_lines(chunk_size=10000):
if line.strip().startswith(b"{"):
tweet = json.loads(line)
# Only yield tweet if it has an 'id' and 'created_at'
# - otherwise it's probably a maintenance message, see
# https://developer.twitter.com/en/docs/tweets/filter-realtime/overview/statuses-filter
if "id" in tweet and "created_at" in tweet:
# 'Fix' weird tweets from streaming API
fix_streaming_tweet(tweet)
yield tweet
else:
print(tweet)
time.sleep(1)
def fix_streaming_tweet(tweet):
if "extended_tweet" in tweet:
tweet.update(tweet.pop("extended_tweet"))
if "full_text" not in tweet:
tweet["full_text"] = tweet["text"]
if "retweeted_status" in tweet:
fix_streaming_tweet(tweet["retweeted_status"])
if "quoted_status" in tweet:
fix_streaming_tweet(tweet["quoted_status"])
def user_ids_for_screen_names(db, screen_names):
sql = "select id from users where lower(screen_name) in ({})".format(
", ".join(["?"] * len(screen_names))
)
return [
r[0] for r in db.conn.execute(sql, [s.lower() for s in screen_names]).fetchall()
]
def read_archive_js(filepath):
"Open zip file, return (filename, content) for all .js"
zf = zipfile.ZipFile(filepath)
for zi in zf.filelist:
# Ignore files in a assets dir -- these are for Twitter's archive
# browser thingie -- and only use final filenames since some archives
# appear to put data in a data/ subdir, which can screw up the filename
# -> importer mapping.
if zi.filename.endswith(".js") and not zi.filename.startswith("assets/"):
yield pathlib.Path(zi.filename).name, zf.open(zi.filename).read()
def extract_and_save_source(db, source):
if not source:
return None
m = source_re.match(source)
details = m.groupdict()
return db["sources"].insert(details, hash_id="id", replace=True).last_pk
def save_user_counts(db, user):
for type_name, type_id in COUNT_HISTORY_TYPES.items():
previous_count = None
try:
previous_count = db.conn.execute(
"""
select count from count_history
where type = ? and user = ?
order by datetime desc limit 1
""",
[type_id, user["id"]],
).fetchall()[0][0]
except IndexError:
pass
current_count = user["{}_count".format(type_name)]
if current_count != previous_count:
db["count_history"].insert(
{
"type": type_id,
"user": user["id"],
"datetime": datetime.datetime.utcnow().isoformat().split(".")[0]
+ "+00:00",
"count": current_count,
},
replace=True,
)
| 32.545584 | 103 | 0.562831 |
f223e02a9e8d6e1994a6747f5ffb8ed85f0bf98a | 1,164 | py | Python | docs/ref/urusan.py | aagusti/sp2d | 51122cdbb9f85bee91d08c3dd29fb1f7d1ae3d90 | [
"MIT"
] | null | null | null | docs/ref/urusan.py | aagusti/sp2d | 51122cdbb9f85bee91d08c3dd29fb1f7d1ae3d90 | [
"MIT"
] | null | null | null | docs/ref/urusan.py | aagusti/sp2d | 51122cdbb9f85bee91d08c3dd29fb1f7d1ae3d90 | [
"MIT"
] | null | null | null | from base import *
class Urusan(Base, base):
__tablename__ ='urusans'
__table_args__ = {'extend_existing':True,
'schema' :'apbd','autoload':True}
@classmethod
def get_by_kode(cls,kode):
return DBSession.query(cls).filter_by(kode=kode).first()
@classmethod
def import_data(cls):
filenm ='urusan.csv'
with open(filenm, 'rb') as csvfile:
reader = csv.DictReader(csvfile, delimiter=';', quotechar='"')
for row in reader:
print row
data = cls.get_by_kode(row['kode'])
if not data:
data=cls()
data.kode = row['kode'].strip()
data.created = datetime.now()
data.create_uid = 1
#data.tahun = data.created.year
#data.level_id = data.kode.count('.')+1
#data.parent_id = DBSession.query(Rekening.id).filter(Rekening.kode==data.kode[:data.kode.rfind('.')]).scalar()
data.disabled = 0
#data.defsign = 1
data.nama = row['nama'].strip()
DBSession.add(data)
DBSession.flush()
DBSession.commit()
if __name__ == '__main__':
Urusan.import_data()
| 32.333333 | 122 | 0.579038 |
6cd2eca20fa994985b669ae9b55774c6893c5239 | 877 | py | Python | users/management/commands/seed_users.py | alstn2468/Django_Airbnb_Clone | eeb61e4a36320a0b269d96f47cc6755dbc4c40f8 | [
"MIT"
] | 5 | 2019-11-26T00:34:24.000Z | 2021-01-04T06:04:48.000Z | users/management/commands/seed_users.py | alstn2468/Django_Airbnb_Clone | eeb61e4a36320a0b269d96f47cc6755dbc4c40f8 | [
"MIT"
] | 3 | 2021-06-09T19:05:40.000Z | 2021-09-08T01:49:01.000Z | users/management/commands/seed_users.py | alstn2468/Django_Airbnb_Clone | eeb61e4a36320a0b269d96f47cc6755dbc4c40f8 | [
"MIT"
] | 6 | 2019-11-24T11:47:09.000Z | 2021-08-16T20:21:35.000Z | from django.core.management.base import BaseCommand
from django_seed import Seed
from users.models import User
class Command(BaseCommand):
help = "Automatically create users"
def add_arguments(self, parser):
parser.add_argument("--number", default=1, help="Number of users to create")
def handle(self, *args, **options):
try:
number = int(options.get("number"))
self.stdout.write(self.style.SUCCESS("■ START CREATE USERS"))
seeder = Seed.seeder()
seeder.add_entity(User, number, {"is_staff": False, "is_superuser": False})
seeder.execute()
self.stdout.write(self.style.SUCCESS("■ SUCCESS CREATE ALL USERS!"))
except Exception as e:
self.stdout.write(self.style.ERROR(f"■ {e}"))
self.stdout.write(self.style.ERROR("■ FAIL CREATE USERS"))
| 32.481481 | 87 | 0.63626 |
80c5f928be5d64417a039117ce5bb9bd9eb633c1 | 6,928 | py | Python | very_deep_models/deep_ILM_GN.py | z0zz0/MSc_thesis | 935d0b9aa6781970fad96a58a191134d38702b10 | [
"MIT"
] | null | null | null | very_deep_models/deep_ILM_GN.py | z0zz0/MSc_thesis | 935d0b9aa6781970fad96a58a191134d38702b10 | [
"MIT"
] | null | null | null | very_deep_models/deep_ILM_GN.py | z0zz0/MSc_thesis | 935d0b9aa6781970fad96a58a191134d38702b10 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from ilm import ilm_GN
class ConvNet(nn.Module):
def __init__(self):
super(ConvNet, self).__init__()
self.pool_stride_2 = nn.MaxPool3d(kernel_size=(2,2,2), stride=2) # (size, stride)
self.pool_stride_1 = nn.MaxPool3d(kernel_size=(2,2,2), stride=1) # (size, stride)
self.droput_3d_05 = nn.Dropout3d(p=0.5)
self.droput_1d_02 = nn.Dropout(p=0.25)
self.conv1 = nn.Conv3d(1, 5, 5, padding=(2, 2, 2)) #(input channels, ouput channels (no. of filters), kernel size)
self.relu1 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn1 = ilm_GN(channels=5, num_groups=5)
self.conv2 = nn.Conv3d(5, 5, 5, padding=(2, 2, 2))
self.relu2 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn2 = ilm_GN(channels=5, num_groups=5)
self.conv3 = nn.Conv3d(5, 5, 3, padding=(1, 1, 1))
self.relu3 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn3 = ilm_GN(channels=5, num_groups=5)
self.conv4 = nn.Conv3d(5, 5, 3, padding=(1, 1, 1))
self.relu4 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn4 = ilm_GN(channels=5, num_groups=5)
self.conv5 = nn.Conv3d(5, 5, 3, padding=(1, 1, 1))
self.relu5 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn5 = ilm_GN(channels=5, num_groups=5)
self.conv6 = nn.Conv3d(5, 10, 3, padding=(1, 1, 1))
self.relu6 = nn.ReLU()
#pooling 2 stride, 2x2x2. ###
self.ilm_gn6 = ilm_GN(channels=10, num_groups=5)
self.conv7 = nn.Conv3d(10, 10, 3, padding=(1, 1, 1))
self.relu7 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn7 = ilm_GN(channels=10, num_groups=5)
self.conv8 = nn.Conv3d(10, 10, 3, padding=(1, 1, 1))
self.relu8 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn8 = ilm_GN(channels=10, num_groups=5)
self.conv9 = nn.Conv3d(10, 10, 3, padding=(1, 1, 1))
self.relu9 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn9 = ilm_GN(channels=10, num_groups=5)
self.conv10 = nn.Conv3d(10, 10, 3, padding=(1, 1, 1))
self.relu10 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn10 = ilm_GN(channels=10, num_groups=5)
self.conv11 = nn.Conv3d(10, 10, 3, padding=(1, 1, 1))
self.relu11 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn11 = ilm_GN(channels=10, num_groups=5)
self.conv12 = nn.Conv3d(10, 20, 3, padding=(1, 1, 1))
self.relu12 = nn.ReLU()
#pooling 2 stride, 2x2x2. ###
self.ilm_gn12 = ilm_GN(channels=20, num_groups=5)
self.conv13 = nn.Conv3d(20, 20, 3, padding=(1, 1, 1))
self.relu13 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn13 = ilm_GN(channels=20, num_groups=5)
self.conv14 = nn.Conv3d(20, 20, 3, padding=(1, 1, 1))
self.relu14 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn14 = ilm_GN(channels=20, num_groups=5)
self.conv15 = nn.Conv3d(20, 20, 3, padding=(1, 1, 1))
self.relu15 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn15 = ilm_GN(channels=20, num_groups=5)
self.conv16 = nn.Conv3d(20, 20, 3, padding=(1, 1, 1))
self.relu16 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn16 = ilm_GN(channels=20, num_groups=5)
self.conv17 = nn.Conv3d(20, 20, 3, padding=(1, 1, 1))
self.relu17 = nn.ReLU()
#pooling 1 stride, 2x2x2.
self.ilm_gn17 = ilm_GN(channels=20, num_groups=5)
self.conv18 = nn.Conv3d(20, 40, 3, padding=(1, 1, 1))
self.relu18 = nn.ReLU()
#pooling 2 stride, 2x2x2. ###
self.ilm_gn18 = ilm_GN(channels=40, num_groups=5)
self.conv19 = nn.Conv3d(40, 80, 3, padding=(1, 1, 1))
self.relu19 = nn.ReLU()
#pooling 2 stride, 2x2x2. ###
self.ilm_gn19 = ilm_GN(channels=80, num_groups=5)
#flatten
#Dropout3d
self.fc1 = nn.Linear(80*7*7*3, 65)
self.relu20 = nn.ReLU()
self.ln20 = nn.LayerNorm(65)
self.fc2 = nn.Linear(65, 40)
self.relu21 = nn.ReLU()
self.ln21 = nn.LayerNorm(40)
self.fc3 = nn.Linear(40, 20)
self.relu22 = nn.ReLU()
self.ln22 = nn.LayerNorm(20)
#Dropout
self.fc4 = nn.Linear(20, 3)
def forward(self, x):
x = self.pool_stride_1(self.relu1(self.conv1(x)))
x = self.ilm_gn1(x)
x = self.pool_stride_1(self.relu2(self.conv2(x)))
x = self.ilm_gn2(x)
x = self.pool_stride_1(self.relu3(self.conv3(x)))
x = self.ilm_gn3(x)
x = self.pool_stride_1(self.relu4(self.conv4(x)))
x = self.ilm_gn4(x)
x = self.pool_stride_1(self.relu5(self.conv5(x)))
x = self.ilm_gn5(x)
x = self.pool_stride_2(self.relu6(self.conv6(x)))
x = self.ilm_gn6(x)
x = self.pool_stride_1(self.relu7(self.conv7(x)))
x = self.ilm_gn7(x)
x = self.pool_stride_1(self.relu8(self.conv8(x)))
x = self.ilm_gn8(x)
x = self.pool_stride_1(self.relu9(self.conv9(x)))
x = self.ilm_gn9(x)
x = self.pool_stride_1(self.relu10(self.conv10(x)))
x = self.ilm_gn10(x)
x = self.pool_stride_1(self.relu11(self.conv11(x)))
x = self.ilm_gn11(x)
x = self.pool_stride_2(self.relu12(self.conv12(x)))
x = self.ilm_gn12(x)
x = self.pool_stride_1(self.relu13(self.conv13(x)))
x = self.ilm_gn13(x)
x = self.pool_stride_1(self.relu14(self.conv14(x)))
x = self.ilm_gn14(x)
x = self.pool_stride_1(self.relu15(self.conv15(x)))
x = self.ilm_gn15(x)
x = self.pool_stride_1(self.relu16(self.conv16(x)))
x = self.ilm_gn16(x)
x = self.pool_stride_1(self.relu17(self.conv17(x)))
x = self.ilm_gn17(x)
x = self.pool_stride_2(self.relu18(self.conv18(x)))
x = self.ilm_gn18(x)
x = self.pool_stride_2(self.relu19(self.conv19(x)))
x = self.ilm_gn19(x)
x = self.droput_3d_05(x)
x = x.view(-1, 11760)
x = self.relu20(self.fc1(x))
x = self.ln20(x)
x = self.relu21(self.fc2(x))
x = self.ln21(x)
x = self.relu22(self.fc3(x))
x = self.ln22(x)
x = self.droput_1d_02(x)
x = self.fc4(x)
return x | 34.128079 | 122 | 0.534642 |
bbc0da642b2ccfe4fa493eab31575ae0a4f3f0ce | 60,553 | py | Python | tests/system/test_gbq.py | tonyabraham116/python-bigquery-pandas | 8b177afacff6814570a790ecc32339d9c37e4791 | [
"BSD-3-Clause"
] | null | null | null | tests/system/test_gbq.py | tonyabraham116/python-bigquery-pandas | 8b177afacff6814570a790ecc32339d9c37e4791 | [
"BSD-3-Clause"
] | null | null | null | tests/system/test_gbq.py | tonyabraham116/python-bigquery-pandas | 8b177afacff6814570a790ecc32339d9c37e4791 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2017 pandas-gbq Authors All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# -*- coding: utf-8 -*-
import datetime
import sys
import numpy as np
import pandas
import pandas.api.types
import pandas.util.testing as tm
from pandas import DataFrame, NaT
try:
import pkg_resources # noqa
except ImportError:
raise ImportError("Could not import pkg_resources (setuptools).")
import pytest
import pytz
from pandas_gbq import gbq
import pandas_gbq.schema
TABLE_ID = "new_test"
PANDAS_VERSION = pkg_resources.parse_version(pandas.__version__)
NULLABLE_INT_PANDAS_VERSION = pkg_resources.parse_version("0.24.0")
NULLABLE_INT_MESSAGE = (
"Require pandas 0.24+ in order to use nullable integer type."
)
def test_imports():
gbq._test_google_api_imports()
def make_mixed_dataframe_v2(test_size):
# create df to test for all BQ datatypes except RECORD
bools = np.random.randint(2, size=(1, test_size)).astype(bool)
flts = np.random.randn(1, test_size)
ints = np.random.randint(1, 10, size=(1, test_size))
strs = np.random.randint(1, 10, size=(1, test_size)).astype(str)
times = [
datetime.datetime.now(pytz.timezone("US/Arizona"))
for t in range(test_size)
]
return DataFrame(
{
"bools": bools[0],
"flts": flts[0],
"ints": ints[0],
"strs": strs[0],
"times": times[0],
},
index=range(test_size),
)
def get_schema(
gbq_connector: gbq.GbqConnector, dataset_id: str, table_id: str
):
"""Retrieve the schema of the table
Obtain from BigQuery the field names and field types
for the table defined by the parameters
Parameters
----------
dataset_id : str
Name of the BigQuery dataset for the table
table_id : str
Name of the BigQuery table
Returns
-------
list of dicts
Fields representing the schema
"""
from google.cloud import bigquery
bqclient = gbq_connector.client
table_ref = bigquery.TableReference(
bigquery.DatasetReference(bqclient.project, dataset_id),
table_id,
)
try:
table = bqclient.get_table(table_ref)
remote_schema = table.schema
remote_fields = [
field_remote.to_api_repr() for field_remote in remote_schema
]
for field in remote_fields:
field["type"] = field["type"].upper()
field["mode"] = field["mode"].upper()
return remote_fields
except gbq_connector.http_error as ex:
gbq_connector.process_http_error(ex)
def verify_schema(gbq_connector, dataset_id, table_id, schema):
"""Indicate whether schemas match exactly
Compare the BigQuery table identified in the parameters with
the schema passed in and indicate whether all fields in the former
are present in the latter. Order is not considered.
Parameters
----------
dataset_id :str
Name of the BigQuery dataset for the table
table_id : str
Name of the BigQuery table
schema : list(dict)
Schema for comparison. Each item should have
a 'name' and a 'type'
Returns
-------
bool
Whether the schemas match
"""
fields_remote = pandas_gbq.schema._clean_schema_fields(
get_schema(gbq_connector, dataset_id, table_id)
)
fields_local = pandas_gbq.schema._clean_schema_fields(schema["fields"])
return fields_remote == fields_local
class TestGBQConnectorIntegration(object):
def test_should_be_able_to_make_a_connector(self, gbq_connector):
assert gbq_connector is not None, "Could not create a GbqConnector"
def test_should_be_able_to_get_a_bigquery_client(self, gbq_connector):
bigquery_client = gbq_connector.get_client()
assert bigquery_client is not None
class TestReadGBQIntegration(object):
@pytest.fixture(autouse=True)
def setup(self, project, credentials):
# - PER-TEST FIXTURES -
# put here any instruction you want to be run *BEFORE* *EVERY* test is
# executed.
self.gbq_connector = gbq.GbqConnector(project, credentials=credentials)
self.credentials = credentials
def test_should_properly_handle_empty_strings(self, project_id):
query = 'SELECT "" AS empty_string'
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"empty_string": [""]}))
def test_should_properly_handle_null_strings(self, project_id):
query = "SELECT STRING(NULL) AS null_string"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"null_string": [None]}))
def test_should_properly_handle_valid_integers(self, project_id):
query = "SELECT CAST(3 AS INT64) AS valid_integer"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(df, DataFrame({"valid_integer": [3]}))
def test_should_properly_handle_nullable_integers(self, project_id):
if PANDAS_VERSION < NULLABLE_INT_PANDAS_VERSION:
pytest.skip(msg=NULLABLE_INT_MESSAGE)
query = """SELECT * FROM
UNNEST([1, NULL]) AS nullable_integer
"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
dtypes={"nullable_integer": "Int64"},
)
tm.assert_frame_equal(
df,
DataFrame(
{
"nullable_integer": pandas.Series(
[1, pandas.NA], dtype="Int64"
)
}
),
)
def test_should_properly_handle_valid_longs(self, project_id):
query = "SELECT 1 << 62 AS valid_long"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(df, DataFrame({"valid_long": [1 << 62]}))
def test_should_properly_handle_nullable_longs(self, project_id):
if PANDAS_VERSION < NULLABLE_INT_PANDAS_VERSION:
pytest.skip(msg=NULLABLE_INT_MESSAGE)
query = """SELECT * FROM
UNNEST([1 << 62, NULL]) AS nullable_long
"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
dtypes={"nullable_long": "Int64"},
)
tm.assert_frame_equal(
df,
DataFrame(
{
"nullable_long": pandas.Series(
[1 << 62, pandas.NA], dtype="Int64"
)
}
),
)
def test_should_properly_handle_null_integers(self, project_id):
if PANDAS_VERSION < NULLABLE_INT_PANDAS_VERSION:
pytest.skip(msg=NULLABLE_INT_MESSAGE)
query = "SELECT CAST(NULL AS INT64) AS null_integer"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
dtypes={"null_integer": "Int64"},
)
tm.assert_frame_equal(
df,
DataFrame(
{"null_integer": pandas.Series([pandas.NA], dtype="Int64")}
),
)
def test_should_properly_handle_valid_floats(self, project_id):
from math import pi
query = "SELECT PI() AS valid_float"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"valid_float": [pi]}))
def test_should_properly_handle_nullable_floats(self, project_id):
from math import pi
query = """SELECT * FROM
(SELECT PI() AS nullable_float),
(SELECT NULL AS nullable_float)"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"nullable_float": [pi, None]}))
def test_should_properly_handle_valid_doubles(self, project_id):
from math import pi
query = "SELECT PI() * POW(10, 307) AS valid_double"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(
df, DataFrame({"valid_double": [pi * 10 ** 307]})
)
def test_should_properly_handle_nullable_doubles(self, project_id):
from math import pi
query = """SELECT * FROM
(SELECT PI() * POW(10, 307) AS nullable_double),
(SELECT NULL AS nullable_double)"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(
df, DataFrame({"nullable_double": [pi * 10 ** 307, None]})
)
def test_should_properly_handle_null_floats(self, project_id):
query = """SELECT null_float
FROM UNNEST(ARRAY<FLOAT64>[NULL, 1.0]) AS null_float
"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(df, DataFrame({"null_float": [np.nan, 1.0]}))
def test_should_properly_handle_date(self, project_id):
query = "SELECT DATE(2003, 1, 4) AS date_col"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
)
expected = DataFrame(
{
"date_col": pandas.Series(
[datetime.date(2003, 1, 4)], dtype="datetime64[ns]"
)
},
)
tm.assert_frame_equal(df, expected)
def test_should_properly_handle_time(self, project_id):
query = "SELECT TIME_ADD(TIME(3, 14, 15), INTERVAL 926589 MICROSECOND) AS time_col"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
)
expected = DataFrame(
{
"time_col": pandas.Series(
[datetime.time(3, 14, 15, 926589)], dtype="object"
)
},
)
tm.assert_frame_equal(df, expected)
def test_should_properly_handle_timestamp_unix_epoch(self, project_id):
query = 'SELECT TIMESTAMP("1970-01-01 00:00:00") AS unix_epoch'
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
expected = DataFrame(
{"unix_epoch": ["1970-01-01T00:00:00.000000Z"]},
dtype="datetime64[ns]",
)
if expected["unix_epoch"].dt.tz is None:
expected["unix_epoch"] = expected["unix_epoch"].dt.tz_localize(
"UTC"
)
tm.assert_frame_equal(df, expected)
def test_should_properly_handle_arbitrary_timestamp(self, project_id):
query = 'SELECT TIMESTAMP("2004-09-15 05:00:00") AS valid_timestamp'
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
expected = DataFrame(
{"valid_timestamp": ["2004-09-15T05:00:00.000000Z"]},
dtype="datetime64[ns]",
)
if expected["valid_timestamp"].dt.tz is None:
expected["valid_timestamp"] = expected[
"valid_timestamp"
].dt.tz_localize("UTC")
tm.assert_frame_equal(df, expected)
def test_should_properly_handle_datetime_unix_epoch(self, project_id):
query = 'SELECT DATETIME("1970-01-01 00:00:00") AS unix_epoch'
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(
df,
DataFrame(
{"unix_epoch": ["1970-01-01T00:00:00"]}, dtype="datetime64[ns]"
),
)
def test_should_properly_handle_arbitrary_datetime(self, project_id):
query = 'SELECT DATETIME("2004-09-15 05:00:00") AS valid_timestamp'
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(
df,
DataFrame(
{"valid_timestamp": [np.datetime64("2004-09-15T05:00:00")]}
),
)
@pytest.mark.parametrize(
"expression, is_expected_dtype",
[
("current_date()", pandas.api.types.is_datetime64_ns_dtype),
("current_timestamp()", pandas.api.types.is_datetime64tz_dtype),
("current_datetime()", pandas.api.types.is_datetime64_ns_dtype),
("TRUE", pandas.api.types.is_bool_dtype),
("FALSE", pandas.api.types.is_bool_dtype),
],
)
def test_return_correct_types(
self, project_id, expression, is_expected_dtype
):
"""
All type checks can be added to this function using additional
parameters, rather than creating additional functions.
We can consolidate the existing functions here in time
TODO: time doesn't currently parse
("time(12,30,00)", "<M8[ns]"),
"""
query = "SELECT {} AS _".format(expression)
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
assert is_expected_dtype(df["_"].dtype)
def test_should_properly_handle_null_timestamp(self, project_id):
query = "SELECT TIMESTAMP(NULL) AS null_timestamp"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
expected = DataFrame({"null_timestamp": [NaT]}, dtype="datetime64[ns]")
expected["null_timestamp"] = expected["null_timestamp"].dt.tz_localize(
"UTC"
)
tm.assert_frame_equal(df, expected)
def test_should_properly_handle_null_datetime(self, project_id):
query = "SELECT CAST(NULL AS DATETIME) AS null_datetime"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(df, DataFrame({"null_datetime": [NaT]}))
def test_should_properly_handle_null_boolean(self, project_id):
query = "SELECT BOOLEAN(NULL) AS null_boolean"
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"null_boolean": [None]}))
def test_should_properly_handle_nullable_booleans(self, project_id):
query = """SELECT * FROM
(SELECT BOOLEAN(TRUE) AS nullable_boolean),
(SELECT NULL AS nullable_boolean)"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(
df, DataFrame({"nullable_boolean": [True, None]}).astype(object)
)
def test_unicode_string_conversion_and_normalization(self, project_id):
correct_test_datatype = DataFrame({"unicode_string": ["éü"]})
unicode_string = "éü"
query = 'SELECT "{0}" AS unicode_string'.format(unicode_string)
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
tm.assert_frame_equal(df, correct_test_datatype)
def test_index_column(self, project_id):
query = "SELECT 'a' AS string_1, 'b' AS string_2"
result_frame = gbq.read_gbq(
query,
project_id=project_id,
index_col="string_1",
credentials=self.credentials,
dialect="legacy",
)
correct_frame = DataFrame(
{"string_1": ["a"], "string_2": ["b"]}
).set_index("string_1")
assert result_frame.index.name == correct_frame.index.name
def test_column_order(self, project_id):
query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
col_order = ["string_3", "string_1", "string_2"]
result_frame = gbq.read_gbq(
query,
project_id=project_id,
col_order=col_order,
credentials=self.credentials,
dialect="legacy",
)
correct_frame = DataFrame(
{"string_1": ["a"], "string_2": ["b"], "string_3": ["c"]}
)[col_order]
tm.assert_frame_equal(result_frame, correct_frame)
def test_read_gbq_raises_invalid_column_order(self, project_id):
query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
col_order = ["string_aaa", "string_1", "string_2"]
# Column string_aaa does not exist. Should raise InvalidColumnOrder
with pytest.raises(gbq.InvalidColumnOrder):
gbq.read_gbq(
query,
project_id=project_id,
col_order=col_order,
credentials=self.credentials,
dialect="legacy",
)
def test_column_order_plus_index(self, project_id):
query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
col_order = ["string_3", "string_2"]
result_frame = gbq.read_gbq(
query,
project_id=project_id,
index_col="string_1",
col_order=col_order,
credentials=self.credentials,
dialect="legacy",
)
correct_frame = DataFrame(
{"string_1": ["a"], "string_2": ["b"], "string_3": ["c"]}
)
correct_frame.set_index("string_1", inplace=True)
correct_frame = correct_frame[col_order]
tm.assert_frame_equal(result_frame, correct_frame)
def test_read_gbq_raises_invalid_index_column(self, project_id):
query = "SELECT 'a' AS string_1, 'b' AS string_2, 'c' AS string_3"
col_order = ["string_3", "string_2"]
# Column string_bbb does not exist. Should raise InvalidIndexColumn
with pytest.raises(gbq.InvalidIndexColumn):
gbq.read_gbq(
query,
project_id=project_id,
index_col="string_bbb",
col_order=col_order,
credentials=self.credentials,
dialect="legacy",
)
def test_malformed_query(self, project_id):
with pytest.raises(gbq.GenericGBQException):
gbq.read_gbq(
"SELCET * FORM [publicdata:samples.shakespeare]",
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
def test_bad_project_id(self):
with pytest.raises(gbq.GenericGBQException):
gbq.read_gbq(
"SELCET * FROM [publicdata:samples.shakespeare]",
project_id="not-my-project",
credentials=self.credentials,
dialect="legacy",
)
def test_bad_table_name(self, project_id):
with pytest.raises(gbq.GenericGBQException):
gbq.read_gbq(
"SELECT * FROM [publicdata:samples.nope]",
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
def test_download_dataset_larger_than_200k_rows(self, project_id):
test_size = 200005
# Test for known BigQuery bug in datasets larger than 100k rows
# http://stackoverflow.com/questions/19145587/bq-py-not-paging-results
df = gbq.read_gbq(
"SELECT id FROM [publicdata:samples.wikipedia] "
"GROUP EACH BY id ORDER BY id ASC LIMIT {0}".format(test_size),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(df.drop_duplicates()) == test_size
def test_ddl(self, random_dataset, project_id):
# Bug fix for https://github.com/pydata/pandas-gbq/issues/45
df = gbq.read_gbq(
"CREATE OR REPLACE TABLE {}.test_ddl (x INT64)".format(
random_dataset.dataset_id
)
)
assert len(df) == 0
def test_ddl_w_max_results(self, random_dataset, project_id):
df = gbq.read_gbq(
"CREATE OR REPLACE TABLE {}.test_ddl (x INT64)".format(
random_dataset.dataset_id
),
max_results=0,
)
assert df is None
def test_max_results(self, random_dataset, project_id):
df = gbq.read_gbq(
"SELECT * FROM UNNEST(GENERATE_ARRAY(1, 100))", max_results=10
)
assert len(df) == 10
def test_zero_rows(self, project_id):
# Bug fix for https://github.com/pandas-dev/pandas/issues/10273
df = gbq.read_gbq(
'SELECT name, number, (mlc_class = "HU") is_hurricane, iso_time '
"FROM `bigquery-public-data.noaa_hurricanes.hurricanes` "
'WHERE iso_time = TIMESTAMP("1900-01-01 00:00:00") ',
project_id=project_id,
credentials=self.credentials,
)
empty_columns = {
"name": pandas.Series([], dtype=object),
"number": pandas.Series([], dtype=np.dtype(int)),
"is_hurricane": pandas.Series([], dtype=np.dtype(bool)),
"iso_time": pandas.Series([], dtype="datetime64[ns]"),
}
expected_result = DataFrame(
empty_columns,
columns=["name", "number", "is_hurricane", "iso_time"],
)
tm.assert_frame_equal(df, expected_result, check_index_type=False)
def test_one_row_one_column(self, project_id):
df = gbq.read_gbq(
"SELECT 3 as v",
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
expected_result = DataFrame(dict(v=[3]))
tm.assert_frame_equal(df, expected_result)
def test_legacy_sql(self, project_id):
legacy_sql = "SELECT id FROM [publicdata.samples.wikipedia] LIMIT 10"
# Test that a legacy sql statement fails when
# setting dialect='standard'
with pytest.raises(gbq.GenericGBQException):
gbq.read_gbq(
legacy_sql,
project_id=project_id,
dialect="standard",
credentials=self.credentials,
)
# Test that a legacy sql statement succeeds when
# setting dialect='legacy'
df = gbq.read_gbq(
legacy_sql,
project_id=project_id,
dialect="legacy",
credentials=self.credentials,
)
assert len(df.drop_duplicates()) == 10
def test_standard_sql(self, project_id):
standard_sql = (
"SELECT DISTINCT id FROM "
"`publicdata.samples.wikipedia` LIMIT 10"
)
# Test that a standard sql statement fails when using
# the legacy SQL dialect.
with pytest.raises(gbq.GenericGBQException):
gbq.read_gbq(
standard_sql,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
# Test that a standard sql statement succeeds when
# setting dialect='standard'
df = gbq.read_gbq(
standard_sql,
project_id=project_id,
dialect="standard",
credentials=self.credentials,
)
assert len(df.drop_duplicates()) == 10
def test_query_with_parameters(self, project_id):
sql_statement = "SELECT @param1 + @param2 AS valid_result"
config = {
"query": {
"useLegacySql": False,
"parameterMode": "named",
"queryParameters": [
{
"name": "param1",
"parameterType": {"type": "INTEGER"},
"parameterValue": {"value": 1},
},
{
"name": "param2",
"parameterType": {"type": "INTEGER"},
"parameterValue": {"value": 2},
},
],
}
}
# Test that a query that relies on parameters fails
# when parameters are not supplied via configuration
with pytest.raises(ValueError):
gbq.read_gbq(
sql_statement,
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
# Test that the query is successful because we have supplied
# the correct query parameters via the 'config' option
df = gbq.read_gbq(
sql_statement,
project_id=project_id,
credentials=self.credentials,
configuration=config,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"valid_result": [3]}))
def test_query_inside_configuration(self, project_id):
query_no_use = 'SELECT "PI_WRONG" AS valid_string'
query = 'SELECT "PI" AS valid_string'
config = {"query": {"query": query, "useQueryCache": False}}
# Test that it can't pass query both
# inside config and as parameter
with pytest.raises(ValueError):
gbq.read_gbq(
query_no_use,
project_id=project_id,
credentials=self.credentials,
configuration=config,
dialect="legacy",
)
df = gbq.read_gbq(
None,
project_id=project_id,
credentials=self.credentials,
configuration=config,
dialect="legacy",
)
tm.assert_frame_equal(df, DataFrame({"valid_string": ["PI"]}))
def test_configuration_without_query(self, project_id):
sql_statement = "SELECT 1"
config = {
"copy": {
"sourceTable": {
"projectId": project_id,
"datasetId": "publicdata:samples",
"tableId": "wikipedia",
},
"destinationTable": {
"projectId": project_id,
"datasetId": "publicdata:samples",
"tableId": "wikipedia_copied",
},
}
}
# Test that only 'query' configurations are supported
# nor 'copy','load','extract'
with pytest.raises(ValueError):
gbq.read_gbq(
sql_statement,
project_id=project_id,
credentials=self.credentials,
configuration=config,
dialect="legacy",
)
def test_configuration_raises_value_error_with_multiple_config(
self, project_id
):
sql_statement = "SELECT 1"
config = {
"query": {"query": sql_statement, "useQueryCache": False},
"load": {"query": sql_statement, "useQueryCache": False},
}
# Test that only ValueError is raised with multiple configurations
with pytest.raises(ValueError):
gbq.read_gbq(
sql_statement,
project_id=project_id,
credentials=self.credentials,
configuration=config,
dialect="legacy",
)
def test_timeout_configuration(self, project_id):
sql_statement = """
SELECT
SUM(bottles_sold) total_bottles,
UPPER(category_name) category_name,
magnitude,
liquor.zip_code zip_code
FROM `bigquery-public-data.iowa_liquor_sales.sales` liquor
JOIN `bigquery-public-data.geo_us_boundaries.zip_codes` zip_codes
ON liquor.zip_code = zip_codes.zip_code
JOIN `bigquery-public-data.noaa_historic_severe_storms.tornado_paths` tornados
ON liquor.date = tornados.storm_date
WHERE ST_INTERSECTS(tornado_path_geom, zip_code_geom)
GROUP BY category_name, magnitude, zip_code
ORDER BY magnitude ASC, total_bottles DESC
"""
configs = [
{"query": {"useQueryCache": False, "timeoutMs": 1}},
{"query": {"useQueryCache": False}, "jobTimeoutMs": 1},
]
for config in configs:
with pytest.raises(gbq.QueryTimeout):
gbq.read_gbq(
sql_statement,
project_id=project_id,
credentials=self.credentials,
configuration=config,
)
def test_query_response_bytes(self):
assert self.gbq_connector.sizeof_fmt(999) == "999.0 B"
assert self.gbq_connector.sizeof_fmt(1024) == "1.0 KB"
assert self.gbq_connector.sizeof_fmt(1099) == "1.1 KB"
assert self.gbq_connector.sizeof_fmt(1044480) == "1020.0 KB"
assert self.gbq_connector.sizeof_fmt(1048576) == "1.0 MB"
assert self.gbq_connector.sizeof_fmt(1048576000) == "1000.0 MB"
assert self.gbq_connector.sizeof_fmt(1073741824) == "1.0 GB"
assert self.gbq_connector.sizeof_fmt(1.099512e12) == "1.0 TB"
assert self.gbq_connector.sizeof_fmt(1.125900e15) == "1.0 PB"
assert self.gbq_connector.sizeof_fmt(1.152922e18) == "1.0 EB"
assert self.gbq_connector.sizeof_fmt(1.180592e21) == "1.0 ZB"
assert self.gbq_connector.sizeof_fmt(1.208926e24) == "1.0 YB"
assert self.gbq_connector.sizeof_fmt(1.208926e28) == "10000.0 YB"
def test_struct(self, project_id):
query = """SELECT 1 int_field,
STRUCT("a" as letter, 1 as num) struct_field"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
expected = DataFrame(
[[1, {"letter": "a", "num": 1}]],
columns=["int_field", "struct_field"],
)
tm.assert_frame_equal(df, expected)
def test_array(self, project_id):
query = """select ["a","x","b","y","c","z"] as letters"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(
df,
DataFrame([[["a", "x", "b", "y", "c", "z"]]], columns=["letters"]),
)
def test_array_length_zero(self, project_id):
query = """WITH t as (
SELECT "a" letter, [""] as array_field
UNION ALL
SELECT "b" letter, [] as array_field)
select letter, array_field, array_length(array_field) len
from t
order by letter ASC"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
expected = DataFrame(
[["a", [""], 1], ["b", [], 0]],
columns=["letter", "array_field", "len"],
)
tm.assert_frame_equal(df, expected)
def test_array_agg(self, project_id):
query = """WITH t as (
SELECT "a" letter, 1 num
UNION ALL
SELECT "b" letter, 2 num
UNION ALL
SELECT "a" letter, 3 num)
select letter, array_agg(num order by num ASC) numbers
from t
group by letter
order by letter ASC"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(
df,
DataFrame(
[["a", [1, 3]], ["b", [2]]], columns=["letter", "numbers"]
),
)
def test_array_of_floats(self, project_id):
query = """select [1.1, 2.2, 3.3] as a, 4 as b"""
df = gbq.read_gbq(
query,
project_id=project_id,
credentials=self.credentials,
dialect="standard",
)
tm.assert_frame_equal(
df, DataFrame([[[1.1, 2.2, 3.3], 4]], columns=["a", "b"])
)
def test_tokyo(self, tokyo_dataset, tokyo_table, project_id):
df = gbq.read_gbq(
"SELECT MAX(year) AS max_year FROM {}.{}".format(
tokyo_dataset, tokyo_table
),
dialect="standard",
location="asia-northeast1",
project_id=project_id,
credentials=self.credentials,
)
assert df["max_year"][0] >= 2000
class TestToGBQIntegration(object):
@pytest.fixture(autouse=True, scope="function")
def setup(self, project, credentials, random_dataset_id):
# - PER-TEST FIXTURES -
# put here any instruction you want to be run *BEFORE* *EVERY* test is
# executed.
self.credentials = credentials
self.gbq_connector = gbq.GbqConnector(project, credentials=credentials)
self.bqclient = self.gbq_connector.client
self.table = gbq._Table(
project, random_dataset_id, credentials=credentials
)
self.destination_table = "{}.{}".format(random_dataset_id, TABLE_ID)
def test_upload_data(self, project_id):
test_id = "1"
test_size = 20001
df = make_mixed_dataframe_v2(test_size)
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
chunksize=10000,
credentials=self.credentials,
)
result = gbq.read_gbq(
"SELECT COUNT(*) AS num_rows FROM {0}".format(
self.destination_table + test_id
),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert result["num_rows"][0] == test_size
def test_upload_empty_data(self, project_id):
test_id = "data_with_0_rows"
df = DataFrame()
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
credentials=self.credentials,
)
table = self.bqclient.get_table(self.destination_table + test_id)
assert table.num_rows == 0
assert len(table.schema) == 0
def test_upload_empty_data_with_schema(self, project_id):
test_id = "data_with_0_rows"
df = DataFrame(
{
"a": pandas.Series(dtype="int64"),
"b": pandas.Series(dtype="object"),
}
)
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
credentials=self.credentials,
)
table = self.bqclient.get_table(self.destination_table + test_id)
assert table.num_rows == 0
schema = table.schema
assert schema[0].field_type == "INTEGER"
assert schema[1].field_type == "STRING"
def test_upload_data_if_table_exists_fail(self, project_id):
test_id = "2"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
self.table.create(TABLE_ID + test_id, gbq._generate_bq_schema(df))
# Test the default value of if_exists is 'fail'
with pytest.raises(gbq.TableCreationError):
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
credentials=self.credentials,
)
# Test the if_exists parameter with value 'fail'
with pytest.raises(gbq.TableCreationError):
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
if_exists="fail",
credentials=self.credentials,
)
def test_upload_data_if_table_exists_append(self, project_id):
test_id = "3"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
df_different_schema = tm.makeMixedDataFrame()
# Initialize table with sample data
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
chunksize=10000,
credentials=self.credentials,
)
# Test the if_exists parameter with value 'append'
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
if_exists="append",
credentials=self.credentials,
)
result = gbq.read_gbq(
"SELECT COUNT(*) AS num_rows FROM {0}".format(
self.destination_table + test_id
),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert result["num_rows"][0] == test_size * 2
# Try inserting with a different schema, confirm failure
with pytest.raises(gbq.InvalidSchema):
gbq.to_gbq(
df_different_schema,
self.destination_table + test_id,
project_id,
if_exists="append",
credentials=self.credentials,
)
def test_upload_subset_columns_if_table_exists_append(self, project_id):
# Issue 24: Upload is succesful if dataframe has columns
# which are a subset of the current schema
test_id = "16"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
df_subset_cols = df.iloc[:, :2]
# Initialize table with sample data
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
chunksize=10000,
credentials=self.credentials,
)
# Test the if_exists parameter with value 'append'
gbq.to_gbq(
df_subset_cols,
self.destination_table + test_id,
project_id,
if_exists="append",
credentials=self.credentials,
)
result = gbq.read_gbq(
"SELECT COUNT(*) AS num_rows FROM {0}".format(
self.destination_table + test_id
),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert result["num_rows"][0] == test_size * 2
def test_upload_data_if_table_exists_replace(self, project_id):
test_id = "4"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
df_different_schema = tm.makeMixedDataFrame()
# Initialize table with sample data
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
chunksize=10000,
credentials=self.credentials,
)
# Test the if_exists parameter with the value 'replace'.
gbq.to_gbq(
df_different_schema,
self.destination_table + test_id,
project_id,
if_exists="replace",
credentials=self.credentials,
)
result = gbq.read_gbq(
"SELECT COUNT(*) AS num_rows FROM {0}".format(
self.destination_table + test_id
),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert result["num_rows"][0] == 5
def test_upload_data_if_table_exists_raises_value_error(self, project_id):
test_id = "4"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
# Test invalid value for if_exists parameter raises value error
with pytest.raises(ValueError):
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
if_exists="xxxxx",
credentials=self.credentials,
)
def test_google_upload_errors_should_raise_exception(self, project_id):
raise pytest.skip("buggy test")
test_id = "5"
test_timestamp = datetime.datetime.now(pytz.timezone("US/Arizona"))
bad_df = DataFrame(
{
"bools": [False, False],
"flts": [0.0, 1.0],
"ints": [0, "1"],
"strs": ["a", 1],
"times": [test_timestamp, test_timestamp],
},
index=range(2),
)
with pytest.raises(gbq.StreamingInsertError):
gbq.to_gbq(
bad_df,
self.destination_table + test_id,
project_id,
credentials=self.credentials,
)
def test_upload_chinese_unicode_data(self, project_id):
test_id = "2"
test_size = 6
df = DataFrame(
np.random.randn(6, 4), index=range(6), columns=list("ABCD")
)
df["s"] = u"信用卡"
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
credentials=self.credentials,
chunksize=10000,
)
result_df = gbq.read_gbq(
"SELECT * FROM {0}".format(self.destination_table + test_id),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(result_df) == test_size
if sys.version_info.major < 3:
pytest.skip(msg="Unicode comparison in Py2 not working")
result = result_df["s"].sort_values()
expected = df["s"].sort_values()
tm.assert_numpy_array_equal(expected.values, result.values)
def test_upload_other_unicode_data(self, project_id):
test_id = "3"
test_size = 3
df = DataFrame(
{
"s": ["Skywalker™", "lego", "hülle"],
"i": [200, 300, 400],
"d": [
"2017-12-13 17:40:39",
"2017-12-13 17:40:39",
"2017-12-13 17:40:39",
],
}
)
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id=project_id,
credentials=self.credentials,
chunksize=10000,
)
result_df = gbq.read_gbq(
"SELECT * FROM {0}".format(self.destination_table + test_id),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(result_df) == test_size
if sys.version_info.major < 3:
pytest.skip(msg="Unicode comparison in Py2 not working")
result = result_df["s"].sort_values()
expected = df["s"].sort_values()
tm.assert_numpy_array_equal(expected.values, result.values)
def test_upload_mixed_float_and_int(self, project_id):
"""Test that we can upload a dataframe containing an int64 and float64 column.
See: https://github.com/pydata/pandas-gbq/issues/116
"""
test_id = "mixed_float_and_int"
test_size = 2
df = DataFrame(
[[1, 1.1], [2, 2.2]],
index=["row 1", "row 2"],
columns=["intColumn", "floatColumn"],
)
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id=project_id,
credentials=self.credentials,
)
result_df = gbq.read_gbq(
"SELECT * FROM {0}".format(self.destination_table + test_id),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(result_df) == test_size
def test_upload_data_with_newlines(self, project_id):
test_id = "data_with_newlines"
test_size = 2
df = DataFrame({"s": ["abcd", "ef\ngh"]})
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id=project_id,
credentials=self.credentials,
)
result_df = gbq.read_gbq(
"SELECT * FROM {0}".format(self.destination_table + test_id),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(result_df) == test_size
if sys.version_info.major < 3:
pytest.skip(msg="Unicode comparison in Py2 not working")
result = result_df["s"].sort_values()
expected = df["s"].sort_values()
tm.assert_numpy_array_equal(expected.values, result.values)
def test_upload_data_flexible_column_order(self, project_id):
test_id = "13"
test_size = 10
df = make_mixed_dataframe_v2(test_size)
# Initialize table with sample data
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id,
chunksize=10000,
credentials=self.credentials,
)
df_columns_reversed = df[df.columns[::-1]]
gbq.to_gbq(
df_columns_reversed,
self.destination_table + test_id,
project_id,
if_exists="append",
credentials=self.credentials,
)
def test_upload_data_with_valid_user_schema(self, project_id):
# Issue #46; tests test scenarios with user-provided
# schemas
df = tm.makeMixedDataFrame()
test_id = "18"
test_schema = [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
destination_table = self.destination_table + test_id
gbq.to_gbq(
df,
destination_table,
project_id,
credentials=self.credentials,
table_schema=test_schema,
)
dataset, table = destination_table.split(".")
assert verify_schema(
self.gbq_connector, dataset, table, dict(fields=test_schema)
)
def test_upload_data_with_invalid_user_schema_raises_error(
self, project_id
):
df = tm.makeMixedDataFrame()
test_id = "19"
test_schema = [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "FLOAT"},
{"name": "D", "type": "FLOAT"},
]
destination_table = self.destination_table + test_id
with pytest.raises(gbq.GenericGBQException):
gbq.to_gbq(
df,
destination_table,
project_id,
credentials=self.credentials,
table_schema=test_schema,
)
def test_upload_data_with_missing_schema_fields_raises_error(
self, project_id
):
df = tm.makeMixedDataFrame()
test_id = "20"
test_schema = [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "FLOAT"},
]
destination_table = self.destination_table + test_id
with pytest.raises(gbq.GenericGBQException):
gbq.to_gbq(
df,
destination_table,
project_id,
credentials=self.credentials,
table_schema=test_schema,
)
def test_upload_data_with_timestamp(self, project_id):
test_id = "21"
test_size = 6
df = DataFrame(
np.random.randn(test_size, 4),
index=range(test_size),
columns=list("ABCD"),
)
df["times"] = pandas.Series(
[
"2018-03-13T05:40:45.348318",
"2018-04-13T05:40:45.348318",
"2018-05-13T05:40:45.348318",
"2018-06-13T05:40:45.348318",
"2018-07-13T05:40:45.348318",
"2018-08-13T05:40:45.348318",
],
dtype="datetime64[ns]",
).dt.tz_localize("UTC")
gbq.to_gbq(
df,
self.destination_table + test_id,
project_id=project_id,
credentials=self.credentials,
)
result_df = gbq.read_gbq(
"SELECT * FROM {0}".format(self.destination_table + test_id),
project_id=project_id,
credentials=self.credentials,
dialect="legacy",
)
assert len(result_df) == test_size
expected = df["times"].sort_values()
result = result_df["times"].sort_values()
tm.assert_series_equal(expected, result)
def test_upload_data_with_different_df_and_user_schema(self, project_id):
df = tm.makeMixedDataFrame()
df["A"] = df["A"].astype(str)
df["B"] = df["B"].astype(str)
test_id = "22"
test_schema = [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
destination_table = self.destination_table + test_id
gbq.to_gbq(
df,
destination_table,
project_id,
credentials=self.credentials,
table_schema=test_schema,
)
dataset, table = destination_table.split(".")
assert verify_schema(
self.gbq_connector, dataset, table, dict(fields=test_schema)
)
def test_upload_data_tokyo(
self, project_id, tokyo_dataset, bigquery_client
):
from google.cloud import bigquery
test_size = 10
df = make_mixed_dataframe_v2(test_size)
tokyo_destination = "{}.to_gbq_test".format(tokyo_dataset)
# Initialize table with sample data
gbq.to_gbq(
df,
tokyo_destination,
project_id,
credentials=self.credentials,
location="asia-northeast1",
)
table = bigquery_client.get_table(
bigquery.TableReference(
bigquery.DatasetReference(project_id, tokyo_dataset),
"to_gbq_test",
)
)
assert table.num_rows > 0
def test_upload_data_tokyo_non_existing_dataset(
self, project_id, random_dataset_id, bigquery_client
):
from google.cloud import bigquery
test_size = 10
df = make_mixed_dataframe_v2(test_size)
non_existing_tokyo_dataset = random_dataset_id
non_existing_tokyo_destination = "{}.to_gbq_test".format(
non_existing_tokyo_dataset
)
# Initialize table with sample data
gbq.to_gbq(
df,
non_existing_tokyo_destination,
project_id,
credentials=self.credentials,
location="asia-northeast1",
)
table = bigquery_client.get_table(
bigquery.TableReference(
bigquery.DatasetReference(
project_id, non_existing_tokyo_dataset
),
"to_gbq_test",
)
)
assert table.num_rows > 0
# _Dataset tests
def test_create_dataset(
bigquery_client, gbq_dataset, random_dataset_id, project_id
):
from google.cloud import bigquery
gbq_dataset.create(random_dataset_id)
dataset_reference = bigquery.DatasetReference(
project_id, random_dataset_id
)
assert bigquery_client.get_dataset(dataset_reference) is not None
def test_create_dataset_already_exists(gbq_dataset, random_dataset_id):
gbq_dataset.create(random_dataset_id)
with pytest.raises(gbq.DatasetCreationError):
gbq_dataset.create(random_dataset_id)
def test_dataset_exists(gbq_dataset, random_dataset_id):
gbq_dataset.create(random_dataset_id)
assert gbq_dataset.exists(random_dataset_id)
def test_dataset_does_not_exist(gbq_dataset, random_dataset_id):
assert not gbq_dataset.exists(random_dataset_id)
# _Table tests
def test_create_table(gbq_table):
schema = gbq._generate_bq_schema(tm.makeMixedDataFrame())
gbq_table.create("test_create_table", schema)
assert gbq_table.exists("test_create_table")
def test_create_table_already_exists(gbq_table):
schema = gbq._generate_bq_schema(tm.makeMixedDataFrame())
gbq_table.create("test_create_table_exists", schema)
with pytest.raises(gbq.TableCreationError):
gbq_table.create("test_create_table_exists", schema)
def test_table_does_not_exist(gbq_table):
assert not gbq_table.exists("test_table_does_not_exist")
def test_delete_table(gbq_table):
test_schema = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
gbq_table.create("test_delete_table", test_schema)
gbq_table.delete("test_delete_table")
assert not gbq_table.exists("test_delete_table")
def test_delete_table_not_found(gbq_table):
with pytest.raises(gbq.NotFoundException):
gbq_table.delete("test_delete_table_not_found")
def test_create_table_data_dataset_does_not_exist(
project, credentials, gbq_dataset, random_dataset_id
):
table_id = "test_create_table_data_dataset_does_not_exist"
table_with_new_dataset = gbq._Table(
project, random_dataset_id, credentials=credentials
)
df = make_mixed_dataframe_v2(10)
table_with_new_dataset.create(table_id, gbq._generate_bq_schema(df))
assert gbq_dataset.exists(random_dataset_id)
assert table_with_new_dataset.exists(table_id)
def test_verify_schema_allows_flexible_column_order(gbq_table, gbq_connector):
table_id = "test_verify_schema_allows_flexible_column_order"
test_schema_1 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
test_schema_2 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "B", "type": "FLOAT"},
{"name": "D", "type": "TIMESTAMP"},
]
}
gbq_table.create(table_id, test_schema_1)
assert verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, test_schema_2
)
def test_verify_schema_fails_different_data_type(gbq_table, gbq_connector):
table_id = "test_verify_schema_fails_different_data_type"
test_schema_1 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
test_schema_2 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "STRING"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
gbq_table.create(table_id, test_schema_1)
assert not verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, test_schema_2
)
def test_verify_schema_fails_different_structure(gbq_table, gbq_connector):
table_id = "test_verify_schema_fails_different_structure"
test_schema_1 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
test_schema_2 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B2", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
gbq_table.create(table_id, test_schema_1)
assert not verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, test_schema_2
)
def test_verify_schema_ignores_field_mode(gbq_table, gbq_connector):
table_id = "test_verify_schema_ignores_field_mode"
test_schema_1 = {
"fields": [
{"name": "A", "type": "FLOAT", "mode": "NULLABLE"},
{"name": "B", "type": "FLOAT", "mode": "NULLABLE"},
{"name": "C", "type": "STRING", "mode": "NULLABLE"},
{"name": "D", "type": "TIMESTAMP", "mode": "REQUIRED"},
]
}
test_schema_2 = {
"fields": [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
}
gbq_table.create(table_id, test_schema_1)
assert verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, test_schema_2
)
def test_retrieve_schema(gbq_table, gbq_connector):
# Issue #24 schema function returns the schema in biquery
table_id = "test_retrieve_schema"
test_schema = {
"fields": [
{
"name": "A",
"type": "FLOAT",
"mode": "NULLABLE",
"description": None,
},
{
"name": "B",
"type": "FLOAT",
"mode": "NULLABLE",
"description": None,
},
{
"name": "C",
"type": "STRING",
"mode": "NULLABLE",
"description": None,
},
{
"name": "D",
"type": "TIMESTAMP",
"mode": "NULLABLE",
"description": None,
},
]
}
gbq_table.create(table_id, test_schema)
expected = [
{"name": "A", "type": "FLOAT"},
{"name": "B", "type": "FLOAT"},
{"name": "C", "type": "STRING"},
{"name": "D", "type": "TIMESTAMP"},
]
assert verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, {"fields": expected}
)
def test_to_gbq_does_not_override_mode(gbq_table, gbq_connector):
# See: https://github.com/pydata/pandas-gbq/issues/315
table_id = "test_to_gbq_does_not_override_mode"
table_schema = {
"fields": [
{
"mode": "REQUIRED",
"name": "A",
"type": "FLOAT",
"description": "A",
},
{
"mode": "NULLABLE",
"name": "B",
"type": "FLOAT",
"description": "B",
},
{
"mode": "NULLABLE",
"name": "C",
"type": "STRING",
"description": "C",
},
]
}
gbq_table.create(table_id, table_schema)
gbq.to_gbq(
pandas.DataFrame({"A": [1.0], "B": [2.0], "C": ["a"]}),
"{0}.{1}".format(gbq_table.dataset_id, table_id),
project_id=gbq_connector.project_id,
if_exists="append",
)
assert verify_schema(
gbq_connector, gbq_table.dataset_id, table_id, table_schema
)
| 32.766775 | 91 | 0.559625 |
955c54a0895bf7d59c609838e0de87150c504877 | 362 | py | Python | pacote-download/ex023.py | LeticiaTr/Exerc-cios-em-Python | 97f62ad36f958ce6f1386a55a7473adc85ddf415 | [
"MIT"
] | null | null | null | pacote-download/ex023.py | LeticiaTr/Exerc-cios-em-Python | 97f62ad36f958ce6f1386a55a7473adc85ddf415 | [
"MIT"
] | null | null | null | pacote-download/ex023.py | LeticiaTr/Exerc-cios-em-Python | 97f62ad36f958ce6f1386a55a7473adc85ddf415 | [
"MIT"
] | null | null | null | # Faça um programa que leia um número de 0 a 9999 e mostre na tela cada um dos dígitos separados.
num = int(input(' Digite um número de 0 a 9999 ' ))
u = num // 1 % 10
d = num // 10 % 10
c = num // 100 % 10
m = num // 1000 % 10
print (f'Analisando o número {num} ')
print (f'Unidade {u}')
print (f'Dezena {d}')
print (f'Centena {c}')
print (f'Milhar {m}') | 32.909091 | 97 | 0.61326 |
0968381362e430cf37671b2d806032fcbcb76238 | 1,989 | py | Python | bloom/audio/sfx.py | thomasrogers03/bloom | 5d49c18a241216aca354aa79971940691e6f33b4 | [
"Apache-2.0"
] | 9 | 2020-11-22T03:04:52.000Z | 2022-01-17T15:36:25.000Z | bloom/audio/sfx.py | thomasrogers03/bloom | 5d49c18a241216aca354aa79971940691e6f33b4 | [
"Apache-2.0"
] | null | null | null | bloom/audio/sfx.py | thomasrogers03/bloom | 5d49c18a241216aca354aa79971940691e6f33b4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Thomas Rogers
# SPDX-License-Identifier: Apache-2.0
import wave
from .. import data_loading, rff
class SoundDescriptor(data_loading.CustomStruct):
relative_volume: data_loading.Int32
pitch: data_loading.Int32
pitch_range: data_loading.Int32
sound_format: data_loading.Int32
loop_start: data_loading.Int32
class Sound:
def __init__(self, sounds_rff: rff.RFF, sound_descriptor_data: bytes):
unpacker = data_loading.Unpacker(sound_descriptor_data)
self._descriptor = unpacker.read_struct(SoundDescriptor)
if self._descriptor.sound_format == 1:
self._sample_rate = 11025
elif self._descriptor.sound_format == 5:
self._sample_rate = 22050
else:
raise ValueError("Unsupported SFX")
raw_name = unpacker.read_remaining().decode().rstrip("\x00").upper()
self._raw = sounds_rff.data_for_entry(f"{raw_name}.RAW")
@staticmethod
def load(sounds_rff: rff.RFF, sound_sfx_name: str):
descriptor = sounds_rff.data_for_entry(sound_sfx_name)
if descriptor is None:
return None
return Sound(sounds_rff, descriptor)
@property
def pitch(self):
return self._descriptor.pitch / float(1 << 16)
@property
def relative_volume(self):
return self._descriptor.relative_volume / float(1 << 6)
@property
def is_looping(self):
return self._descriptor.loop_start >= 0
@property
def loop_time(self):
return self._descriptor.loop_start / self._sample_rate
@property
def sample_count(self):
return len(self._raw)
@property
def has_data(self):
return self._raw is not None
def create_wav(self, path: str):
with wave.open(path, "wb") as wave_file:
wave_file.setnchannels(1)
wave_file.setsampwidth(1)
wave_file.setframerate(self._sample_rate)
wave_file.writeframesraw(self._raw)
| 28.414286 | 76 | 0.671694 |
c6cce9b2dcf790d0a12e9f9017730cb92cd8f6b6 | 23,818 | py | Python | openstack/tests/unit/cloud/test_stack.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/cloud/test_stack.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/cloud/test_stack.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 1 | 2020-07-21T02:18:23.000Z | 2020-07-21T02:18:23.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import testtools
import openstack.cloud
from openstack.tests import fakes
from openstack.tests.unit import base
from openstack.orchestration.v1 import stack
class TestStack(base.TestCase):
def setUp(self):
super(TestStack, self).setUp()
self.stack_id = self.getUniqueString('id')
self.stack_name = self.getUniqueString('name')
self.stack_tag = self.getUniqueString('tag')
self.stack = fakes.make_fake_stack(self.stack_id, self.stack_name)
def test_list_stacks(self):
fake_stacks = [
self.stack,
fakes.make_fake_stack(
self.getUniqueString('id'),
self.getUniqueString('name'))
]
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
json={"stacks": fake_stacks}),
])
stacks = self.cloud.list_stacks()
self.assertEqual(
[f.toDict() for f in self.cloud._normalize_stacks(
stack.Stack(**st) for st in fake_stacks)],
[f.toDict() for f in stacks])
self.assert_calls()
def test_list_stacks_exception(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
status_code=404)
])
with testtools.ExpectedException(
openstack.cloud.OpenStackCloudURINotFound):
self.cloud.list_stacks()
self.assert_calls()
def test_search_stacks(self):
fake_stacks = [
self.stack,
fakes.make_fake_stack(
self.getUniqueString('id'),
self.getUniqueString('name'))
]
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
json={"stacks": fake_stacks}),
])
stacks = self.cloud.search_stacks()
self.assertEqual(
self.cloud._normalize_stacks(
stack.Stack(**st) for st in fake_stacks),
stacks)
self.assert_calls()
def test_search_stacks_filters(self):
fake_stacks = [
self.stack,
fakes.make_fake_stack(
self.getUniqueString('id'),
self.getUniqueString('name'),
status='CREATE_FAILED')
]
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
json={"stacks": fake_stacks}),
])
filters = {'status': 'FAILED'}
stacks = self.cloud.search_stacks(filters=filters)
self.assertEqual(
self.cloud._normalize_stacks(
stack.Stack(**st) for st in fake_stacks[1:]),
stacks)
self.assert_calls()
def test_search_stacks_exception(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
status_code=404)
])
with testtools.ExpectedException(
openstack.cloud.OpenStackCloudURINotFound):
self.cloud.search_stacks()
def test_delete_stack(self):
resolve = 'resolve_outputs=False'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{name}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name, resolve=resolve),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name,
resolve=resolve))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
json={"stack": self.stack}),
dict(method='DELETE',
uri='{endpoint}/stacks/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id)),
])
self.assertTrue(self.cloud.delete_stack(self.stack_name))
self.assert_calls()
def test_delete_stack_not_found(self):
resolve = 'resolve_outputs=False'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/stack_name?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT, resolve=resolve),
status_code=404),
])
self.assertFalse(self.cloud.delete_stack('stack_name'))
self.assert_calls()
def test_delete_stack_exception(self):
resolve = 'resolve_outputs=False'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, resolve=resolve),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name,
resolve=resolve))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
json={"stack": self.stack}),
dict(method='DELETE',
uri='{endpoint}/stacks/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id),
status_code=400,
reason="ouch"),
])
with testtools.ExpectedException(
openstack.cloud.OpenStackCloudBadRequest):
self.cloud.delete_stack(self.stack_id)
self.assert_calls()
def test_delete_stack_wait(self):
marker_event = fakes.make_fake_stack_event(
self.stack_id, self.stack_name, status='CREATE_COMPLETE')
marker_qs = 'marker={e_id}&sort_dir=asc'.format(
e_id=marker_event['id'])
resolve = 'resolve_outputs=False'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id,
resolve=resolve),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name,
resolve=resolve))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
json={"stack": self.stack}),
dict(method='GET',
uri='{endpoint}/stacks/{id}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id,
qs='limit=1&sort_dir=desc'),
complete_qs=True,
json={"events": [marker_event]}),
dict(method='DELETE',
uri='{endpoint}/stacks/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id)),
dict(method='GET',
uri='{endpoint}/stacks/{id}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id,
qs=marker_qs),
complete_qs=True,
json={"events": [
fakes.make_fake_stack_event(
self.stack_id, self.stack_name,
status='DELETE_COMPLETE'),
]}),
dict(method='GET',
uri='{endpoint}/stacks/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
status_code=404),
])
self.assertTrue(self.cloud.delete_stack(self.stack_id, wait=True))
self.assert_calls()
def test_delete_stack_wait_failed(self):
failed_stack = self.stack.copy()
failed_stack['stack_status'] = 'DELETE_FAILED'
marker_event = fakes.make_fake_stack_event(
self.stack_id, self.stack_name, status='CREATE_COMPLETE')
marker_qs = 'marker={e_id}&sort_dir=asc'.format(
e_id=marker_event['id'])
resolve = 'resolve_outputs=False'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, resolve=resolve),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name,
resolve=resolve))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
json={"stack": self.stack}),
dict(method='GET',
uri='{endpoint}/stacks/{id}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id,
qs='limit=1&sort_dir=desc'),
complete_qs=True,
json={"events": [marker_event]}),
dict(method='DELETE',
uri='{endpoint}/stacks/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id)),
dict(method='GET',
uri='{endpoint}/stacks/{id}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id,
qs=marker_qs),
complete_qs=True,
json={"events": [
fakes.make_fake_stack_event(
self.stack_id, self.stack_name,
status='DELETE_COMPLETE'),
]}),
dict(method='GET',
uri='{endpoint}/stacks/{id}?resolve_outputs=False'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name,
resolve=resolve))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}?{resolve}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name, resolve=resolve),
json={"stack": failed_stack}),
])
with testtools.ExpectedException(
openstack.cloud.OpenStackCloudException):
self.cloud.delete_stack(self.stack_id, wait=True)
self.assert_calls()
def test_create_stack(self):
test_template = tempfile.NamedTemporaryFile(delete=False)
test_template.write(fakes.FAKE_TEMPLATE.encode('utf-8'))
test_template.close()
self.register_uris([
dict(
method='POST', uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
json={"stack": self.stack},
validate=dict(
json={
'disable_rollback': False,
'parameters': {},
'stack_name': self.stack_name,
'tags': self.stack_tag,
'template': fakes.FAKE_TEMPLATE_CONTENT,
'timeout_mins': 60}
)),
dict(
method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": self.stack}),
])
self.cloud.create_stack(
self.stack_name,
tags=self.stack_tag,
template_file=test_template.name
)
self.assert_calls()
def test_create_stack_wait(self):
test_template = tempfile.NamedTemporaryFile(delete=False)
test_template.write(fakes.FAKE_TEMPLATE.encode('utf-8'))
test_template.close()
self.register_uris([
dict(
method='POST', uri='{endpoint}/stacks'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT),
json={"stack": self.stack},
validate=dict(
json={
'disable_rollback': False,
'parameters': {},
'stack_name': self.stack_name,
'tags': self.stack_tag,
'template': fakes.FAKE_TEMPLATE_CONTENT,
'timeout_mins': 60}
)),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/events?sort_dir=asc'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
json={"events": [
fakes.make_fake_stack_event(
self.stack_id, self.stack_name,
status='CREATE_COMPLETE',
resource_name='name'),
]}),
dict(
method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": self.stack}),
])
self.cloud.create_stack(
self.stack_name,
tags=self.stack_tag,
template_file=test_template.name,
wait=True)
self.assert_calls()
def test_update_stack(self):
test_template = tempfile.NamedTemporaryFile(delete=False)
test_template.write(fakes.FAKE_TEMPLATE.encode('utf-8'))
test_template.close()
self.register_uris([
dict(
method='PUT',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
validate=dict(
json={
'disable_rollback': False,
'parameters': {},
'tags': self.stack_tag,
'template': fakes.FAKE_TEMPLATE_CONTENT,
'timeout_mins': 60}),
json={}),
dict(
method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": self.stack}),
])
self.cloud.update_stack(
self.stack_name,
tags=self.stack_tag,
template_file=test_template.name)
self.assert_calls()
def test_update_stack_wait(self):
marker_event = fakes.make_fake_stack_event(
self.stack_id, self.stack_name, status='CREATE_COMPLETE',
resource_name='name')
marker_qs = 'marker={e_id}&sort_dir=asc'.format(
e_id=marker_event['id'])
test_template = tempfile.NamedTemporaryFile(delete=False)
test_template.write(fakes.FAKE_TEMPLATE.encode('utf-8'))
test_template.close()
self.register_uris([
dict(
method='GET',
uri='{endpoint}/stacks/{name}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name,
qs='limit=1&sort_dir=desc'),
json={"events": [marker_event]}),
dict(
method='PUT',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
validate=dict(
json={
'disable_rollback': False,
'parameters': {},
'tags': self.stack_tag,
'template': fakes.FAKE_TEMPLATE_CONTENT,
'timeout_mins': 60}),
json={}),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/events?{qs}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name,
qs=marker_qs),
json={"events": [
fakes.make_fake_stack_event(
self.stack_id, self.stack_name,
status='UPDATE_COMPLETE',
resource_name='name'),
]}),
dict(
method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(
method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": self.stack}),
])
self.cloud.update_stack(
self.stack_name,
tags=self.stack_tag,
template_file=test_template.name,
wait=True)
self.assert_calls()
def test_get_stack(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": self.stack}),
])
res = self.cloud.get_stack(self.stack_name)
self.assertIsNotNone(res)
self.assertEqual(self.stack['stack_name'], res['stack_name'])
self.assertEqual(self.stack['stack_name'], res['name'])
self.assertEqual(self.stack['stack_status'], res['stack_status'])
self.assertEqual('COMPLETE', res['status'])
self.assert_calls()
def test_get_stack_in_progress(self):
in_progress = self.stack.copy()
in_progress['stack_status'] = 'CREATE_IN_PROGRESS'
self.register_uris([
dict(method='GET',
uri='{endpoint}/stacks/{name}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
name=self.stack_name),
status_code=302,
headers=dict(
location='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name))),
dict(method='GET',
uri='{endpoint}/stacks/{name}/{id}'.format(
endpoint=fakes.ORCHESTRATION_ENDPOINT,
id=self.stack_id, name=self.stack_name),
json={"stack": in_progress}),
])
res = self.cloud.get_stack(self.stack_name)
self.assertIsNotNone(res)
self.assertEqual(in_progress['stack_name'], res['stack_name'])
self.assertEqual(in_progress['stack_name'], res['name'])
self.assertEqual(in_progress['stack_status'], res['stack_status'])
self.assertEqual('CREATE', res['action'])
self.assertEqual('IN_PROGRESS', res['status'])
self.assert_calls()
| 41.279029 | 79 | 0.515996 |
5b9dbe6e2fd672749ee875fabd90c219510851f1 | 447 | py | Python | test/test_fapi.py | williamcroberts/tpm2-pytss | b0d37a6d28509fccd9be29c050c7091255f10f92 | [
"BSD-2-Clause"
] | null | null | null | test/test_fapi.py | williamcroberts/tpm2-pytss | b0d37a6d28509fccd9be29c050c7091255f10f92 | [
"BSD-2-Clause"
] | null | null | null | test/test_fapi.py | williamcroberts/tpm2-pytss | b0d37a6d28509fccd9be29c050c7091255f10f92 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python3 -u
"""
SPDX-License-Identifier: BSD-3
"""
import pytest
from tpm2_pytss import *
from .TSS2_BaseTest import TSS2_FapiTest
@pytest.mark.forked
class TestFapi(TSS2_FapiTest):
def testProvision(self):
r = self.fapi.provision()
self.assertEqual(r, False)
def testGetRandom(self):
r = self.fapi.get_random(42)
self.assertEqual(len(r), 42)
if __name__ == "__main__":
unittest.main()
| 17.88 | 40 | 0.671141 |
9b73b3630e63a02c7304b276dd92dc909edd12a1 | 2,780 | py | Python | Configuration/Geometry/python/GeometryExtended2026D92Reco_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | Configuration/Geometry/python/GeometryExtended2026D92Reco_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | Configuration/Geometry/python/GeometryExtended2026D92Reco_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | import FWCore.ParameterSet.Config as cms
# This config was generated automatically using generate2026Geometry.py
# If you notice a mistake, please update the generating script, not just this config
from Configuration.Geometry.GeometryExtended2026D92_cff import *
# tracker
from Geometry.CommonTopologies.globalTrackingGeometry_cfi import *
from RecoTracker.GeometryESProducer.TrackerRecoGeometryESProducer_cfi import *
from Geometry.TrackerGeometryBuilder.TrackerAdditionalParametersPerDet_cfi import *
from Geometry.TrackerGeometryBuilder.trackerParameters_cff import *
from Geometry.TrackerNumberingBuilder.trackerTopology_cfi import *
from Geometry.TrackerGeometryBuilder.idealForDigiTrackerGeometry_cff import *
trackerGeometry.applyAlignment = cms.bool(False)
# calo
from Geometry.CaloEventSetup.HGCalV9Topology_cfi import *
from Geometry.HGCalGeometry.HGCalGeometryESProducer_cfi import *
from Geometry.CaloEventSetup.CaloTopology_cfi import *
from Geometry.CaloEventSetup.CaloGeometryBuilder_cfi import *
CaloGeometryBuilder = cms.ESProducer("CaloGeometryBuilder",
SelectedCalos = cms.vstring("HCAL",
"ZDC",
"EcalBarrel",
"TOWER",
"HGCalEESensitive",
"HGCalHESiliconSensitive",
"HGCalHEScintillatorSensitive"
)
)
from Geometry.EcalAlgo.EcalBarrelGeometry_cfi import *
from Geometry.HcalEventSetup.HcalGeometry_cfi import *
from Geometry.HcalEventSetup.CaloTowerGeometry_cfi import *
from Geometry.HcalEventSetup.CaloTowerTopology_cfi import *
from Geometry.HcalCommonData.hcalDDDRecConstants_cfi import *
from Geometry.HcalEventSetup.hcalTopologyIdeal_cfi import *
from Geometry.CaloEventSetup.EcalTrigTowerConstituents_cfi import *
from Geometry.EcalMapping.EcalMapping_cfi import *
from Geometry.EcalMapping.EcalMappingRecord_cfi import *
# muon
from Geometry.MuonNumbering.muonNumberingInitialization_cfi import *
from RecoMuon.DetLayers.muonDetLayerGeometry_cfi import *
from Geometry.GEMGeometryBuilder.gemGeometry_cff import *
from Geometry.CSCGeometryBuilder.idealForDigiCscGeometry_cff import *
from Geometry.DTGeometryBuilder.idealForDigiDtGeometry_cff import *
# forward
from Geometry.ForwardGeometry.ForwardGeometry_cfi import *
# timing
from RecoMTD.DetLayers.mtdDetLayerGeometry_cfi import *
from Geometry.MTDGeometryBuilder.mtdParameters_cff import *
from Geometry.MTDNumberingBuilder.mtdNumberingGeometry_cff import *
from Geometry.MTDNumberingBuilder.mtdTopology_cfi import *
from Geometry.MTDGeometryBuilder.mtdGeometry_cfi import *
from Geometry.MTDGeometryBuilder.idealForDigiMTDGeometry_cff import *
mtdGeometry.applyAlignment = cms.bool(False)
| 45.57377 | 84 | 0.805036 |
ac80c2a993de2758a63407b3c0c93b108661f206 | 3,876 | py | Python | kubernetes/client/models/v1beta1_host_port_range.py | dix000p/kubernetes-client-python | 22e473e02883aca1058606092c86311f02f42be2 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1beta1_host_port_range.py | dix000p/kubernetes-client-python | 22e473e02883aca1058606092c86311f02f42be2 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1beta1_host_port_range.py | dix000p/kubernetes-client-python | 22e473e02883aca1058606092c86311f02f42be2 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1HostPortRange(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'max': 'int',
'min': 'int'
}
attribute_map = {
'max': 'max',
'min': 'min'
}
def __init__(self, max=None, min=None):
"""
V1beta1HostPortRange - a model defined in Swagger
"""
self._max = None
self._min = None
self.discriminator = None
self.max = max
self.min = min
@property
def max(self):
"""
Gets the max of this V1beta1HostPortRange.
max is the end of the range, inclusive.
:return: The max of this V1beta1HostPortRange.
:rtype: int
"""
return self._max
@max.setter
def max(self, max):
"""
Sets the max of this V1beta1HostPortRange.
max is the end of the range, inclusive.
:param max: The max of this V1beta1HostPortRange.
:type: int
"""
if max is None:
raise ValueError("Invalid value for `max`, must not be `None`")
self._max = max
@property
def min(self):
"""
Gets the min of this V1beta1HostPortRange.
min is the start of the range, inclusive.
:return: The min of this V1beta1HostPortRange.
:rtype: int
"""
return self._min
@min.setter
def min(self, min):
"""
Sets the min of this V1beta1HostPortRange.
min is the start of the range, inclusive.
:param min: The min of this V1beta1HostPortRange.
:type: int
"""
if min is None:
raise ValueError("Invalid value for `min`, must not be `None`")
self._min = min
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1HostPortRange):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 24.687898 | 105 | 0.53483 |
d0ae0797b1dc576fd593825e93d6175805485ec9 | 13,300 | py | Python | kale/harvester/harvester_api.py | inan0812/kale-blockchain | 1b502fe21a4be10b4db0171c3a7030079dcefa1b | [
"Apache-2.0"
] | null | null | null | kale/harvester/harvester_api.py | inan0812/kale-blockchain | 1b502fe21a4be10b4db0171c3a7030079dcefa1b | [
"Apache-2.0"
] | null | null | null | kale/harvester/harvester_api.py | inan0812/kale-blockchain | 1b502fe21a4be10b4db0171c3a7030079dcefa1b | [
"Apache-2.0"
] | null | null | null | import asyncio
import time
from pathlib import Path
from typing import Callable, List, Tuple
from blspy import AugSchemeMPL, G2Element
from kale.consensus.pot_iterations import calculate_iterations_quality, calculate_sp_interval_iters
from kale.harvester.harvester import Harvester
from kale.plotting.plot_tools import PlotInfo, parse_plot_info
from kale.protocols import harvester_protocol
from kale.protocols.farmer_protocol import FarmingInfo
from kale.protocols.protocol_message_types import ProtocolMessageTypes
from kale.server.outbound_message import make_msg
from kale.server.ws_connection import WSKaleConnection
from kale.types.blockchain_format.proof_of_space import ProofOfSpace
from kale.types.blockchain_format.sized_bytes import bytes32
from kale.util.api_decorators import api_request, peer_required
from kale.util.ints import uint8, uint32, uint64
from kale.wallet.derive_keys import master_sk_to_local_sk
class HarvesterAPI:
harvester: Harvester
def __init__(self, harvester: Harvester):
self.harvester = harvester
def _set_state_changed_callback(self, callback: Callable):
self.harvester.state_changed_callback = callback
@api_request
async def harvester_handshake(self, harvester_handshake: harvester_protocol.HarvesterHandshake):
"""
Handshake between the harvester and farmer. The harvester receives the pool public keys,
as well as the farmer pks, which must be put into the plots, before the plotting process begins.
We cannot use any plots which have different keys in them.
"""
self.harvester.farmer_public_keys = harvester_handshake.farmer_public_keys
self.harvester.pool_public_keys = harvester_handshake.pool_public_keys
await self.harvester.refresh_plots()
if len(self.harvester.provers) == 0:
self.harvester.log.warning("Not farming any plots on this harvester. Check your configuration.")
return None
@peer_required
@api_request
async def new_signage_point_harvester(
self, new_challenge: harvester_protocol.NewSignagePointHarvester, peer: WSKaleConnection
):
"""
The harvester receives a new signage point from the farmer, this happens at the start of each slot.
The harvester does a few things:
1. The harvester applies the plot filter for each of the plots, to select the proportion which are eligible
for this signage point and challenge.
2. The harvester gets the qualities for each plot. This is approximately 7 reads per plot which qualifies.
Note that each plot may have 0, 1, 2, etc qualities for that challenge: but on average it will have 1.
3. Checks the required_iters for each quality and the given signage point, to see which are eligible for
inclusion (required_iters < sp_interval_iters).
4. Looks up the full proof of space in the plot for each quality, approximately 64 reads per quality
5. Returns the proof of space to the farmer
"""
if len(self.harvester.pool_public_keys) == 0 or len(self.harvester.farmer_public_keys) == 0:
# This means that we have not received the handshake yet
return None
start = time.time()
assert len(new_challenge.challenge_hash) == 32
# Refresh plots to see if there are any new ones
if start - self.harvester.last_load_time > self.harvester.plot_load_frequency:
await self.harvester.refresh_plots()
self.harvester.last_load_time = time.time()
loop = asyncio.get_running_loop()
def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
# Uses the DiskProver object to lookup qualities. This is a blocking call,
# so it should be run in a thread pool.
try:
plot_id = plot_info.prover.get_id()
sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
plot_id,
new_challenge.challenge_hash,
new_challenge.sp_hash,
)
try:
quality_strings = plot_info.prover.get_qualities_for_challenge(sp_challenge_hash)
except Exception as e:
self.harvester.log.error(f"Error using prover object {e}")
self.harvester.log.error(
f"File: {filename} Plot ID: {plot_id.hex()}, "
f"challenge: {sp_challenge_hash}, plot_info: {plot_info}"
)
return []
responses: List[Tuple[bytes32, ProofOfSpace]] = []
if quality_strings is not None:
# Found proofs of space (on average 1 is expected per plot)
for index, quality_str in enumerate(quality_strings):
required_iters: uint64 = calculate_iterations_quality(
self.harvester.constants.DIFFICULTY_CONSTANT_FACTOR,
quality_str,
plot_info.prover.get_size(),
new_challenge.difficulty,
new_challenge.sp_hash,
)
sp_interval_iters = calculate_sp_interval_iters(
self.harvester.constants, new_challenge.sub_slot_iters
)
if required_iters < sp_interval_iters:
# Found a very good proof of space! will fetch the whole proof from disk,
# then send to farmer
try:
proof_xs = plot_info.prover.get_full_proof(sp_challenge_hash, index)
except Exception as e:
self.harvester.log.error(f"Exception fetching full proof for {filename}. {e}")
self.harvester.log.error(
f"File: {filename} Plot ID: {plot_id.hex()}, challenge: {sp_challenge_hash}, "
f"plot_info: {plot_info}"
)
continue
# Look up local_sk from plot to save locked memory
(
pool_public_key_or_puzzle_hash,
farmer_public_key,
local_master_sk,
) = parse_plot_info(plot_info.prover.get_memo())
local_sk = master_sk_to_local_sk(local_master_sk)
plot_public_key = ProofOfSpace.generate_plot_public_key(
local_sk.get_g1(), farmer_public_key
)
responses.append(
(
quality_str,
ProofOfSpace(
sp_challenge_hash,
plot_info.pool_public_key,
plot_info.pool_contract_puzzle_hash,
plot_public_key,
uint8(plot_info.prover.get_size()),
proof_xs,
),
)
)
return responses
except Exception as e:
self.harvester.log.error(f"Unknown error: {e}")
return []
async def lookup_challenge(
filename: Path, plot_info: PlotInfo
) -> Tuple[Path, List[harvester_protocol.NewProofOfSpace]]:
# Executes a DiskProverLookup in a thread pool, and returns responses
all_responses: List[harvester_protocol.NewProofOfSpace] = []
if self.harvester._is_shutdown:
return filename, []
proofs_of_space_and_q: List[Tuple[bytes32, ProofOfSpace]] = await loop.run_in_executor(
self.harvester.executor, blocking_lookup, filename, plot_info
)
for quality_str, proof_of_space in proofs_of_space_and_q:
all_responses.append(
harvester_protocol.NewProofOfSpace(
new_challenge.challenge_hash,
new_challenge.sp_hash,
quality_str.hex() + str(filename.resolve()),
proof_of_space,
new_challenge.signage_point_index,
)
)
return filename, all_responses
awaitables = []
passed = 0
total = 0
for try_plot_filename, try_plot_info in self.harvester.provers.items():
try:
if try_plot_filename.exists():
# Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
# This is being executed at the beginning of the slot
total += 1
if ProofOfSpace.passes_plot_filter(
self.harvester.constants,
try_plot_info.prover.get_id(),
new_challenge.challenge_hash,
new_challenge.sp_hash,
):
passed += 1
awaitables.append(lookup_challenge(try_plot_filename, try_plot_info))
except Exception as e:
self.harvester.log.error(f"Error plot file {try_plot_filename} may no longer exist {e}")
# Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
total_proofs_found = 0
for filename_sublist_awaitable in asyncio.as_completed(awaitables):
filename, sublist = await filename_sublist_awaitable
time_taken = time.time() - start
if time_taken > 5:
self.harvester.log.warning(
f"Looking up qualities on {filename} took: {time.time() - start}. This should be below 5 seconds "
f"to minimize risk of losing rewards."
)
else:
pass
# If you want additional logs, uncomment the following line
# self.harvester.log.debug(f"Looking up qualities on {filename} took: {time.time() - start}")
for response in sublist:
total_proofs_found += 1
msg = make_msg(ProtocolMessageTypes.new_proof_of_space, response)
await peer.send_message(msg)
now = uint64(int(time.time()))
farming_info = FarmingInfo(
new_challenge.challenge_hash,
new_challenge.sp_hash,
now,
uint32(passed),
uint32(total_proofs_found),
uint32(total),
)
pass_msg = make_msg(ProtocolMessageTypes.farming_info, farming_info)
await peer.send_message(pass_msg)
self.harvester.log.info(
f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..."
f" Found {total_proofs_found} proofs. Time: {time.time() - start:.5f} s. "
f"Total {len(self.harvester.provers)} plots"
)
@api_request
async def request_signatures(self, request: harvester_protocol.RequestSignatures):
"""
The farmer requests a signature on the header hash, for one of the proofs that we found.
A signature is created on the header hash using the harvester private key. This can also
be used for pooling.
"""
plot_filename = Path(request.plot_identifier[64:]).resolve()
try:
plot_info = self.harvester.provers[plot_filename]
except KeyError:
self.harvester.log.warning(f"KeyError plot {plot_filename} does not exist.")
return None
# Look up local_sk from plot to save locked memory
(
pool_public_key_or_puzzle_hash,
farmer_public_key,
local_master_sk,
) = parse_plot_info(plot_info.prover.get_memo())
local_sk = master_sk_to_local_sk(local_master_sk)
agg_pk = ProofOfSpace.generate_plot_public_key(local_sk.get_g1(), farmer_public_key)
# This is only a partial signature. When combined with the farmer's half, it will
# form a complete PrependSignature.
message_signatures: List[Tuple[bytes32, G2Element]] = []
for message in request.messages:
signature: G2Element = AugSchemeMPL.sign(local_sk, message, agg_pk)
message_signatures.append((message, signature))
response: harvester_protocol.RespondSignatures = harvester_protocol.RespondSignatures(
request.plot_identifier,
request.challenge_hash,
request.sp_hash,
local_sk.get_g1(),
farmer_public_key,
message_signatures,
)
return make_msg(ProtocolMessageTypes.respond_signatures, response)
| 48.717949 | 118 | 0.589624 |
d15e7127db9357062ca9b88f1b82af760489b66a | 583 | py | Python | sweetpea/tests/sampling_strategies/uc-counting-tests/example-4.py | ahsanbutt95/sweetpea-py | d2e2074ef4b20b5f46d8049ca4bb0bf46c3fc705 | [
"MIT"
] | 10 | 2019-03-15T01:30:14.000Z | 2022-03-21T03:41:57.000Z | sweetpea/tests/sampling_strategies/uc-counting-tests/example-4.py | ahsanbutt95/sweetpea-py | d2e2074ef4b20b5f46d8049ca4bb0bf46c3fc705 | [
"MIT"
] | 33 | 2019-03-25T16:30:22.000Z | 2021-07-14T22:31:10.000Z | sweetpea/tests/sampling_strategies/uc-counting-tests/example-4.py | ahsanbutt95/sweetpea-py | d2e2074ef4b20b5f46d8049ca4bb0bf46c3fc705 | [
"MIT"
] | 5 | 2020-09-07T10:20:12.000Z | 2022-01-18T03:11:29.000Z | import operator as op
from sweetpea import fully_cross_block
from sweetpea.primitives import Factor, DerivedLevel, WithinTrial
color = Factor("color", ["red", "blue"])
text = Factor("text", ["red", "blue"])
congruency = Factor("congruency", [
DerivedLevel("congruent", WithinTrial(op.eq, [color, text])),
DerivedLevel("incongruent", WithinTrial(op.ne, [color, text]))
])
direction = Factor("direction", ["up", "down"])
design = [color, text, congruency, direction]
crossing = [color, text]
block = fully_cross_block(design, crossing, [])
# ASSERT COUNT = 384
| 29.15 | 67 | 0.691252 |
77d3e23edd17a79158492d7c3a6200e5ee420f1b | 430 | py | Python | packages/python/plotly/plotly/validators/densitymapbox/_legendgroup.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/densitymapbox/_legendgroup.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/densitymapbox/_legendgroup.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | import _plotly_utils.basevalidators
class LegendgroupValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="legendgroup", parent_name="densitymapbox", **kwargs
):
super(LegendgroupValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs,
)
| 30.714286 | 78 | 0.665116 |
95debd9170debb032d3850823ecd0fa82e4d2f1c | 1,522 | py | Python | homeassistant/components/braviatv/__init__.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 1 | 2021-03-23T07:20:03.000Z | 2021-03-23T07:20:03.000Z | homeassistant/components/braviatv/__init__.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 51 | 2020-08-03T07:30:44.000Z | 2022-03-22T06:02:42.000Z | homeassistant/components/braviatv/__init__.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 2 | 2021-03-22T21:42:48.000Z | 2021-04-12T12:26:39.000Z | """The Bravia TV component."""
import asyncio
from bravia_tv import BraviaRC
from homeassistant.const import CONF_HOST, CONF_MAC
from .const import BRAVIARC, DOMAIN, UNDO_UPDATE_LISTENER
PLATFORMS = ["media_player"]
async def async_setup(hass, config):
"""Set up the Bravia TV component."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up a config entry."""
host = config_entry.data[CONF_HOST]
mac = config_entry.data[CONF_MAC]
undo_listener = config_entry.add_update_listener(update_listener)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = {
BRAVIARC: BraviaRC(host, mac),
UNDO_UPDATE_LISTENER: undo_listener,
}
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
]
)
)
hass.data[DOMAIN][config_entry.entry_id][UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
async def update_listener(hass, config_entry):
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
| 24.95082 | 86 | 0.688568 |
37cce9e46f3e30583bf5e05325d776eac3781aa7 | 2,519 | py | Python | aiohttp_auth/auth/cookie_ticket_auth.py | gnarlychicken/aiohttp_auth | 3d55236889fb14b662279b050de18d43842bb886 | [
"MIT"
] | 12 | 2016-02-27T21:57:51.000Z | 2020-10-26T13:57:47.000Z | aiohttp_auth/auth/cookie_ticket_auth.py | gnarlychicken/aiohttp_auth | 3d55236889fb14b662279b050de18d43842bb886 | [
"MIT"
] | 5 | 2016-07-22T15:51:48.000Z | 2021-09-07T21:48:01.000Z | aiohttp_auth/auth/cookie_ticket_auth.py | gnarlychicken/aiohttp_auth | 3d55236889fb14b662279b050de18d43842bb886 | [
"MIT"
] | 8 | 2016-02-15T04:58:31.000Z | 2019-01-21T14:17:16.000Z | from .ticket_auth import TktAuthentication
COOKIE_AUTH_KEY = 'aiohttp_auth.auth.CookieTktAuthentication'
class CookieTktAuthentication(TktAuthentication):
"""Ticket authentication mechanism based on the ticket_auth library, with
ticket data being stored as a cookie in the response.
"""
async def remember_ticket(self, request, ticket):
"""Called to store the ticket data for a request.
Ticket data is stored in COOKIE_AUTH_KEY in the request object, and
written as cookie data to the response during the process_response()
function.
Args:
request: aiohttp Request object.
ticket: String like object representing the ticket to be stored.
"""
request[COOKIE_AUTH_KEY] = ticket
async def forget_ticket(self, request):
"""Called to forget the ticket data a request
Args:
request: aiohttp Request object.
"""
request[COOKIE_AUTH_KEY] = ''
async def get_ticket(self, request):
"""Called to return the ticket for a request.
Args:
request: aiohttp Request object.
Returns:
A ticket (string like) object, or None if no ticket is available
for the passed request.
"""
return request.cookies.get(self.cookie_name, None)
async def process_response(self, request, response):
"""Called to perform any processing of the response required.
This function stores any cookie data in the COOKIE_AUTH_KEY as a
cookie in the response object. If the value is a empty string, the
associated cookie is deleted instead.
This function requires the response to be a aiohttp Response object,
and assumes that the response has not started if the remember or
forget functions are called during the request.
Args:
request: aiohttp Request object.
response: response object returned from the handled view
Raises:
RuntimeError: Raised if response has already started.
"""
await super().process_response(request, response)
if COOKIE_AUTH_KEY in request:
if response.started:
raise RuntimeError("Cannot save cookie into started response")
cookie = request[COOKIE_AUTH_KEY]
if cookie == '':
response.del_cookie(self.cookie_name)
else:
response.set_cookie(self.cookie_name, cookie)
| 34.506849 | 78 | 0.651449 |
1d3dc2b9866fbd51cd8cba4d6ac3aa6ffc42acfb | 593 | py | Python | conanfile.py | yax-lakam-tuun/libqrcode | 1010c7a1ed83cb01a7ab889ff716ffa9e5665fc4 | [
"MIT"
] | 4 | 2021-01-16T18:01:52.000Z | 2021-11-15T05:02:58.000Z | conanfile.py | yax-lakam-tuun/libqrcode | 1010c7a1ed83cb01a7ab889ff716ffa9e5665fc4 | [
"MIT"
] | 2 | 2021-01-20T19:15:33.000Z | 2021-05-29T15:20:56.000Z | conanfile.py | yax-lakam-tuun/libqrcode | 1010c7a1ed83cb01a7ab889ff716ffa9e5665fc4 | [
"MIT"
] | 1 | 2021-06-17T18:24:57.000Z | 2021-06-17T18:24:57.000Z | from conans import ConanFile
class LibqrcodeConan(ConanFile):
name = "libqrcode"
version = "v1.0"
exports_sources = "include/*"
no_copy_source = True
url = "https://github.com/yax-lakam-tuun/libqrcode"
license = "MIT License"
description = "A header-only C++20 library for generating QR Codes"
settings = "os"
def build(self):
pass
def package(self):
self.copy("*.h")
def package_info(self):
if not self.settings.os == "Windows":
self.cpp_info.libs.append("stdc++")
self.cpp_info.libs.append("m")
| 25.782609 | 71 | 0.617201 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.