id
stringlengths 1
7
| text
stringlengths 6
1.03M
| dataset_id
stringclasses 1
value |
---|---|---|
1603816
|
import datetime
import flask.scaffold
flask.helpers._endpoint_from_view_func = flask.scaffold._endpoint_from_view_func
import flask_restful
from flask import request
from marshmallow import ValidationError
from app import RestException, db
from app.model.investigator import Investigator
from app.model.study_investigator import StudyInvestigator
from app.schema.schema import InvestigatorSchema
class InvestigatorEndpoint(flask_restful.Resource):
schema = InvestigatorSchema()
def get(self, id):
model = db.session.query(Investigator).filter_by(id=id).first()
if model is None: raise RestException(RestException.NOT_FOUND)
return self.schema.dump(model)
def delete(self, id):
db.session.query(StudyInvestigator).filter_by(investigator_id=id).delete()
db.session.query(Investigator).filter_by(id=id).delete()
db.session.commit()
return None
def put(self, id):
request_data = request.get_json()
instance = db.session.query(Investigator).filter_by(id=id).first()
try:
updated = self.schema.load(request_data, instance=instance)
except Exception as errors:
raise RestException(RestException.INVALID_OBJECT, details=errors)
updated.last_updated = datetime.datetime.utcnow()
db.session.add(updated)
db.session.commit()
return self.schema.dump(updated)
class InvestigatorListEndpoint(flask_restful.Resource):
investigatorsSchema = InvestigatorSchema(many=True)
investigatorSchema = InvestigatorSchema()
def get(self):
investigators = db.session.query(Investigator).order_by(Investigator.name).all()
return self.investigatorsSchema.dump(investigators)
def post(self):
request_data = request.get_json()
try:
load_result = self.investigatorSchema.load(request_data)
model = db.session.query(Investigator).filter_by(name=load_result.name).first()
if model:
return self.investigatorSchema.dump(model)
else:
db.session.add(load_result)
db.session.commit()
return self.investigatorSchema.dump(load_result)
except ValidationError as err:
raise RestException(RestException.INVALID_OBJECT,
details=load_result.errors)
|
StarcoderdataPython
|
3258862
|
import json
import numpy as np
import os
import io
def dice_score(ref, pred):
# from https://stackoverflow.com/questions/49759710/calculating-dice-co-efficient-between-two-random-images-of-same-size
if ref.shape != pred.shape:
raise ValueError("Shape mismatch: img and img2 must have to be of the same shape.")
else:
intersection = np.logical_and(ref, pred)
value = (2. * intersection.sum()) / (ref.sum() + pred.sum())
return value
def dice_list(reference_json, user_json, image_width=1024, image_height=1024):
f_reference = open(reference_json)
if isinstance(user_json, str):
f_user = open(user_json)
else:
f_user = io.StringIO(user_json.getvalue().decode("utf-8"))
# f_user = json.load(stringio)
data_reference = json.load(f_reference)
data_user = json.load(f_user)
list_dice_scores = []
# if "covid27" in reference_json:
if "_via_img_metadata" in data_reference:
gt_patients_list = data_reference["_via_img_metadata"]
else:
gt_patients_list = data_reference
# gt_patients_list = data_reference["_via_img_metadata"]
if "_via_img_metadata" in data_user:
user_patients_list = data_user["_via_img_metadata"]
else:
user_patients_list = data_user
for key in gt_patients_list:
if key in user_patients_list:
np_reference = np.zeros([image_width, image_height])
np_user = np.zeros([image_width, image_height])
for region in gt_patients_list[key]["regions"]:
if region['shape_attributes']['name'] == 'rect':
x_start = region['shape_attributes']['x']
y_start = region['shape_attributes']['y']
x_end = region['shape_attributes']['width'] + x_start
y_end = region['shape_attributes']['height'] + y_start
np_reference[x_start:x_end, y_start:y_end] = 1
else: # doesn't have rect type of region so we should skip it
break
for region in user_patients_list[key]["regions"]:
if region['shape_attributes']['name'] == 'rect':
x_start = region['shape_attributes']['x']
y_start = region['shape_attributes']['y']
x_end = region['shape_attributes']['width'] + x_start
y_end = region['shape_attributes']['height'] + y_start
np_user[x_start:x_end, y_start:y_end] = 1
dice_score_patient = dice_score(np_reference, np_user)
if not np.isnan(dice_score_patient):
list_dice_scores.append(dice_score_patient)
else: # reference didn't had rect but user drew rect
list_dice_scores.append(0)
else:
for region in gt_patients_list[key]["regions"]:
if region['shape_attributes']['name'] == 'rect':
list_dice_scores.append(0)
print("Not segmented by used")
else:
print("Not rect tool")
return list_dice_scores
def get_score_all_users(directory, ground_truth_file, user_files_list):
reference_json = os.path.join(directory, ground_truth_file)
scores_users = []
for user_file in user_files_list:
user_json = os.path.join(directory, user_file)
dice_scores = dice_list(reference_json, user_json)
user_score = round(np.sum(np.asarray(dice_scores)) * 10)
scores_users.append(user_score)
order_users = np.argsort(scores_users)
return order_users, scores_users
# if __name__ == '__main__':
# # example of input and call to get order and scores
# # inputs are directory where files are located, ground truth json filename, list of json users annotations filenames
# order, score = get_score_all_users('/Users/joaosantinha/Downloads',
# 'via_project_9Dec2020_15h40m_Les_ground_truth.json',
# ['via_project_8Dec2020_15h28m_jane_with_missing_keys.json',
# 'via_project_18May2021_13h3m_Pedro.json',
# 'via_project_20May2021_10h53m-6_Lilli.json'])
# print('Order: ', order+1, '\nScore: ', score)
|
StarcoderdataPython
|
4838571
|
"""Functions copypasted from newer versions of numpy.
"""
from __future__ import division, print_function, absolute_import
import warnings
import sys
import numpy as np
from numpy.testing.nosetester import import_nose
from scipy._lib._version import NumpyVersion
if NumpyVersion(np.__version__) > '1.7.0.dev':
_assert_warns = np.testing.assert_warns
else:
def _assert_warns(warning_class, func, *args, **kw):
r"""
Fail unless the given callable throws the specified warning.
This definition is copypasted from numpy 1.9.0.dev.
The version in earlier numpy returns None.
Parameters
----------
warning_class : class
The class defining the warning that `func` is expected to throw.
func : callable
The callable to test.
*args : Arguments
Arguments passed to `func`.
**kwargs : Kwargs
Keyword arguments passed to `func`.
Returns
-------
The value returned by `func`.
"""
with warnings.catch_warnings(record=True) as l:
warnings.simplefilter('always')
result = func(*args, **kw)
if not len(l) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not l[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)" % (func.__name__, warning_class, l[0]))
return result
def assert_raises_regex(exception_class, expected_regexp,
callable_obj=None, *args, **kwargs):
"""
Fail unless an exception of class exception_class and with message that
matches expected_regexp is thrown by callable when invoked with arguments
args and keyword arguments kwargs.
Name of this function adheres to Python 3.2+ reference, but should work in
all versions down to 2.6.
Notes
-----
.. versionadded:: 1.8.0
"""
__tracebackhide__ = True # Hide traceback for py.test
nose = import_nose()
if sys.version_info.major >= 3:
funcname = nose.tools.assert_raises_regex
else:
# Only present in Python 2.7, missing from unittest in 2.6
funcname = nose.tools.assert_raises_regexp
return funcname(exception_class, expected_regexp, callable_obj,
*args, **kwargs)
if NumpyVersion(np.__version__) >= '1.10.0':
from numpy import broadcast_to
else:
# Definition of `broadcast_to` from numpy 1.10.0.
def _maybe_view_as_subclass(original_array, new_array):
if type(original_array) is not type(new_array):
# if input was an ndarray subclass and subclasses were OK,
# then view the result as that subclass.
new_array = new_array.view(type=type(original_array))
# Since we have done something akin to a view from original_array, we
# should let the subclass finalize (if it has it implemented, i.e., is
# not None).
if new_array.__array_finalize__:
new_array.__array_finalize__(original_array)
return new_array
def _broadcast_to(array, shape, subok, readonly):
shape = tuple(shape) if np.iterable(shape) else (shape,)
array = np.array(array, copy=False, subok=subok)
if not shape and array.shape:
raise ValueError('cannot broadcast a non-scalar to a scalar array')
if any(size < 0 for size in shape):
raise ValueError('all elements of broadcast shape must be non-'
'negative')
broadcast = np.nditer(
(array,), flags=['multi_index', 'refs_ok', 'zerosize_ok'],
op_flags=['readonly'], itershape=shape, order='C').itviews[0]
result = _maybe_view_as_subclass(array, broadcast)
if not readonly and array.flags.writeable:
result.flags.writeable = True
return result
def broadcast_to(array, shape, subok=False):
return _broadcast_to(array, shape, subok=subok, readonly=True)
|
StarcoderdataPython
|
3232782
|
<reponame>beidongjiedeguang/manim-express
from manim_express_tests.tests_import import *
scene = EagerModeScene(screen_size=Size.medium)
test_axis = np.array([1, 1, 1])
vector_arrow = Arrow(ORIGIN, test_axis)
scene.play(ShowCreation(vector_arrow))
scene.hold_on()
q_1 = Quaternion().set_from_axis_angle(test_axis, 20 * DEGREES)
q_2 = Quaternion().set_from_axis_angle(test_axis, 30 * DEGREES)
print(Quaternion.multiply_quat(q_1, q_2))
print(Quaternion.multiply_quat_2(q_1, q_2))
vec1 = Vec3(1, 1, 1).normalise()
vec2 = Vec3(2, 3, 4).normalise()
print("-------------------")
print(vec1, vec2)
q = Quaternion().set_from_unit_vectors(vec1, vec2)
print(vec1.apply_quaternion(q))
|
StarcoderdataPython
|
1671866
|
# A part of pdfrw (pdfrw.googlecode.com)
# Copyright (C) 2006-2012 <NAME>, Austin, Texas
# MIT license -- See LICENSE.txt for details
class PdfObject(str):
''' A PdfObject is a textual representation of any PDF file object
other than an array, dict or string. It has an indirect attribute
which defaults to False.
'''
indirect = False
|
StarcoderdataPython
|
1751437
|
#!/usr/bin/python
##############################################
###Python template
###Author: <NAME>
###Date: 7/15/14
###Function: Conduct time-based simulations on age-structure networks where pre-existing immunity exists and is heterogeneous within the adult population. Vary single average value of immunity for subpopulation of adults with any pre-existing immunity. Choose subpopulation node IDs with a fixed random proportion of the adult population.
###Import data:
###Command Line: python age_time_immunity_single.py
##############################################
### notes ###
### packages/modules ###
import csv
import zipfile
from time import clock
from collections import defaultdict
import networkx as nx
from random import seed
## local modules ##
import percolations as perc
import simulation_parameters as par
import pretty_print as pp
### parameters ###
seed(19)
numsims = par.sp_numsims
size_epi = par.sp_size_epi
inf_period = par.sp_inf_period
g = par.sp_gamma
T = par.sp_T
b = par.sp_b
# specific to immunity params
imm_val_ls = par.sp_immune_val_list
prop = par.sp_prop
zstring = par.sp_pstr_fixed
zstring2 = par.sp_mstr_range
### data structures ###
d_node_age = {} # d_node_age[nodenumber] = ageclass
###############################################
### import data and initialize graph ###
### import data ###
graph = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/urban_network_added_with_info_May24_2014/urban_edges_N10k_Sept2012.txt') # Vancouver network
graph_ages = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/urban_network_added_with_info_May24_2014/urban_ages_N10k_Sept2012.txt') # node number and age class
### construct age-structured network ###
ct = 1
G = nx.Graph()
for edge in graph:
edge_ls = edge.strip().split(' ')
G.add_edge(*edge_ls)
for line in graph_ages:
new_line = line.strip().split(' ')
node, age = new_line
d_node_age[node] = age # node-ageclass dictionary
N = float(G.order())
print "network size:", N
### ziparchive to write results ###
zipname = '/home/elee/Dropbox/Elizabeth_Bansal_Lab/Age_Based_Simulations/Results/immunity_time_%ssims_beta%.3f_%s_%s.zip' %(numsims, b, zstring, zstring2)
###############################################
### set pre-existing immunity conditions ###
totalsimtime = clock()
## identify nodes with pre-existing immunity (return list of adult IDs - mult methods) ## imm_nodes = list of adult IDs with any pre-existing immunity
# choose randomly, given a proportion of adults with pre-existing immunity (# tune adult proportion in equal increments) (# set adult proportion equal to the proportion of adults infected from a single simulation)
imm_nodes = perc.immune_nodes_proportion(G, d_node_age, prop)
for imm_val in imm_val_ls:
zstring3 = 'single%s' %(imm_val) # string for filename disambiguation
## assign magnitude of pre-existing immunity to each node (return dict with node ID and pre-existing immunity value; 0 if none - mult methods) ## d_immunity_mag[node] = pre-existing immunity value
# set a single average value of immunity
d_immunity_mag = perc.immunity_magnitude_single(G, imm_nodes, imm_val)
###############################################
### pre-existing immunity simulations ###
totaltime = clock()
## save infection and recovery tsteps for each sim
# d_save_I_tstep[simnumber] (or d_save_R_tstep) = [time step of infection/recovery where index = node number - 1 else 0]
d_save_I_tstep = defaultdict(list)
d_save_R_tstep = defaultdict(list)
for num in xrange(numsims):
start = clock()
total_rec, I_tstep_list, R_tstep_list = perc.episim_age_time_imm(G, d_node_age, d_immunity_mag, b, g)
d_save_I_tstep[num] = I_tstep_list
d_save_R_tstep[num] = R_tstep_list
print "simtime, simnum, episize:", clock() - start, "\t", num, "\t", total_rec
# print tsteps of infection and recovery to recreate sim
# sort order of sims so that the rows in d_save_I_tstep and d_save_R_tstep will match each other
filename = 'Results/Itstep_immunity_time_%ssims_beta%.3f_%s_%s.txt' %(numsims, b, zstring, zstring3)
pp.print_sorteddlist_to_file(d_save_I_tstep, filename, numsims)
pp.compress_to_ziparchive(zipname, filename)
filename = 'Results/Rtstep_immunity_time_%ssims_beta%.3f_%s_%s.txt' %(numsims, b, zstring, zstring3)
pp.print_sorteddlist_to_file(d_save_R_tstep, filename, numsims)
pp.compress_to_ziparchive(zipname, filename)
print "total time for sims:", clock() - totaltime
print "Params:", numsims, size_epi, inf_period, g, T, b, imm_val, prop
print "total time for sims:", clock() - totalsimtime
print "age_time_immunity_single.py complete"
|
StarcoderdataPython
|
49991
|
## This File Contains all the different Neural Network Architectures used and the Loss function
import torch
import torch.nn as nn
import torch.nn.functional as functions
## Dense Network
class Dense(nn.Module):
def __init__(self):
super(Dense,self).__init__()
self.fc1 = nn.Linear(6*7,32) #board-size hard-coded
self.fc2 = nn.Linear(32,16)
self.probhead = nn.Linear(16,7)
self.valuehead = nn.Linear(16,1)
self.soft = nn.Softmax(dim=1)
self.tanh = nn.Tanh()
def forward(self,x):
x = x.view(-1,6*7)
x = functions.relu(self.fc1(x))
x = functions.relu(self.fc2(x))
#action probs
P = self.soft(self.probhead(x))
#value probs
v = self.tanh(self.valuehead(x))
return P,v
## Convolutional Network
class Conv(nn.Module):
def __init__(self):
super(Conv,self).__init__()
self.conv1 = nn.Conv2d(1,8,3,stride=1,padding=1)
self.bn1 = nn.BatchNorm2d(8)
self.fc1 = nn.Linear(336,150)
self.fc2 = nn.Linear(150,60)
self.probhead = nn.Linear(60,7)
self.valuehead = nn.Linear(60,1)
self.soft = nn.Softmax(dim=1)
self.tanh = nn.Tanh()
def forward(self,x):
x = x.view(-1,1,6,7)
x = functions.relu(self.bn1(self.conv1(x)))
x = x.view(-1,6*7*8)
x = functions.relu(self.fc1(x))
x = functions.relu(self.fc2(x))
P = self.soft(self.probhead(x))
v = self.tanh(self.valuehead(x))
return P,v
## Loss Function
class Alphaloss(nn.Module):
def __init__(self):
super(Alphaloss,self).__init__()
def forward(self,z,v,pi,P): #Notation as per AlphaZero Paper
value_error = (z - v) **2
policy_error = -torch.matmul(pi,torch.log(P).T) # gives the same result
#policy_error = torch.sum(-pi*torch.log(P),1)
return (value_error.view(-1)+policy_error).mean()
|
StarcoderdataPython
|
4839642
|
<reponame>domenukk/lighthouse
from .misc import *
from .debug import *
from .log import lmsg, logging_started, start_logging
|
StarcoderdataPython
|
3277058
|
print("Input: ",end="")
string = input()
def stack(s):
if (len(s) == 0):
return
x = s[-1]
s.pop()
stack(s)
print(x, end="")
s.append(x)
def min(s):
Stack = []
Stack.append(s[0])
for i in range(1, len(s)):
if (len(Stack) == 0):
Stack.append(s[i])
else:
if (Stack[-1] == s[i]):
Stack.pop()
else:
Stack.append(s[i])
stack(Stack)
print("Output: ",end="")
min(string)
|
StarcoderdataPython
|
1775084
|
from django.apps import AppConfig
class IngresoConfig(AppConfig):
name = 'ingreso'
|
StarcoderdataPython
|
28766
|
class Solution:
def diStringMatch(self, S):
low,high=0,len(S)
ans=[]
for i in S:
if i=="I":
ans.append(low)
low+=1
else:
ans.append(high)
high-=1
return ans +[low]
|
StarcoderdataPython
|
3240719
|
# Faça um programa que leia o ano de nascimento de um jovem e informe, de acordo com sua idade:
# - se ele ainda vai se alistar ao serviço militar.
# - se é a hora de se alistar.
# - se já passou do tempo do alistamento.
# seu programa também deverá mostrar o tempo que falta ou que passou do prazo.
from datetime import date
print('~~~'*15)
print('SERVIÇO DE ALISTAMENTO MILITAR.')
print('~~~'*15)
ano = int(input('Digite o ano de nascimento: '))
idade = 2021 - ano
n = 18 - idade
if idade == 18:
print('\033[33mVocê deverá se alistar ainda este ano!')
elif idade <= 17:
print('\033[32mainda falta(am) {} ano(os) para o seu alistamento.'.format(n))
elif idade > 18:
n = n * (-1)
print('\033[31mVocê já deveria ter se alistado há {} ano(os).'.format(n))
|
StarcoderdataPython
|
43501
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
class MatplotlibWidget(FigureCanvas):
def __init__(self, figure, parent):
"""
"""
super(MatplotlibWidget, self).__init__(figure)
self.setParent(parent)
self.fig = figure
def close_figure(self):
"""
"""
if self.fig:
self.fig.clf()
plt.close(self.fig)
self.fig = None
|
StarcoderdataPython
|
1719089
|
import json
import pytest
from idempotency_key.encoders import BasicKeyEncoder
from idempotency_key.exceptions import MissingIdempotencyKeyError
def test_basic_encoding():
class Request:
path_info = "/myURL/path/"
method = "POST"
body = json.dumps({"key": "value"}).encode("UTF-8")
request = Request()
obj = BasicKeyEncoder()
enc_key = obj.encode_key(request, "MyKey")
assert enc_key == "<KEY>"
def test_basic_encoder_null_key():
class Request:
path_info = "/myURL/path/"
method = "POST"
body = json.dumps({"key": "value"}).encode("UTF-8")
request = Request()
obj = BasicKeyEncoder()
with pytest.raises(MissingIdempotencyKeyError) as e_info:
obj.encode_key(request, None)
assert e_info.value.args[0] == "Idempotency key cannot be None."
|
StarcoderdataPython
|
161847
|
# In the mysterious country of Byteland, everything is quite different from what you'd normally expect. In most places, if
# you were approached by two mobsters in a dark alley, they would probably tell you to give them all the money that you
# have. If you refused, or didn't have any - they might even beat you up.
#
# In Byteland the government decided that even the slightest chance of someone getting injured has to be ruled out. So,
# they introduced a strict policy. When a mobster approaches you in a dark alley, he asks you for a specific amount of
# money. You are obliged to show him all the money that you have, but you only need to pay up if he can find a subset of
# your banknotes whose total value matches his demand. Since banknotes in Byteland can have any positive integer value
# smaller than one thousand you are quite likely to get off without paying.
#
# Both the citizens and the gangsters of Byteland have very positive feelings about the system. No one ever gets hurt, the
# gangsters don't lose their jobs, and there are quite a few rules that minimize that probability of getting mugged (the
# first one is: don't go into dark alleys - and this one is said to work in other places also).
#
# Input
# The first line contains integer t, the number of test cases (about 100). Then t test cases follow. Each test case starts
# with n, the number of banknotes in your wallet, and m, the amount of money the muggers asked of you. Then n numbers
# follow, representing values of your banknotes. Your wallet does not hold more than 20 banknotes, and the value of a
# single banknote is never more than 1000.
#
# Output
# For each test case output a single line with the word 'Yes' if there is a subset of your banknotes that sums to m, and
# 'No' otherwise.
#
# Example
# Input:
# 5
# 3 3
# 1
# 1
# 1
# 5 11
# 1
# 2
# 4
# 8
# 16
# 5 23
# 1
# 2
# 4
# 8
# 16
# 5 13
# 1
# 5
# 5
# 10
# 10
# 20 132
# 17
# 6
# 4
# 998
# 254
# 137
# 259
# 153
# 154
# 3
# 28
# 19
# 123
# 542
# 857
# 23
# 687
# 35
# 99
# 999
#
# Output:
# Yes
# Yes
# Yes
# No
# Yes
from itertools import combinations
def subset(a, n, m):
for i in range(1, n+1):
array = combinations(a, i)
for j in array:
if sum(j) == m:
return "Yes"
return "No"
for _ in range(int(input())):
n, m = map(int, input().split())
a = []
for i in range(n):
a.append(int(input()))
ans = subset(a, n, m)
print(ans)
#Made by <NAME>
|
StarcoderdataPython
|
1706411
|
<filename>src/e404.py
#!/usr/bin/python
import util
import templates
def run():
print("Creating 404")
html = templates.get("404")
html = templates.initial_replace(html, -1)
html = templates.final_replace(html, ".")
util.writefile("../404.html", html)
if __name__ == "__main__":
run()
|
StarcoderdataPython
|
128297
|
<reponame>troyel/OpenMetadata<gh_stars>1-10
# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test Table and Column Tests' validate implementations.
Each test should validate the Success, Failure and Aborted statuses
"""
from datetime import datetime
from metadata.generated.schema.entity.data.table import ColumnProfile, TableProfile
from metadata.generated.schema.tests.basic import TestCaseResult, TestCaseStatus
from metadata.generated.schema.tests.column.columnValuesLengthsToBeBetween import (
ColumnValueLengthsToBeBetween,
)
from metadata.generated.schema.tests.column.columnValuesToBeBetween import (
ColumnValuesToBeBetween,
)
from metadata.generated.schema.tests.column.columnValuesToBeNotNull import (
ColumnValuesToBeNotNull,
)
from metadata.generated.schema.tests.column.columnValuesToBeUnique import (
ColumnValuesToBeUnique,
)
from metadata.generated.schema.tests.table.tableColumnCountToEqual import (
TableColumnCountToEqual,
)
from metadata.generated.schema.tests.table.tableRowCountToBeBetween import (
TableRowCountToBeBetween,
)
from metadata.generated.schema.tests.table.tableRowCountToEqual import (
TableRowCountToEqual,
)
from metadata.orm_profiler.validations.core import validate
EXECUTION_DATE = datetime.strptime("2021-07-03", "%Y-%m-%d")
def test_table_row_count_to_equal():
"""
Check TableRowCountToEqual
"""
table_profile = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
rowCount=100,
)
res_ok = validate(
TableRowCountToEqual(value=100),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result="Found 100.0 rows vs. the expected 100",
)
res_ko = validate(
TableRowCountToEqual(value=50),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result="Found 100.0 rows vs. the expected 50",
)
table_profile_aborted = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
)
res_aborted = validate(
TableRowCountToEqual(value=100),
table_profile=table_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result="rowCount should not be None for TableRowCountToEqual",
)
def test_table_row_count_to_be_between():
"""
Check TableRowCountToEqual
"""
table_profile = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
rowCount=100,
)
res_ok = validate(
TableRowCountToBeBetween(minValue=20, maxValue=120),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result="Found 100.0 rows vs. the expected range [20, 120].",
)
res_ko = validate(
TableRowCountToBeBetween(minValue=120, maxValue=200),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result="Found 100.0 rows vs. the expected range [120, 200].",
)
table_profile_aborted = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
)
res_aborted = validate(
TableRowCountToBeBetween(minValue=120, maxValue=200),
table_profile=table_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result="rowCount should not be None for TableRowCountToBeBetween",
)
def test_table_column_count_to_equal():
"""
Check TableRowCountToEqual
"""
table_profile = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
columnCount=5,
)
res_ok = validate(
TableColumnCountToEqual(columnCount=5),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result="Found 5.0 columns vs. the expected 5",
)
res_ko = validate(
TableColumnCountToEqual(columnCount=20),
table_profile=table_profile,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result="Found 5.0 columns vs. the expected 20",
)
table_profile_aborted = TableProfile(
profileDate=EXECUTION_DATE.strftime("%Y-%m-%d"),
)
res_aborted = validate(
TableColumnCountToEqual(columnCount=5),
table_profile=table_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result="columnCount should not be None for TableColumnCountToEqual",
)
def test_column_values_to_be_between():
"""
Check ColumnValuesToBeBetween
"""
column_profile = ColumnProfile(
min=1,
max=3,
)
res_ok = validate(
ColumnValuesToBeBetween(
minValue=0,
maxValue=3,
),
col_profile=column_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result="Found min=1.0, max=3.0 vs. the expected min=0, max=3.",
)
res_ko = validate(
ColumnValuesToBeBetween(
minValue=0,
maxValue=2,
),
col_profile=column_profile,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result="Found min=1.0, max=3.0 vs. the expected min=0, max=2.",
)
column_profile_aborted = ColumnProfile(
min=1,
)
res_aborted = validate(
ColumnValuesToBeBetween(
minValue=0,
maxValue=3,
),
col_profile=column_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result=(
"We expect `min` & `max` to be informed on the profiler for ColumnValuesToBeBetween"
+ " but got min=1.0, max=None."
),
)
def test_column_values_to_be_unique():
"""
Check ColumnValuesToBeUnique
"""
column_profile = ColumnProfile(
valuesCount=10,
uniqueCount=10,
)
res_ok = validate(
ColumnValuesToBeUnique(),
col_profile=column_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result=(
"Found valuesCount=10.0 vs. uniqueCount=10.0."
+ " Both counts should be equal for column values to be unique."
),
)
column_profile_ko = ColumnProfile(
valuesCount=10,
uniqueCount=5,
)
res_ko = validate(
ColumnValuesToBeUnique(),
col_profile=column_profile_ko,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result=(
"Found valuesCount=10.0 vs. uniqueCount=5.0."
+ " Both counts should be equal for column values to be unique."
),
)
column_profile_aborted = ColumnProfile()
res_aborted = validate(
ColumnValuesToBeUnique(),
col_profile=column_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result=(
"We expect `valuesCount` & `uniqueCount` to be informed on the profiler for ColumnValuesToBeUnique"
+ " but got valuesCount=None, uniqueCount=None."
),
)
def test_column_values_to_be_not_null():
"""
Check ColumnValuesToBeNotNull
"""
column_profile = ColumnProfile(
nullCount=0,
)
res_ok = validate(
ColumnValuesToBeNotNull(),
col_profile=column_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result=("Found nullCount=0.0. It should be 0."),
)
column_profile_ko = ColumnProfile(
nullCount=10,
)
res_ko = validate(
ColumnValuesToBeNotNull(),
col_profile=column_profile_ko,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result=("Found nullCount=10.0. It should be 0."),
)
column_profile_aborted = ColumnProfile()
res_aborted = validate(
ColumnValuesToBeNotNull(),
col_profile=column_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result=(
"We expect `nullCount` to be informed on the profiler for ColumnValuesToBeNotNull."
),
)
def test_column_value_length_to_be_between():
"""
Check ColumnValueLengthsToBeBetween
"""
col_profile = ColumnProfile(
minLength=4,
maxLength=16,
)
res_ok = validate(
ColumnValueLengthsToBeBetween(minLength=2, maxLength=20),
col_profile=col_profile,
execution_date=EXECUTION_DATE,
)
assert res_ok == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Success,
result="Found minLength=4.0, maxLength=16.0 vs. the expected minLength=2, maxLength=20.",
)
res_ko = validate(
ColumnValueLengthsToBeBetween(minLength=10, maxLength=20),
col_profile=col_profile,
execution_date=EXECUTION_DATE,
)
assert res_ko == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Failed,
result="Found minLength=4.0, maxLength=16.0 vs. the expected minLength=10, maxLength=20.",
)
col_profile_aborted = ColumnProfile(minLength=4)
res_aborted = validate(
ColumnValueLengthsToBeBetween(minLength=2, maxLength=20),
col_profile=col_profile_aborted,
execution_date=EXECUTION_DATE,
)
assert res_aborted == TestCaseResult(
executionTime=EXECUTION_DATE.timestamp(),
testCaseStatus=TestCaseStatus.Aborted,
result=(
"We expect `minLength` & `maxLength` to be informed on the profiler for ColumnValueLengthsToBeBetween"
+ " but got minLength=4.0, maxLength=None."
),
)
|
StarcoderdataPython
|
1737697
|
from django.db import models
class Job(models.Model):
image = models.ImageField(upload_to='images/')
summary = models.CharField(max_length=200)
|
StarcoderdataPython
|
3399015
|
import copy
import json
import os
import pdb
import re
from typing import Dict, List, TypeVar
import torch
from elvis.modeling.models import build_net
from elvis.modeling.models.layers import MLP
from torch.nn import functional as F
from .base import MetaArch
from .build import ARCH_REGISTRY
Tensor = TypeVar('torch.tensor')
__all__ = ['MetaVQA',
'build_meta_vqa']
class MetaVQA(MetaArch):
def __init__(self,
model,
max_visual,
max_tokens,
ans2id):
super(MetaArch, self).__init__()
self.model = model
self.max_visual = max_visual
self.max_tokens = max_tokens
self.ans2id = ans2id
self.id2ans = {v: k for k, v in ans2id.items()}
self.out_layer = MLP(in_features=self.model.embed_dim,
hidden_dim=self.model.embed_dim,
out_features=len(self.ans2id),
dropout_p=.1)
def forward(self, vis_in, txt_in, vis_mask, txt_mask, **kwargs):
out = self.model(vis_in=vis_in, vis_mask=vis_mask, txt_in=txt_in, txt_mask=txt_mask)
t_pool = out[:, 0]
#v_pool = out[:, self.max_tokens]
logits = self.out_layer(t_pool)
return {'vqa_logits': logits}
def compute_loss(self, vqa_logits, gt_answers, **kwargs) -> Dict:
vqa_loss = F.binary_cross_entropy_with_logits(vqa_logits, gt_answers, reduction='none')
vqa_loss = vqa_loss.sum(dim=-1).mean()
return {'loss': vqa_loss}
def save_on_disk(self, path):
#save vocab only once
vocab_ckp = os.path.join(path, 'VQA.vocab')
if not os.path.exists(vocab_ckp):
with open(vocab_ckp, 'w') as fp:
json.dump(self.ans2id, fp)
#use deepcopy to avoid problems with DistributedDataParallel
state_dict = copy.deepcopy(self).cpu().state_dict()
ckp_file = os.path.join(path, 'state_dict.pt')
torch.save(state_dict, ckp_file)
def from_pretrained(self, state_dict):
layers_names = list(state_dict.keys())
for l_name in layers_names:
if l_name.startswith('lm_mlp') or l_name.startswith('itm_fc'):
del state_dict[l_name]
else:
#remove the model.layer_name
state_dict[l_name[6:]] = state_dict.pop(l_name)
self.model.load_state_dict(state_dict)
def predict(self, vis_in, txt_in, vis_mask, txt_mask, **kwargs):
out = self.forward(vis_in, txt_in, vis_mask, txt_mask, **kwargs)
probs = torch.sigmoid(out['vqa_logits']).squeeze(0)
answer_id = torch.argmax(probs).item()
answer_conf = probs[answer_id].item()
answer = self.id2ans[answer_id]
return answer, answer_conf
"""
def from_checkpoint(self, path):
self.lang_net.load_config(path)
state_path = os.path.join(path, 'state_dict.pt')
state_dict = torch.load(state_path)
self.load_state_dict(state_dict)
voc_path = os.path.join(path, 'label2ans.json')
with open(voc_path) as fp:
self.id2ans = json.load(fp)
"""
@ARCH_REGISTRY.register()
def build_meta_vqa(cfg, **kwargs):
with open(cfg.MODEL.ANS_VOCAB) as fp:
ans2id = json.load(fp)
model, data_interface = build_net(cfg.MODEL, get_interface='vqa', **{'ans2id': ans2id})
vqa = MetaVQA(model,
max_visual=cfg.MODEL.MAX_N_VISUAL,
max_tokens=cfg.MODEL.MAX_N_TOKENS,
ans2id=ans2id)
return vqa, data_interface
|
StarcoderdataPython
|
41476
|
"""
Receiving Open Sound Control messages as audio streams
**02-receive-streams.py**
This script shows a granulation process controlled by OSC messages
coming from another program (run the next example, *03-send-streams.py*,
to get values coming in).
"""
from pyo import *
s = Server().boot()
# The sound table to granulate.
table = SndTable("../snds/flute.aif")
# Listen addresses '/density', '/position', '/pitch_rand' and '/duration' on port 9000.
rec = OscReceive(port=9000, address=["/density", "/position", "/pitch_rand", "/duration"])
# Sets initial values for the OSC streams. This allow the program to run with
# minimal behaviour even if no message have been sent on these addresses.
rec.setValue("/density", 0.5)
rec.setValue("/position", 0.5)
rec.setValue("/pitch_rand", 0.0)
rec.setValue("/duration", 0.5)
# Density of grains, between 1 and 250 grains per second.
dens = SigTo(rec["/density"], time=0.05, mul=249, add=1)
# Reading position, in samples, in the table + little jitter noise.
pos = SigTo(rec["/position"], time=0.05, mul=table.getSize(), add=Noise(100))
# Amplitude of a jitter noise around 1.0 to control the pitch of individual grains.
rpit = SigTo(rec["/pitch_rand"], time=0.05, mul=0.2, add=0.001)
pit = Noise(mul=rpit, add=1)
# Grain duration, between 0.025 and 0.5 second.
dur = SigTo(rec["/duration"], time=0.05, mul=0.475, add=0.025)
grain = Particle(
table=table, # table to read samples from.
env=HannTable(), # grain envelope.
dens=dens, # density of grains per second.
pitch=pit, # pitch of grains.
pos=pos, # position in the table where to start the grain.
dur=dur, # grain duration.
dev=0.01, # Maximum deviation of the starting time of the grain.
pan=Noise(0.5, 0.5), # Panning factor of the grain.
chnls=2, # Number of channels to output.
mul=0.15,
).out()
s.gui(locals())
|
StarcoderdataPython
|
3244987
|
<gh_stars>1-10
# daily request to youtube
from django.utils import timezone
from youtube.models import despacito
from .ytbAPI import ytbAPI
from . import youtube_lib as ytb
def despacito_daily():
result = ytb.video_list('kJQP7kiw5Fk','statistics')
t = timezone.now()
v = result['statistics']['viewCount']
l = result['statistics']['likeCount']
d = result['statistics']['dislikeCount']
c = result['statistics']['commentCount']
tmp = despacito(date=t, views=v, likes=l, dislikes=d, comments=c)
tmp.save()
print('despacito daily done: {} \n'.format(str(t)))
def test():
print('testdaily')
|
StarcoderdataPython
|
1685128
|
<gh_stars>10-100
# Copyright 2014-2020 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module which tests the compat.py functionality.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
import os
import unittest
import six
from scalyr_agent.compat import os_environ_unicode
class EnvironUnicode(unittest.TestCase):
TEST_VAR = "TEST_VAR_ENVIRON_UNICODE"
def test_environ_get(self):
os.environ[EnvironUnicode.TEST_VAR] = six.ensure_str("Test string")
self.assertEqual(
os_environ_unicode.get(EnvironUnicode.TEST_VAR),
six.text_type("Test string"),
)
self.assertEqual(
os_environ_unicode[EnvironUnicode.TEST_VAR], six.text_type("Test string")
)
def test_environ_set(self):
os_environ_unicode[EnvironUnicode.TEST_VAR] = six.ensure_str("Test two string")
self.assertEqual(
os_environ_unicode.get(EnvironUnicode.TEST_VAR),
six.text_type("Test two string"),
)
def test_environ_pop(self):
os_environ_unicode[EnvironUnicode.TEST_VAR] = six.ensure_str("Test four string")
value = os_environ_unicode.pop(EnvironUnicode.TEST_VAR)
self.assertEqual(value, six.text_type("Test four string"))
def test_environ_in(self):
os.environ[EnvironUnicode.TEST_VAR] = "Foo"
self.assertTrue(EnvironUnicode.TEST_VAR in os_environ_unicode)
self.assertFalse("FakeKey1234" in os_environ_unicode)
|
StarcoderdataPython
|
1675353
|
#!/usr/bin/env python
"""bless_client
A sample client to invoke the BLESS Lambda function and save the signed SSH Certificate.
Usage:
bless_client.py region lambda_function_name bastion_user bastion_user_ip remote_usernames
bastion_ips bastion_command <id_rsa.pub to sign> <output id_rsa-cert.pub>
region: AWS region where your lambda is deployed.
lambda_function_name: The AWS Lambda function's alias or ARN to invoke.
bastion_user: The user on the bastion, who is initiating the SSH request.
bastion_user_ip: The IP of the user accessing the bastion.
remote_usernames: Comma-separated list of username(s) or authorized principals on the remote
server that will be used in the SSH request. This is enforced in the issued certificate.
bastion_ips: The source IP(s) where the SSH connection will be initiated from.
Addresses should be comma-separated and can be individual IPs or CIDR format (nn.nn.nn.nn/nn
or hhhh::hhhh/nn). This is enforced in the issued certificate.
bastion_command: Text information about the SSH request of the bastion_user.
id_rsa.pub to sign: The id_rsa.pub that will be used in the SSH request. This is
enforced in the issued certificate.
output id_rsa-cert.pub: The file where the certificate should be saved. Per man SSH(1):
"ssh will also try to load certificate information from the filename
obtained by appending -cert.pub to identity filenames" e.g. the <id_rsa.pub to sign>.
"""
import json
import os
import stat
import sys
import boto3
def main(argv):
if len(argv) < 9 or len(argv) > 10:
print(
'Usage: bless_client.py region lambda_function_name bastion_user bastion_user_ip '
'remote_usernames bastion_ips bastion_command <id_rsa.pub to sign> '
'<output id_rsa-cert.pub> [kmsauth token]')
return -1
region, lambda_function_name, bastion_user, bastion_user_ip, remote_usernames, bastion_ips, \
bastion_command, public_key_filename, certificate_filename = argv[:9]
with open(public_key_filename, 'r') as f:
public_key = f.read().strip()
payload = {'bastion_user': bastion_user, 'bastion_user_ip': bastion_user_ip,
'remote_usernames': remote_usernames, 'bastion_ips': bastion_ips,
'command': bastion_command, 'public_key_to_sign': public_key}
if len(argv) == 10:
payload['kmsauth_token'] = argv[9]
payload_json = json.dumps(payload)
print('Executing:')
print('payload_json is: \'{}\''.format(payload_json))
lambda_client = boto3.client('lambda', region_name=region)
response = lambda_client.invoke(FunctionName=lambda_function_name,
InvocationType='RequestResponse', LogType='None',
Payload=payload_json)
print('{}\n'.format(response['ResponseMetadata']))
if response['StatusCode'] != 200:
print('Error creating cert.')
return -1
payload = json.loads(response['Payload'].read())
if 'certificate' not in payload:
print(payload)
return -1
cert = payload['certificate']
with os.fdopen(os.open(certificate_filename, os.O_WRONLY | os.O_CREAT, 0o600),
'w') as cert_file:
cert_file.write(cert)
# If cert_file already existed with the incorrect permissions, fix them.
file_status = os.stat(certificate_filename)
if 0o600 != (file_status.st_mode & 0o777):
os.chmod(certificate_filename, stat.S_IRUSR | stat.S_IWUSR)
print('Wrote Certificate to: ' + certificate_filename)
if __name__ == '__main__':
main(sys.argv[1:])
|
StarcoderdataPython
|
4815625
|
<reponame>zhuyuanxiang/deep-learning-with-python-notebooks
# -*- encoding: utf-8 -*-
"""
@Author : zYx.Tom
@Contact : <EMAIL>
@site : https://zhuyuanxiang.github.io
---------------------------
@Software : PyCharm
@Project : deep-learning-with-python-notebooks
@File : ch0702_tensor_board.py
@Version : v0.1
@Time : 2019-11-27 15:26
@License : (C)Copyright 2018-2019, zYx.Tom
@Reference : 《Python 深度学习,Francois Chollet》, Sec070202,P212
@Desc : 高级的深度学习最佳实践,使用TensorBoard来检查并且监控深度学习模型
"""
import os
import sys
import matplotlib.pyplot as plt
import numpy as np # pip install numpy<1.17,小于1.17就不会报错
import winsound
from keras.activations import relu
from keras.datasets import imdb
from keras.layers import Conv1D, Embedding, GlobalMaxPooling1D, MaxPooling1D
from keras.layers import Dense
from keras.losses import binary_crossentropy
from keras.models import Sequential
from keras.optimizers import rmsprop
from keras.preprocessing.sequence import pad_sequences
# 屏蔽警告:Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# 设置数据显示的精确度为小数点后3位
np.set_printoptions(precision = 3, suppress = True, threshold = np.inf, linewidth = 200)
# to make this notebook's output stable across runs
seed = 42
np.random.seed(seed)
# Python ≥3.5 is required
assert sys.version_info >= (3, 5)
# numpy 1.16.4 is required
assert np.__version__ in ["1.16.5", "1.16.4"]
# ----------------------------------------------------------------------
max_features = 2000
max_len = 500
embedding_size = 128
epochs = 15
batch_size = 128
verbose = 2
validation_split = 0.2
print("Listing 7.7:准备 IMDB 数据集...")
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words = max_features)
x_train = x_train[0:max_features]
x_test = x_test[0:max_features]
y_train = y_train[0:max_features]
y_test = y_test[0:max_features]
print('\t', len(x_train), 'train sequences(训练序列)')
print('\t', len(x_test), 'test sequences(测试序列)')
print('Pad sequences (samples x time)')
x_train = pad_sequences(x_train, maxlen = max_len)
x_test = pad_sequences(x_test, maxlen = max_len)
print('\t x_train shape:', x_train.shape)
print('\t x_test shape:', x_test.shape)
# ----------------------------------------------------------------------
model = Sequential()
model.add(Embedding(max_features, 128, input_length = max_len, name = 'Embedding'))
model.add(Conv1D(32, 7, activation = relu))
model.add(MaxPooling1D(5))
model.add(Conv1D(32, 7, activation = relu))
model.add(GlobalMaxPooling1D())
model.add(Dense(1))
model.summary()
model.compile(optimizer = rmsprop(), loss = binary_crossentropy, metrics = ['acc'])
from keras.utils import plot_model
plot_model(model, to_file = 'model.png')
plot_model(model, show_shapes = True, to_file = 'model_with_parameter.png')
# ----------------------------------------------------------------------
# callbacks = [
# TensorBoard(
# log_dir = 'my_log_dir', # 日志文件保存的位置
# histogram_freq = 1, # 每一轮之后记录激活直方图
# # ToDo:还需要提供 embeddings_data 才能记录数据
# # embeddings_freq = 1, # 每一轮之后记录嵌入数据
# )
# ]
# history = model.fit(x_train, y_train, epochs = 20, batch_size = 128, validation_split = 0.2,
# callbacks = callbacks, verbose = 2, use_multiprocessing = True)
# ----------------------------------------------------------------------
# 运行结束的提醒
winsound.Beep(600, 500)
if len(plt.get_fignums()) != 0:
plt.show()
pass
|
StarcoderdataPython
|
138454
|
<gh_stars>1-10
from .APIKeyLabel import APIKeyLabel
from .Contract import Contract
from .DnsAddress import DnsAddress
from .Error import Error
from .GetOrganizationUsersResponseBody import GetOrganizationUsersResponseBody
from .IsMember import IsMember
from .JoinOrganizationInvitation import JoinOrganizationInvitation
from .LocalizedInfoText import LocalizedInfoText
from .Organization import Organization
from .OrganizationAPIKey import OrganizationAPIKey
from .OrganizationLogo import OrganizationLogo
from .OrganizationTreeItem import OrganizationTreeItem
from .RegistryEntry import RegistryEntry
from .ValidityTime import ValidityTime
from .api_response import APIResponse
from .unhandled_api_error import UnhandledAPIError
from .unmarshall_error import UnmarshallError
from Jumpscale import j
class OrganizationsService:
def __init__(self, client):
pass
self.client = client
def Get2faValidityTime(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the 2FA validity time for the organization, in seconds
It is method for GET /organizations/{globalid}/2fa/validity
"""
uri = self.client.base_url + "/organizations/" + globalid + "/2fa/validity"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=ValidityTime(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def Set2faValidityTime(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Update the 2FA validity time for the organization
It is method for PUT /organizations/{globalid}/2fa/validity
"""
uri = self.client.base_url + "/organizations/" + globalid + "/2fa/validity"
return self.client.put(uri, data, headers, query_params, content_type)
def DeleteOrganizationAPIKey(
self, label, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes an API key
It is method for DELETE /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationAPIKey(self, label, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get an api key from an organization
It is method for GET /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationAPIKey(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateOrganizationAPIKey(
self, data, label, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates the label or other properties of a key.
It is method for PUT /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
return self.client.put(uri, data, headers, query_params, content_type)
def GetOrganizationAPIKeyLabels(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the list of active api keys.
It is method for GET /organizations/{globalid}/apikeys
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(APIKeyLabel(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewOrganizationAPIKey(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new API Key, a secret itself should not be provided, it will be generated serverside.
It is method for POST /organizations/{globalid}/apikeys
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=OrganizationAPIKey(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetOrganizationContracts(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the contracts where the organization is 1 of the parties. Order descending by date.
It is method for GET /organizations/{globalid}/contracts
"""
uri = self.client.base_url + "/organizations/" + globalid + "/contracts"
return self.client.get(uri, None, headers, query_params, content_type)
def CreateOrganizationContracty(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new contract.
It is method for POST /organizations/{globalid}/contracts
"""
uri = self.client.base_url + "/organizations/" + globalid + "/contracts"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Contract(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetDescriptionWithFallback(
self, langkey, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Get the description for an organization for this langkey, try to use the English is there is no description for this langkey
It is method for GET /organizations/{globalid}/description/{langkey}/withfallback
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey + "/withfallback"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteDescription(self, langkey, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Delete the description for this organization for a given language key
It is method for DELETE /organizations/{globalid}/description/{langkey}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey
return self.client.delete(uri, None, headers, query_params, content_type)
def GetDescription(self, langkey, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the description for an organization for this langkey
It is method for GET /organizations/{globalid}/description/{langkey}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def SetDescription(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Set the description for this organization for a given language key
It is method for POST /organizations/{globalid}/description
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateDescription(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Update the description for this organization for a given language key
It is method for PUT /organizations/{globalid}/description
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationDns(
self, dnsname, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes a DNS name associated with an organization
It is method for DELETE /organizations/{globalid}/dns/{dnsname}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns/" + dnsname
return self.client.delete(uri, None, headers, query_params, content_type)
def UpdateOrganizationDns(
self, data, dnsname, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates an existing DNS name associated with an organization
It is method for PUT /organizations/{globalid}/dns/{dnsname}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns/" + dnsname
return self.client.put(uri, data, headers, query_params, content_type)
def CreateOrganizationDns(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Creates a new DNS name associated with an organization
It is method for POST /organizations/{globalid}/dns
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=DnsAddress(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemovePendingOrganizationInvitation(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Cancel a pending invitation.
It is method for DELETE /organizations/{globalid}/invitations/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/invitations/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def GetInvitations(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the list of pending invitations for users to join this organization.
It is method for GET /organizations/{globalid}/invitations
"""
uri = self.client.base_url + "/organizations/" + globalid + "/invitations"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(JoinOrganizationInvitation(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationLogo(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Removes the Logo from an organization
It is method for DELETE /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationLogo(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the Logo from an organization
It is method for GET /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationLogo(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def SetOrganizationLogo(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Set the organization Logo for the organization
It is method for PUT /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationLogo(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemoveOrganizationMember(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove a member from an organization.
It is method for DELETE /organizations/{globalid}/members/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def AddOrganizationMember(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Invite someone to become member of an organization.
It is method for POST /organizations/{globalid}/members
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateOrganizationMemberShip(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Update an organization membership
It is method for PUT /organizations/{globalid}/members
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RejectOrganizationInvite(
self, invitingorg, role, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Reject the invite for one of your organizations
It is method for DELETE /organizations/{globalid}/organizations/{invitingorg}/roles/{role}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/organizations/" + invitingorg + "/roles/" + role
return self.client.delete(uri, None, headers, query_params, content_type)
def AcceptOrganizationInvite(
self, data, invitingorg, role, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Accept the invite for one of your organizations
It is method for POST /organizations/{globalid}/organizations/{invitingorg}/roles/{role}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/organizations/" + invitingorg + "/roles/" + role
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemoveIncludeSubOrgsOf(
self, orgmember, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove an orgmember or orgowner organization to the includesuborgsof list
It is method for DELETE /organizations/{globalid}/orgmembers/includesuborgs/{orgmember}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/includesuborgs/" + orgmember
return self.client.delete(uri, None, headers, query_params, content_type)
def AddIncludeSubOrgsOf(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add an orgmember or orgowner organization to the includesuborgsof list
It is method for POST /organizations/{globalid}/orgmembers/includesuborgs
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/includesuborgs"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrgMember(self, globalid2, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Remove an organization as a member
It is method for DELETE /organizations/{globalid}/orgmembers/{globalid2}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/" + globalid2
return self.client.delete(uri, None, headers, query_params, content_type)
def SetOrgMember(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add another organization as a member of this one
It is method for POST /organizations/{globalid}/orgmembers
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers"
return self.client.post(uri, data, headers, query_params, content_type)
def UpdateOrganizationOrgMemberShip(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Update the membership status of an organization
It is method for PUT /organizations/{globalid}/orgmembers
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrgOwner(self, globalid2, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Remove an organization as an owner
It is method for DELETE /organizations/{globalid}/orgowners/{globalid2}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgowners/" + globalid2
return self.client.delete(uri, None, headers, query_params, content_type)
def SetOrgOwner(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add another organization as an owner of this one
It is method for POST /organizations/{globalid}/orgowners
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgowners"
return self.client.post(uri, data, headers, query_params, content_type)
def RemoveOrganizationOwner(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove an owner from organization
It is method for DELETE /organizations/{globalid}/owners/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/owners/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def AddOrganizationOwner(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Invite someone to become owner of an organization.
It is method for POST /organizations/{globalid}/owners
"""
uri = self.client.base_url + "/organizations/" + globalid + "/owners"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationRegistryEntry(
self, key, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes a RegistryEntry from the organization's registry
It is method for DELETE /organizations/{globalid}/registry/{key}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry/" + key
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationRegistryEntry(
self, key, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Get a RegistryEntry from the organization's registry.
It is method for GET /organizations/{globalid}/registry/{key}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry/" + key
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=RegistryEntry(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def ListOrganizationRegistry(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Lists the RegistryEntries in an organization's registry.
It is method for GET /organizations/{globalid}/registry
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(RegistryEntry(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def AddOrganizationRegistryEntry(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Adds a RegistryEntry to the organization's registry, if the key is already used, it is overwritten.
It is method for POST /organizations/{globalid}/registry
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=RegistryEntry(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteRequiredScope(
self, requiredscope, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Deletes a required scope
It is method for DELETE /organizations/{globalid}/requiredscopes/{requiredscope}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes/" + requiredscope
return self.client.delete(uri, None, headers, query_params, content_type)
def UpdateRequiredScope(
self, data, requiredscope, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates a required scope
It is method for PUT /organizations/{globalid}/requiredscopes/{requiredscope}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes/" + requiredscope
return self.client.put(uri, data, headers, query_params, content_type)
def AddRequiredScope(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Adds a required scope
It is method for POST /organizations/{globalid}/requiredscopes
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes"
return self.client.post(uri, data, headers, query_params, content_type)
def GetOrganizationTree(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Tree structure of all suborganizations
It is method for GET /organizations/{globalid}/tree
"""
uri = self.client.base_url + "/organizations/" + globalid + "/tree"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(OrganizationTreeItem(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UserIsMember(self, username, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Checks if the user has memberschip rights on the organization
It is method for GET /organizations/{globalid}/users/ismember/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/users/ismember/" + username
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=IsMember(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetOrganizationUsers(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get all users from this organization, not including suborganizations.
It is method for GET /organizations/{globalid}/users
"""
uri = self.client.base_url + "/organizations/" + globalid + "/users"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=GetOrganizationUsersResponseBody(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganization(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Deletes an organization and all data linked to it (join-organization-invitations, oauth_access_tokens, oauth_clients, logo)
It is method for DELETE /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganization(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get organization info
It is method for GET /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewSubOrganization(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new suborganization.
It is method for POST /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewOrganization(self, data, headers=None, query_params=None, content_type="application/json"):
"""
Create a new organization. 1 user should be in the owners list. Validation is performed to check if the securityScheme allows management on this user.
It is method for POST /organizations
"""
uri = self.client.base_url + "/organizations"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
|
StarcoderdataPython
|
324
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 26 16:34:21 2018
@author: LiHongWang
"""
import os
import tensorflow as tf
from model import fcn_vgg
from model import fcn_mobile
from model import fcn_resnet_v2
from data import input_data
slim = tf.contrib.slim
def main():
num_classes=2
tfRecorf_dir= 'D:/dataSet/kitti/road/sub_um_lane_tra66.tfrecord'
train_dir = './fm2/'
if not os.path.exists(train_dir):
os.makedirs(train_dir)
with tf.Graph().as_default():
global_step = tf.contrib.framework.get_or_create_global_step()
tf.logging.set_verbosity(tf.logging.INFO)
with tf.device("/cpu:0"):
samples=input_data.get_images_labels(tfRecorf_dir,num_classes,66,
crop_size=[224,224],
batch_size=4)
batch_queue = slim.prefetch_queue.prefetch_queue(samples,
capacity=128 )
tra_batch = batch_queue.dequeue()
logit,prediction=fcn_mobile.fcn_mobv1(tra_batch['image'],num_classes)
# logit,prediction=fcn_vgg.fcn_vgg16(tra_batch['image'],num_classes)
# logit,prediction=fcn_resnet_v2.fcn_res101(tra_batch['image'],num_classes)
cross_entropy=tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logit,
labels=tf.squeeze(tra_batch['label'], squeeze_dims=[3]),name="entropy")
loss = tf.reduce_mean(cross_entropy,name='loss')
slim.losses.add_loss(loss)
total_loss = slim.losses.get_total_loss()
# print("image", tra_batch['image'])
# print("label", tf.cast(tra_batch['label']*255, tf.uint8))
# print("prediction", tf.cast(prediction*255, tf.uint8))
# Create some summaries to visualize the training process:
tf.summary.scalar('losses/Total_Loss', total_loss)
tf.summary.image("image", tra_batch['image'], max_outputs=4)
tf.summary.image("label", tf.cast(tra_batch['label']*255, tf.uint8), max_outputs=4)
tf.summary.image("prediction", tf.cast(prediction*255, tf.uint8), max_outputs=4)
lr = tf.train.exponential_decay(0.001,
global_step,
10000,
0.8,
staircase=True)
#lr = tf.constant(0.001, tf.float32)
tf.summary.scalar('learning_rate', lr)
for variable in slim.get_model_variables():
tf.summary.histogram(variable.op.name, variable)
# Specify the optimizer and create the train op:
optimizer = tf.train.RMSPropOptimizer(lr,0.9)
train_op = slim.learning.create_train_op(total_loss, optimizer)
# Run the training:
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.7)
config=tf.ConfigProto(gpu_options=gpu_options)
final_loss = slim.learning.train(train_op,
logdir=train_dir,
log_every_n_steps=100,
save_summaries_secs=20,
save_interval_secs=1800,
init_fn=None,#fcn_mobile.get_init_fn(),
session_config=config,
number_of_steps=65000)
print('Finished training. Last batch loss %f' % final_loss)
if __name__=='__main__':
main()
|
StarcoderdataPython
|
145207
|
import os
from flask import Flask, render_template, request, Response, send_from_directory, Blueprint, flash, g, redirect
from functools import wraps
from flask import current_app as app
from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator
import datetime
from crontab import CronTab
import getpass
from pocket import db as db
from .nav import nav
money = Blueprint("money", __name__)
def check_auth(username, password):
user = os.environ.get('AUTH_USER')
if user is None:
user = 'admin'
passwd = os.environ.get('AUTH_PASS')
if passwd is None:
passwd = '<PASSWORD>'
return username == user and password == <PASSWORD>
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
nav.register_element('money_top', Navbar( View('Pocket Money Tracker', '.home'), View('Schedules', '.schedules'), View('Add Child', '.addChild')))
@money.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico', mimetype='image/vnd.microsoft.icon')
@money.route('/history/<child>')
def history(child):
rows = db.getHistory(child);
return render_template("history.html",rows = rows, child = child)
@money.route('/add/<child>')
@requires_auth
def add(child):
now = datetime.datetime.now()
dateString = now.strftime("%d/%m/%Y")
templateData = {
'child': child,
'dt':dateString
}
return render_template('add.html', **templateData)
@money.route('/schedules')
def schedules():
rows = db.getSchedules()
cron = CronTab(user=getpass.getuser())
return render_template('schedule.html', rows = rows, cron = cron)
@money.route('/addSchedule')
@requires_auth
def addSchedule():
rows = db.getChildren()
return render_template('addSchedule.html', rows = rows)
@money.route('/addScheduleRec', methods = ['POST', 'GET'])
def addScheduleRec():
if request.method == 'POST':
try:
print(request.form)
child = request.form['children']
amt = request.form['amt']
desc = request.form['desc']
freq = request.form['freq'] # weekly / monthly
freqWeekly = request.form['daily'] # MON - SUN
freqMonthly = request.form['monthly'] # 1 - 31
frequency = ""
if amt is None:
amt = 0
cron = CronTab(user=getpass.getuser())
job = cron.new(command="/payment.sh '" + child + "' " + amt, comment=desc)
job.minute.on(1)
job.hour.on(1)
if freq == "weekly":
job.dow.on(freqWeekly)
frequency = "Every week on " + freqWeekly
if freq == "monthly":
job.setall('1 1 ' + freqMonthly + ' * *')
frequency = "On the " + str(freqMonthly) + " day of the month"
cron.write()
db.addSchedule(child, amt, desc, frequency)
msg = "successfully added schedule"
except Exception as e:
print(e)
msg = "error adding schedule"
finally:
flash(msg)
return redirect('/')
@money.route('/deleteSchedule/<child>/<desc>/<rowid>')
@requires_auth
def deleteSchedule(child, desc, rowid):
try:
print("Deleting schedule record")
cron = CronTab(user=getpass.getuser())
cron.remove_all(comment=desc)
cron.write()
db.deleteSchedule(child, rowid)
msg = "Successfully deleted record"
except Exception as e:
print(e)
msg = "Error deleting record, please retry"
finally:
flash(msg)
return redirect('/')
@money.route('/deleteAmount/<child>/<rowid>')
@requires_auth
def deleteAmount(child, rowid):
try:
print("Deleting record")
db.deleteAmount(child, rowid)
msg = "Successfully deleted record"
except Exception as e:
print(e)
msg = "Error deleting record, please retry"
finally:
flash(msg)
return redirect('/')
@money.route('/addRec', methods = ['POST', 'GET'])
def addRec():
if request.method == 'POST':
try:
child = request.form['child']
dt = request.form['dt']
amt = request.form['amt']
desc = request.form['desc']
db.addData(child, dt, amt, desc)
msg = "Successfully added transaction"
except:
msg = "Error adding transaction, please retry"
finally:
flash(msg)
return redirect('/')
@money.route('/addChildRec', methods = ['POST', 'GET'])
def addChildRec():
if request.method == 'POST':
try:
child = request.form['child']
amt = request.form['amt']
if amt is None:
amt = 0
now = datetime.datetime.now()
dt = now.strftime("%Y-%m-%d")
db.addChild(child, amt, dt)
msg = "successfully added child"
except Exception as e:
print(e)
msg = "error adding child"
finally:
flash(msg)
return redirect('/')
@money.route('/addChild')
def addChild():
now = datetime.datetime.now()
dateString = now.strftime("%Y-%m-%d")
return render_template('addchild.html', title = 'Pocket Money Tracker', time = dateString)
@money.route("/")
def home():
now = datetime.datetime.now()
dateString = now.strftime("%Y-%m-%d")
rows = db.getBalances()
if len(rows) == 0:
return render_template('addchild.html', title = 'Pocket Money Tracker', time = dateString)
else:
return render_template('index.html', rows = rows, title = 'Pocket Money Tracker', time = dateString)
|
StarcoderdataPython
|
3393269
|
#! /usr/bin/python
import logging
import os
import random
import threading
from random import randint
class emGps(threading.Thread):
def __init__(self, mode=None):
threading.Thread.__init__(self)
logging.info('Global Positioning System')
self.gpsd = None
self.running = False
self.mode = mode
self.latitude = None
self.longitude = None
self.altitude = None
self.satellites = None
if self.mode is None:
self.gpsd = gps.gps(mode=WATCH_ENABLE)
def run(self):
self.running = True
while self.running:
if self.mode is None:
self.gpsd.next()
def stop(self):
self.running = False
def emGpsData(self):
if self.mode is None:
self.latitude = self.gpsd.fix.latitude
self.longitude = self.gpsd.fix.longitude
self.altitude = self.gpsd.fix.altitude
self.satellites = self.gpsd.satellites
self.speed = self.gpsd.speed
self.track = self.gpsd.track
else:
self.latitude = random.uniform(21.14000000, 21.18000000)
self.longitude = random.uniform(-101.600000, -101.660000)
self.altitude = randint(1000, 2000)
self.satellites = randint(1,10)
self.speed = randint(10, 100)
self.track = randint(0, 360)
gpsdata = ("Gps: {0}," "{1}," "{2}," "{3}," "{4}," "{5}".format( \
self.latitude, self.longitude, self.altitude, self.satellites, self.speed, self.track))
logging.info(gpsdata)
return self.latitude, self.longitude, \
self.altitude, self.satellites, \
self.speed, self.track
@property
def fix(self):
return self.gpsd.fix
@property
def utc(self):
return self.gpsd.utc
@property
def satellitess(self):
return self.gpsd.satellites
# End of File
|
StarcoderdataPython
|
1731868
|
<filename>rec_run.py
import os
import argparse
def get_all_ini_files():
"""
collect all ini files in the current folder
:return: a list of sorted file names
"""
current_path = os.getcwd()
all_files = os.listdir(current_path)
ini_files = []
for item in all_files:
if os.path.isfile(item) and item.endswith('.ini'):
ini_files.append(item)
return sorted(ini_files)
def modify_arg_file(file_name=None, value=None):
"""
change arg file
:param file_name: which file to change
:param value: what value should be changed to
:return: None
"""
with open("astra_in.in", 'r') as arg_file:
contents = arg_file.readlines()
if file_name is not None:
distribution_line = contents[2]
line_parts = distribution_line.split('\'')
line_parts[1] = file_name
new_line = '\''.join(line_parts)
contents[2] = new_line
if value is not None:
# max_b_line = contents[68]
max_b_line = contents[80] # 80 if space charge module is taken into account
max_b_part = max_b_line.split('=')
max_b_part[1] = value
new_max_b_line = '='.join(max_b_part)
contents[80] = new_max_b_line # 80 if space charge module is taken into account
with open('astra_in_' + file_name + '-solStrength' + value + '.in', 'w') as arg_file:
arg_file.writelines(contents)
def main(filename=None, start=None, end=None, step=None):
"""
main function
:param filename: particle distribution files
:param start: imaging solenoid starting B field
:param end: imaging solenoid final B field
:param step: scan step size
:return: None
"""
if start is not None and filename is None and end is None and step is None:
files = get_all_ini_files()
for file in files:
print('processing {fn} with solenoid B field value of {va}'.format(
fn=filename,
va=start
))
modify_arg_file(file_name=file, value=str(start))
os.system('./astra astra_in_' + file + '-solStrength' + start + '.in')
elif filename is not None and start is not None and end is not None and step is not None:
for i in range(int(start), int(end) - 1, int(step)):
value = i / 10000
print('processing {fn} with solenoid B field value of {va}'.format(
fn=filename,
va=value
))
modify_arg_file(file_name=filename, value=str(value))
os.system('./astra astra_in_' + filename + '-solStrength' + str(value) + '.in')
else:
# TODO: add raise errors and remove quit()
print('Follow the following format:')
print('python3 ' + __file__ + ' -f [filename] -start [start] -end [end] -step [step]')
quit()
print("DONE.")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename", help="file name")
parser.add_argument('-s', '--start', help='start value (10000 or real number)')
parser.add_argument('-e', '--end', help='end value (10000)')
parser.add_argument('-st', '--step', help='step (10000)')
args = parser.parse_args()
main(filename=args.filename, start=args.start, end=args.end, step=args.step)
|
StarcoderdataPython
|
1699053
|
import os
from configparser import ConfigParser, RawConfigParser
from flask import request, g, Response
import requests
from json import dumps, loads
from pymunge import MungeContext
curr_dirname = os.path.dirname(os.path.abspath(__file__))
src_dir, _ = os.path.split(curr_dirname)
file = src_dir + '/dev_config.ini'
def use_munge():
cp = ConfigParser()
cp.read(file)
return cp['GLOBAL'].getboolean('munge')
def munge_response(response):
if use_munge():
body = response.json
payload = dumps(body).encode('utf-8')
with MungeContext() as ctx:
cred = ctx.encode(payload).decode('utf-8')
json = dumps(dict(munge_cred=cred))
response.data = json
return response
else:
return response
def unmunge_request():
body = request.json
if use_munge():
cred = body['munge_cred'].encode('utf-8')
with MungeContext() as ctx:
payload, uid, gid = ctx.decode(cred)
g.payload = loads(payload.decode('utf-8'))
else:
g.payload = body
def send_request(*args, **kwargs) -> (Response, str):
if not kwargs.get('json'):
kwargs['json'] = dict()
if use_munge():
with MungeContext() as ctx:
cred = ctx.encode(dumps(kwargs['json']).encode('utf-8')).decode('utf-8')
kwargs['json'] = dict(munge_cred=cred)
r = requests.request(*args, **kwargs)
if r.text:
return r, loads(unmunge(r.text))
else:
return r, r.text
else:
r = requests.request(*args, **kwargs)
return r, loads(r.text)
def unmunge(text):
if use_munge():
d = loads(text)
cred = d['munge_cred'].encode('utf-8')
with MungeContext() as ctx:
payload, uid, guid = ctx.decode(cred)
return payload.decode('utf-8')
else:
return text
def get_conf_directory():
default_config = '/etc/default/dmd'
home_dir = os.path.expanduser("~") + '/.dmd/'
project_cfg = os.getcwd() + '/src/cfg/'
if os.path.isfile(default_config):
with open(default_config) as f:
file_content = '[dummy_section]\n' + f.read()
config_parser = RawConfigParser()
config_parser.read_string(file_content)
return config_parser.get('dummy_section', 'conf_directory')
elif os.path.isfile(home_dir + 'controller'):
return home_dir
else:
return project_cfg
def get_configfile_from_config(configuration):
default_config = '/etc/default/jaws'
project_cfg = os.getcwd() + '/src/cfg/'
path = ""
config_file = ""
if os.path.isfile(default_config):
with open(default_config) as f:
file_content = '[dummy_section]\n' + f.read()
config_parser = RawConfigParser()
config_parser.read_string(file_content)
path = config_parser.get('dummy_section', 'conf_directory')
else:
path = project_cfg
main_parser = ConfigParser()
main_config = main_parser.read(path + 'main.ini')
if configuration == "storages":
config_file = main_parser.get('CONFIGFILES', 'STORAGES')
if configuration == "acl":
config_file = main_parser.get('CONFIGFILES', 'ACL')
if configuration == "application":
config_file = main_parser.get('CONFIGFILES', 'APPLICATION')
if configuration == "copytools":
config_file = main_parser.get('CONFIGFILES', 'COPYTOOLS')
if configuration == "controller":
config_file = main_parser.get('CONFIGFILES', 'CONTROLLER')
if configuration == "master":
config_file = main_parser.get('CONFIGFILES', 'MASTER')
if configuration == "view":
config_file = main_parser.get('CONFIGFILES', 'MASTER')
if configuration == "worker":
config_file = main_parser.get('CONFIGFILES', 'WORKER')
if configuration == "main":
config_file = path + 'main.ini'
config = ConfigParser()
config.read(path + config_file)
configuration_sections = config.sections()
configuration = dict()
for section in configuration_sections:
configuration[section] = dict(config.items(section))
return configuration
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
|
StarcoderdataPython
|
3234542
|
<filename>blog/migrations/0005_auto_20180721_0113.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-07-21 08:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_auto_20180721_0108'),
]
operations = [
migrations.RemoveField(
model_name='clue',
name='publish',
),
migrations.AlterField(
model_name='clue',
name='slug',
field=models.SlugField(unique=True),
),
]
|
StarcoderdataPython
|
3369629
|
<gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayCommerceDataScenicSyncModel(object):
def __init__(self):
self._code_value = None
self._isv_name = None
self._isv_scenic_address = None
self._isv_scenic_name = None
self._outer_id = None
self._scenic_app_id = None
self._scenic_id = None
@property
def code_value(self):
return self._code_value
@code_value.setter
def code_value(self, value):
if isinstance(value, list):
self._code_value = list()
for i in value:
self._code_value.append(i)
@property
def isv_name(self):
return self._isv_name
@isv_name.setter
def isv_name(self, value):
self._isv_name = value
@property
def isv_scenic_address(self):
return self._isv_scenic_address
@isv_scenic_address.setter
def isv_scenic_address(self, value):
self._isv_scenic_address = value
@property
def isv_scenic_name(self):
return self._isv_scenic_name
@isv_scenic_name.setter
def isv_scenic_name(self, value):
self._isv_scenic_name = value
@property
def outer_id(self):
return self._outer_id
@outer_id.setter
def outer_id(self, value):
self._outer_id = value
@property
def scenic_app_id(self):
return self._scenic_app_id
@scenic_app_id.setter
def scenic_app_id(self, value):
self._scenic_app_id = value
@property
def scenic_id(self):
return self._scenic_id
@scenic_id.setter
def scenic_id(self, value):
self._scenic_id = value
def to_alipay_dict(self):
params = dict()
if self.code_value:
if isinstance(self.code_value, list):
for i in range(0, len(self.code_value)):
element = self.code_value[i]
if hasattr(element, 'to_alipay_dict'):
self.code_value[i] = element.to_alipay_dict()
if hasattr(self.code_value, 'to_alipay_dict'):
params['code_value'] = self.code_value.to_alipay_dict()
else:
params['code_value'] = self.code_value
if self.isv_name:
if hasattr(self.isv_name, 'to_alipay_dict'):
params['isv_name'] = self.isv_name.to_alipay_dict()
else:
params['isv_name'] = self.isv_name
if self.isv_scenic_address:
if hasattr(self.isv_scenic_address, 'to_alipay_dict'):
params['isv_scenic_address'] = self.isv_scenic_address.to_alipay_dict()
else:
params['isv_scenic_address'] = self.isv_scenic_address
if self.isv_scenic_name:
if hasattr(self.isv_scenic_name, 'to_alipay_dict'):
params['isv_scenic_name'] = self.isv_scenic_name.to_alipay_dict()
else:
params['isv_scenic_name'] = self.isv_scenic_name
if self.outer_id:
if hasattr(self.outer_id, 'to_alipay_dict'):
params['outer_id'] = self.outer_id.to_alipay_dict()
else:
params['outer_id'] = self.outer_id
if self.scenic_app_id:
if hasattr(self.scenic_app_id, 'to_alipay_dict'):
params['scenic_app_id'] = self.scenic_app_id.to_alipay_dict()
else:
params['scenic_app_id'] = self.scenic_app_id
if self.scenic_id:
if hasattr(self.scenic_id, 'to_alipay_dict'):
params['scenic_id'] = self.scenic_id.to_alipay_dict()
else:
params['scenic_id'] = self.scenic_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceDataScenicSyncModel()
if 'code_value' in d:
o.code_value = d['code_value']
if 'isv_name' in d:
o.isv_name = d['isv_name']
if 'isv_scenic_address' in d:
o.isv_scenic_address = d['isv_scenic_address']
if 'isv_scenic_name' in d:
o.isv_scenic_name = d['isv_scenic_name']
if 'outer_id' in d:
o.outer_id = d['outer_id']
if 'scenic_app_id' in d:
o.scenic_app_id = d['scenic_app_id']
if 'scenic_id' in d:
o.scenic_id = d['scenic_id']
return o
|
StarcoderdataPython
|
170905
|
<gh_stars>1-10
from topfarm.constraint_components.capacity import CapacityConstraint
import numpy as np
import topfarm
from topfarm.tests.test_files import xy3tb
from topfarm._topfarm import TopFarmProblem
from topfarm.easy_drivers import EasySimpleGADriver
def test_capacity_as_penalty():
tf = xy3tb.get_tf(design_vars={topfarm.type_key: ([0, 0, 0], 0, 2)},
constraints=[CapacityConstraint(5, rated_power_array=[100, 10000, 10])],
driver=EasySimpleGADriver(),
plot_comp=None)
# check normal result that satisfies the penalty
assert tf.evaluate()[0] == 141.0
# check penalized result if capacity constraint is not satisfied
assert tf.evaluate({'type': np.array([0, 1, 1])})[0] == 1e10 + 15.1
def test_capacity_tf():
# 15 turbines, 5 different types, 50MW max installed capacity
n_wt = 15
rated_power_array_kW = np.linspace(1, 10, int(n_wt / 3)) * 1e3
inputtypes = np.tile(np.array([range(int(n_wt / 3))]), 3).flatten()
tf = TopFarmProblem({'type': inputtypes},
constraints=[CapacityConstraint(max_capacity=50, rated_power_array=rated_power_array_kW)],
driver=EasySimpleGADriver()
)
tf.evaluate()
# case above the maximum allowed installed capacity, yes penalty
assert tf["totalcapacity"] == 82.5
assert tf['penalty_comp.penalty_capacity_comp_50'] == 32.5
# set all turbines type 0, still 15 turbines and re-run the problem
tf.evaluate({'type': inputtypes * 0})
# case below the maximum allowed installed capacity, no penalty
assert tf["totalcapacity"] == 15
assert tf['penalty_comp.penalty_capacity_comp_50'][0] == 0.0
|
StarcoderdataPython
|
114294
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import typing as tp
from . import core
from . import container
from . import choice
def flatten_parameter(
parameter: core.Parameter,
with_containers: bool = True,
order: int = 0
) -> tp.Dict[str, core.Parameter]:
"""List all the instances involved as parameter (not as subparameter/
endogeneous parameter)
Parameter
---------
parameter: Parameter
the parameter to inspect
with_container: bool
returns only non-container instances (aka no Dict, Tuple, Instrumentation or Constant)
order: int
order of model/internal parameters to extract. With 0, no model/internal parameters is
extracted, with 1, only 1st order are extracted, with 2, so model/internal parameters and
their own model/internal parameters etc...
Returns
-------
dict
a dict of all parameters implied in this parameter, i.e all choices, items of dict
and tuples etc, but not the subparameters/endogeneous parameters like sigma
with keys if type "<index>.<key>" for a tuple containing dicts containing data for instance.
Note
----
This function is experimental, its output will probably evolve before converging.
"""
flat = {"": parameter}
if isinstance(parameter, core.Dict):
content_to_add: tp.List[core.Dict] = [parameter]
if isinstance(parameter, container.Instrumentation): # special case: skip internal Tuple and Dict
content_to_add = [parameter[0], parameter[1]] # type: ignore
for c in content_to_add:
for k, p in c._content.items():
content = flatten_parameter(p, with_containers=with_containers, order=order)
flat.update({str(k) + ("" if not x else ("." if not x.startswith("#") else "") + x): y for x, y in content.items()})
if order > 0 and parameter._parameters is not None:
subparams = flatten_parameter(parameter.parameters, with_containers=False, order=order - 1)
flat.update({"#" + str(x): y for x, y in subparams.items()})
if not with_containers:
flat = {x: y for x, y in flat.items() if not isinstance(y, (core.Dict, core.Constant)) or isinstance(y, choice.BaseChoice)}
return flat
|
StarcoderdataPython
|
4840264
|
<reponame>xiaohuid/huobi_Python<filename>huobi/model/accountbalancerequest.py
from huobi.model import *
class AccountBalanceRequest:
"""
The account change information received by subscription of account.
:member
timestamp: The UNIX formatted timestamp generated by server in UTC.
change_type: The event that asset change notification related.
account_list: The list of account and balance
"""
def __init__(self):
self.timestamp = 0
self.client_req_id = ""
self.topic = ""
self.account_list = list()
@staticmethod
def json_parse(json_data):
account_balance = AccountBalanceRequest()
account_balance.timestamp = json_data.get_int("ts")
account_balance.client_req_id = json_data.get_string("cid")
account_balance.topic = json_data.get_string("topic")
subaccount_list = json_data.get_array("data")
account_list = list()
for subaccount in subaccount_list.get_items():
account = Account.json_parse(subaccount)
account_list.append(account)
account_balance.account_list = account_list
return account_balance
|
StarcoderdataPython
|
157342
|
# Generated by Django 2.2.6 on 2019-12-07 01:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('events', '0016_auto_20191206_2347'),
]
operations = [
migrations.DeleteModel(
name='DayOfWeek',
),
migrations.AlterField(
model_name='recurringevent',
name='first_occurence',
field=models.ForeignKey(blank=True, default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to='events.RecurringEvent'),
),
]
|
StarcoderdataPython
|
143857
|
#!/usr/bin/env python
#
# Copyright 2011 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. moduleauthor:: <NAME> <<EMAIL>>
test_omegacn7500: Unittests for omegacn7500
Uses a dummy serial port from the module :py:mod:`dummy_serial`.
"""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "Apache License, Version 2.0"
import sys
import unittest
import omegacn7500
import dummy_serial
class TestCalculateRegisterAddress(unittest.TestCase):
knownValues=[
('setpoint', 0, 0, 8192), # registertype, patternnumber, stepnumber, knownresult
('setpoint', 1, 0, 8200),
('time', 0, 0, 8320),
('time', 0, 1, 8321),
('time', 1, 0, 8328),
('actualstep', 0, None, 4160),
('actualstep', 0, 0, 4160),
('actualstep', 1, None, 4161),
('actualstep', 1, 0, 4161),
('actualstep', 1, 5, 4161), # Stepnumber should have no effect.
('cycles', 0, None, 4176),
('cycles', 1, None, 4177),
('linkpattern', 0, None, 4192),
('linkpattern', 1, None, 4193),
]
def testKnownValues(self):
for registertype, patternnumber, stepnumber, knownresult in self.knownValues:
resultvalue = omegacn7500._calculateRegisterAddress(registertype, patternnumber, stepnumber)
self.assertEqual(resultvalue, knownresult)
def testWrongValues(self):
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 'ABC', 0, 0)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 'setpoint', -1, 0)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 'setpoint', 8, 0)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 'setpoint', 0, -1)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 'setpoint', 0, 8)
def testWrongType(self):
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 0, 0, 0) # Note: Raises value error
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, 1.0, 0, 0)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, None, 0, 0)
self.assertRaises(ValueError, omegacn7500._calculateRegisterAddress, ['setpoint'], 0, 0)
self.assertRaises(TypeError, omegacn7500._calculateRegisterAddress, 'setpoint', 0.0, 0)
self.assertRaises(TypeError, omegacn7500._calculateRegisterAddress, 'setpoint', [0], 0)
self.assertRaises(TypeError, omegacn7500._calculateRegisterAddress, 'setpoint', None, 0)
self.assertRaises(TypeError, omegacn7500._calculateRegisterAddress, 'setpoint', 0, 0.0)
self.assertRaises(TypeError, omegacn7500._calculateRegisterAddress, 'setpoint', 0, [0])
class TestCheckPatternNumber(unittest.TestCase):
def testKnownResults(self):
omegacn7500._checkPatternNumber(0)
omegacn7500._checkPatternNumber(3)
omegacn7500._checkPatternNumber(7)
def testWrongValue(self):
self.assertRaises(ValueError, omegacn7500._checkPatternNumber, -1)
self.assertRaises(ValueError, omegacn7500._checkPatternNumber, 8)
self.assertRaises(ValueError, omegacn7500._checkPatternNumber, 99)
self.assertRaises(ValueError, omegacn7500._checkPatternNumber, 12345)
def testWrongType(self):
self.assertRaises(TypeError, omegacn7500._checkPatternNumber, '1')
self.assertRaises(TypeError, omegacn7500._checkPatternNumber, 1.0)
self.assertRaises(TypeError, omegacn7500._checkPatternNumber, [1])
self.assertRaises(TypeError, omegacn7500._checkPatternNumber, None)
class TestCheckStepNumber(unittest.TestCase):
def testKnownResults(self):
omegacn7500._checkStepNumber(0)
omegacn7500._checkStepNumber(3)
omegacn7500._checkStepNumber(7)
def testWrongValue(self):
self.assertRaises(ValueError, omegacn7500._checkStepNumber, -1)
self.assertRaises(ValueError, omegacn7500._checkStepNumber, 8)
self.assertRaises(ValueError, omegacn7500._checkStepNumber, 99)
self.assertRaises(ValueError, omegacn7500._checkStepNumber, 12345)
def testWrongType(self):
self.assertRaises(TypeError, omegacn7500._checkStepNumber, '1')
self.assertRaises(TypeError, omegacn7500._checkStepNumber, 1.0)
self.assertRaises(TypeError, omegacn7500._checkStepNumber, [1])
self.assertRaises(TypeError, omegacn7500._checkStepNumber, None)
class TestCheckSetpointValue(unittest.TestCase):
def testKnownResults(self):
omegacn7500._checkSetpointValue(900, 1000)
omegacn7500._checkSetpointValue(900.0, 1000.0)
def testWrongValue(self):
self.assertRaises(ValueError, omegacn7500._checkSetpointValue, 900, 800)
self.assertRaises(ValueError, omegacn7500._checkSetpointValue, 900.0, 800.0)
self.assertRaises(ValueError, omegacn7500._checkSetpointValue, -100, 800)
self.assertRaises(ValueError, omegacn7500._checkSetpointValue, 900, -800)
def testWrongType(self):
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, '900', 1000)
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, [900], 1000)
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, None, 1000)
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, 900, '1000')
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, 900, [1000])
self.assertRaises(TypeError, omegacn7500._checkSetpointValue, 900, None)
class TestCheckTimeValue(unittest.TestCase):
def testKnownResults(self):
omegacn7500._checkTimeValue(75, 99)
omegacn7500._checkTimeValue(75.0, 99.0)
def testWrongValue(self):
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 75, 10)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, -5, 10)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, -75, 10)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 75.0, 10.0)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, -5.0, 10.0)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, -75.0, 10.0)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 5, -10)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 75, -10)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 5.0, -10.0)
self.assertRaises(ValueError, omegacn7500._checkTimeValue, 75.0, -10.0)
def testWrongType(self):
self.assertRaises(TypeError, omegacn7500._checkTimeValue, '75', 99)
self.assertRaises(TypeError, omegacn7500._checkTimeValue, [75], 99)
self.assertRaises(TypeError, omegacn7500._checkTimeValue, None, 99)
self.assertRaises(TypeError, omegacn7500._checkTimeValue, 75, '99')
self.assertRaises(TypeError, omegacn7500._checkTimeValue, 75, [99])
self.assertRaises(TypeError, omegacn7500._checkTimeValue, 75, None)
###########################################
# Communication using a dummy serial port #
###########################################
class TestDummyCommunication_Slave1(unittest.TestCase):
"""Testing using dummy communication, with data recorded for slaveaddress = 1
Most of the tests are for making sure that the communication details are OK.
For some examples of testing the methods for argument value errors or
argument type errors, see the :meth:`.testSetControlModeWithWrongValue` and
:meth:`.testSetControlModeWithWrongValueType` methods.
"""
def setUp(self):
# Prepare a dummy serial port to have proper responses
dummy_serial.VERBOSE = False
dummy_serial.RESPONSES = RESPONSES
dummy_serial.DEFAULT_RESPONSE = 'NotFoundInDictionary'
# Monkey-patch a dummy serial port for testing purpose
omegacn7500.minimalmodbus.serial.Serial = dummy_serial.Serial
# Initialize a (dummy) instrument
self.instrument = omegacn7500.OmegaCN7500('DUMMYPORTNAME', 1)
self.instrument._debug = False
def testReadPv1(self):
self.assertAlmostEqual( self.instrument.get_pv(), 24.6 )
def testRun(self):
self.instrument.run()
def testStop(self):
self.instrument.stop()
def testIsRunning(self):
self.assertFalse( self.instrument.is_running() )
def testGetSetpoint(self):
self.assertAlmostEqual( self.instrument.get_setpoint(), 100)
def testSetSetpoint(self):
self.instrument.set_setpoint(100)
def testGetControlMode(self):
self.assertEqual( self.instrument.get_control_mode(), 'PID')
def testSetControlMode(self):
self.instrument.set_control_mode(3)
def testSetControlModeWithWrongValue(self):
self.assertRaises(ValueError, self.instrument.set_control_mode, 4)
self.assertRaises(ValueError, self.instrument.set_control_mode, -1)
def testSetControlModeWithWrongValueType(self):
self.assertRaises(TypeError, self.instrument.set_control_mode, 3.0)
self.assertRaises(TypeError, self.instrument.set_control_mode, [3])
self.assertRaises(TypeError, self.instrument.set_control_mode, '3')
self.assertRaises(TypeError, self.instrument.set_control_mode, None)
def testGetStartPatternNo(self):
self.assertEqual( self.instrument.get_start_pattern_no(), 2)
def testSetStartPatternNo(self):
self.instrument.set_start_pattern_no(2)
def testGetPatternStepSetpoint(self):
self.assertAlmostEqual( self.instrument.get_pattern_step_setpoint(0, 3), 333.3)
def testSetPatternStepSetpoint(self):
self.instrument.set_pattern_step_setpoint(0, 3, 333.3)
self.instrument.set_pattern_step_setpoint(0, 3, 40)
def testGetPatternStepTime(self):
self.assertAlmostEqual( self.instrument.get_pattern_step_time(0, 3), 45)
def testSetPatternStepTime(self):
self.instrument.set_pattern_step_time(0, 3, 45)
self.instrument.set_pattern_step_time(0, 3, 40)
def testGetPatternActualStep(self):
self.assertEqual( self.instrument.get_pattern_actual_step(0), 7 )
def testSetPatternActualStep(self):
self.instrument.set_pattern_actual_step(0, 7)
def testGetPatternAdditionalCycles(self):
self.assertEqual( self.instrument.get_pattern_additional_cycles(0), 4)
def testSetPatternAdditionalCycles(self):
self.instrument.set_pattern_additional_cycles(0, 4)
self.instrument.set_pattern_additional_cycles(0, 2)
def testGetPatternLinkToPattern(self):
self.assertEqual( self.instrument.get_pattern_link_topattern(0), 1)
def testSetPatternLinkToPattern(self):
self.instrument.set_pattern_link_topattern(0, 1)
def testGetAllPatternVariables(self): # TODO: Change this to proper assertEqual
_print_out( '\nSlave address 1:' )
_print_out( self.instrument.get_all_pattern_variables(0) )
def testSetAllPatternVariables(self):
self.instrument.set_all_pattern_variables(0,
10, 10,
20, 20,
30, 30,
40, 40,
50, 50,
60, 60,
70, 70,
80, 80,
7, 4, 1)
class TestDummyCommunication_Slave10(unittest.TestCase):
"""Testing using dummy communication, with data recorded for slaveaddress = 10
"""
def setUp(self):
dummy_serial.RESPONSES = RESPONSES
dummy_serial.DEFAULT_RESPONSE = 'NotFoundInDictionary'
omegacn7500.minimalmodbus.serial.Serial = dummy_serial.Serial
self.instrument = omegacn7500.OmegaCN7500('DUMMYPORTNAME', 10)
def testReadPv1(self):
self.assertAlmostEqual( self.instrument.get_pv(), 25.9 )
def testRun(self):
self.instrument.run()
def testStop(self):
self.instrument.stop()
def testIsRunning(self):
self.assertFalse( self.instrument.is_running() )
def testGetSetpoint(self):
self.assertAlmostEqual( self.instrument.get_setpoint(), 100)
def testSetSetpoint(self):
self.instrument.set_setpoint(100)
def testGetControlMode(self):
self.assertEqual( self.instrument.get_control_mode(), 'PID')
def testSetControlMode(self):
self.instrument.set_control_mode(3)
def testGetStartPatternNo(self):
self.assertEqual( self.instrument.get_start_pattern_no(), 2)
def testSetStartPatternNo(self):
self.instrument.set_start_pattern_no(2)
def testGetPatternStepSetpoint(self):
self.assertEqual( self.instrument.get_pattern_step_setpoint(0, 3), 333.3)
def testSetPatternStepSetpoint(self):
self.instrument.set_pattern_step_setpoint(0, 3, 333.3)
self.instrument.set_pattern_step_setpoint(0, 3, 40)
def testGetPatternStepTime(self):
self.assertAlmostEqual( self.instrument.get_pattern_step_time(0, 3), 45)
def testSetPatternStepTime(self):
self.instrument.set_pattern_step_time(0, 3, 45)
self.instrument.set_pattern_step_time(0, 3, 40)
def testGetPatternActualStep(self):
self.assertEqual( self.instrument.get_pattern_actual_step(0), 7)
def testSetPatternActualStep(self):
self.instrument.set_pattern_actual_step(0, 7)
def testGetPatternAdditionalCycles(self):
self.assertEqual( self.instrument.get_pattern_additional_cycles(0), 4)
def testSetPatternAdditionalCycles(self):
self.instrument.set_pattern_additional_cycles(0, 4)
self.instrument.set_pattern_additional_cycles(0, 2)
def testGetPatternLinkToPattern(self):
self.assertEqual( self.instrument.get_pattern_link_topattern(0), 1)
def testSetPatternLinkToPattern(self):
self.instrument.set_pattern_link_topattern(0, 1)
def testGetAllPatternVariables(self): # TODO: Change this to proper assertEqual
_print_out( '\nSlave address 10:' )
_print_out( self.instrument.get_all_pattern_variables(0) )
def testSetAllPatternVariables(self):
self.instrument.set_all_pattern_variables(0,
10, 10,
20, 20,
30, 30,
40, 40,
50, 50,
60, 60,
70, 70,
80, 80,
7, 4, 1)
RESPONSES = {}
"""A dictionary of respones from a dummy Omega CN7500 instrument.
The key is the message (string) sent to the serial port, and the item is the response (string)
from the dummy serial port.
"""
## Recorded data from OmegaCN7500 ##
####################################
# Slave address 1, get_pv()
RESPONSES['\x01\x03\x10\x00\x00\x01\x80\xca'] = '\x01\x03\x02\x00\xf68\x02'
# Slave address 1, run()
RESPONSES['\x01\x05\x08\x14\xff\x00\xce^'] = '\x01\x05\x08\x14\xff\x00\xce^'
# Slave address 1, stop()
RESPONSES['\x01\x05\x08\x14\x00\x00\x8f\xae'] = '\x01\x05\x08\x14\x00\x00\x8f\xae'
# Slave address 1, is_running()
RESPONSES['\x01\x02\x08\x14\x00\x01\xfb\xae'] = '\x01\x02\x01\x00\xa1\x88'
# Slave address 1, get_setpoint()
RESPONSES['\x01\x03\x10\x01\x00\x01\xd1\n'] = '\x01\x03\x02\x03\xe8\xb8\xfa'
# Slave address 1, set_setpoint()
RESPONSES['\x01\x10\x10\x01\x00\x01\x02\x03\xe8\xb6\xfe'] = '\x01\x10\x10\x01\x00\x01T\xc9'
# Slave address 1, get_control_mode()
RESPONSES['\x01\x03\x10\x05\x00\x01\x90\xcb'] = '\x01\x03\x02\x00\x00\xb8D'
#RESPONSES['\x01\x03\x10\x05\x00\x01\x90\xcb'] = '\x01\x03\x02\x00\x09xB' # Use this for testing wrong controlmode
# Slave address 1, set_control_mode()
RESPONSES['\x01\x10\x10\x05\x00\x01\x02\x00\x03\xf7\xc5'] = '\x01\x10\x10\x05\x00\x01\x15\x08'
# Slave address 1, get_start_pattern_no()
RESPONSES['\x01\x03\x100\x00\x01\x80\xc5'] = '\x01\x03\x02\x00\x029\x85'
# Slave address 1, set_start_pattern_no()
RESPONSES['\x01\x10\x100\x00\x01\x02\x00\x023\xa0'] = '\x01\x10\x100\x00\x01\x05\x06'
# Slave address 1, set_pattern_step_setpoint() Pattern 0, step 3, value 333.3. See also below.
RESPONSES['\x01\x10 \x03\x00\x01\x02\r\x05C2'] = '\x01\x10 \x03\x00\x01\xfa\t'
# Slave address 1, set_pattern_step_time() Pattern 0, step 3, value 45. See also below.
RESPONSES['\x01\x10 \x83\x00\x01\x02\x00-X|'] = '\x01\x10 \x83\x00\x01\xfb\xe1'
# Slave address 1, set_pattern_additional_cycles() Pattern 0, value 4. See also below.
RESPONSES['\x01\x10\x10P\x00\x01\x02\x00\x04\xba\x02'] = '\x01\x10\x10P\x00\x01\x05\x18'
# Slave address 1, get_all_pattern_variables()
# --- Valid for pattern 0 ---
# SP0: 10 Time0: 10
# SP1: 20 Time1: 20
# SP2: 30 Time2: 30
# SP3: 333 Time3: 45
# SP4: 50 Time4: 50
# SP5: 60 Time5: 60
# SP6: 70 Time6: 70
# SP7: 80 Time7: 80
# Actual step: 7
# Add'l cycles: 4
# Linked pattern: 1
RESPONSES['\x01\x03 \x00\x00\x01\x8f\xca'] = '\x01\x03\x02\x00d\xb9\xaf' # SP0
RESPONSES['\x01\x03 \x01\x00\x01\xde\n'] = '\x01\x03\x02\x00\xc8\xb9\xd2'
RESPONSES['\x01\x03 \x02\x00\x01.\n'] = '\x01\x03\x02\x01,\xb8\t'
RESPONSES['\x01\x03 \x03\x00\x01\x7f\xca'] = '\x01\x03\x02\r\x05|\xd7'
RESPONSES['\x01\x03 \x04\x00\x01\xce\x0b'] = '\x01\x03\x02\x01\xf4\xb8S'
RESPONSES['\x01\x03 \x05\x00\x01\x9f\xcb'] = '\x01\x03\x02\x02X\xb8\xde'
RESPONSES['\x01\x03 \x06\x00\x01o\xcb'] = '\x01\x03\x02\x02\xbc\xb8\x95'
RESPONSES['\x01\x03 \x07\x00\x01>\x0b'] = '\x01\x03\x02\x03 \xb9l'
RESPONSES['\x01\x03 \x80\x00\x01\x8e"'] = '\x01\x03\x02\x00\n8C' # Time0
RESPONSES['\x01\x03 \x81\x00\x01\xdf\xe2'] = '\x01\x03\x02\x00\x14\xb8K'
RESPONSES['\x01\x03 \x82\x00\x01/\xe2'] = '\x01\x03\x02\x00\x1e8L'
RESPONSES['\x01\x03 \x83\x00\x01~"'] = '\x01\x03\x02\x00-xY'
RESPONSES['\x01\x03 \x84\x00\x01\xcf\xe3'] = '\x01\x03\x02\x0029\x91'
RESPONSES['\x01\x03 \x85\x00\x01\x9e#'] = '\x01\x03\x02\x00<\xb8U'
RESPONSES['\x01\x03 \x86\x00\x01n#'] = '\x01\x03\x02\x00F9\xb6'
RESPONSES['\x01\x03 \x87\x00\x01?\xe3'] = '\x01\x03\x02\x00P\xb8x'
RESPONSES['\x01\x03\x10@\x00\x01\x81\x1e'] = '\x01\x03\x02\x00\x07\xf9\x86' # Actual step
RESPONSES['\x01\x03\x10P\x00\x01\x80\xdb'] = '\x01\x03\x02\x00\x04\xb9\x87' # Cycles
RESPONSES['\x01\x03\x10`\x00\x01\x80\xd4'] = '\x01\x03\x02\x00\x01y\x84' # Linked pattern
# Slave address 1, set_all_pattern_variables()
# --- Valid for pattern 0 ---
RESPONSES['\x01\x10 \x00\x00\x01\x02\x00d\x86y'] = '\x01\x10 \x00\x00\x01\n\t' # SP0
RESPONSES['\x01\x10 \x01\x00\x01\x02\x00\xc8\x87\xd5'] = '\x01\x10 \x01\x00\x01[\xc9'
RESPONSES['\x01\x10 \x02\x00\x01\x02\x01,\x86='] = '\x01\x10 \x02\x00\x01\xab\xc9'
RESPONSES['\x01\x10 \x03\x00\x01\x02\x01\x90\x86]'] = '\x01\x10 \x03\x00\x01\xfa\t' # SP3, value 40
RESPONSES['\x01\x10 \x04\x00\x01\x02\x01\xf4\x86\x01'] = '\x01\x10 \x04\x00\x01K\xc8'
RESPONSES['\x01\x10 \x05\x00\x01\x02\x02X\x87]'] = '\x01\x10 \x05\x00\x01\x1a\x08'
RESPONSES['\x01\x10 \x06\x00\x01\x02\x02\xbc\x87%'] = '\x01\x10 \x06\x00\x01\xea\x08'
RESPONSES['\x01\x10 \x07\x00\x01\x02\x03 \x87\r'] = '\x01\x10 \x07\x00\x01\xbb\xc8'
RESPONSES['\x01\x10 \x80\x00\x01\x02\x00\n\x18U'] = '\x01\x10 \x80\x00\x01\x0b\xe1' # Time0
RESPONSES['\x01\x10 \x81\x00\x01\x02\x00\x14\x99\x8c'] = '\x01\x10 \x81\x00\x01Z!'
RESPONSES['\x01\x10 \x82\x00\x01\x02\x00\x1e\x19\xb8'] = '\x01\x10 \x82\x00\x01\xaa!'
RESPONSES['\x01\x10 \x83\x00\x01\x02\x00(\x98\x7f'] = '\x01\x10 \x83\x00\x01\xfb\xe1' # Time3, value 40
RESPONSES['\x01\x10 \x84\x00\x01\x02\x002\x18\x03'] = '\x01\x10 \x84\x00\x01J '
RESPONSES['\x01\x10 \x85\x00\x01\x02\x00<\x98\x16'] = '\x01\x10 \x85\x00\x01\x1b\xe0'
RESPONSES['\x01\x10 \x86\x00\x01\x02\x00F\x19\xc6'] = '\x01\x10 \x86\x00\x01\xeb\xe0'
RESPONSES['\x01\x10 \x87\x00\x01\x02\x00P\x99\xd9'] = '\x01\x10 \x87\x00\x01\xba '
RESPONSES['\x01\x10\x10@\x00\x01\x02\x00\x07\xf8\x93'] = '\x01\x10\x10@\x00\x01\x04\xdd' # Actual step
RESPONSES['\x01\x10\x10P\x00\x01\x02\x00\x02:\x00'] = '\x01\x10\x10P\x00\x01\x05\x18' # Cycles, value 2
RESPONSES['\x01\x10\x10`\x00\x01\x02\x00\x01\x7f\xf1'] = '\x01\x10\x10`\x00\x01\x05\x17' # Linked pattern
# Slave address 10, get_pv()
RESPONSES['\n\x03\x10\x00\x00\x01\x81\xb1'] = '\n\x03\x02\x01\x03\\\x14'
# Slave address 10, run()
RESPONSES['\n\x05\x08\x14\xff\x00\xcf%'] = '\n\x05\x08\x14\xff\x00\xcf%'
# Slave address 10, stop()
RESPONSES['\n\x05\x08\x14\x00\x00\x8e\xd5'] = '\n\x05\x08\x14\x00\x00\x8e\xd5'
# Slave address 10, is_running()
RESPONSES['\n\x02\x08\x14\x00\x01\xfa\xd5'] = '\n\x02\x01\x00\xa3\xac'
# Slave address 10, get_setpoint()
RESPONSES['\n\x03\x10\x01\x00\x01\xd0q'] = '\n\x03\x02\x03\xe8\x1d;'
# Slave address 10, set_setpoint()
RESPONSES['\n\x10\x10\x01\x00\x01\x02\x03\xe8\xc5\xce'] = '\n\x10\x10\x01\x00\x01U\xb2'
# Slave address 10, get_control_mode()
RESPONSES['\n\x03\x10\x05\x00\x01\x91\xb0'] = '\n\x03\x02\x00\x00\x1d\x85'
# Slave address 10, set_control_mode()
RESPONSES['\n\x10\x10\x05\x00\x01\x02\x00\x03\x84\xf5'] = '\n\x10\x10\x05\x00\x01\x14s'
# Slave address 10, get_start_pattern_no()
RESPONSES['\n\x03\x100\x00\x01\x81\xbe'] = '\n\x03\x02\x00\x02\x9cD'
# Slave address 10, set_start_pattern_no()
RESPONSES['\n\x10\x100\x00\x01\x02\x00\x02@\x90'] = '\n\x10\x100\x00\x01\x04}'
# Slave address 10, set_pattern_step_setpoint() Pattern 0, step 3, value 333.3. See also below.
RESPONSES['\n\x10 \x03\x00\x01\x02\r\x050\x02'] = '\n\x10 \x03\x00\x01\xfbr'
# Slave address 10, set_pattern_step_time() Pattern 0, step 3, value 45. See also below.
RESPONSES['\n\x10 \x83\x00\x01\x02\x00-+L'] = '\n\x10 \x83\x00\x01\xfa\x9a'
# Slave address 10, set_pattern_additional_cycles() Pattern 0, value 4. See also below.
RESPONSES['\n\x10\x10P\x00\x01\x02\x00\x04\xc92'] = '\n\x10\x10P\x00\x01\x04c'
# Slave address 10, get_all_pattern_variables()
# --- Valid for pattern 0 ---
# SP0: 10 Time0: 10
# SP1: 20 Time1: 20
# SP2: 30 Time2: 30
# SP3: 333 Time3: 45
# SP4: 50 Time4: 50
# SP5: 60 Time5: 60
# SP6: 70 Time6: 70
# SP7: 80 Time7: 80
# Actual step: 7
# Add'l cycles: 4
# Linked pattern: 1
RESPONSES['\n\x03 \x00\x00\x01\x8e\xb1'] = '\n\x03\x02\x00d\x1cn' # SP0
RESPONSES['\n\x03 \x01\x00\x01\xdfq'] = '\n\x03\x02\x00\xc8\x1c\x13'
RESPONSES['\n\x03 \x02\x00\x01/q'] = '\n\x03\x02\x01,\x1d\xc8'
RESPONSES['\n\x03 \x03\x00\x01~\xb1'] = '\n\x03\x02\r\x05\xd9\x16'
RESPONSES['\n\x03 \x04\x00\x01\xcfp'] = '\n\x03\x02\x01\xf4\x1d\x92'
RESPONSES['\n\x03 \x05\x00\x01\x9e\xb0'] = '\n\x03\x02\x02X\x1d\x1f'
RESPONSES['\n\x03 \x06\x00\x01n\xb0'] = '\n\x03\x02\x02\xbc\x1dT'
RESPONSES['\n\x03 \x07\x00\x01?p'] = '\n\x03\x02\x03 \x1c\xad'
RESPONSES['\n\x03 \x80\x00\x01\x8fY'] = '\n\x03\x02\x00\n\x9d\x82' # Time0
RESPONSES['\n\x03 \x81\x00\x01\xde\x99'] = '\n\x03\x02\x00\x14\x1d\x8a'
RESPONSES['\n\x03 \x82\x00\x01.\x99'] = '\n\x03\x02\x00\x1e\x9d\x8d'
RESPONSES['\n\x03 \x83\x00\x01\x7fY'] = '\n\x03\x02\x00-\xdd\x98'
RESPONSES['\n\x03 \x84\x00\x01\xce\x98'] = '\n\x03\x02\x002\x9cP'
RESPONSES['\n\x03 \x85\x00\x01\x9fX'] = '\n\x03\x02\x00<\x1d\x94'
RESPONSES['\n\x03 \x86\x00\x01oX'] = '\n\x03\x02\x00F\x9cw'
RESPONSES['\n\x03 \x87\x00\x01>\x98'] = '\n\x03\x02\x00P\x1d\xb9'
RESPONSES['\n\x03\x10@\x00\x01\x80e'] = '\n\x03\x02\x00\x07\\G' # Actual step
RESPONSES['\n\x03\x10P\x00\x01\x81\xa0'] = '\n\x03\x02\x00\x04\x1cF' # Cycles
RESPONSES['\n\x03\x10`\x00\x01\x81\xaf'] = '\n\x03\x02\x00\x01\xdcE' # Linked pattern
# Slave address 10, set_all_pattern_variables()
# --- Valid for pattern 0 ---
RESPONSES['\n\x10 \x00\x00\x01\x02\x00d\xf5I'] = '\n\x10 \x00\x00\x01\x0br' # SP0
RESPONSES['\n\x10 \x01\x00\x01\x02\x00\xc8\xf4\xe5'] = '\n\x10 \x01\x00\x01Z\xb2'
RESPONSES['\n\x10 \x02\x00\x01\x02\x01,\xf5\r'] = '\n\x10 \x02\x00\x01\xaa\xb2'
RESPONSES['\n\x10 \x03\x00\x01\x02\x01\x90\xf5m'] = '\n\x10 \x03\x00\x01\xfbr' # SP3, value 40
RESPONSES['\n\x10 \x04\x00\x01\x02\x01\xf4\xf51'] = '\n\x10 \x04\x00\x01J\xb3'
RESPONSES['\n\x10 \x05\x00\x01\x02\x02X\xf4m'] = '\n\x10 \x05\x00\x01\x1bs'
RESPONSES['\n\x10 \x06\x00\x01\x02\x02\xbc\xf4\x15'] = '\n\x10 \x06\x00\x01\xebs'
RESPONSES['\n\x10 \x07\x00\x01\x02\x03 \xf4='] = '\n\x10 \x07\x00\x01\xba\xb3'
RESPONSES['\n\x10 \x80\x00\x01\x02\x00\nke'] = '\n\x10 \x80\x00\x01\n\x9a' # Time0
RESPONSES['\n\x10 \x81\x00\x01\x02\x00\x14\xea\xbc'] = '\n\x10 \x81\x00\x01[Z'
RESPONSES['\n\x10 \x82\x00\x01\x02\x00\x1ej\x88'] = '\n\x10 \x82\x00\x01\xabZ'
RESPONSES['\n\x10 \x83\x00\x01\x02\x00(\xebO'] = '\n\x10 \x83\x00\x01\xfa\x9a' # Time3, value 40
RESPONSES['\n\x10 \x84\x00\x01\x02\x002k3'] = '\n\x10 \x84\x00\x01K['
RESPONSES['\n\x10 \x85\x00\x01\x02\x00<\xeb&'] = '\n\x10 \x85\x00\x01\x1a\x9b'
RESPONSES['\n\x10 \x86\x00\x01\x02\x00Fj\xf6'] = '\n\x10 \x86\x00\x01\xea\x9b'
RESPONSES['\n\x10 \x87\x00\x01\x02\x00P\xea\xe9'] = '\n\x10 \x87\x00\x01\xbb['
RESPONSES['\n\x10\x10@\x00\x01\x02\x00\x07\x8b\xa3'] = '\n\x10\x10@\x00\x01\x05\xa6' # Actual step
RESPONSES['\n\x10\x10P\x00\x01\x02\x00\x02I0'] = '\n\x10\x10P\x00\x01\x04c' # Cycles, value 2
RESPONSES['\n\x10\x10`\x00\x01\x02\x00\x01\x0c\xc1'] = '\n\x10\x10`\x00\x01\x04l' # Linked pattern
def _print_out( inputstring ):
"""Print the inputstring. To make it compatible with Python2 and Python3."""
sys.stdout.write(inputstring + '\n')
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
1796190
|
<reponame>L3gume/CodeAndBrunch
from setuptools import setup
# you may need setuptools instead of distutils
setup (
# basic stuff here
scripts = [
'patrickTweet.py',
'jsonParser.py'
]
)
|
StarcoderdataPython
|
3250813
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestRestoreLcMaps(osgunittest.OSGTestCase):
@core.osgrelease(3.5)
def test_01_restore_lcmaps(self):
core.skip_ok_unless_installed('lcmaps', 'lcmaps-plugins-voms', 'lcmaps-db-templates')
files.restore(core.config['lcmaps.gsi-authz'], 'lcmaps')
files.restore(core.config['lcmaps.db'], 'lcmaps')
|
StarcoderdataPython
|
3358177
|
# coding=utf8
import re
import os
import logging
import datetime
class SignalInfo:
def __init__(self):
self.log_datetime = None
self.startup = None
self.strategy = None
self.runtime = None
self.signal = None
self.instrument = None
self.quantity = None
self.price = None
self.dir = None
self.datetime = None
self.cpu_tick = None
self.tag = None
class OrderCreationInfo:
def __init__(self):
self.log_datetime = None
self.startup = None
self.runtime = None
self.signal = None
self.order = None
self.instrument = None
self.quantity = None
self.price = None
self.dir = None
self.datetime = None
self.cpu_tick = None
class OrderTradedInfo:
def __init__(self):
self.log_datetime = None
self.startup = None
self.runtime = None
self.order = None
self.quantity = None
self.price = None
self.datetime = None
self.cpu_tick = None
def __parse_signal_row(row):
try:
stg = re.findall('stg:([^,]+),', row)
rt = re.findall('rt:(\d+)', row)
sig = re.findall('sig:(\d+)', row)
ins = re.findall('ins:([a-zA-Z]+\d+)', row)
qty = re.findall('qty:(\d+)', row)
pri = re.findall('pri:([0-9.]+)', row)
dr = re.findall('dir:([A-Z]+)', row)
dt = re.findall('dt:(\d+T\d+)', row)
ctk = re.findall('ctk:(\d+)', row)
tag = re.findall('msg:(.*)', row)
sinfo = SignalInfo()
sinfo.strategy = stg[0]
sinfo.runtime = int(rt[0])
sinfo.signal = int(sig[0])
sinfo.instrument = ins[0]
sinfo.quantity = int(qty[0])
sinfo.price = float(pri[0])
sinfo.dir = dr[0]
if len(dt[0]) == 13:
temp = dt[0].replace("T", "T00")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
elif len(dt[0]) == 14:
temp = dt[0].replace("T", "T0")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
else:
sinfo.datetime = datetime.datetime.strptime(dt[0], "%Y%m%dT%H%M%S")
sinfo.cpu_tick = int(ctk[0])
sinfo.tag = tag[0]
return sinfo
except Exception as e:
logging.warning('parse_signal_row fail\nerror: %s\nrow: %s'
% (str(e), row))
return None
def __parse_order_creation_row(row):
try:
rt = re.findall('rt:(\d+)', row)
sig = re.findall('sig:(\d+)', row)
od = re.findall('ord:(\d+)', row)
ins = re.findall('ins:([a-zA-Z]+\d+)', row)
qty = re.findall('qty:(\d+)', row)
pri = re.findall('pri:([0-9.]+)', row)
dr = re.findall('dir:([A-Z]+)', row)
dt = re.findall('dt:(\d+T\d+)', row)
ctk = re.findall('ctk:(\d+)', row)
sinfo = OrderCreationInfo()
sinfo.runtime = int(rt[0])
sinfo.signal = int(sig[0])
sinfo.order = int(od[0])
sinfo.instrument = ins[0]
sinfo.quantity = int(qty[0])
sinfo.price = float(pri[0])
sinfo.dir = dr[0]
if len(dt[0]) == 13:
temp = dt[0].replace("T", "T00")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
elif len(dt[0]) == 14:
temp = dt[0].replace("T", "T0")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
else:
sinfo.datetime = datetime.datetime.strptime(dt[0], "%Y%m%dT%H%M%S")
sinfo.cpu_tick = int(ctk[0])
return sinfo
except Exception as e:
logging.warning('parse_order_creation_row fail\nerror: %s\nrow: %s'
% (str(e), row))
return None
def __parse_order_traded_row(row):
try:
rt = re.findall('rt:(\d+)', row)
od = re.findall('ord:(\d+)', row)
qty = re.findall('qty:(\d+)', row)
pri = re.findall('pri:([0-9.]+)', row)
dt = re.findall('dt:(\d+T\d+)', row)
ctk = re.findall('ctk:(\d+)', row)
sinfo = OrderTradedInfo()
sinfo.runtime = int(rt[0])
sinfo.order = int(od[0])
sinfo.quantity = int(qty[0])
sinfo.price = float(pri[0])
if len(dt[0]) == 13:
temp = dt[0].replace("T", "T00")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
elif len(dt[0]) == 14:
temp = dt[0].replace("T", "T0")
sinfo.datetime = datetime.datetime.strptime(temp, "%Y%m%dT%H%M%S")
else:
sinfo.datetime = datetime.datetime.strptime(dt[0], "%Y%m%dT%H%M%S")
sinfo.cpu_tick = int(ctk[0])
return sinfo
except Exception as e:
logging.warning('parse_order_creation_row fail\nerror: %s\nrow: %s'
% (str(e), row))
return None
def __parse_row(row):
log_dt = re.findall('^\[(\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}\.\d{3})\]\[[A-Z]\]', row)
if not log_dt or not log_dt[0]:
return None
try:
log_dt = datetime.datetime.strptime(log_dt[0], "%Y/%m/%d %H:%M:%S.%f")
except Exception as e:
logging.warning('parse log datetime fail\nerror: %s\nrow: %s'
% (str(e), row))
return None
log_type = re.findall('<([a-z_]+)>', row)
if not log_type or not log_type[0]:
return None
info = None
if log_type[0] == 'signal':
info = __parse_signal_row(row)
elif log_type[0] == 'order_created':
info = __parse_order_creation_row(row)
elif log_type[0] == 'order_traded':
info = __parse_order_traded_row(row)
elif log_type[0] == 'order_canceled':
pass
elif log_type[0] == 'order_rejected':
pass
if info is not None:
info.log_datetime = log_dt
return info
def parse_log_file(filename, parse_from_dt=None, parse_until_dt=None):
"""
:param filename: 日志文件名
:param parse_from_dt: 解析log时间戳大于等于此时间的日志
:param parse_until_dt: 解析log时间戳小于此时间的日志
:return:
"""
if parse_from_dt is None:
parse_from_dt = datetime.datetime.min
if parse_until_dt is None:
parse_until_dt = datetime.datetime.max
signals = list()
orders = list()
trades = list()
if os.path.exists(filename):
# split individual starts
starts = list()
lines = list()
with open(filename) as fid:
for line in fid:
row = line.rstrip().lstrip()
if not row:
continue
spliter_line = re.findall(r'^======', row)
if not spliter_line:
lines.append(line)
else:
starts.append(list(lines))
lines.clear()
if lines:
starts.append(list(lines))
# scan
for n, lines in enumerate(starts):
for line in lines:
info = __parse_row(line)
if info is None:
continue
if info.log_datetime < parse_from_dt:
continue
if info.log_datetime >= parse_until_dt:
continue
# set startup id
info.startup = n
if type(info) == SignalInfo:
signals.append(info)
elif type(info) == OrderCreationInfo:
orders.append(info)
elif type(info) == OrderTradedInfo:
trades.append(info)
return signals, orders, trades
# =========================================================================
def main():
filename = r'C:\D\work_scripts\trade_log_parser\trade_logs\Transactions_20160420.log'
s, o, t = parse_log_file(filename)
for i in s:
print(i)
for i in o:
print(i)
for i in t:
print(i)
# =========================================================================
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
155818
|
import datetime
import random
import threading
import time
from selenium import webdriver
from selenium.common.exceptions import TimeoutException, WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from product_search_details import ProductSearchDetails
NEWEGG = "newegg"
BESTBUY = "bestbuy"
def restart_selenium(
driver
):
if driver is not None:
driver.close()
driver = webdriver.Firefox()
return driver
def start_newegg_checkout(
driver,
item
) -> bool:
btn = item.find_elements_by_class_name("btn-primary")[0]
btn.click()
time.sleep(0.5)
driver.get("https://secure.newegg.com/Shopping/ShoppingCart.aspx")
time.sleep(0.5)
driver.get(
"javascript:attachDelegateEvent((function(){Biz.GlobalShopping.ShoppingCart.checkOut('True')}));")
time.sleep(3)
if item.text.lower().find("Your shopping cart is empty") > -1:
return False
return True
def scrape_for_product(
driver: any,
vendor_name: str,
vendor_details: ProductSearchDetails,
delay: float
) -> float:
no_response_error_text = "site did not respond"
should_exit = False
try:
product_search_url = vendor_details.product_search_url
driver.get(product_search_url)
time.sleep(3)
try:
element_search = vendor_details.sku_containter_css_class
items = WebDriverWait(driver, delay).until(
expected_conditions.presence_of_all_elements_located((By.CLASS_NAME, element_search)))
for item in items:
now = datetime.datetime.now()
print("Time : ")
print(now.strftime("%Y-%m-%d %H:%M:%S \n"))
print(f'{vendor_name}\n{item.text}\n')
if item.text.lower().find(vendor_details.add_to_cart_search) > -1:
print(f'item available at {vendor_name}{item.text}')
if vendor_name is NEWEGG:
should_exit = start_newegg_checkout(driver, item)
break
except TimeoutException:
print(no_response_error_text)
delay += 15
except WebDriverException:
print(no_response_error_text)
delay += 15
return (delay, should_exit)
class ProductScraper(object):
def __scrape_for_product_loop__(
self
):
driver_attempt_count = 0
driver = restart_selenium(None)
while True:
if driver_attempt_count > 100:
driver_attempt_count = 0
driver_attempt_count += 1
delay = random.random() * 10 + \
self.__product_info__.seconds_delay_between_refresh
print(f'{self.__vendor_name__}\n')
(delay, should_exit) = scrape_for_product(
driver,
self.__vendor_name__,
self.__product_info__,
delay)
if should_exit:
print("Please finish ordering")
time.sleep(15 * 60)
time.sleep(delay)
def __init__(
self,
vendor_name: str,
vendor_info: ProductSearchDetails
) -> None:
self.__vendor_name__ = vendor_name
self.__product_info__ = vendor_info
self.__thread__ = threading.Thread(
target=self.__scrape_for_product_loop__,
name=vendor_name)
self.__thread__.start()
|
StarcoderdataPython
|
189678
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 18 09:25:00 2014
@author: rich
"""
import sys
import re
import ast
import numpy as np
from collections import Counter
headerQuestion = 'Question'
headerQID = 'qID'
headerQuestionType = 'qAnalysisType'
headerQuestionWeight = 'qWeight'
headerPossibleResponses = 'Possible Responses'
# responseOther = 'Other:' # used in original survey data
responseOther = 'Other'
qType_Metadata = -1
qType_Subjective = 0
qType_Numerical = 1
qType_Ordinal = 2 # pick one from an ordered list of options
qType_MultiCategory = 3 # pick one or more from a list of options
qType_BiCategory = 4 # answers of the form A, B, A and B, (neither), (other)
qType_Category = 5 # pick one
qType_Hierarchical = 6 # pick one or more, each option is hierarchical, colon separated
qType_MultiOrdinal = 7 # pick one or more from an ordered list of options
qType_WtdFeature = 8 # int/float is zero-based feature weight
def isFloat(str):
try:
float(str)
return True
except ValueError:
return False
# get attribute list and optional weights for multi-valued attributes
def getAttrValueList(values):
weights = None
if values == None or (isinstance(values, float) and np.isnan(values)):
return ([], weights)
if values == 'None':
return (['None'], weights)
if type(values) != list:
try:
values = ast.literal_eval(values)
if not isinstance(values, list):
values = [values]
except: # in case there's an unparseable string, perhaps multiple values are separated by ; or |
try:
values = [x.strip() for x in re.split(";|\|", values)]
except:
print values
values = []
values = filter(None, values) # filter out empty strings
if len(values) > 0 and (type(values[0]) == list or type(values[0]) == tuple):
valwts = zip(*values)
values = list(valwts[0])
weights = list(valwts[1])
return (values, weights)
def readQuestionAndResponseData(rawData, questionTextHeader, filters = [], doRound = False):
questionData = rawData['questions']
responseData = rawData['responses']
questionTextHeader = questionTextHeader if (questionTextHeader != None and len(questionTextHeader) > 0) else headerQuestion
# read in and process the questions
# get questions
print("[ReadData.readQuestionAndResponseData] Processing %s question rows" % str(len(questionData)))
#print(questionData)
qIDs = {int(question[headerQID]): question[questionTextHeader] for idx, question in questionData.iterrows()}
# read in the responses
# get headers
# get responses
print("[ReadData.readQuestionAndResponseData] Processing %s entity rows" % str(len(responseData)))
# questions is a data frame of questions and an {ID, question text} dict
data = {'questions': {'rawQuestions': questionData, 'questions': questionData, 'qIDs' : qIDs},
'responses': responseData}
cleanQuestions(data)
buildAnswerMetadata(data, doRound)
filterQuestions(data, filters)
return data
# build metadata about each question's answers
# record question type and possible answer values (numeric range or list of choices)
def buildAnswerMetadata(data, doRound):
questionData = data['questions']['questions']
responseData = data['responses']
gotPossibleResponses = headerPossibleResponses in questionData.columns
answers = {}
answerIdx = {} # map question id and quesion text
print("[ReadData.buildAnswerMetadata] Processing %s questions" % str(len(questionData)))
for idx, qData in questionData.iterrows():
question = qData[headerQuestion]
qType = int(qData[headerQuestionType])
qWt = float(qData[headerQuestionWeight]) if headerQuestionWeight in qData else 1.0
scan = False
# question sheet has answer metadata
if gotPossibleResponses and qData[headerPossibleResponses] is not None:
responses = qData[headerPossibleResponses].split('|')
if qType == qType_Numerical or qType == qType_WtdFeature:
if responses[0] == 'numeric' or doRound == True:
answer = {'qType': qType, 'qWt': qWt}
answer['min'] = sys.float_info.max
answer['max'] = sys.float_info.min
scan = True
else:
answer = {'qType': qType, 'min': float(responses[0]), 'max': float(responses[1])}
elif qType == qType_Ordinal:
nResponses = len(responses)
answer = {'qType': qType, 'qWt': qWt, 'options': responses, 'nResponses': nResponses}
answer['hasOther'] = True if 'Other' in responses else False
# all categorical responses, treat multiOrdinal as categorical for now
elif qType == qType_MultiCategory or qType == qType_BiCategory or qType == qType_Category or qType == qType_MultiOrdinal:
nResponses = len(responses)
if 'Other' in responses:
responses.remove(responseOther)
responseMap = {}
i = 0
for response in responses:
responseMap[response] = i
i += 1
answer = {'qType': qType, 'qWt': qWt, 'options': responses, 'nResponses': nResponses, 'responseMap': responseMap}
else:
answer = {'qType': qType, 'qWt': qWt}
else: # no metadata in question sheet, scan all responses to build answer metadata
scan = True
# initialize answer metadata
answer = {'qType': qType, 'qWt': qWt}
if qType == qType_Numerical or qType == qType_WtdFeature:
answer['min'] = sys.float_info.max
answer['max'] = sys.float_info.min
elif qType == qType_Ordinal or qType == qType_MultiCategory or qType == qType_BiCategory or qType == qType_Category or qType == qType_Hierarchical or qType == qType_MultiOrdinal:
# answer['options'] = set()
answer['options'] = Counter()
if qType == qType_Ordinal:
answer['hasOther'] = False
if scan == True:
# scan all responses to fill in answer metadata
responses = responseData[question]
if qType == qType_Numerical or qType == qType_WtdFeature:
responses = responses.replace('', float('nan'))
res = responses[~np.isnan(responses)]
if len(res) > 0:
answer['min'] = min(res)
answer['max'] = max(res)
else: # skip attribute if it has no values
continue
if doRound:
responses = responses.round()
if qType == qType_WtdFeature: # force feature min to zero
answer['min'] = max(answer['min'], 0)
answer['median'] = np.median(res)
#print("Attribute: " + question + " info: " + str(answer))
elif qType == qType_MultiCategory or qType == qType_BiCategory or qType == qType_Hierarchical or qType == qType_MultiOrdinal:
tot = 0
ntot = 0
for response in responses:
ans = getAttrValueList(response)[0]
answer['options'].update(ans)
if len(ans) > 0:
tot += len(ans)
ntot += 1
answer['avgResponses'] = float(tot)/ntot
elif qType == qType_Ordinal or qType == qType_Category:
for response in responses:
answer['options'].update([response])
# finalize answer metadata
if qType == qType_Ordinal:
responses = sorted(answer['options'])
answer['options'] = responses
answer['nResponses'] = len(responses)
# all other categorical responses
elif qType == qType_MultiCategory or qType == qType_BiCategory or qType == qType_Category or qType == qType_Hierarchical or qType == qType_MultiOrdinal:
minTags = 1 if qType == qType_MultiCategory else 0
responses = [k for k, v in answer['options'].iteritems() if v > minTags] # keep tags that occur more than once
answer['options'] = responses
answer['nResponses'] = len(responses)
responseMap = {}
i = 0
for response in responses:
responseMap[response] = i
i += 1
answer['responseMap'] = responseMap
# build answer similarity for each pair of hierarchical answers
if qType == qType_Hierarchical:
answer['responseSim'] = buildResponseSimilarityMatrix(responses)
elif qType == qType_Numerical or qType == qType_WtdFeature:
responses = responses.replace('', float('nan'))
responses = responseData[question]
res = responses[~np.isnan(responses)]
answer['median'] = np.median(res)
answer['histog'] = []
# qID = int(qData[0])
qID = int(qData[headerQID])
answers[qData[headerQuestion]] = answer
answerIdx[qID] = qData[headerQuestion]
data['answers'] = answers
data['answerIdx'] = answerIdx
# remove any questions (attributes) that are not in the response data
def cleanQuestions(data):
questions = data['questions']['rawQuestions']
responseData = data['responses']
dropList = []
for idx, qData in questions.iterrows():
question = qData[headerQuestion]
if question not in responseData:
dropList.append(idx)
data['questions']['questions'] = questions.drop(dropList)
# run one or more filters on the questions to select the questions to process
# questionFilters is an array of functions with arguments (questionData)
# that return boolean True if question is used
def filterQuestions(data, questionFilters):
questions = data['questions']['rawQuestions']
dropList = []
for idx, question in questions.iterrows():
for qFilter in questionFilters:
if qFilter(question) == False:
dropList.append(idx)
newQuestions = data['questions']['questions'] = questions.drop(dropList)
print("Processing %s filtered questions" % str(newQuestions.shape[0]))
# compute similarity for each pair of hierarchical responses
def buildResponseSimilarityMatrix(responses):
n = len(responses)
simmat = np.empty([n,n])
maxLen = 0
for i in range(0,n):
ans1 = [x.strip() for x in responses[i].split(':')]
len1 = len(ans1)
if len1 > maxLen:
maxLen = len1
for j in range(i,n):
if i is j:
simmat[i][j] = len1
else:
ans2 = [x.strip() for x in responses[j].split(':')]
len2 = len(ans2)
for k in range(0, min(len1, len2)):
if ans1[k] != ans2[k]:
break
simmat[i][j] = k
simmat[j][i] = k
# normalize with longest hierarchical term
if maxLen > 1:
maxLen = maxLen - 1
maxLen = float(maxLen)
for i in range(0,n):
for j in range(0,n):
simmat[i][j] /= maxLen
return simmat
def getClusterAggregationData(rawData):
propName = 'attribute'
propType = 'type'
aggOps = 'summary_stats' # comma separated list of aggregation operations
try:
aggData = rawData['aggregation']
aggDataFinal = {}
# read in and process the sheet
# get headers
aggHeaders = {}
headerRow = aggData[0]
for idx in range(len(headerRow)):
aggHeaders[headerRow[idx].value] = idx
# get column indices and read data
propIdx = aggHeaders[propName]
typeIdx = aggHeaders[propType]
aggIdx = aggHeaders[aggOps]
for aggRow in aggData:
aggOpList = [x.strip() for x in aggRow[aggIdx].split(',')]
for aggOp in aggOpList:
aggDataFinal[aggRow[propIdx]] = {propType: aggRow[typeIdx], aggOp: aggOp}
except:
aggDataFinal = None
return aggDataFinal
|
StarcoderdataPython
|
1681038
|
<filename>ansible/venv/lib/python2.7/site-packages/ansible/modules/storage/netapp/netapp_e_auditlog.py
#!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: netapp_e_auditlog
short_description: NetApp E-Series manage audit-log configuration
description:
- This module allows an e-series storage system owner to set audit-log configuration parameters.
version_added: '2.7'
author: <NAME> (@ndswartz)
extends_documentation_fragment:
- netapp.eseries
options:
max_records:
description:
- The maximum number log messages audit-log will retain.
- Max records must be between and including 100 and 50000.
default: 50000
log_level:
description: Filters the log messages according to the specified log level selection.
choices:
- all
- writeOnly
default: writeOnly
full_policy:
description: Specifies what audit-log should do once the number of entries approach the record limit.
choices:
- overWrite
- preventSystemAccess
default: overWrite
threshold:
description:
- This is the memory full percent threshold that audit-log will start issuing warning messages.
- Percent range must be between and including 60 and 90.
default: 90
force:
description:
- Forces the audit-log configuration to delete log history when log messages fullness cause immediate
warning or full condition.
- Warning! This will cause any existing audit-log messages to be deleted.
- This is only applicable for I(full_policy=preventSystemAccess).
type: bool
default: no
log_path:
description: A local path to a file to be used for debug logging.
required: no
notes:
- Check mode is supported.
- This module is currently only supported with the Embedded Web Services API v3.0 and higher.
"""
EXAMPLES = """
- name: Define audit-log to prevent system access if records exceed 50000 with warnings occurring at 60% capacity.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ <PASSWORD> }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
max_records: 50000
log_level: all
full_policy: preventSystemAccess
threshold: 60
log_path: /path/to/log_file.log
- name: Define audit-log utilize the default values.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ <PASSWORD> }}"
ssid: "{{ netapp_e_ssid }}"
- name: Force audit-log configuration when full or warning conditions occur while enacting preventSystemAccess policy.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ <PASSWORD> }}"
ssid: "{{ netapp_e_ssid }}"
max_records: 5000
log_level: all
full_policy: preventSystemAccess
threshold: 60
force: yes
"""
RETURN = """
msg:
description: Success message
returned: on success
type: str
sample: The settings have been updated.
"""
import json
import logging
from pprint import pformat
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netapp import request, eseries_host_argument_spec
from ansible.module_utils._text import to_native
try:
from urlparse import urlparse, urlunparse
except Exception:
from urllib.parse import urlparse, urlunparse
class AuditLog(object):
"""Audit-log module configuration class."""
MAX_RECORDS = 50000
HEADERS = {"Content-Type": "application/json",
"Accept": "application/json"}
def __init__(self):
argument_spec = eseries_host_argument_spec()
argument_spec.update(dict(
max_records=dict(type="int", default=50000),
log_level=dict(type="str", default="writeOnly", choices=["all", "writeOnly"]),
full_policy=dict(type="str", default="overWrite", choices=["overWrite", "preventSystemAccess"]),
threshold=dict(type="int", default=90),
force=dict(type="bool", default=False),
log_path=dict(type='str', required=False)))
self.module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
args = self.module.params
self.max_records = args["max_records"]
if self.max_records < 100 or self.max_records > self.MAX_RECORDS:
self.module.fail_json(msg="Audit-log max_records count must be between 100 and 50000: [%s]"
% self.max_records)
self.threshold = args["threshold"]
if self.threshold < 60 or self.threshold > 90:
self.module.fail_json(msg="Audit-log percent threshold must be between 60 and 90: [%s]" % self.threshold)
self.log_level = args["log_level"]
self.full_policy = args["full_policy"]
self.force = args["force"]
self.ssid = args['ssid']
self.url = args['api_url']
if not self.url.endswith('/'):
self.url += '/'
self.creds = dict(url_password=args['api_password'],
validate_certs=args['validate_certs'],
url_username=args['api_username'], )
# logging setup
log_path = args['log_path']
self._logger = logging.getLogger(self.__class__.__name__)
if log_path:
logging.basicConfig(
level=logging.DEBUG, filename=log_path, filemode='w',
format='%(relativeCreated)dms %(levelname)s %(module)s.%(funcName)s:%(lineno)d\n %(message)s')
self.proxy_used = self.is_proxy()
self._logger.info(self.proxy_used)
self.check_mode = self.module.check_mode
def is_proxy(self):
"""Determine whether the API is embedded or proxy."""
try:
# replace http url path with devmgr/utils/about
about_url = list(urlparse(self.url))
about_url[2] = "devmgr/utils/about"
about_url = urlunparse(about_url)
rc, data = request(about_url, timeout=300, headers=self.HEADERS, **self.creds)
return data["runningAsProxy"]
except Exception as err:
self.module.fail_json(msg="Failed to retrieve the webservices about information! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def get_configuration(self):
"""Retrieve the existing audit-log configurations.
:returns: dictionary containing current audit-log configuration
"""
try:
if self.proxy_used:
rc, data = request(self.url + "audit-log/config", timeout=300, headers=self.HEADERS, **self.creds)
else:
rc, data = request(self.url + "storage-systems/%s/audit-log/config" % self.ssid,
timeout=300, headers=self.HEADERS, **self.creds)
return data
except Exception as err:
self.module.fail_json(msg="Failed to retrieve the audit-log configuration! "
"Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def build_configuration(self):
"""Build audit-log expected configuration.
:returns: Tuple containing update boolean value and dictionary of audit-log configuration
"""
config = self.get_configuration()
current = dict(auditLogMaxRecords=config["auditLogMaxRecords"],
auditLogLevel=config["auditLogLevel"],
auditLogFullPolicy=config["auditLogFullPolicy"],
auditLogWarningThresholdPct=config["auditLogWarningThresholdPct"])
body = dict(auditLogMaxRecords=self.max_records,
auditLogLevel=self.log_level,
auditLogFullPolicy=self.full_policy,
auditLogWarningThresholdPct=self.threshold)
update = current != body
self._logger.info(pformat(update))
self._logger.info(pformat(body))
return update, body
def delete_log_messages(self):
"""Delete all audit-log messages."""
self._logger.info("Deleting audit-log messages...")
try:
if self.proxy_used:
rc, result = request(self.url + "audit-log?clearAll=True", timeout=300,
method="DELETE", headers=self.HEADERS, **self.creds)
else:
rc, result = request(self.url + "storage-systems/%s/audit-log?clearAll=True" % self.ssid, timeout=300,
method="DELETE", headers=self.HEADERS, **self.creds)
except Exception as err:
self.module.fail_json(msg="Failed to delete audit-log messages! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def update_configuration(self, update=None, body=None, attempt_recovery=True):
"""Update audit-log configuration."""
if update is None or body is None:
update, body = self.build_configuration()
if update and not self.check_mode:
try:
if self.proxy_used:
rc, result = request(self.url + "storage-systems/audit-log/config", timeout=300,
data=json.dumps(body), method='POST', headers=self.HEADERS,
ignore_errors=True, **self.creds)
else:
rc, result = request(self.url + "storage-systems/%s/audit-log/config" % self.ssid, timeout=300,
data=json.dumps(body), method='POST', headers=self.HEADERS,
ignore_errors=True, **self.creds)
if rc == 422:
if self.force and attempt_recovery:
self.delete_log_messages()
update = self.update_configuration(update, body, False)
else:
self.module.fail_json(msg="Failed to update audit-log configuration! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(rc, result)))
except Exception as error:
self.module.fail_json(msg="Failed to update audit-log configuration! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(error)))
return update
def update(self):
"""Update the audit-log configuration."""
update = self.update_configuration()
self.module.exit_json(msg="Audit-log update complete", changed=update)
def __call__(self):
self.update()
def main():
auditlog = AuditLog()
auditlog()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3395754
|
<filename>scrapy_cookies/storage/mongo.py
import logging
import pickle
import re
from http.cookiejar import Cookie
from itertools import starmap
from typing import Dict
import pymongo
from pymongo import MongoClient
from pymongo.collection import Collection
from pymongo.database import Database
from scrapy.http.cookies import CookieJar
from scrapy.settings import Settings
from scrapy.spiders import Spider
from scrapy_cookies.storage import BaseStorage
logger = logging.getLogger(__name__)
pattern = re.compile("^COOKIES_MONGO_MONGOCLIENT_(?P<kwargs>(?!KWARGS).*)$")
def get_arguments(var):
return {str: {"name": var}, dict: var}[type(var)]
def write_cookiejar(cookiejar: CookieJar):
return pickle.dumps(cookiejar)
def read_cookiejar(document):
try:
return pickle.loads(document["cookiejar"])
except TypeError:
return None
def convert_cookiejar(cookiejar):
def _convert_cookies(x):
if isinstance(x, (str, int, bool)):
return x
elif isinstance(x, Cookie):
return dict(
map(
lambda attr: (attr, getattr(x, attr)),
(
"version",
"name",
"value",
"port",
"port_specified",
"domain",
"domain_specified",
"domain_initial_dot",
"path",
"path_specified",
"secure",
"expires",
"discard",
"comment",
"comment_url",
),
)
)
elif isinstance(x, dict):
return dict(
starmap(
lambda k, v: (_convert_cookies(k), _convert_cookies(v)), x.items()
)
)
return _convert_cookies(cookiejar._cookies)
class MongoStorage(BaseStorage):
def __init__(self, settings: Settings):
super(MongoStorage, self).__init__(settings)
self.mongo_settings: Dict[str, str] = dict(
starmap(
lambda k, v: (pattern.sub(lambda x: x.group(1).lower(), k), v),
filter(
lambda pair: pattern.match(pair[0]), settings.copy_to_dict().items()
),
)
)
self.mongo_settings.update(self.settings["COOKIES_MONGO_MONGOCLIENT_KWARGS"])
self.client: MongoClient = None
self.db: Database = None
self.coll: Collection = None
@classmethod
def from_middleware(cls, middleware):
obj = cls(middleware.settings)
return obj
def open_spider(self, spider: Spider):
self.client: MongoClient = MongoClient(**self.mongo_settings)
self.db: Database = self.client.get_database(
**get_arguments(self.settings["COOKIES_MONGO_DATABASE"])
)
self.coll: Collection = self.db.get_collection(
**get_arguments(self.settings["COOKIES_MONGO_COLLECTION"])
)
self.coll.create_index([("key", pymongo.ASCENDING)], unique=True)
def close_spider(self, spider: Spider):
self.client.close()
def __missing__(self, k) -> CookieJar:
cookiejar: CookieJar = CookieJar()
self[k] = cookiejar
return cookiejar
def __delitem__(self, v):
# TODO: finish this method
self.coll.delete_one({})
def __getitem__(self, k) -> CookieJar:
v: CookieJar = read_cookiejar(self.coll.find_one({"key": k}))
if isinstance(v, CookieJar):
return v
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, k)
raise KeyError(k)
def __iter__(self):
return iter(self.coll.find())
def __len__(self) -> int:
return self.coll.count_documents({})
def __setitem__(self, k, v):
self.coll.update_one(
{"key": k},
{
"$set": {
"key": k,
"cookiejar": write_cookiejar(v),
"cookies": convert_cookiejar(v),
}
},
upsert=True,
)
|
StarcoderdataPython
|
23317
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""This module contains a class representing messages that are sent or received.
"""
from azure.iot.device import constant
# TODO: Revise this class. Does all of this REALLY need to be here?
class Message(object):
"""Represents a message to or from IoTHub
:ivar data: The data that constitutes the payload
:ivar custom_properties: Dictionary of custom message properties
:ivar lock_token: Used by receiver to abandon, reject or complete the message
:ivar message id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''}
:ivar sequence_number: A number (unique per device-queue) assigned by IoT Hub to each message
:ivar to: A destination specified for Cloud-to-Device (C2D) messages
:ivar expiry_time_utc: Date and time of message expiration in UTC format
:ivar enqueued_time: Date and time a C2D message was received by IoT Hub
:ivar correlation_id: A property in a response message that typically contains the message_id of the request, in request-reply patterns
:ivar user_id: An ID to specify the origin of messages
:ivar ack: A feedback message generator. This property is used in C2D messages to request IoT Hub to generate feedback messages as a result of the consumption of the message by the device
:ivar content_encoding: Content encoding of the message data. Can be 'utf-8', 'utf-16' or 'utf-32'
:ivar content_type: Content type property used to route messages with the message-body. Can be 'application/json'
:ivar output_name: Name of the output that the is being sent to.
"""
def __init__(
self,
data,
message_id=None,
content_encoding="utf-8",
content_type="application/json",
output_name=None,
):
"""
Initializer for Message
:param data: The data that constitutes the payload
:param str message_id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''}
:param str content_encoding: Content encoding of the message data. Default is 'utf-8'. Other values can be utf-16' or 'utf-32'
:param str content_type: Content type property used to routes with the message body. Default value is 'application/json'
:param str output_name: Name of the output that the is being sent to.
"""
self.data = data
self.custom_properties = {}
self.lock_token = None
self.message_id = message_id
self.sequence_number = None
self.to = None
self.expiry_time_utc = None
self.enqueued_time = None
self.correlation_id = None
self.user_id = None
self.ack = None
self.content_encoding = content_encoding
self.content_type = content_type
self.output_name = output_name
self._iothub_interface_id = None
@property
def iothub_interface_id(self):
return self._iothub_interface_id
def set_as_security_message(self):
"""
Set the message as a security message.
This is a provisional API. Functionality not yet guaranteed.
"""
self._iothub_interface_id = constant.SECURITY_MESSAGE_INTERFACE_ID
def __str__(self):
return str(self.data)
|
StarcoderdataPython
|
31379
|
CANAIS_ADM = {
"diretoria": 441263190832185350,
"secretaria": 731689039853518848
}
SAUDACOES = ["Olá!", "Oi!", "Iai!"]
GUIA_ANONIMA_ID = 956319073568976967
msg_ajuda = "**::ola** | **::oi** | **::iai** | **::athena**: Mande um ola caloroso para mim, e responderei!\n" \
"**::cool** `texto`: Você pode me perguntar se algo é COOl (provavelmente sou eu).\n" \
"**::pitagoras** `expressão...`: Resolvo uma expressão matemática no estilo Pitágoras.\n" \
'**::rola** | **::dado** `NdN`: Consigo rolar uns dados para você se for conveniente.\n' \
"**::escolha** | **::prefere** `opções...`: Vou escolher a melhor opção entre algumas opções.\n" \
"**::stalk**: Envio algumas informações suas... Anda stalkeando você mesmo(a)!?.\n" \
"**::privilegios** `membro...`: Mostro suas permissões nesse canal ou de outra pessoa.\n" \
"**::convite**: Mando o convite do servidor.\n" \
"**::chegamais** `menções...`: Separo um canal para você e mais pessoas ficarem a vontade.\n" \
"**::ajuda** | **::comandos**: Esse já é um pouco autoexplicativo não?" \
"\n\n" \
"**Administração**:\n\n" \
'**::teste** `N vezes` `palavra`: Repito uma mensagem para saber se estou "di Boa"\n' \
'**::prompt**: Abro meu console para você interagir com meu código ||pervertido(a)!||.\n' \
"**::ping**: Mando a minha latência (morar nos E.U.A é para poucos).\n" \
"**::cep**: Mando o ID do canal atual.\n" \
"**::cpf**: Envio o ID de alguém.\n" \
"**::relatorio**: Faço um relatório geral do servidor." \
"(n de membros, n de boosts, nivel, n de canais, n de categorias, n de cargos...).\n" \
"**::faxina** `limite`: Dou uma limpeza das últimas (100 por padrão) mensagens no canal atual.\n" \
"\n" \
"**::log** `membro`: Faço um pequeno histórico escolar de um membro especifico. " \
"Ou o seu, caso não for especificado. Por padrão o limite é 15.\n" \
"\n" \
"**::basta**: Mando todas as pessoas **comuns** calarem a boca.\n" \
"**::liberado**: Descalo a boca de todos (talvez não seja uma boa ideia).\n" \
"**::aviso**: Muto alguém pelos seus crimes contra a nação.\n" \
"\n" \
"**::kick** `membro` `motivo`: Dou uma voadora em algum membro...\n" \
"Você pode **kickar** sem um motivo especificado, porém isso seria abuso de autoridade...\n" \
"**::ban** `membro` `motivo`: Excluo um membro da sociedade.\n" \
"Você pode **banir** sem um motivo especificado, porém isso seria abuso de autoridade..." \
"\n\n\n" \
"Você ainda pode pedir uma explicação de alto calão de certos comandos usando **::ajuda** `comando`." \
" Os que tenho alto conhecimento:" \
"`cool`; `soma`; `rola`; `escolha`; `chegamais`; `basta`; `log`; `ban`/`kick`; `aviso`." \
"\n" \
"Também se quiser saber mais sobre as permissões de `Administração`, mande um `::ajuda adms`."
msg_adms = """
Vou dizer resumidamente quem pode oquê aqui e as permissões minimas do cargo mais alto seu.
**Comando** | **Permissão**
`::teste` | Gerenciar canais
`::prompt` | Administrador
`::ping` | Gerenciar canais
`::cep` | Gerenciar canais
`::cpf` | Gerenciar canais
`::relatorio`| Administrador
`::faxina` | Gerenciar mensagens
`::log` | Gerenciar mensagens
`::basta` | Gerenciar mensagens
`::liberado` | Gerenciar mensagens
`::aviso` | Gerenciar mensagens
`::kick` | Expulsar membros
`::ban` | Banir membros
"""
alta_ajuda = {
"adms": msg_adms,
"cool": "Digo se algo é _cool_, como por exemplo: ::cool athena",
"pitagoras": "Calculo uma expressão matemática, como: `(23 + 2) * 9 - 2**3`.\nAinda pode usar exponenciação = `**`, e resto de divisão = `%`",
"rola": "Rolo um dado descompromissadamente: ::rola 1d20 = 1 dado de 20",
"escolha": "Use para eu escolher coisas aleatórias, manda as opções em sequência: ::escolha loritta athena disboard",
"chegamais": """Tenho um sistema de mensagens anônimas.
Entre em um desses canais para usufruir:
<#956301680679473253>
<#957638065596272680>
<#957638119560192090>
Use `::chegamais` `menções` (onde "menções" são as menções dos membros que queira convidar), o canal será fechado para todos com o cargo **everyone** com exceção de vocês (logicamente os outros como administradores e moderadores poderão ver as mensagens) e será aberto depois de _10 minutos_ de inatividade (fique tranquilo, antes disso eu vou apagar tudo).
Obs: Sendo que os de patente alta podem ver as mensagens, não passem os limites, olhem <#441263333807751178> para terem certeza.
""",
"basta": "Todos com somente o cargo **everyone** serão impedidos de falar no canal com o comando invocado.",
"log": "Envio as últimas mensagens de alguém.",
"aviso": "Dou o cargo @Avisado para um membro e ele não poderá mandar mensagens em qualquer canal, para descastiga-lo use o comando novamente.",
"kick": "Use para por alguém nas rédias, use-o no canal em que o membro tenha acesso (para deixar as coisas um pouco mais democráticas).",
"ban": "Use para por alguém nas rédias, use-o no canal em que o membro tenha acesso (para deixar as coisas um pouco mais democráticas)."
}
|
StarcoderdataPython
|
3381040
|
<gh_stars>1-10
# Copyright 2019 The Kapitan Authors
# SPDX-FileCopyrightText: 2020 The Kapitan Authors <<EMAIL>>
#
# SPDX-License-Identifier: Apache-2.0
"kapitan error classes"
class KapitanError(Exception):
"""generic kapitan error"""
pass
class CompileError(KapitanError):
"""compile error"""
pass
class InventoryError(KapitanError):
"""inventory error"""
pass
class SecretError(KapitanError):
"""secrets error"""
pass
class RefError(KapitanError):
"""ref error"""
pass
class RefBackendError(KapitanError):
"""ref backend error"""
pass
class RefFromFuncError(KapitanError):
"""ref from func error"""
pass
class RefHashMismatchError(KapitanError):
"""ref has mismatch error"""
pass
class HelmBindingUnavailableError(KapitanError):
"""helm input is used when the binding is not available"""
pass
class HelmTemplateError(KapitanError):
pass
class GitSubdirNotFoundError(KapitanError):
"""git dependency subdir not found error"""
pass
class GitFetchingError(KapitanError):
"""repo not found and/or permission error"""
pass
class RequestUnsuccessfulError(KapitanError):
"""request error"""
pass
class KubernetesManifestValidationError(KapitanError):
"""kubernetes manifest schema validation error"""
pass
|
StarcoderdataPython
|
4818819
|
"""
Struct that holds abstract_task, its part and handlers.
"""
from typing import Callable, Tuple
import numpy as np
from omtool.core.datamodel.abstract_task import AbstractTask
from omtool.core.datamodel.snapshot import Snapshot
class HandlerTask:
"""
Struct that holds abstract_task, its part and handlers.
"""
def __init__(
self,
task: AbstractTask,
part=slice(0, None),
handlers: list[Callable[[Tuple[np.ndarray, np.ndarray]], None]] = None,
):
if handlers is None:
handlers = []
self.task = task
self.part = part
self.handlers = handlers
def run(self, snapshot: Snapshot):
"""
Launch the task and return its value
"""
data = self.task.run(snapshot[self.part])
for handler in self.handlers:
handler(data)
|
StarcoderdataPython
|
1733589
|
# Generated by Django 2.2 on 2019-04-17 06:02
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20190417_0601'),
]
operations = [
migrations.AlterField(
model_name='avatar',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.AlterField(
model_name='child',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.AlterField(
model_name='human',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.AlterField(
model_name='parent',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.AlterField(
model_name='sibling',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
]
|
StarcoderdataPython
|
3335244
|
import numpy as np
def return_test_set(X, y):
"""
:param X : 2D array of our dataset
:param y : 1D array of the groundtruth labels of the dataset
"""
# total number of samples in the dataset
N = X.shape[0]
indices_all = list(np.arange(N))
# Create train set's indices and test set's indices
# Train set will have samples 1-25, 51-75, 101-125
# Test set will have the rest samples
indices_train = []
indices_test = []
for i in range(0, 25):
indices_train.append(i)
for i in range(50, 75):
indices_train.append(i)
for i in range(100, 125):
indices_train.append(i)
indices_test = [x for x in indices_all if x not in indices_train]
indices_train = np.array(indices_train)
indices_test = np.array(indices_test)
X_train = X[indices_train,:]
y_train = y[indices_train]
X_test = X[indices_test,:]
y_test = y[indices_test]
return X_train, y_train, X_test, y_test
|
StarcoderdataPython
|
1679237
|
# -*- coding: utf-8 -*-
"""Mutations that expand the graph."""
from . import neighborhood, upstream
from .neighborhood import *
from .upstream import *
__all__ = neighborhood.__all__ + upstream.__all__
|
StarcoderdataPython
|
1632948
|
# Feed-related, but Atom-independent, functions.
from feedmark.utils import quote_plus
def construct_entry_url(section):
# Currently supports links to anchors generated by Github's Markdown renderer.
if 'link-target-url' not in section.document.properties:
return None
return '{}#{}'.format(section.document.properties['link-target-url'], quote_plus(section.anchor))
def extract_feed_properties(document):
properties = {}
properties['title'] = document.title
properties['author'] = document.properties['author']
properties['url'] = document.properties['url']
properties['link-target-url'] = document.properties.get('link-target-url')
return properties
def extract_sections(documents):
sections = []
for document in documents:
for section in document.sections:
sections.append(section)
sections.sort(key=lambda section: section.publication_date, reverse=True)
return sections
|
StarcoderdataPython
|
1742638
|
import numpy as np
import matplotlib.pylab as plt
from skimage.transform import resize
import imageio
from os import walk
from skimage.restoration import denoise_nl_means, estimate_sigma
noisy_path = 'C:/Files/M2 MVA/S1/Object recognition/Project/SinGAN-master/Input/GaussianNoise/'
NLmeans_path = 'C:/Files/M2 MVA/S1/Object recognition/Project/SinGAN-master/Output/NLmeans/Gaussian/'
_, _, filenames = next(walk(noisy_path))
for image_name in filenames:
print(image_name)
noisy = imageio.imread(noisy_path+image_name)/255
plt.imshow(noisy)
plt.show()
sigma_estimation = np.mean(estimate_sigma(noisy, multichannel=True))
denoised = denoise_nl_means(noisy, h=0.7*sigma_estimation, fast_mode=True)
plt.imshow(denoised)
plt.show()
imageio.imwrite(NLmeans_path+image_name, denoised)
|
StarcoderdataPython
|
5524
|
<gh_stars>0
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Modified from espnet(https://github.com/espnet/espnet)
"""Tacotron2 decoder related modules."""
import paddle
import paddle.nn.functional as F
import six
from paddle import nn
from paddlespeech.t2s.modules.tacotron2.attentions import AttForwardTA
class Prenet(nn.Layer):
"""Prenet module for decoder of Spectrogram prediction network.
This is a module of Prenet in the decoder of Spectrogram prediction network,
which described in `Natural TTS
Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions`_.
The Prenet preforms nonlinear conversion
of inputs before input to auto-regressive lstm,
which helps to learn diagonal attentions.
Notes
----------
This module alway applies dropout even in evaluation.
See the detail in `Natural TTS Synthesis by
Conditioning WaveNet on Mel Spectrogram Predictions`_.
.. _`Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions`:
https://arxiv.org/abs/1712.05884
"""
def __init__(self, idim, n_layers=2, n_units=256, dropout_rate=0.5):
"""Initialize prenet module.
Parameters
----------
idim : int
Dimension of the inputs.
odim : int
Dimension of the outputs.
n_layers : int, optional
The number of prenet layers.
n_units : int, optional
The number of prenet units.
"""
super().__init__()
self.dropout_rate = dropout_rate
self.prenet = nn.LayerList()
for layer in six.moves.range(n_layers):
n_inputs = idim if layer == 0 else n_units
self.prenet.append(
nn.Sequential(nn.Linear(n_inputs, n_units), nn.ReLU()))
def forward(self, x):
"""Calculate forward propagation.
Parameters
----------
x : Tensor
Batch of input tensors (B, ..., idim).
Returns
----------
Tensor
Batch of output tensors (B, ..., odim).
"""
for i in six.moves.range(len(self.prenet)):
# F.dropout 引入了随机, tacotron2 的 dropout 是不能去掉的
x = F.dropout(self.prenet[i](x))
return x
class Postnet(nn.Layer):
"""Postnet module for Spectrogram prediction network.
This is a module of Postnet in Spectrogram prediction network,
which described in `Natural TTS Synthesis by
Conditioning WaveNet on Mel Spectrogram Predictions`_.
The Postnet predicts refines the predicted
Mel-filterbank of the decoder,
which helps to compensate the detail sturcture of spectrogram.
.. _`Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions`:
https://arxiv.org/abs/1712.05884
"""
def __init__(
self,
idim,
odim,
n_layers=5,
n_chans=512,
n_filts=5,
dropout_rate=0.5,
use_batch_norm=True, ):
"""Initialize postnet module.
Parameters
----------
idim : int
Dimension of the inputs.
odim : int
Dimension of the outputs.
n_layers : int, optional
The number of layers.
n_filts : int, optional
The number of filter size.
n_units : int, optional
The number of filter channels.
use_batch_norm : bool, optional
Whether to use batch normalization..
dropout_rate : float, optional
Dropout rate..
"""
super().__init__()
self.postnet = nn.LayerList()
for layer in six.moves.range(n_layers - 1):
ichans = odim if layer == 0 else n_chans
ochans = odim if layer == n_layers - 1 else n_chans
if use_batch_norm:
self.postnet.append(
nn.Sequential(
nn.Conv1D(
ichans,
ochans,
n_filts,
stride=1,
padding=(n_filts - 1) // 2,
bias_attr=False, ),
nn.BatchNorm1D(ochans),
nn.Tanh(),
nn.Dropout(dropout_rate), ))
else:
self.postnet.append(
nn.Sequential(
nn.Conv1D(
ichans,
ochans,
n_filts,
stride=1,
padding=(n_filts - 1) // 2,
bias_attr=False, ),
nn.Tanh(),
nn.Dropout(dropout_rate), ))
ichans = n_chans if n_layers != 1 else odim
if use_batch_norm:
self.postnet.append(
nn.Sequential(
nn.Conv1D(
ichans,
odim,
n_filts,
stride=1,
padding=(n_filts - 1) // 2,
bias_attr=False, ),
nn.BatchNorm1D(odim),
nn.Dropout(dropout_rate), ))
else:
self.postnet.append(
nn.Sequential(
nn.Conv1D(
ichans,
odim,
n_filts,
stride=1,
padding=(n_filts - 1) // 2,
bias_attr=False, ),
nn.Dropout(dropout_rate), ))
def forward(self, xs):
"""Calculate forward propagation.
Parameters
----------
xs : Tensor
Batch of the sequences of padded input tensors (B, idim, Tmax).
Returns
----------
Tensor
Batch of padded output tensor. (B, odim, Tmax).
"""
for i in six.moves.range(len(self.postnet)):
xs = self.postnet[i](xs)
return xs
class ZoneOutCell(nn.Layer):
"""ZoneOut Cell module.
This is a module of zoneout described in
`Zoneout: Regularizing RNNs by Randomly Preserving Hidden Activations`_.
This code is modified from `eladhoffer/seq2seq.pytorch`_.
Examples
----------
>>> lstm = paddle.nn.LSTMCell(16, 32)
>>> lstm = ZoneOutCell(lstm, 0.5)
.. _`Zoneout: Regularizing RNNs by Randomly Preserving Hidden Activations`:
https://arxiv.org/abs/1606.01305
.. _`eladhoffer/seq2seq.pytorch`:
https://github.com/eladhoffer/seq2seq.pytorch
"""
def __init__(self, cell, zoneout_rate=0.1):
"""Initialize zone out cell module.
Parameters
----------
cell : nn.Layer:
Paddle recurrent cell module
e.g. `paddle.nn.LSTMCell`.
zoneout_rate : float, optional
Probability of zoneout from 0.0 to 1.0.
"""
super().__init__()
self.cell = cell
self.hidden_size = cell.hidden_size
self.zoneout_rate = zoneout_rate
if zoneout_rate > 1.0 or zoneout_rate < 0.0:
raise ValueError(
"zoneout probability must be in the range from 0.0 to 1.0.")
def forward(self, inputs, hidden):
"""Calculate forward propagation.
Parameters
----------
inputs : Tensor
Batch of input tensor (B, input_size).
hidden : tuple
- Tensor: Batch of initial hidden states (B, hidden_size).
- Tensor: Batch of initial cell states (B, hidden_size).
Returns
----------
Tensor
Batch of next hidden states (B, hidden_size).
tuple:
- Tensor: Batch of next hidden states (B, hidden_size).
- Tensor: Batch of next cell states (B, hidden_size).
"""
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.cell(inputs, hidden)
next_hidden = self._zoneout(hidden, next_hidden, self.zoneout_rate)
# to have the same output format with LSTMCell in paddle
return next_hidden[0], next_hidden
def _zoneout(self, h, next_h, prob):
# apply recursively
if isinstance(h, tuple):
num_h = len(h)
if not isinstance(prob, tuple):
prob = tuple([prob] * num_h)
return tuple(
[self._zoneout(h[i], next_h[i], prob[i]) for i in range(num_h)])
if self.training:
mask = paddle.bernoulli(paddle.ones([*paddle.shape(h)]) * prob)
return mask * h + (1 - mask) * next_h
else:
return prob * h + (1 - prob) * next_h
class Decoder(nn.Layer):
"""Decoder module of Spectrogram prediction network.
This is a module of decoder of Spectrogram prediction network in Tacotron2,
which described in `Natural TTS
Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions`_.
The decoder generates the sequence of
features from the sequence of the hidden states.
.. _`Natural TTS Synthesis by Conditioning WaveNet on Mel Spectrogram Predictions`:
https://arxiv.org/abs/1712.05884
"""
def __init__(
self,
idim,
odim,
att,
dlayers=2,
dunits=1024,
prenet_layers=2,
prenet_units=256,
postnet_layers=5,
postnet_chans=512,
postnet_filts=5,
output_activation_fn=None,
cumulate_att_w=True,
use_batch_norm=True,
use_concate=True,
dropout_rate=0.5,
zoneout_rate=0.1,
reduction_factor=1, ):
"""Initialize Tacotron2 decoder module.
Parameters
----------
idim : int
Dimension of the inputs.
odim : int
Dimension of the outputs.
att nn.Layer
Instance of attention class.
dlayers int, optional
The number of decoder lstm layers.
dunits : int, optional
The number of decoder lstm units.
prenet_layers : int, optional
The number of prenet layers.
prenet_units : int, optional
The number of prenet units.
postnet_layers : int, optional
The number of postnet layers.
postnet_filts : int, optional
The number of postnet filter size.
postnet_chans : int, optional
The number of postnet filter channels.
output_activation_fn : nn.Layer, optional
Activation function for outputs.
cumulate_att_w : bool, optional
Whether to cumulate previous attention weight.
use_batch_norm : bool, optional
Whether to use batch normalization.
use_concate : bool, optional
Whether to concatenate encoder embedding with decoder lstm outputs.
dropout_rate : float, optional
Dropout rate.
zoneout_rate : float, optional
Zoneout rate.
reduction_factor : int, optional
Reduction factor.
"""
super().__init__()
# store the hyperparameters
self.idim = idim
self.odim = odim
self.att = att
self.output_activation_fn = output_activation_fn
self.cumulate_att_w = cumulate_att_w
self.use_concate = use_concate
self.reduction_factor = reduction_factor
# check attention type
if isinstance(self.att, AttForwardTA):
self.use_att_extra_inputs = True
else:
self.use_att_extra_inputs = False
# define lstm network
prenet_units = prenet_units if prenet_layers != 0 else odim
self.lstm = nn.LayerList()
for layer in six.moves.range(dlayers):
iunits = idim + prenet_units if layer == 0 else dunits
lstm = nn.LSTMCell(iunits, dunits)
if zoneout_rate > 0.0:
lstm = ZoneOutCell(lstm, zoneout_rate)
self.lstm.append(lstm)
# define prenet
if prenet_layers > 0:
self.prenet = Prenet(
idim=odim,
n_layers=prenet_layers,
n_units=prenet_units,
dropout_rate=dropout_rate, )
else:
self.prenet = None
# define postnet
if postnet_layers > 0:
self.postnet = Postnet(
idim=idim,
odim=odim,
n_layers=postnet_layers,
n_chans=postnet_chans,
n_filts=postnet_filts,
use_batch_norm=use_batch_norm,
dropout_rate=dropout_rate, )
else:
self.postnet = None
# define projection layers
iunits = idim + dunits if use_concate else dunits
self.feat_out = nn.Linear(
iunits, odim * reduction_factor, bias_attr=False)
self.prob_out = nn.Linear(iunits, reduction_factor)
# initialize
# self.apply(decoder_init)
def _zero_state(self, hs):
init_hs = paddle.zeros([paddle.shape(hs)[0], self.lstm[0].hidden_size])
return init_hs
def forward(self, hs, hlens, ys):
"""Calculate forward propagation.
Parameters
----------
hs : Tensor
Batch of the sequences of padded hidden states (B, Tmax, idim).
hlens : Tensor(int64) padded
Batch of lengths of each input batch (B,).
ys : Tensor
Batch of the sequences of padded target features (B, Lmax, odim).
Returns
----------
Tensor
Batch of output tensors after postnet (B, Lmax, odim).
Tensor
Batch of output tensors before postnet (B, Lmax, odim).
Tensor
Batch of logits of stop prediction (B, Lmax).
Tensor
Batch of attention weights (B, Lmax, Tmax).
Note
----------
This computation is performed in teacher-forcing manner.
"""
# thin out frames (B, Lmax, odim) -> (B, Lmax/r, odim)
if self.reduction_factor > 1:
ys = ys[:, self.reduction_factor - 1::self.reduction_factor]
# length list should be list of int
# hlens = list(map(int, hlens))
# initialize hidden states of decoder
c_list = [self._zero_state(hs)]
z_list = [self._zero_state(hs)]
for _ in six.moves.range(1, len(self.lstm)):
c_list += [self._zero_state(hs)]
z_list += [self._zero_state(hs)]
prev_out = paddle.zeros([paddle.shape(hs)[0], self.odim])
# initialize attention
prev_att_w = None
self.att.reset()
# loop for an output sequence
outs, logits, att_ws = [], [], []
for y in ys.transpose([1, 0, 2]):
if self.use_att_extra_inputs:
att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w,
prev_out)
else:
att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w)
prenet_out = self.prenet(
prev_out) if self.prenet is not None else prev_out
xs = paddle.concat([att_c, prenet_out], axis=1)
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0]))
z_list[0], c_list[0] = next_hidden
for i in six.moves.range(1, len(self.lstm)):
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.lstm[i](z_list[i - 1],
(z_list[i], c_list[i]))
z_list[i], c_list[i] = next_hidden
zcs = (paddle.concat([z_list[-1], att_c], axis=1)
if self.use_concate else z_list[-1])
outs += [
self.feat_out(zcs).reshape([paddle.shape(hs)[0], self.odim, -1])
]
logits += [self.prob_out(zcs)]
att_ws += [att_w]
# teacher forcing
prev_out = y
if self.cumulate_att_w and prev_att_w is not None:
prev_att_w = prev_att_w + att_w # Note: error when use +=
else:
prev_att_w = att_w
# (B, Lmax)
logits = paddle.concat(logits, axis=1)
# (B, odim, Lmax)
before_outs = paddle.concat(outs, axis=2)
# (B, Lmax, Tmax)
att_ws = paddle.stack(att_ws, axis=1)
if self.reduction_factor > 1:
# (B, odim, Lmax)
before_outs = before_outs.reshape(
[paddle.shape(before_outs)[0], self.odim, -1])
if self.postnet is not None:
# (B, odim, Lmax)
after_outs = before_outs + self.postnet(before_outs)
else:
after_outs = before_outs
# (B, Lmax, odim)
before_outs = before_outs.transpose([0, 2, 1])
# (B, Lmax, odim)
after_outs = after_outs.transpose([0, 2, 1])
logits = logits
# apply activation function for scaling
if self.output_activation_fn is not None:
before_outs = self.output_activation_fn(before_outs)
after_outs = self.output_activation_fn(after_outs)
return after_outs, before_outs, logits, att_ws
def inference(
self,
h,
threshold=0.5,
minlenratio=0.0,
maxlenratio=10.0,
use_att_constraint=False,
backward_window=None,
forward_window=None, ):
"""Generate the sequence of features given the sequences of characters.
Parameters
----------
h : Tensor
Input sequence of encoder hidden states (T, C).
threshold : float, optional
Threshold to stop generation.
minlenratio : float, optional
Minimum length ratio.
If set to 1.0 and the length of input is 10,
the minimum length of outputs will be 10 * 1 = 10.
minlenratio : float, optional
Minimum length ratio.
If set to 10 and the length of input is 10,
the maximum length of outputs will be 10 * 10 = 100.
use_att_constraint : bool
Whether to apply attention constraint introduced in `Deep Voice 3`_.
backward_window : int
Backward window size in attention constraint.
forward_window : int
Forward window size in attention constraint.
Returns
----------
Tensor
Output sequence of features (L, odim).
Tensor
Output sequence of stop probabilities (L,).
Tensor
Attention weights (L, T).
Note
----------
This computation is performed in auto-regressive manner.
.. _`Deep Voice 3`: https://arxiv.org/abs/1710.07654
"""
# setup
assert len(paddle.shape(h)) == 2
hs = h.unsqueeze(0)
ilens = paddle.shape(h)[0]
maxlen = int(paddle.shape(h)[0] * maxlenratio)
minlen = int(paddle.shape(h)[0] * minlenratio)
# initialize hidden states of decoder
c_list = [self._zero_state(hs)]
z_list = [self._zero_state(hs)]
for _ in six.moves.range(1, len(self.lstm)):
c_list += [self._zero_state(hs)]
z_list += [self._zero_state(hs)]
prev_out = paddle.zeros([1, self.odim])
# initialize attention
prev_att_w = None
self.att.reset()
# setup for attention constraint
if use_att_constraint:
last_attended_idx = 0
else:
last_attended_idx = None
# loop for an output sequence
idx = 0
outs, att_ws, probs = [], [], []
while True:
# updated index
idx += self.reduction_factor
# decoder calculation
if self.use_att_extra_inputs:
att_c, att_w = self.att(
hs,
ilens,
z_list[0],
prev_att_w,
prev_out,
last_attended_idx=last_attended_idx,
backward_window=backward_window,
forward_window=forward_window, )
else:
att_c, att_w = self.att(
hs,
ilens,
z_list[0],
prev_att_w,
last_attended_idx=last_attended_idx,
backward_window=backward_window,
forward_window=forward_window, )
att_ws += [att_w]
prenet_out = self.prenet(
prev_out) if self.prenet is not None else prev_out
xs = paddle.concat([att_c, prenet_out], axis=1)
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0]))
z_list[0], c_list[0] = next_hidden
for i in six.moves.range(1, len(self.lstm)):
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.lstm[i](z_list[i - 1],
(z_list[i], c_list[i]))
z_list[i], c_list[i] = next_hidden
zcs = (paddle.concat([z_list[-1], att_c], axis=1)
if self.use_concate else z_list[-1])
# [(1, odim, r), ...]
outs += [self.feat_out(zcs).reshape([1, self.odim, -1])]
# [(r), ...]
probs += [F.sigmoid(self.prob_out(zcs))[0]]
if self.output_activation_fn is not None:
prev_out = self.output_activation_fn(
outs[-1][:, :, -1]) # (1, odim)
else:
prev_out = outs[-1][:, :, -1] # (1, odim)
if self.cumulate_att_w and prev_att_w is not None:
prev_att_w = prev_att_w + att_w # Note: error when use +=
else:
prev_att_w = att_w
if use_att_constraint:
last_attended_idx = int(att_w.argmax())
# check whether to finish generation
if sum(paddle.cast(probs[-1] >= threshold,
'int64')) > 0 or idx >= maxlen:
# check mininum length
if idx < minlen:
continue
# (1, odim, L)
outs = paddle.concat(outs, axis=2)
if self.postnet is not None:
# (1, odim, L)
outs = outs + self.postnet(outs)
# (L, odim)
outs = outs.transpose([0, 2, 1]).squeeze(0)
probs = paddle.concat(probs, axis=0)
att_ws = paddle.concat(att_ws, axis=0)
break
if self.output_activation_fn is not None:
outs = self.output_activation_fn(outs)
return outs, probs, att_ws
def calculate_all_attentions(self, hs, hlens, ys):
"""Calculate all of the attention weights.
Parameters
----------
hs : Tensor
Batch of the sequences of padded hidden states (B, Tmax, idim).
hlens : Tensor(int64)
Batch of lengths of each input batch (B,).
ys : Tensor
Batch of the sequences of padded target features (B, Lmax, odim).
Returns
----------
numpy.ndarray
Batch of attention weights (B, Lmax, Tmax).
Note
----------
This computation is performed in teacher-forcing manner.
"""
# thin out frames (B, Lmax, odim) -> (B, Lmax/r, odim)
if self.reduction_factor > 1:
ys = ys[:, self.reduction_factor - 1::self.reduction_factor]
# length list should be list of int
hlens = list(map(int, hlens))
# initialize hidden states of decoder
c_list = [self._zero_state(hs)]
z_list = [self._zero_state(hs)]
for _ in six.moves.range(1, len(self.lstm)):
c_list += [self._zero_state(hs)]
z_list += [self._zero_state(hs)]
prev_out = paddle.zeros([paddle.shape(hs)[0], self.odim])
# initialize attention
prev_att_w = None
self.att.reset()
# loop for an output sequence
att_ws = []
for y in ys.transpose([1, 0, 2]):
if self.use_att_extra_inputs:
att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w,
prev_out)
else:
att_c, att_w = self.att(hs, hlens, z_list[0], prev_att_w)
att_ws += [att_w]
prenet_out = self.prenet(
prev_out) if self.prenet is not None else prev_out
xs = paddle.concat([att_c, prenet_out], axis=1)
# we only use the second output of LSTMCell in paddle
_, next_hidden = self.lstm[0](xs, (z_list[0], c_list[0]))
z_list[0], c_list[0] = next_hidden
for i in six.moves.range(1, len(self.lstm)):
z_list[i], c_list[i] = self.lstm[i](z_list[i - 1],
(z_list[i], c_list[i]))
# teacher forcing
prev_out = y
if self.cumulate_att_w and prev_att_w is not None:
# Note: error when use +=
prev_att_w = prev_att_w + att_w
else:
prev_att_w = att_w
# (B, Lmax, Tmax)
att_ws = paddle.stack(att_ws, axis=1)
return att_ws
|
StarcoderdataPython
|
7746
|
<gh_stars>1-10
import wrapper as w
from multiprocessing import Process
import atexit
import time
from queue import Queue
''' 8 Processes, 24 threads per process = 192 threads '''
NUM_PROCESSES = 8
workerList = [] # Worker processes
class Worker(Process): # Need multiple threads or else it takes forever
def __init__(self, queue): # filNum is the id of the file to extract from
super().__init__()
self.queue = queue
self.outQueue = Queue()
def run(self):
with concurrent.futures.ThreadPoolExecutor(max_workers=24) as executor:
executor.submit(loadUrl())
def loadUrl():
while not self.queue.empty():
sentence = self.queue.get()
ex = w.GrapheneExtract(sentence)
self.outQueue.put(sentence.strip() + "\t" + str(ex.json) + "\n")
queues = [] # Use seperate queues to avoid waiting for locks
with open("data/all_news.txt", "r") as news:
for line in news[::len(news) / NUM_PROCESSES]:
queue = Queue()
queue.put(line.strip())
print("Queue populated")
for i in range(NUM_PROCESSES):
worker = Worker(queues[i])
worker.daemon = True
worker.start()
workerList.append(worker)
def close_running_threads():
for thread in workerList:
thread.join()
atexit.register(close_running_threads)
print("All threads registered and working.")
while True:
print(queue.qsize() " sentences remaining to be requested")
time.sleep(2) # Print every two seconds
|
StarcoderdataPython
|
1721067
|
<gh_stars>1-10
from iocompython import Root, EndPoint, Signal, Stream, json2bin
import ioterminal
import time
def get_network_conf(device_name, network_name):
global root
exp_mblk_path = 'conf_exp.' + device_name + '.' + network_name
imp_mblk_path = 'conf_imp.' + device_name + '.' + network_name
stream = Stream(root, frd = "frd_buf", tod = "tod_buf", exp = exp_mblk_path, imp = imp_mblk_path, select = 2)
stream.start_read()
while True:
s = stream.run()
if s != None:
break
time.sleep(0.01)
if s == 'completed':
data = stream.get_data();
print(data)
else:
print(s)
stream.delete()
def set_network_conf(device_name, network_name):
global root
exp_mblk_path = 'conf_exp.' + device_name + '.' + network_name
imp_mblk_path = 'conf_imp.' + device_name + '.' + network_name
stream = Stream(root, frd = "frd_buf", tod = "tod_buf", exp = exp_mblk_path, imp = imp_mblk_path, select = 2)
my_conf_bytes = str.encode("My dummy network configuration string")
stream.start_write(my_conf_bytes)
while True:
s = stream.run()
if s != None:
break
time.sleep(0.01)
if s == 'completed':
print("success")
else:
print(s)
stream.delete()
def main():
global root
root = Root('netconftest', security='certfile=bob.crt,keyfile=bob.key')
root.queue_events()
ioterminal.start(root)
epoint = EndPoint(root, flags='tls,dynamic')
while (ioterminal.run(root)):
e = root.wait_com_event(1000)
if e != None:
print(e)
event = e[0]
mblk_name = e[3]
device_name = e[2]
network_name = e[1]
# New device. This has a potential problem.
if event == 'new_device':
#set_network_conf(device_name, network_name)
# get_network_conf(device_name, network_name)
# print(root.setconf(device_name + "." + network_name, str.encode("Dummy config data")))
# print(root.getconf(device_name + "." + network_name))
print(root.getconf(device_name + "." + network_name, 3)) # default network configuration
root.delete()
if (__name__ == '__main__'):
main()
|
StarcoderdataPython
|
19213
|
"""
These tests require an AWS account to be set up, but don't require any manual
intervention beyond some initial setup. Also, these tests create instances (which cost
money!). Either `meadowrun-manage install` needs to be set up, or `meadowrun-manage
clean` needs to be run periodically
"""
import asyncio
import datetime
import io
import pprint
import threading
import uuid
import boto3
import fabric
import pytest
import meadowrun.aws_integration.management_lambdas.adjust_ec2_instances as adjust_ec2_instances # noqa: E501
from basics import BasicsSuite, HostProvider, ErrorsSuite, MapSuite
from instance_registrar_suite import (
InstanceRegistrarProvider,
InstanceRegistrarSuite,
TERMINATE_INSTANCES_IF_IDLE_FOR_TEST,
)
from meadowrun.aws_integration.aws_core import _get_default_region_name
from meadowrun.aws_integration.ec2_instance_allocation import EC2InstanceRegistrar
from meadowrun.aws_integration.ec2_pricing import _get_ec2_instance_types
from meadowrun.aws_integration.ec2_ssh_keys import ensure_meadowrun_key_pair
from meadowrun.aws_integration.grid_tasks_sqs import (
_add_tasks,
_complete_task,
_create_queues_for_job,
_get_task,
get_results,
worker_loop,
)
from meadowrun.instance_allocation import InstanceRegistrar
from meadowrun.instance_selection import choose_instance_types_for_job, Resources
from meadowrun.meadowrun_pb2 import ProcessState
from meadowrun.run_job import AllocCloudInstance
from meadowrun.run_job_core import Host, JobCompletion, CloudProviderType
# TODO don't always run tests in us-east-2
REGION = "us-east-2"
class AwsHostProvider(HostProvider):
def get_host(self) -> Host:
return AllocCloudInstance(1, 2, 80, "EC2", REGION)
def get_test_repo_url(self) -> str:
return "https://github.com/meadowdata/test_repo"
async def get_log_file_text(self, job_completion: JobCompletion) -> str:
with fabric.Connection(
job_completion.public_address,
user="ubuntu",
connect_kwargs={"pkey": ensure_meadowrun_key_pair(REGION)},
) as conn:
with io.BytesIO() as local_copy:
conn.get(job_completion.log_file_name, local_copy)
return local_copy.getvalue().decode("utf-8")
class TestBasicsAws(AwsHostProvider, BasicsSuite):
pass
class TestErrorsAws(AwsHostProvider, ErrorsSuite):
pass
class TestMapAws(MapSuite):
def cloud_provider(self) -> CloudProviderType:
return "EC2"
class EC2InstanceRegistrarProvider(InstanceRegistrarProvider[InstanceRegistrar]):
async def get_instance_registrar(self) -> InstanceRegistrar:
return EC2InstanceRegistrar(await _get_default_region_name(), "create")
async def deregister_instance(
self,
instance_registrar: InstanceRegistrar,
public_address: str,
require_no_running_jobs: bool,
) -> bool:
return adjust_ec2_instances._deregister_ec2_instance(
public_address,
require_no_running_jobs,
instance_registrar.get_region_name(),
)
async def num_currently_running_instances(
self, instance_registrar: InstanceRegistrar
) -> int:
ec2 = boto3.resource("ec2", region_name=instance_registrar.get_region_name())
return sum(1 for _ in adjust_ec2_instances._get_running_instances(ec2))
async def run_adjust(self, instance_registrar: InstanceRegistrar) -> None:
adjust_ec2_instances._deregister_and_terminate_instances(
instance_registrar.get_region_name(),
TERMINATE_INSTANCES_IF_IDLE_FOR_TEST,
datetime.timedelta.min,
)
async def terminate_all_instances(
self, instance_registrar: InstanceRegistrar
) -> None:
adjust_ec2_instances.terminate_all_instances(
instance_registrar.get_region_name()
)
def cloud_provider(self) -> CloudProviderType:
return "EC2"
class TestEC2InstanceRegistrar(EC2InstanceRegistrarProvider, InstanceRegistrarSuite):
pass
@pytest.mark.asyncio
async def test_get_ec2_instance_types():
# This function makes a lot of assumptions about the format of the data we get from
# various AWS endpoints, good to check that everything works. Look for unexpected
# warnings!
instance_types = await _get_ec2_instance_types(REGION)
# the actual number of instance types will fluctuate based on AWS' whims.
assert len(instance_types) > 600
chosen_instance_types = choose_instance_types_for_job(
Resources(5, 3, {}), 52, 10, instance_types
)
total_cpu = sum(
instance_type.instance_type.logical_cpu * instance_type.num_instances
for instance_type in chosen_instance_types
)
assert total_cpu >= 3 * 52
total_memory_gb = sum(
instance_type.instance_type.memory_gb * instance_type.num_instances
for instance_type in chosen_instance_types
)
assert total_memory_gb >= 5 * 52
assert all(
instance_type.instance_type.interruption_probability <= 10
for instance_type in chosen_instance_types
)
pprint.pprint(chosen_instance_types)
chosen_instance_types = choose_instance_types_for_job(
Resources(24000, 1000, {}), 1, 10, instance_types
)
assert len(chosen_instance_types) == 0
class TestGridTaskQueue:
def test_grid_task_queue(self):
"""
Tests the grid_task_queue functions without actually running any tasks. Uses SQS
resources.
"""
region_name = asyncio.run(_get_default_region_name())
task_arguments = ["hello", ("hey", "there"), {"a": 1}]
# dummy variables
job_id = str(uuid.uuid4())
public_address = "foo"
worker_id = 1
request_queue_url, result_queue_url = asyncio.run(
_create_queues_for_job(job_id, region_name)
)
# get results in a different thread as we're adding/completing tasks
results = None
def get_results_thread():
nonlocal results
results = asyncio.run(
get_results(result_queue_url, region_name, len(task_arguments), 1)
)
results_thread = threading.Thread(target=get_results_thread)
results_thread.start()
# add some tasks
asyncio.run(_add_tasks(request_queue_url, region_name, task_arguments))
# get some tasks and complete them
task1 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task1 is not None
task2 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task2 is not None
_complete_task(
result_queue_url,
region_name,
task1,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task1.pickled_function_arguments,
),
public_address,
worker_id,
)
task3 = _get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
assert task3 is not None
# there should be no more tasks to get
assert (
_get_task(
request_queue_url,
result_queue_url,
region_name,
0,
public_address,
worker_id,
)
is None
)
_complete_task(
result_queue_url,
region_name,
task2,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task2.pickled_function_arguments,
),
public_address,
worker_id,
)
_complete_task(
result_queue_url,
region_name,
task3,
ProcessState(
state=ProcessState.ProcessStateEnum.SUCCEEDED,
pickled_result=task3.pickled_function_arguments,
),
public_address,
worker_id,
)
results_thread.join()
assert results == task_arguments
def test_worker_loop(self):
region_name = asyncio.run(_get_default_region_name())
task_arguments = [1, 2, 3, 4]
# dummy variables
job_id = str(uuid.uuid4())
public_address = "foo"
worker_id = 1
request_queue_url, result_queue_url = asyncio.run(
_create_queues_for_job(job_id, region_name)
)
# get results on another thread
results = None
def get_results_thread():
nonlocal results
results = asyncio.run(
get_results(result_queue_url, region_name, len(task_arguments), 1)
)
results_thread = threading.Thread(target=get_results_thread)
results_thread.start()
# add tasks
asyncio.run(_add_tasks(request_queue_url, region_name, task_arguments))
# start a worker_loop which will get tasks and complete them
worker_thread = threading.Thread(
target=lambda: worker_loop(
lambda x: x**x,
request_queue_url,
result_queue_url,
region_name,
public_address,
worker_id,
)
)
worker_thread.start()
results_thread.join()
worker_thread.join()
assert results == [1, 4, 27, 256]
|
StarcoderdataPython
|
4817309
|
<filename>plans/apps.py
from django.apps import AppConfig
from . import conf as app_settings
class PlansConfig(AppConfig):
name = 'plans'
verbose_name = app_settings.APP_VERBOSE_NAME
|
StarcoderdataPython
|
3348593
|
<filename>multilingual_librispeech/reorg_speakers.py
import os
mls_root = 'D:/Data/speech/multilingual_librispeech'
for language in os.listdir(mls_root):
language_dir = os.path.join(mls_root, language)
if not os.path.isdir(language_dir):
continue
print(language)
for speaker in os.listdir(language_dir):
if '_' in speaker:
continue
if speaker in ['dev', 'test', 'train']:
continue
original_speaker_dir = os.path.join(language_dir, speaker)
if not os.path.isdir(original_speaker_dir):
continue
print(speaker)
for file in os.listdir(original_speaker_dir):
t = file.split('_')
speaker, book = t[0], t[1]
new_speaker = speaker +"_" + book
new_speaker_dir = os.path.join(language_dir, new_speaker)
os.makedirs(new_speaker_dir, exist_ok=True)
os.rename(os.path.join(original_speaker_dir, file), os.path.join(new_speaker_dir, file))
|
StarcoderdataPython
|
111981
|
<filename>ckanext/ckanpackager/cli.py
import click
from ckan import model
from .model.stat import ckanpackager_stats_table
def get_commands():
return [ckanpackager]
@click.group()
def ckanpackager():
'''
The CKAN Packager CLI.
'''
pass
@ckanpackager.command(name='initdb')
def init_db():
'''
Initialise the ckanpackager tables.
'''
if not ckanpackager_stats_table.exists(model.meta.engine):
ckanpackager_stats_table.create(model.meta.engine)
click.secho('Created ckanpackager_stats table', fg='green')
else:
click.secho('ckanpackager_stats already exists, skipping init', fg='green')
|
StarcoderdataPython
|
1748279
|
from setup import *
def plot_attention(epoch_attentions, epoch):
attn_plot_size = 16
attention = np.array(epoch_attentions[-1])
attention = attention[:attn_plot_size, :attn_plot_size]
plt.clf()
sns_plot = sns.heatmap(attention, cmap="GnBu")
plt.title('Hidden State Activation vs. Decoder Time Step', fontsize=12)
plt.xlabel('Decoder Time Step', fontsize=12)
plt.ylabel('Hidden Activation', fontsize=12)
curr_plot_dir = f"{plot_path}/{start_time}-{tune.get_trial_name()}/"
os.makedirs(curr_plot_dir, exist_ok=True)
plt.savefig(f"{curr_plot_dir}/attn-epoch{epoch}-.png")
plt.savefig(f"{tune.get_trial_dir()}/attn-epoch{epoch}-.png")
|
StarcoderdataPython
|
1608226
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
Copyright (c) 2016-2019 <NAME> http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import redis
from programy.utils.logging.ylogger import YLogger
from programy.storage.engine import StorageEngine
from programy.storage.stores.nosql.redis.store.binaries import RedisBinariesStore
from programy.storage.stores.nosql.redis.store.braintree import RedisBraintreeStore
from programy.storage.stores.nosql.redis.store.conversations import RedisConversationStore
from programy.storage.stores.nosql.redis.store.logs import RedisLogsStore
from programy.storage.stores.nosql.redis.store.duplicates import RedisDuplicatesStore
from programy.storage.stores.nosql.redis.store.errors import RedisErrorsStore
from programy.storage.stores.nosql.redis.store.errors_collection import RedisErrorsCollectionStore
from programy.storage.stores.nosql.redis.store.learnf import RedisLearnfStore
class RedisStorageEngine(StorageEngine):
def __init__(self, configuration):
StorageEngine.__init__(self, configuration)
def initialise(self):
self._prefix = self.configuration.prefix
self._sessions_set_key = "{prefix}:sessions".format(prefix=self._prefix)
self._expiretime = self.configuration.expiretime
self._redis = redis.StrictRedis(
host=self.configuration.host,
port=self.configuration.port,
password=<PASSWORD>,
db=self.configuration.db,
username=self.configuration.username,
ssl=self.configuration.ssl,
socket_timeout=self.configuration.timeout)
if self.configuration.drop_all_first is True:
try:
self.conversation_store().empty()
except Exception as e:
YLogger.exception(self, "Failed deleting conversation redis data - ", e)
def binaries_store(self):
return RedisBinariesStore(self)
def braintree_store(self):
return RedisBraintreeStore(self)
def learnf_store(self):
return RedisLearnfStore(self)
def errors_store(self):
return RedisErrorsStore(self)
def duplicates_store(self):
return RedisDuplicatesStore(self)
def errors_collection_store(self):
return RedisErrorsCollectionStore(self)
def conversation_store(self):
return RedisConversationStore(self)
def logs_store(self):
return RedisLogsStore(self)
|
StarcoderdataPython
|
114312
|
from math import ceil
import cocotb
import random
from cocotb.clock import Clock
from cocotb.result import TestSuccess, TestFailure
from cocotb.triggers import RisingEdge
from queue import Queue
from poseidon_python import basic
from cocotb_test import simulator
CASES_NUM = 1 # the number of test cases
BUFFER_SIZE = 9 # the size of buffer in transmitter
class AXI4StreamTransmitter:
def __init__(self, target) -> None:
self.ref_outputs = Queue(maxsize=80) # store reference results
self.dut = target
async def reset_dut(self):
dut = self.dut
dut.reset.value = 0
await RisingEdge(dut.clk)
dut.reset.value = 1
for i in range(2):
await RisingEdge(dut.clk)
dut.reset.value = 0
async def generate_input(self):
"""generate input signals"""
dut = self.dut
cases_count = 0
while cases_count < CASES_NUM:
# get random values
inputs = []
for i in range(BUFFER_SIZE):
rand_value = random.randint(0, basic.P - 1)
inputs.append([cases_count % pow(2, 5), rand_value])
self.ref_outputs.put(rand_value)
cases_count += 1
# assign random values to dut io port
tag = []
while len(tag) < BUFFER_SIZE:
valid = random.random() > 0.2
index = random.randint(0, 4)
while index in tag:
index = random.randint(0, 4)
dut.io_input_valid.value = valid
dut.io_input_payload_state_id.value = inputs[index][0]
dut.io_input_payload_state_element.value = inputs[index][1]
await RisingEdge(dut.clk)
if (dut.io_input_valid.value & dut.io_input_ready.value) == True:
tag.append(index)
async def check_output(self):
"""check output signals"""
cases_count = 0
dut = self.dut
while True:
# get random ready signals
ready = random.random() > 0.3
dut.io_output_ready.value = ready
await RisingEdge(dut.clk)
if (dut.io_output_ready.value & dut.io_output_valid.value) == True:
ref_res = self.ref_outputs.get()
dut_res = int(dut.io_output_payload.value)
assert ref_res == dut_res, "test case {} failed".format(cases_count)
cases_count += 1
if cases_count == CASES_NUM:
raise TestSuccess(" pass {} test cases".format(CASES_NUM))
@cocotb.test(timeout_time=100000, timeout_unit="ns")
async def AXI4StreamTransmitterTest(dut):
await cocotb.start(Clock(dut.clk, 10, "ns").start())
# set default values to all dut input ports
dut.io_input_valid.value = False
dut.io_input_payload_state_id.value = 0
dut.io_input_payload_state_element.value = 0
dut.io_output_ready.value = False
# start testing
tester = AXI4StreamTransmitter(dut)
await tester.reset_dut()
await cocotb.start(tester.generate_input())
await cocotb.start(tester.check_output())
while True:
await RisingEdge(dut.clk)
# pytest
def test_AXI4StreamTransmitter():
simulator.run(
verilog_sources=["../main/verilog/AXI4StreamTransmitter.v"],
toplevel="AXI4StreamTransmitter",
module="AXI4StreamTransmitterTester",
python_search="./src/reference_model/",
)
|
StarcoderdataPython
|
3207039
|
<reponame>douglaspands/controle-financeiro
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.views.generic import TemplateView, View
from .forms import RegistroForm
from .usecases import registrar_usuario_pessoa_fisica
class LogoutConfirmar(TemplateView):
template_name = "registration/logout_confirmar.html"
class UsuarioCriar(View):
form_class = RegistroForm
template_name = "autenticacao/usuario_criar.html"
context_object_name = "usuario"
def get(self, request: HttpRequest) -> HttpResponse:
form = self.form_class()
return render(request, self.template_name, {"form": form})
def post(self, request: HttpRequest) -> HttpResponse:
form = self.form_class(request.POST)
if form.is_valid():
registrar_usuario_pessoa_fisica(form)
return render(request, "autenticacao/usuario_criar_concluido.html", {})
else:
return render(request, self.template_name, {"form": form})
|
StarcoderdataPython
|
3311928
|
from todo import *
bloco = Bloquinho()
#colocando o bloco dentro do programa que manipula ele
app = Request(bloco)
#inicia o loop do programa
app.run()
|
StarcoderdataPython
|
121593
|
<filename>graspsampling-py-defgraspsim/graspsampling/utilities.py<gh_stars>10-100
# Copyright (c) 2020 NVIDIA Corporation
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""Helper functions for grasp sampling."""
import trimesh
import trimesh.transformations as tra
import numpy as np
import os
try:
from trimesh.collision import fcl
fcl_import_failed = False
except Exception:
fcl_import_failed = True
def sample_spherical_cap(cone_dirs, cone_aperture, num_samples_per_dir=1):
"""Uniformly distributed points on a spherical cap (sphere radius = 1).
Args:
cone_dirs (np.array): Nx3 array that represents cone directions.
cone_aperture (float): Aperture of cones / size of spherical cap.
num_samples_per_dir (int, optional): Number of samples to draw per direction. Defaults to 1.
Raises:
NotImplementedError: [description]
Returns:
np.array: Nx3 array of sampled points.
"""
# sample around north pole
if num_samples_per_dir > 1:
raise NotImplementedError("num_samples_per_dir > 1 is not implemented")
num_samples = len(cone_dirs) * num_samples_per_dir
z = np.random.rand(num_samples) * (1.0 - np.cos(cone_aperture)) + np.cos(
cone_aperture
)
phi = np.random.rand(num_samples) * 2.0 * np.pi
x = np.sqrt(1.0 - np.power(z, 2)) * np.cos(phi)
y = np.sqrt(1.0 - np.power(z, 2)) * np.sin(phi)
points = np.vstack([x, y, z]).T
points = points[..., np.newaxis]
transforms = np.array(
[
trimesh.geometry.align_vectors([0, 0, 1], cone_dir)[:3, :3]
for cone_dir in cone_dirs
]
)
result = np.matmul(transforms, points)
return np.squeeze(result, axis=2)
def sample_random_orientation_z(mean_axis_z, axis_cone_aperture):
"""Sample a random orientation around a cap defined by mean_axis_z."""
z_axis = sample_spherical_cap(
mean_axis_z, axis_cone_aperture, num_samples_per_dir=1
)[0]
while True:
r = sample_random_direction_R3(1)[0]
# check if collinear
if abs(z_axis.dot(r)) < (1.0 - 1e-2):
break
y_axis = np.cross(z_axis, r)
x_axis = np.cross(y_axis, z_axis)
orientation = np.eye(4)
orientation[:3, 0] = x_axis
orientation[:3, 1] = y_axis
orientation[:3, 2] = z_axis
return orientation
def random_quaternions(size):
"""Generate random quaternions, uniformly distributed on SO(3).
See: http://planning.cs.uiuc.edu/node198.html
Args:
size (int): Number of quaternions.
Returns:
np.array: sizex4 array of quaternions in w-x-y-z format.
"""
u = np.random.rand(size, 3)
r1 = np.sqrt(1.0 - u[:, 0])
r2 = np.sqrt(u[:, 0])
t = 2.0 * np.pi * u[:, 1:]
qw = np.cos(t[:, 1]) * r2
qx = np.sin(t[:, 0]) * r1
qy = np.cos(t[:, 0]) * r1
qz = np.sin(t[:, 1]) * r2
return np.vstack([qw, qx, qy, qz]).T
def sample_random_direction_R3(number_of_directions):
"""Uniformly distributed directions on S2.
Sampled from a multivariate Gaussian, followed by normalization.
Args:
number_of_directions (int): Number of directions to sample.
Returns:
np.array: number_of_directionsx3 array of directions.
"""
# sample multivariate Gaussian and normalize
dir = np.random.normal(0, 1, (number_of_directions, 3))
dir = dir / np.linalg.norm(dir, axis=1)[:, np.newaxis]
return dir
def discretized_SO3(resolution):
"""Return an array of quaternions that are equidistant on SO(3).
Args:
resolution (int): A number in {72, 576, 4608, 36864}.
Raises:
ValueError: Argument represents an unknown resolution.
Returns:
np.array: Nx4 array of quaternions (in w-x-y-z format)
"""
available_resolutions = [72, 576, 4608, 36864]
if resolution not in available_resolutions:
raise ValueError(
f"SO3 resolution {resolution} unknown. Available resolutions: \
{', '.join([str(x) for x in available_resolutions])}"
)
res_path = get_resource_path("data/discretizations")
res_name = os.path.join(res_path, f"so3_{int(resolution)}_quaternionxyzw.npy")
quaternions = np.load(res_name)
return quats_xyzw_to_wxyz(quaternions)
def numpy_to_fcl_transform(arr):
"""Convert numpy matrix to fcl transform."""
return fcl.Transform(arr[:3, :3], arr[:3, 3])
def fcl_transform_to_numpy(arr):
"""Convert fcl transform to numpy matrix."""
ret = np.eye(4)
ret[:3, :3] = arr.getRotation()
ret[:3, 3] = arr.getTranslation()
return ret
def mat_to_pose_wxyz(mat):
"""Convert matrix to pos and wxyz quaternion."""
p = mat[:3, 3].tolist()
p += tra.quaternion_from_matrix(mat).tolist()
return np.array(p)
def mat_to_pose_xyzw(mat):
"""Convert matrix to pos and xyzw quaternion."""
p = mat[:3, 3].tolist()
p += np.roll(tra.quaternion_from_matrix(mat), -1).tolist()
return np.array(p)
def pose_wxyz_to_mat(p):
"""Convert pos and wxyz quaternion to matrix."""
tmp = tra.quaternion_matrix(p[3:])
tmp[:3, 3] = p[:3]
return tmp
def pose_xyzw_to_mat(p):
"""Convert pos and xyzw quaternion to matrix."""
tmp = tra.quaternion_matrix(np.roll(p[3:], +1))
tmp[:3, 3] = p[:3]
return tmp
def poses_xyzw_to_mats(poses):
"""Convert multiple pos and xyzw quaternion to matrices."""
mats = []
for p in poses:
# convert each transform to a pose
mat = pose_xyzw_to_mat(np.asarray(p))
mats.append(mat.tolist())
return mats
def poses_wxyz_to_mats(poses):
"""Convert multiple pos and wxyz quaternion to matrices."""
mats = []
for p in poses:
mat = pose_wxyz_to_mat(np.asarray(p))
mats.append(mat.tolist())
return mats
def quats_xyzw_to_wxyz(q):
"""Convert from xyzw to wxyz quaternions."""
return np.roll(q, 1, axis=1)
def quats_wxyz_to_xyzw(q):
"""Convert from wxyz to xyzw quaternions."""
return np.roll(q, -1, axis=1)
def pose_xyzw_to_wxyz(p):
"""Convert from pose and xyzw to wxyz quatenrions."""
tmp = p[:3].tolist()
tmp += np.roll(p[3:], +1).tolist()
return np.array(tmp)
# the main convention is: w - x - y - z
def pose_wxyz_to_xyzw(p):
"""Convert from pose and wxyz to xyzw quaternions."""
tmp = p[:3].tolist()
tmp += np.roll(p[3:], -1).tolist()
return np.array(tmp)
def mats_to_poses_xyzw(transforms):
"""Convert matrices to pos and xyzw quaternions."""
poses = []
for t in transforms:
# convert each transform to a pose
pose = mat_to_pose_xyzw(np.asarray(t))
poses.append(pose.tolist())
return poses
def mats_to_poses_wxyz(transforms):
"""Convert matrices to pos and wxyz quaternions."""
poses = []
for t in transforms:
# convert each transform to a pose
pose = mat_to_pose_wxyz(np.asarray(t))
poses.append(pose.tolist())
return poses
def get_gripper_object_bounds(gripper_mesh, object_mesh):
"""Get bounds of object with gripper."""
gripper_size = np.abs(gripper_mesh.bounding_sphere.bounds).max()
lower_bound, upper_bound = np.split(
object_mesh.bounds + [[-gripper_size], [gripper_size]], 2, axis=0
)
return lower_bound, upper_bound
def get_resource_path(path=""):
"""Get path to resouce."""
return os.path.join(os.path.abspath(os.path.dirname(__file__)), "../", path)
def instantiate_mesh(**kwargs):
"""Instantiate scaled mesh."""
fname = get_resource_path(kwargs["file"])
mesh = trimesh.load(fname)
mesh.apply_scale(kwargs["scale"])
return mesh
|
StarcoderdataPython
|
106234
|
<reponame>IvanProgramming/dnevnik_mos_ru
__version__ = "2.3.0"
from .school import School
from .class_unit import ClassUnit
from .group import Group
from .teacher import Teacher
from .student_profile import StudentProfile
from .client import Client
from .academic_years import AcademicYear
from .auth_providers import *
from .exceptions import *
from .base_auth_provider import BaseAuthProvider
|
StarcoderdataPython
|
1681588
|
#!/usr/bin/python3
try:
from pbkdf2 import PBKDF2
except:
print("install pbkdf2: \"pip3 install pbkdf2\"")
exit(1)
try:
from Crypto import Random
from Crypto.Util.py3compat import bchr
from Crypto.Cipher import AES
except:
print("install pycrypto: \"pip3 install pycrypto\"")
exit(1)
import os, sys
from base64 import b64encode
from getpass import getpass
import codecs
def pad(data_to_pad, block_size, style='pkcs7'):
padding_len = block_size-len(data_to_pad)%block_size
if style == 'pkcs7':
padding = bchr(padding_len)*padding_len
elif style == 'x923':
padding = bchr(0)*(padding_len-1) + bchr(padding_len)
elif style == 'iso7816':
padding = bchr(128) + bchr(0)*(padding_len-1)
else:
raise ValueError("Unknown padding style")
return data_to_pad + padding
def main():
# sanitize input
if len(sys.argv) < 2:
print("Usage:\n%s filename [passphrase]"%sys.argv[0])
exit(0)
inputfile = sys.argv[1]
try:
with open(inputfile, "rb") as f:
data = f.read()
except:
print("Cannot open file: %s"%inputfile)
exit(1)
if len(sys.argv) > 2:
passphrase = sys.argv[2]
else:
while True:
passphrase = getpass(prompt='Password: ')
if passphrase == getpass(prompt='Confirm: '):
break
print("Passwords don\'t match, try again.")
salt = Random.new().read(32)
iv = Random.new().read(16)
key = PBKDF2(passphrase=passphrase,salt=salt,iterations=100).read(32)
cipher = AES.new(key, AES.MODE_CBC, IV=iv)
padded = pad(data, 16)
encrypted = cipher.encrypt(padded)
projectFolder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
with open(os.path.join(projectFolder, "decryptTemplate.html")) as f:
templateHTML = f.read()
encryptedJSON = "{\"salt\":\"%s\",\"iv\":\"%s\",\"data\":\"%s\"}"%(
b64encode(salt).decode("utf-8"), b64encode(iv).decode("utf-8"), b64encode(encrypted).decode("utf-8"))
encryptedDocument = templateHTML.replace("/*{{ENCRYPTED_PAYLOAD}}*/\"\"", encryptedJSON)
filename, extension = os.path.splitext(inputfile)
outputfile = filename + "-protected" + extension
with codecs.open(outputfile, 'w','utf-8-sig') as f:
f.write(encryptedDocument)
print("File saved to %s"%outputfile)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3232362
|
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
# Port to userbot by @MoveAngel
from telethon.errors.rpcerrorlist import YouBlockedUserError
from userbot import bot, CMD_HELP
from userbot.events import register
from asyncio.exceptions import TimeoutError
@register(outgoing=True, pattern=r"^\.sa(?: |$)(.*)")
async def lastname(steal):
if steal.fwd_from:
return
if not steal.reply_to_msg_id:
await steal.edit("```Mohon Reply Ke Pesan Pengguna Yang Ingin Anda Scan Yang Mulia.```")
return
message = await steal.get_reply_message()
chat = "@SangMataInfo_bot"
user_id = message.sender.id
id = f"/search_id {user_id}"
if message.sender.bot:
await steal.edit("```Reply Ke Pesan Pengguna Yang Ingin Di Scann.```")
return
await steal.edit("__C__")
await steal.edit("__Co__")
await steal.edit("__Con__")
await steal.edit("__Conn__")
await steal.edit("__Conne__")
await steal.edit("__Connec__")
await steal.edit("__Connect__")
await steal.edit("__Connecti__")
await steal.edit("__Connectin__")
await steal.edit("__Connecting__")
await steal.edit("__Connecting t__")
await steal.edit("__Connecting to__")
await steal.edit("__Connecting to s__")
await steal.edit("__Connecting to se__")
await steal.edit("__Connecting to ser__")
await steal.edit("__Connecting to serv__")
await steal.edit("__Connecting to serve__")
await steal.edit("__Connecting to server__")
await steal.edit("__Connecting to server.__")
await steal.edit("__Connecting to server..__")
await steal.edit("__Connecting to server...__")
try:
async with bot.conversation(chat) as conv:
try:
msg = await conv.send_message(id)
r = await conv.get_response()
response = await conv.get_response()
except YouBlockedUserError:
await steal.reply(
"```Yang Mulia, Mohon Unblock @sangmatainfo_bot Dan Coba Scan Kembali.```"
)
return
if r.text.startswith("Name"):
respond = await conv.get_response()
await steal.edit(f"`{r.message}`")
await steal.client.delete_messages(
conv.chat_id, [msg.id, r.id, response.id, respond.id]
)
return
if response.text.startswith("No records") or r.text.startswith(
"No records"
):
await steal.edit("```Saya Tidak Menemukan Informasi Pergantian Nama Ini Yang Mulia, Orang Ini Belum Pernah Mengganti Nama Sebelumnya```")
await steal.client.delete_messages(
conv.chat_id, [msg.id, r.id, response.id]
)
return
else:
respond = await conv.get_response()
await steal.edit(f"```{response.message}```")
await steal.client.delete_messages(
conv.chat_id, [msg.id, r.id, response.id, respond.id]
)
except TimeoutError:
return await steal.edit("`Saya Sedang Sakit Yang Mulia, Mohon Maaf`")
CMD_HELP.update({
"sangmata":
"⚡𝘾𝙈𝘿⚡: `.sa`\
\n↳ : Mendapatkan Riwayat Nama Pengguna Yang Di Scan."
})
|
StarcoderdataPython
|
87672
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-29 20:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='animal',
name='lived_with_animal_types',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='animal',
name='lived_with_animals',
field=models.CharField(default='Unknown', max_length=10),
),
migrations.AlterField(
model_name='animal',
name='lived_with_kids',
field=models.CharField(default='Unknown', max_length=10),
),
migrations.AlterField(
model_name='animal',
name='special_needs',
field=models.CharField(blank=True, max_length=100),
),
]
|
StarcoderdataPython
|
137563
|
<reponame>ajrox090/VaRA-Tool-Suite<filename>varats/varats/data/reports/commit_report.py<gh_stars>0
"""Data wrappers for commit reports generated by VaRA."""
import logging
import typing as tp
from pathlib import Path
import pandas as pd
import yaml
from varats.base.version_header import VersionHeader
from varats.mapping.commit_map import CommitMap
from varats.report.report import BaseReport, FileStatusExtension, ReportFilename
from varats.utils.git_util import ShortCommitHash, FullCommitHash
LOG = logging.getLogger(__name__)
class FunctionInfo():
"""Encapsulates the information gathered for a single functions."""
def __init__(self, raw_yaml: tp.Dict[str, tp.Any]) -> None:
self.__name = str(raw_yaml['function-name'])
self.__id = str(raw_yaml['id'])
self.__region_id = str(raw_yaml['region-id'])
@property
def name(self) -> str:
"""Name of the function."""
return self.__name
@property
def id(self) -> str:
"""Unique ID of the function info."""
return self.__id
@property
def region_id(self) -> str:
"""ID of the region."""
return self.__region_id
def __str__(self) -> str:
return "{} ({}): {}".format(self.name, self.id, self.region_id)
class RegionMapping():
"""Mapping from region ID to commit hash."""
def __init__(self, raw_yaml: tp.Dict[str, tp.Any]) -> None:
self.id = str(raw_yaml['id'])
self.hash = FullCommitHash(str(raw_yaml['hash']))
def __str__(self) -> str:
return "{} = {}".format(self.id, self.hash.hash)
class RegionToFunctionEdge():
"""Graph edge to connect regions and function data."""
def __init__(self, from_region: str, to_function: str) -> None:
self._from = from_region
self._to = to_function
@property
def region(self) -> str:
return self._from
@property
def function(self) -> str:
return self._to
def __str__(self) -> str:
return "{} -> {}".format(self._from, self._to)
class RegionToRegionEdge():
"""Graph edge to interconnect regions."""
def __init__(self, raw_yaml: tp.Dict[str, tp.Any]) -> None:
self._from = str(raw_yaml['from'])
self._to = str(raw_yaml['to'])
def __str__(self) -> str:
return "{} -> {}".format(self._from, self._to)
@property
def edge_from(self) -> str:
return self._from
@property
def edge_to(self) -> str:
return self._to
class FunctionGraphEdges():
"""A graph like structure that represents the connections between
``FunctionInfo``s."""
def __init__(self, raw_yaml: tp.Dict[str, tp.Any]) -> None:
self.fid = raw_yaml['function-id']
self.cg_edges: tp.List[RegionToFunctionEdge] = []
cg_edges = raw_yaml['call-graph-edges']
if cg_edges is not None:
for edge in cg_edges:
for callee in edge['to-functions']:
self.cg_edges.append(
RegionToFunctionEdge(edge['from-region'], callee)
)
self.cf_edges: tp.List[RegionToRegionEdge] = []
cf_edges = raw_yaml['control-flow-edges']
if cf_edges is not None:
for edge in cf_edges:
self.cf_edges.append(RegionToRegionEdge(edge))
self.df_relations: tp.List[RegionToRegionEdge] = []
df_edges = raw_yaml['data-flow-relations']
if df_edges is not None:
for edge in df_edges:
self.df_relations.append(RegionToRegionEdge(edge))
def __str__(self) -> str:
repr_str = "FName: {}:\n\t CG-Edges [".format(self.fid)
sep = ""
for cg_edge in self.cg_edges:
repr_str += sep + str(cg_edge)
sep = ", "
repr_str += "]"
repr_str += "\n\t CF-Edges ["
sep = ""
for cf_edge in self.cf_edges:
repr_str += sep + str(cf_edge)
sep = ", "
repr_str += "]"
return repr_str
class CommitReport(BaseReport):
"""Data class that gives access to a loaded commit report."""
SHORTHAND = "CR"
FILE_TYPE = "yaml"
def __init__(self, path: Path) -> None:
super().__init__(path)
with open(path, "r") as stream:
documents = yaml.load_all(stream, Loader=yaml.CLoader)
version_header = VersionHeader(next(documents))
version_header.raise_if_not_type("CommitReport")
version_header.raise_if_version_is_less_than(3)
raw_infos = next(documents)
self.finfos: tp.Dict[str, FunctionInfo] = {}
for raw_finfo in raw_infos['function-info']:
finfo = FunctionInfo(raw_finfo)
self.finfos[finfo.name] = finfo
self.region_mappings: tp.Dict[str, RegionMapping] = {}
raw_region_mapping = raw_infos['region-mapping']
if raw_region_mapping is not None:
for raw_r_mapping in raw_region_mapping:
r_mapping = RegionMapping(raw_r_mapping)
self.region_mappings[r_mapping.id] = r_mapping
gedges = next(documents)
self.graph_info: tp.Dict[str, FunctionGraphEdges] = {}
for raw_fg_edge in gedges:
f_edge = FunctionGraphEdges(raw_fg_edge)
self.graph_info[f_edge.fid] = f_edge
@property
def head_commit(self) -> ShortCommitHash:
"""The current HEAD commit under which this CommitReport was created."""
return self.filename.commit_hash
@classmethod
def shorthand(cls) -> str:
"""Shorthand for this report."""
return cls.SHORTHAND
@staticmethod
def get_file_name(
project_name: str,
binary_name: str,
project_version: str,
project_uuid: str,
extension_type: FileStatusExtension,
file_ext: str = "yaml"
) -> str:
"""
Generates a filename for a commit report with 'yaml' as file extension.
Args:
project_name: name of the project for which the report was generated
binary_name: name of the binary for which the report was generated
project_version: version of the analyzed project, i.e., commit hash
project_uuid: benchbuild uuid for the experiment run
extension_type: to specify the status of the generated report
file_ext: file extension of the report file
Returns:
name for the report file that can later be uniquly identified
"""
return ReportFilename.get_file_name(
CommitReport.SHORTHAND, project_name, binary_name, project_version,
project_uuid, extension_type, file_ext
)
def calc_max_cf_edges(self) -> int:
"""Calculate the highest amount of control-flow interactions of a single
commit region."""
cf_map: tp.Dict[str, tp.List[int]] = {}
self.init_cf_map_with_edges(cf_map)
total = 0
for from_to_pair in cf_map.values():
total = max(max(from_to_pair[0], from_to_pair[1]), total)
return total
def calc_max_df_edges(self) -> int:
"""Calculate the highest amount of data-flow interactions of a single
commit region."""
df_map: tp.Dict[str, tp.List[int]] = {}
self.init_df_map_with_edges(df_map)
total = 0
for from_to_pair in df_map.values():
total = max(max(from_to_pair[0], from_to_pair[1]), total)
return total
def __str__(self) -> str:
return "FInfo:\n\t{}\nRegionMappings:\n\t{}\n" \
.format(self.finfos.keys(), self.region_mappings.keys())
def __repr__(self) -> str:
return "CR: " + self.path.name
def __lt__(self, other: 'CommitReport') -> bool:
return self.path < other.path
def init_cf_map_with_edges(
self, cf_map: tp.Dict[str, tp.List[int]]
) -> None:
"""
Initialize control-flow map with edges and from/to counters.
Args:
cf_map: control-flow
"""
# if any information is missing add all from the original
# report to avoid errors.
for reg_mapping in self.region_mappings.values():
cf_map[reg_mapping.id] = [0, 0]
for func_g_edge in self.graph_info.values():
for cf_edge in func_g_edge.cf_edges:
cf_map[cf_edge.edge_from][0] += 1
cf_map[cf_edge.edge_to][1] += 1
def number_of_cf_interactions(self) -> int:
"""Total number of found control-flow interactions."""
cf_map: tp.Dict[str, tp.List[int]] = {}
self.init_cf_map_with_edges(cf_map)
total_interactions = 0
for interaction_tuple in cf_map.values():
total_interactions += interaction_tuple[0]
return total_interactions
def number_of_head_cf_interactions(self) -> tp.Tuple[int, int]:
"""
The number of control-flow interactions the HEAD commit has with other
commits.
Returns:
tuple (incoming_head_interactions, outgoing_head_interactions)
"""
cf_map: tp.Dict[str, tp.List[int]] = {}
self.init_cf_map_with_edges(cf_map)
for key, value in cf_map.items():
if key.startswith(self.head_commit.hash):
interaction_tuple = value
return (interaction_tuple[0], interaction_tuple[1])
return (0, 0)
def init_df_map_with_edges(
self, df_map: tp.Dict[str, tp.List[int]]
) -> None:
"""
Initialize data-flow map with edges and from/to counters.
Returns:
tuple (incoming_head_interactions, outgoing_head_interactions)
"""
# if any information is missing add all from the original report
# to avoid errors.
for reg_mapping in self.region_mappings.values():
df_map[reg_mapping.id] = [0, 0]
for func_g_edge in self.graph_info.values():
for df_edge in func_g_edge.df_relations:
df_map[df_edge.edge_from][0] += 1
df_map[df_edge.edge_to][1] += 1
def number_of_df_interactions(self) -> int:
"""Total number of found data-flow interactions."""
df_map: tp.Dict[str, tp.List[int]] = {}
self.init_df_map_with_edges(df_map)
total_interactions = 0
for interaction_tuple in df_map.values():
total_interactions += interaction_tuple[0]
return total_interactions
def number_of_head_df_interactions(self) -> tp.Tuple[int, int]:
"""The number of control-flow interactions the HEAD commit has with
other commits."""
df_map: tp.Dict[str, tp.List[int]] = {}
self.init_df_map_with_edges(df_map)
for key, value in df_map.items():
if key.startswith(self.head_commit.hash):
interaction_tuple = value
return (interaction_tuple[0], interaction_tuple[1])
return (0, 0)
class CommitReportMeta():
"""Meta report class that combines the data of multiple reports, comming
from different revisions, into one."""
def __init__(self) -> None:
self.finfos: tp.Dict[str, FunctionInfo] = {}
self.region_mappings: tp.Dict[str, RegionMapping] = {}
self.__cf_ylimit = 0
self.__df_ylimit = 0
def merge(self, commit_report: CommitReport) -> None:
"""
Merge data from commit report into CommitReportMeta.
Args:
commit_report: new report that will be added to the meta report
"""
self.finfos.update(commit_report.finfos)
self.region_mappings.update(commit_report.region_mappings)
self.__cf_ylimit = max(
self.__cf_ylimit, commit_report.calc_max_cf_edges()
)
self.__df_ylimit = max(
self.__df_ylimit, commit_report.calc_max_df_edges()
)
@property
def cf_ylimit(self) -> int:
return self.__cf_ylimit
@property
def df_ylimit(self) -> int:
return self.__df_ylimit
def __str__(self) -> str:
return "FInfo:\n\t{}\nRegionMappings:\n\t{}\n" \
.format(self.finfos.keys(), self.region_mappings.keys())
###############################################################################
# Connection Generators
###############################################################################
def generate_inout_cfg_cf(
commit_report: CommitReport,
cr_meta: tp.Optional[CommitReportMeta] = None
) -> pd.DataFrame:
"""
Generates a pandas dataframe that contains the commit region control-flow
interaction information.
Args:
commit_report: report containing the commit data
cr_meta: the meta commit report, if available
"""
cf_map = {} # RM -> [from, to]
# Add all from meta commit report and ...
if cr_meta is not None:
for reg_mapping in cr_meta.region_mappings.values():
cf_map[reg_mapping.id] = [0, 0]
commit_report.init_cf_map_with_edges(cf_map)
rows = []
for item in cf_map.items():
total = item[1][0] + item[1][1]
rows.append([item[0], item[1][0], "From", total])
rows.append([item[0], item[1][1], "To", total])
rows.sort(
key=lambda row:
(row[0], -tp.cast(int, row[3]), -tp.cast(int, row[1]), row[2])
)
return pd.DataFrame(
rows, columns=['Region', 'Amount', 'Direction', 'TSort']
)
def generate_interactions(
commit_report: CommitReport, c_map: CommitMap
) -> tp.Tuple[pd.DataFrame, pd.DataFrame]:
"""
Converts the commit analysis interaction data from a ``CommitReport`` into a
pandas data frame for plotting.
Args:
commit_report: the report
c_map: commit map for mapping commits to unique IDs
"""
node_rows = []
for item in commit_report.region_mappings.values():
node_rows.append([item.hash, c_map.time_id(item.hash)])
node_rows.sort(key=lambda row: int(tp.cast(int, row[1])), reverse=True)
nodes = pd.DataFrame(node_rows, columns=['hash', 'id'])
link_rows = []
for func_g_edge in commit_report.graph_info.values():
for cf_edge in func_g_edge.cf_edges:
link_rows.append([
cf_edge.edge_from, cf_edge.edge_to, 1,
c_map.time_id(FullCommitHash(cf_edge.edge_from))
])
links = pd.DataFrame(
link_rows, columns=['source', 'target', 'value', 'src_id']
)
return (nodes, links)
def generate_inout_cfg_df(
commit_report: CommitReport,
cr_meta: tp.Optional[CommitReportMeta] = None
) -> pd.DataFrame:
"""
Generates a pandas dataframe that contains the commit region data-flow
interaction information.
Args:
commit_report: report containing the commit data
cr_meta: the meta commit report, if available
"""
df_map = {} # RM -> [from, to]
# Add all from meta commit report and ...
if cr_meta is not None:
for reg_mapping in cr_meta.region_mappings.values():
df_map[reg_mapping.id] = [0, 0]
commit_report.init_df_map_with_edges(df_map)
rows = []
for item in df_map.items():
total = item[1][0] + item[1][1]
rows.append([item[0], item[1][0], "From", total])
rows.append([item[0], item[1][1], "To", total])
rows.sort(
key=lambda row:
(row[0], -tp.cast(int, row[3]), -tp.cast(int, row[1]), row[2])
)
return pd.DataFrame(
rows, columns=['Region', 'Amount', 'Direction', 'TSort']
)
|
StarcoderdataPython
|
79468
|
<reponame>SidneyAn/nfv<filename>nfv/nfv-vim/nfv_vim/nfvi/objects/v1/_guest_service.py
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import six
from nfv_common.helpers import Constant
from nfv_common.helpers import Constants
from nfv_common.helpers import Singleton
from nfv_vim.nfvi.objects.v1._object import ObjectData
@six.add_metaclass(Singleton)
class GuestServiceNames(Constants):
"""
Guest Service Name Constants
"""
UNKNOWN = Constant('unknown')
HEARTBEAT = Constant('heartbeat')
@six.add_metaclass(Singleton)
class GuestServiceAdministrativeState(Constants):
"""
Guest Service Administrative State Constants
"""
UNKNOWN = Constant('unknown')
LOCKED = Constant('locked')
UNLOCKED = Constant('unlocked')
@six.add_metaclass(Singleton)
class GuestServiceOperationalState(Constants):
"""
Guest Service Operational State Constants
"""
UNKNOWN = Constant('unknown')
ENABLED = Constant('enabled')
DISABLED = Constant('disabled')
# Guest Service Constant Instantiation
GUEST_SERVICE_NAME = GuestServiceNames()
GUEST_SERVICE_ADMIN_STATE = GuestServiceAdministrativeState()
GUEST_SERVICE_OPER_STATE = GuestServiceOperationalState()
class GuestService(ObjectData):
"""
NFVI Guest Service Object
"""
def __init__(self, name, admin_state, oper_state, restart_timeout=None):
super(GuestService, self).__init__('1.0.0')
self.update(dict(name=name, admin_state=admin_state,
oper_state=oper_state,
restart_timeout=restart_timeout))
def as_dict(self):
"""
Represent Guest Service data object as dictionary
"""
data = dict()
data['name'] = self.name
data['admin_state'] = self.admin_state
data['oper_state'] = self.oper_state
if self.restart_timeout is not None:
data['restart_timeout'] = self.restart_timeout
return data
|
StarcoderdataPython
|
1623371
|
# -*- coding: utf-8 -*-
"""
Provide a wrapper and helpers for AWS boto library.
See https://boto3.readthedocs.io/en/latest/ for API details
"""
import logging
import os
import boto3
import re
import toil.provider.base
import toil
from botocore.client import Config
logger = logging.getLogger(__name__)
class AwsLib(toil.provider.base.BaseProvider):
"""
library for AWS functionality.
"""
def __init__(self, toil, config):
super(AwsLib, self).__init__(toil, config)
def session(self, profile='default'):
"""
Create an AWS session.
Args:
profile (): the profile defined in config to use
Returns:
boto3.session.Session
"""
if profile in self.config:
self.configure_proxy()
# create a session
return AwsSession(self._toil, profile, self.config[profile])
else:
raise toil.CloudException(
"profile '{profile}' not defined in config {config}".format(profile=profile, config=self.config))
class AwsSession(object):
"""
provide aws api access
"""
def __init__(self, toil, profile, config):
# self._profile = profile
self._config = config
self._toil = toil
def sts_client(self):
"""
Create an aws sts client
Args:
session (): an aws session
Returns:
boto3.session.client
"""
session = boto3.session.Session(
region_name=self._config['region'],
aws_access_key_id=self._config['access_key_id'],
aws_secret_access_key=self._config['secret_access_key'],
)
return session.client(service_name='sts')
def assume_role(self, sts_client, **kwargs):
"""
Use sts to assume an AWS role.
Returns a set of temporary security credentials (consisting of an access key ID, a secret access key, and a
security token) that you can use to access AWS resources.
Args:
sts_client (): an aws sts client
profile (): the profile defined in config to use
Returns: dict
"""
sts_credentials = sts_client.assume_role(
RoleArn=self._config['role_arn'],
RoleSessionName=self._config['role_session_name']
, **kwargs)
return sts_credentials
def client(self, client_type, **kwargs):
"""
Create a client.
Args:
client_type (): the type of aws service
self._profile (): the self._profile defined in config to use
Returns:
A low-level client representing an AWS service. Uses sts as defined in the config self._profile.
"""
session = boto3.session.Session(
region_name=self._config['region'],
aws_access_key_id=self._config['access_key_id'],
aws_secret_access_key=self._config['secret_access_key'],
)
if 'role_arn' in self._config and self._config['role_arn'] != "":
sts_client = self.sts_client()
sts_credentials = self.assume_role(sts_client)
return session.client(client_type,
aws_access_key_id=sts_credentials['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_credentials['Credentials']['SecretAccessKey'],
aws_session_token=sts_credentials['Credentials']['SessionToken'],
config=Config(signature_version='s3v4'),
**kwargs
)
else:
return session.client(client_type, **kwargs)
def resource(self, resource_type, **kwargs):
"""
Create a resource.
Args:
resource_type (): the type of aws resource
profile (): the profile defined in config to use
Returns:
A low-level client representing an AWS service. Uses sts as defined in the config profile.
"""
session = boto3.session.Session(
region_name=self._config['region'],
aws_access_key_id=self._config['access_key_id'],
aws_secret_access_key=self._config['secret_access_key'],
)
if 'role_arn' in self._config and self._config['role_arn'] != "":
sts_client = self.sts_client()
sts_credentials = self.assume_role(sts_client)
return session.resource(resource_type,
aws_access_key_id=sts_credentials['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_credentials['Credentials']['SecretAccessKey'],
aws_session_token=sts_credentials['Credentials']['SessionToken'],
config=Config(signature_version='s3v4'),
**kwargs
)
else:
session.resource(resource_type, **kwargs)
def _upload_file_to_s3(self, resource, bucket_name, object_key, file_path, ssekms_key_id=None):
"""
Upload a file to s3
This method is private and should only be used withing this class.
Args:
resource (): aws resource
bucket_name (): the bucket the file is going to
object_key (): the key path for the file
file_path (): the path for the local file to upload
ssekms_key_id (): the server side encryption key
Returns:
Nothing.
"""
if ssekms_key_id is not None:
data = open(file_path, 'rb')
logger.debug(
'upload {file_path} to {bucketName}{objectKey} using {ssekms_key_id}'.format(file_path=file_path,
bucketName=bucket_name,
objectKey=object_key,
ssekms_key_id=ssekms_key_id))
result = resource.Bucket(bucket_name).put_object(ServerSideEncryption='aws:kms', Key=object_key, Body=data,
SSEKMSKeyId=ssekms_key_id)
logger.debug(result)
else:
data = open(file_path, 'rb')
logger.debug(
'upload {file_path} to {bucketName}{objectKey} using {ssekms_key_id}'.format(file_path=file_path,
bucketName=bucket_name,
objectKey=object_key,
ssekms_key_id=ssekms_key_id))
result = resource.Bucket(bucket_name).put_object(Key=object_key, Body=data)
logger.debug(result)
# logger.debug('upload {file_path} to {bucketName}{objectKey}'.format(file_path=file_path, bucketName=bucket_name, objectKey=object_key))
# result = resource.Bucket(bucket_name).upload_file(Filename=file_path, Key=object_key)
# logger.debug(result)
def _aws_file_upload_handler(self, real_file_path, file_path, file_name, **kwargs):
"""
A helper method to upload a file to aws.
This method is private and should only be used withing this class.
Args:
real_file_path (): path to local file
file_path (): the path to use in aws
file_name (): the local file name
**kwargs ():
Returns:
Nothing
"""
bucket = kwargs.get('bucket', None)
if bucket is None:
raise TypeError('bucket argument is required')
resource = kwargs.get('resource', None)
if resource is None:
raise TypeError('resource argument is required')
ssekms_key_id = kwargs.get('ssekms_key_id', None)
object_key = (os.path.relpath(file_path, '/') + '/' + file_name).replace('\\', '/')
if not object_key.startswith('/'):
object_key = '/' + object_key
remove_from_key = kwargs.get('remove_from_key', None)
if remove_from_key is not None:
object_key = object_key.replace(remove_from_key, '')
prefix = kwargs.get('prefix', '')
if prefix is None:
prefix = ''
if prefix != '' and not prefix.endswith('/'):
prefix += '/'
object_key = prefix + object_key
# object_key = prefix + (file_name).replace('\\', '/')
real_file_path = real_file_path.replace('\\', '/')
self._upload_file_to_s3(resource, bucket, object_key, real_file_path, ssekms_key_id)
def upload_to_s3(self, bucket, local_path, prefix=None, ssekms_key_id=None):
"""
Upload a local directory to AWS. Uses the cloud helper process_dir and passes a function handler.
Args:
bucket (): aws bucket name
local_path (): local directory path
prefix (): prefix for aws
ssekms_key_id (): aws encryption key
profile (): the profile to use defined in config.
Returns:
Nothing
"""
resource = self.resource('s3', verify=False)
self._toil.traverse_dir(local_path, self._aws_file_upload_handler, resource=resource, bucket=bucket,
local_path=local_path, prefix=prefix, ssekms_key_id=ssekms_key_id,
remove_from_key=local_path)
def download_from_s3(self, bucket_name, key, file_path, ssekms_key_id=None):
"""
Download a file from aws s3.
Args:
bucket_name (): aws bucket name
key (): the key to file in aws
file_path (): local file path and name
ssekms_key_id (): aws encryption key
profile (): the profile to use defined in config
Returns:
Nothing
"""
resource = self.resource('s3', verify=False)
result = resource.Bucket(bucket_name).objects.filter(Prefix=key)
for s3_obj in result:
# resource.Bucket(bucket_name).download_file
logger.debug(s3_obj.key)
local_key = s3_obj.key
local_key = re.sub('[:]', "_colon_", local_key)
# local_key = re.sub('[^a-zA-Z0-9\n\._-]', "", local_key)
local_path = file_path + '/' + local_key
local_dir_name = os.path.dirname(local_path)
try:
if not os.path.exists(local_dir_name):
os.makedirs(local_dir_name)
if not (local_path.endswith('/')):
resource.Bucket(bucket_name).download_file(s3_obj.key, local_path)
except Exception as ex:
logging.error(ex)
|
StarcoderdataPython
|
1660249
|
from unittest.mock import patch
import pytest
from exco.extractor import Validator
from exco.extractor.validator.built_in.is_not_blank_validator import IsNotBlankValidator
from exco.extractor.validator.built_in.value_validator import ValueValidator
from exco.extractor.validator.validation_result import ValidationResult
def test_is_not_blank_validator():
validator = IsNotBlankValidator()
assert validator.validate('a') == ValidationResult.bad(
msg=f'Fail a fail validation of {str(validator)}')
assert validator.validate('') == ValidationResult.good()
@patch.multiple(ValueValidator, __abstractmethods__=set())
def test_value_validator_abstract():
with pytest.raises(NotImplementedError):
vv = ValueValidator()
vv.validate_value("a")
@patch.multiple(Validator, __abstractmethods__=set())
def test_validator_abstract():
with pytest.raises(NotImplementedError):
vv = Validator()
vv.validate(value="a")
|
StarcoderdataPython
|
4832467
|
import cv2
import numpy as np
#Read a Video Stream and Display It
#Camera Object
cam = cv2.VideoCapture(0)
face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml")
face_data= []
cnt =0
user_name = input("Enter your name")
while True:
ret,frame = cam.read()
if ret==False:
print("Something Went Wrong!")
continue
key_pressed = cv2.waitKey(1) & 0xFF
if key_pressed == ord('q'):
break
faces = face_cascade.detectMultiScale(frame,1.3,5) # faces is the list whcih has tuples. print(faces) will show the cordinates of face at every second. Basically where are face it is. eg [(255,283,32)]
faces = sorted(faces , key = lambda f : f[2]*f[3]) # Sorting b/c we want to save the biggest face in the video for accuracy as if two faces come in the video only one will be stored.
if (len(faces)==0):
continue
# Pick the largest face (beacuse it is biggest according to the area ( f[2]*f[3] ))
for face in faces[-1:]:
x,y,w,h = face
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255, 255),2)
# Extract (Crop of the required image) : Region of intrest
face_section = frame[y-10:y+h+10, x-10: x+w+10]
face_section = cv2.resize(face_section, (100,100))
cv2.rectangle(frame,(x,y),(x+w,y+h),(0 ,255, 255,2))
if cnt%10 == 0:
print("Taking pictures",int(cnt/10))
face_data.append(face_section)
cnt+=1
cv2.imshow("Video", frame)
cv2.imshow("Video gray", face_section)
# save the data in the numpy file
print("Total faces",len(face_data))
face_data = np.array(face_data)
face_data = face_data.reshape([face_data.shape[0],-1])
print(face_data.shape)
np.save("FaceData/" + user_name+ ".npy",face_data) # it save the image into array form so that we can use it later
print(face_data.shape)
cam.release()
cv2.destroyAllWindows()
|
StarcoderdataPython
|
76754
|
<reponame>joefinlon/Finlon_et_al_2021_DFR<filename>dfr_enhancement.py
'''
Contains a routine to find regions of enhanced DFR based on matched DFR_Ku-Ka using
a peak prominence method.
Copyright <NAME>, Univ. of Washington, 2022.
'''
import numpy as np
from scipy.signal import find_peaks, peak_prominences, peak_widths
from skimage.measure import label
def find_regions(matched_object, dfr, method='prominances', min_dfr=None, min_prom=2., rel_height=0.4):
'''
Inputs:
matched_object: Dictionary created from matcher routine
dfr: Masked array of DFR values computed from matched_object
method: Method for determining enhanced DFR regions/periods ('prominances')
min_dfr: Minimum DFR to consider for ID scheme (not used for 'prominances' method)
min_prom: Minimum prominance needed to consider DFR peaks (float)
rel_height: Relative height at which the peak width is measured as a percentage of its prominence (float between 0 and 1)
'''
regions_object = {}
peaks = np.array([], dtype='int'); prominences = np.array([]); width_heights = np.array([])
durations_p3 = np.array([]); durations_er2 = np.array([])
peak_starts_p3 = np.array([], dtype='datetime64[ns]'); peak_ends_p3 = np.array([], dtype='datetime64[ns]')
peak_starts_er2 = np.array([], dtype='datetime64[ns]'); peak_ends_er2 = np.array([], dtype='datetime64[ns]')
peak_count = 0
labels = label(~dfr.mask) # find contiguious regions/periods where valid (not masked) DFR values exist (peak ID is more robust this way)
for labelnum in range(1, len(np.unique(labels))+1):
peaks_temp, _ = find_peaks(dfr[labels==labelnum])
if len(peaks_temp)>0:
prominences_temp = peak_prominences(dfr[labels==labelnum], peaks_temp, wlen=None); prominences_temp = prominences_temp[0]
peaks_temp = peaks_temp[prominences_temp>=min_prom]; prominences_temp = prominences_temp[prominences_temp>=min_prom] # trim peaks and prominences
widths_temp = peak_widths(dfr[labels==labelnum], peaks_temp, rel_height=rel_height)
for peaknum in range(len(widths_temp[0])): # loop through each peak to get peak width start/end periods
peak_count += 1
width_heights = np.append(width_heights, widths_temp[1][peaknum])
peak_start_er2 = matched_object['matched']['time_rad']['data'][int(np.where(labels==labelnum)[0][0]+np.floor(widths_temp[2][peaknum]))]
peak_end_er2 = matched_object['matched']['time_rad']['data'][int(np.where(labels==labelnum)[0][0]+np.ceil(widths_temp[3][peaknum]))]
peak_start_p3 = matched_object['matched']['time_p3']['data'][int(np.where(labels==labelnum)[0][0]+np.floor(widths_temp[2][peaknum]))]
peak_end_p3 = matched_object['matched']['time_p3']['data'][int(np.where(labels==labelnum)[0][0]+np.ceil(widths_temp[3][peaknum]))]
if peak_end_er2<peak_start_er2: # fixes rare instance where peak end needs to be shortened (no matched data after this time)
peak_end_er2 = matched_object['matched']['time_rad']['data'][int(np.where(labels==labelnum)[0][0]+np.floor(widths_temp[3][peaknum]))]
peak_end_p3 = matched_object['matched']['time_p3']['data'][int(np.where(labels==labelnum)[0][0]+np.floor(widths_temp[3][peaknum]))]
durations_p3 = np.append(durations_p3, (peak_end_p3-peak_start_p3)/np.timedelta64(1,'s'))
durations_er2 = np.append(durations_er2, (peak_end_er2-peak_start_er2)/np.timedelta64(1,'s'))
print(' Peak #{} from {} - {} ({} sec)'.format(peak_count, peak_start_p3, peak_end_p3, durations_p3[-1]))
peak_starts_p3 = np.append(peak_starts_p3, peak_start_p3); peak_ends_p3 = np.append(peak_ends_p3, peak_end_p3)
peak_starts_er2 = np.append(peak_starts_er2, peak_start_er2); peak_ends_er2 = np.append(peak_ends_er2, peak_end_er2)
peaks = np.append(peaks, np.where(labels==labelnum)[0][0]+peaks_temp)
prominences = np.append(prominences, prominences_temp)
# Construct the object
regions_object['peak_start_p3'] = peak_starts_p3; regions_object['peak_end_p3'] = peak_ends_p3
regions_object['peak_start_er2'] = peak_starts_er2; regions_object['peak_end_er2'] = peak_ends_er2
regions_object['width_height'] = width_heights # height of the contour lines at which the widths where evaluated
regions_object['peak_index'] = peaks; regions_object['peak_value'] = dfr[peaks]; regions_object['peak_prominence'] = prominences
regions_object['duration_p3'] = durations_p3; regions_object['duration_er2'] = durations_er2
regions_object['stats'] = {}
regions_object['stats']['num_regions'] = peak_count
regions_object['stats']['mean_duration_p3'] = np.sum(durations_p3) / peak_count
regions_object['stats']['mean_duration_er2'] = np.sum(durations_er2) / peak_count
return regions_object
|
StarcoderdataPython
|
3349030
|
<reponame>clefever/aoc2019
from collections import defaultdict
import adventofcode
def part1(codes):
prog = defaultdict(int, zip(range(len(codes)), codes))
output = run_program(prog, [])
x, y = 0,0
grid = defaultdict(int)
for char in output[1]:
if char == 10:
y += 1
x = 0
continue
grid[(x,y)] = chr(char)
x += 1
params = alignment_parameters(grid)
total = sum(p[0]*p[1] for p in params)
return total
def part2(codes):
# TODO: Calculate programatically
prog = defaultdict(int, zip(range(len(codes)), codes))
prog[0] = 2
inputs = [
65, 44, 66, 44, 66, 44, 65, 44, 67, 44, 66, 44, 67, 44, 67, 44, 66, 44, 65, 10,
82, 44, 49, 48, 44, 82, 44, 56, 44, 76, 44, 49, 48, 44, 76, 44, 49, 48, 10,
82, 44, 56, 44, 76, 44, 54, 44, 76, 44, 54, 10,
76, 44, 49, 48, 44, 82, 44, 49, 48, 44, 76, 44, 54, 10,
110, 10
]
output = run_program(prog, inputs)
return output[1][-1]
def print_grid(grid):
left = min([key[0] for key in grid.keys()])
right = max([key[0] for key in grid.keys()])
bottom = max([key[1] for key in grid.keys()])
top = min([key[1] for key in grid.keys()])
for y in range(top, bottom+1):
for x in range(left, right+1):
print(grid[(x,y)], end=" ")
print()
def alignment_parameters(grid):
left = min([key[0] for key in grid.keys()])
right = max([key[0] for key in grid.keys()])
bottom = max([key[1] for key in grid.keys()])
top = min([key[1] for key in grid.keys()])
param_list = []
for y in range(top+1, bottom):
for x in range(left+1, right):
if grid[(x,y)] == '#' and grid[(x-1,y)] == '#' and grid[(x+1,y)] == '#' and grid[(x,y-1)] == '#' and grid[(x,y+1)] == '#':
param_list.append((x,y))
return param_list
def run_program(codes, prog_input, ip = 0, relative_base = 0):
outputs = []
prog = codes.copy()
while prog[ip] != 99:
code_and_modes = get_opcode_and_modes(prog[ip])
if code_and_modes[0] == 1:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
write_loc = get_write_location(3, code_and_modes, prog, ip, relative_base)
prog[write_loc] = params[0] + params[1]
ip += 4
elif code_and_modes[0] == 2:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
write_loc = get_write_location(3, code_and_modes, prog, ip, relative_base)
prog[write_loc] = params[0] * params[1]
ip += 4
elif code_and_modes[0] == 3:
if len(prog_input) == 0:
return (prog, outputs, ip, relative_base)
write_loc = get_write_location(1, code_and_modes, prog, ip, relative_base)
prog[write_loc] = prog_input.pop(0)
ip += 2
elif code_and_modes[0] == 4:
params = get_parameters(1, code_and_modes, prog, ip, relative_base)
outputs.append(params[0])
ip += 2
elif code_and_modes[0] == 5:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
ip = params[1] if params[0] != 0 else ip + 3
elif code_and_modes[0] == 6:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
ip = params[1] if params[0] == 0 else ip + 3
elif code_and_modes[0] == 7:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
write_loc = get_write_location(3, code_and_modes, prog, ip, relative_base)
prog[write_loc] = 1 if params[0] < params[1] else 0
ip += 4
elif code_and_modes[0] == 8:
params = get_parameters(2, code_and_modes, prog, ip, relative_base)
write_loc = get_write_location(3, code_and_modes, prog, ip, relative_base)
prog[write_loc] = 1 if params[0] == params[1] else 0
ip += 4
elif code_and_modes[0] == 9:
params = get_parameters(1, code_and_modes, prog, ip, relative_base)
relative_base += params[0]
ip += 2
return (None, outputs, -1, -1)
def get_parameters(num, code_and_modes, prog, ip, relative_base):
return [prog[prog[ip+i]] if code_and_modes[i] == 0 else prog[ip+i] if code_and_modes[i] == 1 else prog[relative_base+prog[ip+i]] for i in range(1, num+1)]
def get_write_location(write_offset, code_and_modes, prog, ip, relative_base):
if code_and_modes[write_offset] == 2:
return relative_base+prog[ip+write_offset]
else:
return prog[ip+write_offset]
def get_opcode_and_modes(code):
code_str = str(code).rjust(5, '0')
return (int(code_str[3:]), int(code_str[2]), int(code_str[1]), int(code_str[0]))
def main():
puzzle_input = adventofcode.read_input(17)
codes = [int(code) for code in puzzle_input.split(',')]
adventofcode.answer(1, 5620, part1(codes))
adventofcode.answer(1, 768115, part2(codes))
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
|
StarcoderdataPython
|
4842578
|
"""Find the minimal frame pointer and stack pointer positions from a C6T VM
logfile. This will be the lowest depth of the stack.
"""
from sys import argv
from typing import Optional, Tuple
def findmin(log: str, fieldpos: int) -> Optional[int]:
"""Splits and then finds minimum in given split index fieldpos.
"""
minval = None
for line in log.splitlines():
try:
curval = int(line.split()[fieldpos], base=16)
if minval is None:
minval = curval
else:
minval = min(curval, minval)
except IndexError:
continue
except ValueError:
continue
return minval
def findmins(log: str) -> Tuple[int]:
"""Find stack and frame pointer mins.
"""
fp, sp = findmin(log, 1), findmin(log, 3)
if fp is None:
fp = -1
if sp is None:
sp = -1
return fp, sp
if __name__ == "__main__":
argv = [None, 'c6t.log']
with open(argv[1], 'r', encoding='utf8') as logfile:
fp, sp = findmins(logfile.read())
print("FP:", hex(fp), "SP:", hex(sp))
|
StarcoderdataPython
|
3297470
|
import tweepy
from twill_util import Util
class Twill():
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token=None,
access_token_secret=None):
'''Initialize the tweepy api'''
self.util = Util()
if not consumer_key:
auth_dict = self.util.get_twitter_auth()
if not auth_dict:
raise ValueError("There is a problem with your twitter credentials")
consumer_key = auth_dict['consumer_key']
consumer_secret = auth_dict['consumer_secret']
access_token = auth_dict['access_token']
access_token_secret = auth_dict['access_token_secret']
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
self.tweepy_api = api
|
StarcoderdataPython
|
3362170
|
from setuptools import setup, find_packages
from opensimplex import __version__
setup(
name='opensimplex',
version=__version__,
description='OpenSimplex n-dimensional gradient noise function.',
long_description=open('README.rst').read(),
keywords='opensimplex simplex noise 2D 3D 4D',
url='https://github.com/lmas/opensimplex',
download_url='https://github.com/lmas/opensimplex/releases',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[ # See: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Mathematics',
],
)
|
StarcoderdataPython
|
185615
|
from flask import Flask
from flask import request
from flask import render_template
from sassutils import builder
from search import search
import json
app = Flask(__name__)
compiled = builder.build_directory(
sass_path="static/scss",
css_path="static/css",
strip_extension=False
)
if app.debug or app.env == "development":
print("Compiled scss:", compiled)
@app.route('/', methods=['GET'])
def index():
"""
Index Page
:return: rendered Page
"""
query = request.args.get('q')
print(query)
if query is not None:
return search_component(query)
else:
return render_template('index.html')
def search_component(query):
"""
Index page w/ search results
:return: rendered Page
"""
# text = request.form['search_text']
text = query
# !!!!!!
# WARNING: no text sanitation done here. Expected to be done in search!
# !!!!!!
search_results = search(text)
json_results = json.loads(search_results)
return render_template('index.html', searchResult=json_results, searchComponent=text)
if __name__ == "__main__":
app.run(debug=True)
|
StarcoderdataPython
|
4819469
|
# coding: utf-8
import datetime
from simple_api import db
class User(db.Document):
username = db.StringField(unique=True, sparse=True, regex=r'^[a-z0-9][a-z0-9\.\-_]*$')
email = db.EmailField(max_length=256)
created_at = db.DateTimeField(default=datetime.datetime.utcnow)
meta = {
'index_background': True,
'indexes': [
{'fields': ['username']},
{'fields': ['email']},
],
}
|
StarcoderdataPython
|
168310
|
<reponame>thatch/nozomi<gh_stars>1-10
"""
Nozomi
Abstract API Session Module
author: <EMAIL>
"""
from nozomi.data.decodable import Decodable
from nozomi.security.agent import Agent
from nozomi.ancillary.immutable import Immutable
from nozomi.security.perspective import Perspective
from typing import TypeVar
T = TypeVar('T', bound='AbstractSession')
class AbstractSession(Decodable, Agent):
session_id: str = NotImplemented
session_key: str = NotImplemented
agent: Agent = NotImplemented
perspective: Perspective = NotImplemented
api_key: str = NotImplemented
agent_requires_confirmation: bool = NotImplemented
agent_confirmed: bool = NotImplemented
agent_id = Immutable(lambda s: s._agent.agent_id)
|
StarcoderdataPython
|
3309101
|
<reponame>shish/sikulpy<gh_stars>10-100
"""
http://doc.sikuli.org/keys.html
"""
import autopy3 # EXT
class Key(object):
ENTER = int(autopy3.key.K_RETURN)
UP = int(autopy3.key.K_UP)
DOWN = int(autopy3.key.K_DOWN)
LEFT = int(autopy3.key.K_LEFT)
RIGHT = int(autopy3.key.K_RIGHT)
BACKSPACE = int(autopy3.key.K_BACKSPACE)
TAB = "\t"
class KeyModifier(object):
# these differ based on platform
CTRL = autopy3.key.MOD_CONTROL
SHIFT = autopy3.key.MOD_SHIFT
ALT = autopy3.key.MOD_ALT
META = autopy3.key.MOD_META
CMD = META
WIN = META
class Mouse(object):
LEFT = 1
RIGHT = 2
MIDDLE = 3
|
StarcoderdataPython
|
90342
|
<reponame>mentix02/djodo<filename>task/urls.py
from django.urls import path, register_converter
from task import views, converters
register_converter(converters.DateConverter, 'date')
app_name = 'task'
urlpatterns = [
path('', views.TaskListView.as_view(), name='index'),
path('create/', views.TaskCreateView.as_view(), name='create'),
path('delete/<int:pk>/', views.TaskDeleteView.as_view(), name='delete'),
path('toggle/<int:pk>/', views.ToggleTaskView.as_view(), name='toggle'),
path('update/<int:pk>/', views.TaskUpdateView.as_view(), name='update'),
path('date/<date:date>/', views.DateTaskListView.as_view(), name='date'),
path(
'delete/completed/',
views.DeleteCompletedTaskView.as_view(),
name='delete-completed',
),
]
|
StarcoderdataPython
|
4822854
|
<reponame>radluz/fakear
import pytest
import yaml
import os
from fakear import Fakear, FakearFileNotFound
from voluptuous import Error as VoluptuousError
class TestErrorsFakear(object):
def test_engine_multiple_args_one_error(self):
with pytest.raises(VoluptuousError):
Fakear(cfg="tests/cfgs/simple_cmd_mult_args_one_error.yml")
def test_engine_YAMLError(self):
with pytest.raises(yaml.YAMLError):
Fakear(rawdata="unbalanced blackets: ][")
def test_engine_VoluptuousError(self):
with pytest.raises(VoluptuousError):
Fakear(rawdata="- command: echo")
def test_fuzzy_text(self):
with pytest.raises(VoluptuousError):
Fakear(cfg="tests/cfgs/fuzzy_text.yml")
def test_file_not_found(self):
with pytest.raises(FakearFileNotFound):
with Fakear(cfg="tests/cfgs/not_found.yml"):
pass
|
StarcoderdataPython
|
1610779
|
<filename>RockPaperScissors.py
import pygame, random
WINDOW_WIDTH = 800
WINDOW_HEIGHT = 600
pygame.init()
screen = pygame.display.set_mode([WINDOW_WIDTH
, WINDOW_HEIGHT])
pygame.display.set_caption('Smileeeeeeeeeeeeeeeeeee')
keep_going = True
pic = pygame.image.load('./resources/ball.bmp')
colorkey = pic.get_at((0, 0))
pic.set_colorkey(colorkey)
picx = 0
picy = 0
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
timer = pygame.time.Clock()
speedx = 5
speedy = 5
PADDLE_WIDTH = 50
PADDLE_HEIGHT = 25
paddlex = 300
paddley = 550
picw = 100
pich = 100
points = 0
lives = 5
font = pygame.font.SysFont("Times", 24)
pygame.mixer.init()
pop = pygame.mixer.Sound('./resources/pop.wav')
def check_exit():
global event, keep_going
for event in pygame.event.get():
if event.type == pygame.QUIT:
keep_going = False
def update_direction(speedx, speedy):
if picx <= 0 or picx + pic.get_width() >= WINDOW_WIDTH:
speedx = -speedx
if picy <= 0:
speedy = -speedy
return speedx, speedy
def lose_life_update(speedy, lives):
if picy >= WINDOW_HEIGHT - pic.get_height():
lives -= 1
speedy = -speedy
return lives, speedy
def redraw_screen():
screen.fill(BLACK)
screen.blit(pic, (picx, picy))
def draw_paddle():
paddlex = pygame.mouse.get_pos()[0]
paddlex -= PADDLE_WIDTH / 2
pygame.draw.rect(screen, WHITE, (paddlex, paddley, PADDLE_WIDTH, PADDLE_HEIGHT))
return paddlex
def update_score(points, speedy):
if picy + pic.get_height() >= paddley and picy + pic.get_height() <= paddley + PADDLE_HEIGHT and speedy > 0:
if picx + picw / 2 >= paddlex and picx + picw / 2 <= paddlex + PADDLE_WIDTH:
points += 1
speedy = -speedy
pop.play()
return points, speedy
def draw_game_over_message(speedx, speedy):
notification = 'Lives' + str(lives) + 'Points : ' + str(points)
if lives < 1:
speedx = speedy = 0
notification = "Game Over. Your score was: " + str(points)
notification += ". Press F1 to play again. "
text = font.render(notification, True, WHITE)
text_rect = text.get_rect()
text_rect.centerx = screen.get_rect().centerx
text_rect.y = 10
screen.blit(text, text_rect)
pygame.display.update()
return speedx, speedy
def check_for_press_key(keep_going):
global points, lives, picx, picy, speedx, speedy
timer.tick(60)
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F1:
points = 0
lives = 5
picx = 0
picy = 0
speedx = 5
speedy = 5
if event.key == pygame.K_ESCAPE:
keep_going = False
return keep_going
while keep_going:
check_exit()
picx += speedx
picy += speedy
speedx, speedy = update_direction(speedx, speedy)
lives, speedy = lose_life_update(speedy, lives)
redraw_screen()
paddlex = draw_paddle()
points, speedy = update_score(points, speedy)
speedx, speedy = draw_game_over_message(speedx, speedy)
keep_going = check_for_press_key(keep_going)
pygame.quit()
|
StarcoderdataPython
|
1625603
|
from flask import jsonify, Response
from backend.api.handlers.decorators import api_authenticated, validate_team_key
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.decorators import cached_public
from backend.common.models.keys import TeamKey
from backend.common.queries.team_query import TeamListQuery, TeamQuery
@validate_team_key
@api_authenticated
@cached_public
def team(team_key: TeamKey) -> Response:
return jsonify(TeamQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3))
@api_authenticated
@cached_public
def team_list(page_num: int) -> Response:
return jsonify(TeamListQuery(page=page_num).fetch_dict(ApiMajorVersion.API_V3))
|
StarcoderdataPython
|
3281117
|
<gh_stars>1-10
import multiprocessing
import os
import re
from utils.Log import Log
import requests
import threadpool
from net.NetUtils import EasyHttp
from utils.sqllite_handle import Sqlite
requests.packages.urllib3.disable_warnings()
address = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + '/'
"""
66ip.cn 无效
data5u.com
xicidaili.com
goubanjia.com
xdaili.cn
kuaidaili.com
cn-proxy.com
proxy-list.org
www.mimiip.com to do
"""
class GetFreeProxy(object):
@staticmethod
def freeProxySecond(area=33, page=1):
"""
代理66 http://www.66ip.cn/
:param area: 抓取代理页数,page=1北京代理页,page=2上海代理页......
:param page: 翻页
:return:
"""
area = 33 if area > 33 else area
for area_index in range(1, area + 1):
for i in range(1, page + 1):
url = "http://www.66ip.cn/areaindex_{}/{}.html".format(area_index, i)
html_tree = EasyHttp.getHtmlTree(url)
if not html_tree:
Log.w('http://www.66ip.cn无效')
return []
tr_list = html_tree.xpath("//*[@id='footer']/div/table/tr[position()>1]")
if len(tr_list) == 0:
continue
for tr in tr_list:
yield tr.xpath("./td[1]/text()")[0] + ":" + tr.xpath("./td[2]/text()")[0]
break
@staticmethod
def freeProxyFourth(page_count=2):
"""
西刺代理 http://www.xicidaili.com
:return:
"""
url_list = [
'http://www.xicidaili.com/nn/', # 高匿
'http://www.xicidaili.com/nt/', # 透明
]
for each_url in url_list:
for i in range(1, page_count + 1):
page_url = each_url + str(i)
tree = EasyHttp.getHtmlTree(page_url)
if not tree:
Log.w('http://www.xicidaili.com无效')
return []
proxy_list = tree.xpath('.//table[@id="ip_list"]//tr[position()>1]')
for proxy in proxy_list:
try:
yield ':'.join(proxy.xpath('./td/text()')[0:2])
except Exception as e:
pass
@staticmethod
def freeProxyFifth():
"""
guobanjia http://www.goubanjia.com/
:return:
"""
url = "http://www.goubanjia.com/"
tree = EasyHttp.getHtmlTree(url)
if not tree:
Log.w('http://www.goubanjia.com无效')
return []
proxy_list = tree.xpath('//td[@class="ip"]')
# 此网站有隐藏的数字干扰,或抓取到多余的数字或.符号
# 需要过滤掉<p style="display:none;">的内容
xpath_str = """.//*[not(contains(@style, 'display: none'))
and not(contains(@style, 'display:none'))
and not(contains(@class, 'port'))
]/text()
"""
for each_proxy in proxy_list:
try:
# :符号裸放在td下,其他放在div span p中,先分割找出ip,再找port
ip_addr = ''.join(each_proxy.xpath(xpath_str))
port = each_proxy.xpath(".//span[contains(@class, 'port')]/text()")[0]
yield '{}:{}'.format(ip_addr, port)
except Exception as e:
pass
@staticmethod
def freeProxySixth():
"""
讯代理 http://www.xdaili.cn/
:return:
"""
url = 'http://www.xdaili.cn/ipagent/freeip/getFreeIps?page=1&rows=10'
try:
res = EasyHttp.get(url, timeout=10).json()
if not res or not res['RESULT'] or not res['RESULT']['rows']:
Log.w('http://www.goubanjia.com无效')
return []
for row in res['RESULT']['rows']:
yield '{}:{}'.format(row['ip'], row['port'])
except Exception as e:
pass
@staticmethod
def freeProxySeventh():
"""
快代理 https://www.kuaidaili.com
"""
url_list = [
'https://www.kuaidaili.com/free/inha/{page}/',
'https://www.kuaidaili.com/free/intr/{page}/'
]
for url in url_list:
for page in range(1, 2):
page_url = url.format(page=page)
tree = EasyHttp.getHtmlTree(page_url)
if tree is None:
Log.w('http://www.kuaidaili.com无效')
return []
proxy_list = tree.xpath('.//table//tr')
for tr in proxy_list[1:]:
yield ':'.join(tr.xpath('./td/text()')[0:2])
@staticmethod
def freeProxyEight():
"""
秘密代理 http://www.mimiip.com
"""
url_gngao = ['http://www.mimiip.com/gngao/%s' % n for n in range(1, 2)] # 国内高匿
url_gnpu = ['http://www.mimiip.com/gnpu/%s' % n for n in range(1, 2)] # 国内普匿
url_gntou = ['http://www.mimiip.com/gntou/%s' % n for n in range(1, 2)] # 国内透明
url_list = url_gngao + url_gnpu + url_gntou
for url in url_list:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://www.mimiip.com无效')
return []
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W].*<td>(\d+)</td>', r)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxyNinth():
"""
码农代理 https://proxy.coderbusy.com/
:return:
"""
urls = ['https://proxy.coderbusy.com/classical/country/cn.aspx?page=1']
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://proxy.coderbusy.com无效')
return []
proxies = re.findall('data-ip="(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})".+?>(\d+)</td>', r)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxyTen():
"""
云代理 http://www.ip3366.net/free/
:return:
"""
urls = ['http://www.ip3366.net/free/']
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://www.ip3366.com无效')
return []
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxyEleven():
"""
IP海 http://www.iphai.com/free/ng
:return:
"""
urls = [
'http://www.iphai.com/free/ng',
'http://www.iphai.com/free/np',
'http://www.iphai.com/free/wg',
'http://www.iphai.com/free/wp'
]
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://www.iphai.com无效')
return []
proxies = re.findall(r'<td>\s*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s*?</td>[\s\S]*?<td>\s*?(\d+)\s*?</td>',
r)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxyTwelve(page_count=2):
"""
guobanjia http://ip.jiangxianli.com/?page=
免费代理库
超多量
:return:
"""
for i in range(1, page_count + 1):
url = 'http://ip.jiangxianli.com/?page={}'.format(i)
html_tree = EasyHttp.getHtmlTree(url)
if html_tree is None:
Log.w('http://ip.jiangxianli.com无效')
return []
tr_list = html_tree.xpath("/html/body/div[1]/div/div[1]/div[2]/table/tbody/tr")
if len(tr_list) == 0:
continue
for tr in tr_list:
yield tr.xpath("./td[2]/text()")[0] + ":" + tr.xpath("./td[3]/text()")[0]
@staticmethod
def freeProxyWallFirst():
"""
墙外网站 cn-proxy
:return:
"""
urls = ['http://cn-proxy.com/', 'http://cn-proxy.com/archives/218']
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://cn-proxy.com无效')
return []
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W]<td>(\d+)</td>', )
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxyWallSecond():
"""
https://proxy-list.org/english/index.php
:return:
"""
urls = ['https://proxy-list.org/english/index.php?p=%s' % n for n in range(1, 10)]
import base64
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://proxy-list.org/english/index.php无效')
return []
proxies = re.findall(r"Proxy\('(.*?)'\)", r)
for proxy in proxies:
yield base64.b64decode(proxy).decode()
@staticmethod
def freeProxyWallThird():
urls = ['https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-1']
for url in urls:
r = EasyHttp.get(url, timeout=10)
if not r:
Log.w('http://list.proxylistplus.com无效')
return []
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r)
for proxy in proxies:
yield ':'.join(proxy)
def verifyProxyFormat(proxy):
"""
检查代理格式
:param proxy:
:return:
"""
import re
verify_regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}"
_proxy = re.findall(verify_regex, proxy)
return True if len(_proxy) == 1 and _proxy[0] == proxy else False
@staticmethod
def validUsefulProxy(params):
"""
检验代理是否可用
"""
marks = params.split('&&')
if isinstance(marks[1], bytes):
marks[1] = marks[1].decode('utf8')
proxies = {"http": "http://{proxy}".format(proxy=marks[1])}
flag = None
try:
# 超过20秒的代理就不要了
r = requests.get('http://httpbin.org/ip', proxies=proxies, timeout=10, verify=False)
if r.status_code == 200 and r.json().get("origin"):
# logger.info('%s is ok' % proxy)
flag = True
except Exception as e:
flag = False
if not flag:
sqlite = Sqlite(address + 'ip.db')
sqlite.update_data('delete * from ip_house where id = {}'.format(marks[0]))
@staticmethod
def getAllProxy(pool_size=10,thread_or_process=True,is_refash=True):
Log.v('正在更新ip池,请稍后...')
# address = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + '/'
if is_refash:
proxys = GetFreeProxy.get_list_proxys()
sqlite = Sqlite(address + 'ip.db')
sqlite.update_data('DELETE FROM ip_house')
sqlite = Sqlite(address + 'ip.db')
for i in range(len(proxys)):
if proxys[i] and GetFreeProxy.verifyProxyFormat(proxys[i]):
sqlite.cursor.execute("INSERT INTO ip_house VALUES (?,?,?);", [i+1,proxys[i],'true'])
sqlite.conn.commit()
sqlite.close_conn()
else:
sqlite = Sqlite(address + 'ip.db')
results = sqlite.query_data('select count(proxy_adress) from ip_house')
if int(results[0][0]) == 0:
proxys = GetFreeProxy.get_list_proxys()
sqlite = Sqlite(address + 'ip.db')
for i in range(len(proxys)):
if proxys[i] and GetFreeProxy.verifyProxyFormat(proxys[i]):
sqlite.cursor.execute("INSERT INTO ip_house VALUES (?,?,?);", [i + 1, proxys[i], 'true'])
sqlite.conn.commit()
sqlite.close_conn()
sqlite = Sqlite(address + 'ip.db')
results = sqlite.query_data('select id,proxy_adress from ip_house')
params = []
for result in results:
param = str(result[0]) + '&&' + result[1]
params.append(param)
Log.v('正在检查ip可用性...')
if thread_or_process:
GetFreeProxy.exec_multi_threading(pool_size,params)
else:
GetFreeProxy.exec_multi_process(pool_size,params)
Log.v('更新完成')
@staticmethod
def get_list_proxys():
proxys = []
proxys.extend(GetFreeProxy.freeProxySecond())
proxys.extend(GetFreeProxy.freeProxyFourth())
proxys.extend(GetFreeProxy.freeProxyFifth())
proxys.extend(GetFreeProxy.freeProxySixth())
proxys.extend(GetFreeProxy.freeProxySeventh())
proxys.extend(GetFreeProxy.freeProxyEight())
proxys.extend(GetFreeProxy.freeProxyNinth())
proxys.extend(GetFreeProxy.freeProxyTen())
proxys.extend(GetFreeProxy.freeProxyEleven())
proxys.extend(GetFreeProxy.freeProxyTwelve())
proxys.extend(GetFreeProxy.freeProxyWallFirst())
proxys.extend(GetFreeProxy.freeProxyWallSecond())
proxys.extend(GetFreeProxy.freeProxyWallThird())
return proxys
@staticmethod
def exec_multi_process(size, proxys):
pool = multiprocessing.Pool(processes=size)
for proxy in proxys:
pool.apply_async(GetFreeProxy.validUsefulProxy, (proxy,))
pool.close()
pool.join()
@staticmethod
def exec_multi_threading(size, proxys):
pool = threadpool.ThreadPool(size)
reqs = threadpool.makeRequests(GetFreeProxy.validUsefulProxy, proxys)
[pool.putRequest(req) for req in reqs]
pool.wait()
if __name__ == '__main__':
GetFreeProxy.getAllProxy()
pass
|
StarcoderdataPython
|
4841717
|
<reponame>csh-tech/horovod<filename>horovod/torch/sync_batch_norm.py
# Based on https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/_functions.py
# Modifications copyright 2020 Maka Autonomous Robotic Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from horovod.torch.mpi_ops import allgather_async, allreduce_async, Sum, size, synchronize
from distutils.version import LooseVersion
import torch
from torch.autograd.function import Function
import torch.nn.functional as F
from torch.nn.modules.batchnorm import _BatchNorm
# Backward compat for old PyTorch
if not hasattr(torch.jit, 'unused'):
torch.jit.unused = lambda x: x
_SYNC_BN_V2 = LooseVersion(torch.__version__) >= LooseVersion('1.6.0')
class SyncBatchNorm(_BatchNorm):
"""
Applies synchronous version of N-dimensional BatchNorm. In this version, normalization
parameters are synchronized across workers during forward pass. This is very useful in
situations where each GPU can fit a very small number of examples.
See https://pytorch.org/docs/stable/nn.html#batchnorm2d for more details about BatchNorm.
Arguments:
num_features: number of channels `C` from the shape `(N, C, ...)`
eps: a value added to the denominator for numerical stability. Default: 1e-5
momentum: the value used for the running_mean and running_var
computation. Can be set to `None` for cumulative moving average
(i.e. simple average). Default: 0.1
affine: a boolean value that when set to `True`, this module has
learnable affine parameters. Default: `True`
track_running_stats: a boolean value that when set to `True`, this
module tracks the running mean and variance, and when set to `False`,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: `True`
NOTE: only GPU input tensors are supported in the training mode.
"""
def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True):
super().__init__(num_features, eps, momentum, affine, track_running_stats)
def _check_input_dim(self, input):
if input.dim() < 2:
raise ValueError('expected at least 2D input (got {}D input)'.format(input.dim()))
def _run_bn(self, input):
return F.batch_norm(
input, self.running_mean, self.running_var, self.weight, self.bias,
self.training or not self.track_running_stats, self.momentum, self.eps)
@torch.jit.unused
def _maybe_run_sync_bn(self, input):
if size() == 1:
return self._run_bn(input)
return _SyncBatchNorm.apply(
input, self.weight, self.bias, self.running_mean, self.running_var,
self.eps, self.momentum)
def forward(self, input):
# currently only GPU input is supported by underlying kernel from PyTorch
if not input.is_cuda:
raise ValueError('SyncBatchNorm expected input tensor to be on GPU')
self._check_input_dim(input)
if self.training and self.track_running_stats:
self.num_batches_tracked = self.num_batches_tracked + 1
if not self.training and self.track_running_stats:
return self._run_bn(input)
else:
return self._maybe_run_sync_bn(input)
class _SyncBatchNorm(Function):
@staticmethod
def forward(self, input, weight, bias, running_mean, running_var, eps, momentum):
input = input.contiguous()
size = input.numel() // input.size(1)
count = torch.tensor([size])
# calculate mean/invstd for input.
mean, invstd = torch.batch_norm_stats(input, eps)
count_handle = allgather_async(count.unsqueeze(0), name='sync_batch_norm.count')
mean_handle = allgather_async(mean.unsqueeze(0), name='sync_batch_norm.mean')
invstd_handle = allgather_async(invstd.unsqueeze(0), name='sync_batch_norm.invstd')
# wait on the async communication to finish
count_all = synchronize(count_handle)
mean_all = synchronize(mean_handle)
invstd_all = synchronize(invstd_handle)
if _SYNC_BN_V2:
counts_for_bngswc = count_all.view(-1).float().to(input.device)
else:
# backwards compatibility
counts_for_bngswc = count_all.view(-1).tolist()
# calculate global mean & invstd
mean, invstd = torch.batch_norm_gather_stats_with_counts(
input,
mean_all,
invstd_all,
running_mean,
running_var,
momentum,
eps,
counts_for_bngswc
)
self.save_for_backward(input, weight, mean, invstd, count_all)
# apply element-wise normalization
return torch.batch_norm_elemt(input, weight, bias, mean, invstd, eps)
@staticmethod
def backward(self, grad_output):
grad_output = grad_output.contiguous()
saved_input, weight, mean, invstd, count_all = self.saved_tensors
need_input_grad, need_weight_grad, need_bias_grad = self.needs_input_grad[0:3]
# calculate local stats as well as grad_weight / grad_bias
sum_dy, sum_dy_xmu, grad_weight, grad_bias = torch.batch_norm_backward_reduce(
grad_output,
saved_input,
mean,
invstd,
weight,
need_input_grad,
need_weight_grad,
need_bias_grad
)
if need_input_grad:
# synchronizing stats used to calculate input gradient.
sum_dy_handle = allreduce_async(sum_dy, op=Sum, name='sync_batch_norm.sum_dy')
sum_dy_xmu_handle = allreduce_async(sum_dy_xmu, op=Sum, name='sync_batch_norm.sum_dy_xmu')
# wait on the async communication to finish
sum_dy = synchronize(sum_dy_handle)
sum_dy_xmu = synchronize(sum_dy_xmu_handle)
if _SYNC_BN_V2:
mean_dy = sum_dy / count_all.sum()
mean_dy_xmu = sum_dy_xmu / count_all.sum()
else:
# before 1.6.0, sum_dy was sum of means from every worker, so we just
# need to divide it by number of workers
mean_dy = sum_dy / size()
mean_dy_xmu = sum_dy_xmu / size()
# backward pass for gradient calculation
grad_input = torch.batch_norm_backward_elemt(
grad_output,
saved_input,
mean,
invstd,
weight,
mean_dy,
mean_dy_xmu
)
else:
grad_input = None
# synchronizing of grad_weight / grad_bias is not needed as distributed
# training would handle all reduce.
if weight is None or not need_weight_grad:
grad_weight = None
if weight is None or not need_bias_grad:
grad_bias = None
return grad_input, grad_weight, grad_bias, None, None, None, None, None, None
|
StarcoderdataPython
|
105308
|
"""
time: c*26 + p
space: 26 + 26 (1)
"""
class Solution:
def findAnagrams(self, s: str, p: str) -> List[int]:
cntP = collections.Counter(p)
cntS = collections.Counter()
P = len(p)
S = len(s)
if P > S:
return []
ans = []
for i, c in enumerate(s):
cntS[c] += 1
if i >= P:
if cntS[s[i-P]] > 1:
cntS[s[i-P]] -= 1
else:
del cntS[s[i-P]]
if cntS == cntP:
ans.append(i-(P-1))
return ans
|
StarcoderdataPython
|
1777405
|
<gh_stars>1-10
# Choregraphe simplified export in Python.
from naoqi import ALProxy
names = list()
times = list()
keys = list()
names.append("HeadPitch")
times.append([0.8, 1.56, 2.24, 2.8, 3.48, 4.6])
keys.append([0.29602, -0.170316, -0.340591, -0.0598679, -0.193327, -0.01078])
names.append("HeadYaw")
times.append([0.8, 1.56, 2.24, 2.8, 3.48, 4.6])
keys.append([-0.135034, -0.351328, -0.415757, -0.418823, -0.520068, -0.375872])
names.append("LElbowRoll")
times.append([0.72, 1.48, 2.16, 2.72, 3.4, 4.52])
keys.append([-1.37902, -1.29005, -1.18267, -1.24863, -1.3192, -1.18421])
names.append("LElbowYaw")
times.append([0.72, 1.48, 2.16, 2.72, 3.4, 4.52])
keys.append([-0.803859, -0.691876, -0.679603, -0.610574, -0.753235, -0.6704])
names.append("LHand")
times.append([1.48, 4.52])
keys.append([0.238207, 0.240025])
names.append("LShoulderPitch")
times.append([0.72, 1.48, 2.16, 2.72, 3.4, 4.52])
keys.append([1.11824, 0.928028, 0.9403, 0.862065, 0.897349, 0.842125])
names.append("LShoulderRoll")
times.append([0.72, 1.48, 2.16, 2.72, 3.4, 4.52])
keys.append([0.363515, 0.226991, 0.20398, 0.217786, 0.248467, 0.226991])
names.append("LWristYaw")
times.append([1.48, 4.52])
keys.append([0.147222, 0.11961])
names.append("RElbowRoll")
times.append([0.64, 1.4, 1.68, 2.08, 2.4, 2.64, 3.04, 3.32, 3.72, 4.44])
keys.append([1.38524, 0.242414, 0.349066, 0.934249, 0.680678, 0.191986, 0.261799, 0.707216, 1.01927, 1.26559])
names.append("RElbowYaw")
times.append([0.64, 1.4, 2.08, 2.64, 3.32, 3.72, 4.44])
keys.append([-0.312978, 0.564471, 0.391128, 0.348176, 0.381923, 0.977384, 0.826783])
names.append("RHand")
times.append([1.4, 3.32, 4.44])
keys.append([0.853478, 0.854933, 0.425116])
names.append("RShoulderPitch")
times.append([0.64, 1.4, 2.08, 2.64, 3.32, 4.44])
keys.append([0.247016, -1.17193, -1.0891, -1.26091, -1.14892, 1.02015])
names.append("RShoulderRoll")
times.append([0.64, 1.4, 2.08, 2.64, 3.32, 4.44])
keys.append([-0.242414, -0.954191, -0.460242, -0.960325, -0.328317, -0.250085])
names.append("RWristYaw")
times.append([1.4, 3.32, 4.44])
keys.append([-0.312978, -0.303775, 0.182504])
def main(robotIP, port):
try:
# uncomment the following line and modify the IP if you use this script outside Choregraphe.
motion = ALProxy("ALMotion", robotIP, port)
#motion = ALProxy("ALMotion")
motion.angleInterpolation(names, keys, times, True)
except BaseException, err:
print err
|
StarcoderdataPython
|
1643707
|
<reponame>realvitya/fmcapi
from fmcapi.api_objects.apiclasstemplate import APIClassTemplate
from fmcapi.api_objects.helper_functions import *
from .networkaddresses import NetworkAddresses
import logging
import warnings
class NetworkGroups(APIClassTemplate):
"""
The NetworkGroups Object in the FMC.
"""
VALID_JSON_DATA = ["id", "name", "type", "objects", "literals"]
VALID_FOR_KWARGS = VALID_JSON_DATA + []
URL_SUFFIX = "/object/networkgroups"
# Technically you can have objects OR literals but I'm not set up for "OR" logic, yet.
REQUIRED_FOR_POST = ["name"]
def __init__(self, fmc, **kwargs):
super().__init__(fmc, **kwargs)
logging.debug("In __init__() for NetworkGroups class.")
self.parse_kwargs(**kwargs)
self.type = "NetworkGroup"
def named_networks(self, action, name=""):
logging.debug("In named_networks() for NetworkGroups class.")
if action == "add":
net1 = NetworkAddresses(fmc=self.fmc)
response = net1.get()
if "items" in response:
new_net = None
for item in response["items"]:
if item["name"] == name:
new_net = {
"name": item["name"],
"id": item["id"],
"type": item["type"],
}
break
if new_net is None:
logging.warning(
f'Network "{name}" is not found in FMC. Cannot add to NetworkGroups.'
)
else:
if "objects" in self.__dict__:
duplicate = False
for obj in self.objects:
if obj["name"] == new_net["name"]:
duplicate = True
break
if not duplicate:
self.objects.append(new_net)
logging.info(f'Adding "{name}" to NetworkGroups.')
else:
self.objects = [new_net]
logging.info(f'Adding "{name}" to NetworkGroups.')
if action == "addgroup":
netg1 = NetworkGroups(fmc=self.fmc)
response = netg1.get()
if "items" in response:
new_net = None
for item in response["items"]:
if item["name"] == name:
new_net = {
"name": item["name"],
"id": item["id"],
"type": item["type"],
}
break
if new_net is None:
logging.warning(
f'Network "{name}" is not found in FMC. Cannot add to NetworkGroups.'
)
else:
if "objects" in self.__dict__:
duplicate = False
for obj in self.objects:
if obj["name"] == new_net["name"]:
duplicate = True
break
if not duplicate:
self.objects.append(new_net)
logging.info(f'Adding "{name}" to NetworkGroups.')
else:
self.objects = [new_net]
logging.info(f'Adding "{name}" to NetworkGroups.')
elif action == "remove":
if "objects" in self.__dict__:
objects_list = []
for obj in self.objects:
if obj["name"] != name:
objects_list.append(obj)
self.objects = objects_list
logging.info(f'Removed "{name}" from NetworkGroups.')
else:
logging.info(
"This NetworkGroups has no named_networks. Nothing to remove."
)
elif action == "clear":
if "objects" in self.__dict__:
del self.objects
logging.info("All named_networks removed from this NetworkGroups.")
def unnamed_networks(self, action, value=""):
logging.debug("In unnamed_networks() for NetworkGroups class.")
new_literal = []
if action == "add":
if value == "":
logging.error(
"Value assignment required to add unamed_network to NetworkGroups."
)
return
literal_type = get_networkaddress_type(value=value)
if literal_type == "host" or literal_type == "network":
new_literal = {"value": value, "type": literal_type}
elif literal_type == "range":
logging.error(
"Ranges are not supported as unnamed_networks in a NetworkGroups."
)
else:
logging.error(
f'Value "{value}" provided is not in a recognizable format.'
)
return
if "literals" in self.__dict__:
duplicate = False
for obj in self.literals:
if obj["value"] == new_literal["value"]:
duplicate = True
break
if not duplicate:
self.literals.append(new_literal)
logging.info(f'Adding "{value}" to NetworkGroup.')
else:
self.literals = [new_literal]
logging.info(f'Adding "{value}" to NetworkGroup.')
elif action == "remove":
if "literals" in self.__dict__:
literals_list = []
for obj in self.literals:
if obj["value"] != value:
literals_list.append(obj)
self.literals = literals_list
logging.info(f'Removed "{value}" from NetworkGroup.')
else:
logging.info(
"This NetworkGroups has no unnamed_networks. Nothing to remove."
)
elif action == "clear":
if "literals" in self.__dict__:
del self.literals
logging.info("All unnamed_networks removed from this NetworkGroups.")
class NetworkGroup(NetworkGroups):
"""Dispose of this Class after 20210101."""
def __init__(self, fmc, **kwargs):
warnings.resetwarnings()
warnings.warn(
"Deprecated: NetworkGroup() should be called via NetworkGroups()."
)
super().__init__(fmc, **kwargs)
|
StarcoderdataPython
|
4809055
|
<gh_stars>1000+
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
try:
from unittest.mock import Mock, patch
except ImportError: # python < 3.3
from mock import Mock, patch # type: ignore
from azure.identity._constants import EnvironmentVariables, KnownAuthorities
from azure.identity._internal import get_default_authority, normalize_authority
import pytest
def test_get_default_authority():
"""get_default_authority should return public cloud or the value of $AZURE_AUTHORITY_HOST, with 'https' scheme"""
# default scheme is https
for authority in ("localhost", "https://localhost"):
with patch.dict("os.environ", {EnvironmentVariables.AZURE_AUTHORITY_HOST: authority}, clear=True):
assert get_default_authority() == "https://localhost"
# default to public cloud
for environ in ({}, {EnvironmentVariables.AZURE_AUTHORITY_HOST: KnownAuthorities.AZURE_PUBLIC_CLOUD}):
with patch.dict("os.environ", environ, clear=True):
assert get_default_authority() == "https://" + KnownAuthorities.AZURE_PUBLIC_CLOUD
# require https
with pytest.raises(ValueError):
with patch.dict("os.environ", {EnvironmentVariables.AZURE_AUTHORITY_HOST: "http://localhost"}, clear=True):
get_default_authority()
def test_normalize_authority():
"""normalize_authority should return a URI with a scheme and no trailing spaces or forward slashes"""
localhost = "localhost"
localhost_tls = "https://" + localhost
# accept https if specified, default to it when no scheme specified
for uri in (localhost, localhost_tls):
assert normalize_authority(uri) == localhost_tls
# remove trailing characters
for string in ("/", " ", "/ ", " /"):
assert normalize_authority(uri + string) == localhost_tls
# raise for other schemes
for scheme in ("http", "file"):
with pytest.raises(ValueError):
normalize_authority(scheme + "://localhost")
|
StarcoderdataPython
|
4842306
|
import decorator
import inspect
import time
import zope.testing.cleanup
_caches = {}
_timeouts = {}
def collect():
"""Clear cache of results which have timed out"""
for func in _caches:
for key in list(_caches[func]):
if (time.time() - _caches[func][key][1] >=
_timeouts[func]):
_caches[func].pop(key, None)
def clear():
_caches.clear()
_timeouts.clear()
zope.testing.cleanup.addCleanUp(clear)
class do_not_cache_and_return:
"""Class which may be returned by a memoized method"""
def __init__(self, value):
self.value = value
def __call__(self):
return self.value
def Memoize(timeout, ignore_self=False, _caches=_caches, _timeouts=_timeouts):
"""Memoize With Timeout
timeout ... in seconds
Based on http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/325905
"""
@decorator.decorator
def func(f, *args, **kwargs):
cache = _caches.setdefault(f, {})
_timeouts.setdefault(f, timeout)
cache_args = args
if ignore_self:
parameters = inspect.signature(f).parameters
if parameters and next(iter(parameters)) == 'self':
cache_args = args[1:]
kw = list(kwargs.items())
kw.sort()
key = (cache_args, tuple(kw))
try:
hash(key)
except TypeError:
# Not hashable.
key = None
try:
value, cached_time = cache[key]
if (time.time() - cached_time) > timeout:
raise KeyError
except KeyError:
value = f(*args, **kwargs)
if isinstance(value, do_not_cache_and_return):
return value()
if key is not None:
cache[key] = (value, time.time())
return value
return func
def memoize_on_attribute(attribute_name, timeout, ignore_self=False):
@decorator.decorator
def func(function, *args, **kw):
try:
self = args[0]
cache = getattr(self, attribute_name)
except (IndexError, AttributeError):
raise TypeError(
"gocept.cache.method.memoize_on_attribute could" +
" not retrieve cache attribute '%s' for function %r"
% (attribute_name, function))
return Memoize(timeout, _caches=cache,
ignore_self=ignore_self)(function)(*args, **kw)
return func
|
StarcoderdataPython
|
1606434
|
import numpy as np
def generate_noise(size, beta):
white_noise = np.random.randn(*size)
white_noise_fft = np.fft.fftn(white_noise)
ndims = len(size)
freq_along_axis = []
for axis in range(ndims):
freq_along_axis.append(np.fft.fftfreq(size[axis]))
grids = np.meshgrid(*freq_along_axis)
sum_of_squares = 0
for grid in grids:
sum_of_squares += grid**2
freqs = np.sqrt(sum_of_squares)
origin = (0,) * ndims
freqs[origin] += 1e-8 # DC component
filter = 1/np.power(freqs, beta)
colored_fft = white_noise_fft * filter.T
colored_noise = np.fft.ifftn(colored_fft)
return np.abs(colored_noise)
|
StarcoderdataPython
|
1672995
|
# Generated by Django 3.0.5 on 2020-12-23 15:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shows', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='show',
name='updated_date',
field=models.DateTimeField(auto_now=True),
),
]
|
StarcoderdataPython
|
181296
|
#removes small images (<80x80) and resize all remaining images to 80x80
import os
import cv2
sourceDir = 'D:\MER\Dataset\Expressions\\3. Face_crop\\Surprise' # Source folder
targetDir = 'D:\MER\Dataset\Expressions\\4. small_image_removed_resized\\Surprise' # Target Folder
imageCount = 0
for sRoot,sDirs,sFiles in os.walk(sourceDir):
break
for sourceName in sFiles:
sourceFile = sourceDir+'\\'+sourceName
image = cv2.imread(sourceFile)
if image.shape[0] >= 80:
imageCount+=1
fileName = 'surprise'+str(imageCount)+'.jpg' #change file name here
imageResize = cv2.resize(image,(80,80)) # resize to 80x80 pixel
cv2.imwrite(targetDir+'\\'+fileName,imageResize)
|
StarcoderdataPython
|
3230319
|
<gh_stars>1-10
# Generated by Django 3.0.6 on 2021-03-01 09:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("backend", "0004_pointsgained_comment"),
]
operations = [
migrations.AddField(
model_name="community",
name="colour",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name="user",
name="description",
field=models.TextField(blank=True, null=True),
),
]
|
StarcoderdataPython
|
86116
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adverts', '0006_auto_20150303_0009'),
]
operations = [
migrations.AlterField(
model_name='adchannel',
name='ad_formats',
field=models.ManyToManyField(
to='adverts.AdFormat', help_text='size and shape of ad'
),
),
migrations.AlterField(
model_name='advert',
name='ad_channels',
field=models.ManyToManyField(
blank=True,
to='adverts.AdChannel',
help_text='Where to show the ad'
),
),
]
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.