code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import numpy as np
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
import datetime as dt
from flask import Flask, jsonify
engine = create_engine("sqlite:///Resources/hawaii.sqlite")
Base = automap_base()
Base.prepare(engine, reflect=True)
Measurement = Base.classes.measurement
Station = Base.classes.station
app = Flask(__name__)
@app.route("/")
def welcome():
"""List all available api routes."""
return (
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/<start_date><br/>"
f"/api/v1.0/<start_date>/<end_date><br/>"
)
@app.route("/api/v1.0/precipitation")
def precipitation():
session = Session(engine)
first_date = session.query(Measurement.date).order_by(Measurement.date).first()
last_date = session.query(Measurement.date).order_by(Measurement.date.desc()).first()
year_ago = dt.date(2017, 8, 23) - dt.timedelta(days=365)
date_prcp = session.query(Measurement.date, func.max(Measurement.prcp)).group_by(Measurement.date).\
filter(Measurement.date > year_ago)
session.close()
# Create a dictionary from the row data and append to a list of all_passengers
all_dates = []
for date, prcp in date_prcp:
dict = {}
dict["date"] = date
dict["prcp"] = prcp
all_dates.append(dict)
return jsonify(all_dates)
@app.route("/api/v1.0/stations")
def stations():
session = Session(engine)
stations = session.query(Station.id, Station.station, Station.name, Station.latitude, Station.longitude, Station.elevation).all()
all_stations = []
for id, station, name, latitude, longitude, elevation in stations:
dict = {}
dict["id"] = id
dict["station"] = station
dict["name"] = name
dict["latitude"] = latitude
dict["longitude"] = longitude
dict["elevation"] = elevation
all_stations.append(dict)
return jsonify(all_stations)
@app.route("/api/v1.0/tobs")
def tobs():
session = Session(engine)
year_ago = dt.date(2017, 8, 18) - dt.timedelta(days=365)
most_active = session.query(Measurement.date, Measurement.tobs).\
filter(Measurement.station == 'USC00519281').\
filter(Measurement.date > year_ago).all()
most_active_tobs = []
for date, tobs in most_active:
dict = {}
dict['date'] = date
dict['tobs'] = tobs
most_active_tobs.append(dict)
return jsonify(most_active_tobs)
@app.route("/api/v1.0/<start_date>")
def calc_temps_start(start_date):
tobs_stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start_date).all()
tobs_stats_all = []
for min, avg, max in tobs_stats:
dict = {}
dict['min'] = min
dict['avg'] = avg
dict['max'] = max
tobs_stats_all.append(dict)
return jsonify(tobs_stats_all)
@app.route("/api/v1.0/<start_date>/<end_date>")
def calc_temps_start_end(start_date, end_date):
tobs_stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start_date).filter(Measurement.date <= end_date).all()
tobs_stats_all = []
for min, avg, max in tobs_stats:
dict = {}
dict['min'] = min
dict['avg'] = avg
dict['max'] = max
tobs_stats_all.append(dict)
return jsonify(tobs_stats_all)
if __name__ == '__main__':
app.run(debug=True) | [
"sqlalchemy.func.avg",
"flask.Flask",
"datetime.date",
"sqlalchemy.orm.Session",
"flask.jsonify",
"sqlalchemy.func.min",
"datetime.timedelta",
"sqlalchemy.create_engine",
"sqlalchemy.ext.automap.automap_base",
"sqlalchemy.func.max"
] | [((230, 280), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///Resources/hawaii.sqlite"""'], {}), "('sqlite:///Resources/hawaii.sqlite')\n", (243, 280), False, 'from sqlalchemy import create_engine, func\n'), ((288, 302), 'sqlalchemy.ext.automap.automap_base', 'automap_base', ([], {}), '()\n', (300, 302), False, 'from sqlalchemy.ext.automap import automap_base\n'), ((415, 430), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (420, 430), False, 'from flask import Flask, jsonify\n'), ((826, 841), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (833, 841), False, 'from sqlalchemy.orm import Session\n'), ((1496, 1514), 'flask.jsonify', 'jsonify', (['all_dates'], {}), '(all_dates)\n', (1503, 1514), False, 'from flask import Flask, jsonify\n'), ((1579, 1594), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (1586, 1594), False, 'from sqlalchemy.orm import Session\n'), ((2083, 2104), 'flask.jsonify', 'jsonify', (['all_stations'], {}), '(all_stations)\n', (2090, 2104), False, 'from flask import Flask, jsonify\n'), ((2161, 2176), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (2168, 2176), False, 'from sqlalchemy.orm import Session\n'), ((2597, 2622), 'flask.jsonify', 'jsonify', (['most_active_tobs'], {}), '(most_active_tobs)\n', (2604, 2622), False, 'from flask import Flask, jsonify\n'), ((3069, 3092), 'flask.jsonify', 'jsonify', (['tobs_stats_all'], {}), '(tobs_stats_all)\n', (3076, 3092), False, 'from flask import Flask, jsonify\n'), ((3602, 3625), 'flask.jsonify', 'jsonify', (['tobs_stats_all'], {}), '(tobs_stats_all)\n', (3609, 3625), False, 'from flask import Flask, jsonify\n'), ((1031, 1051), 'datetime.date', 'dt.date', (['(2017)', '(8)', '(23)'], {}), '(2017, 8, 23)\n', (1038, 1051), True, 'import datetime as dt\n'), ((1054, 1076), 'datetime.timedelta', 'dt.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (1066, 1076), True, 'import datetime as dt\n'), ((2192, 2212), 'datetime.date', 'dt.date', (['(2017)', '(8)', '(18)'], {}), '(2017, 8, 18)\n', (2199, 2212), True, 'import datetime as dt\n'), ((2215, 2237), 'datetime.timedelta', 'dt.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (2227, 2237), True, 'import datetime as dt\n'), ((1125, 1151), 'sqlalchemy.func.max', 'func.max', (['Measurement.prcp'], {}), '(Measurement.prcp)\n', (1133, 1151), False, 'from sqlalchemy import create_engine, func\n'), ((2726, 2752), 'sqlalchemy.func.min', 'func.min', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (2734, 2752), False, 'from sqlalchemy import create_engine, func\n'), ((2754, 2780), 'sqlalchemy.func.avg', 'func.avg', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (2762, 2780), False, 'from sqlalchemy import create_engine, func\n'), ((2782, 2808), 'sqlalchemy.func.max', 'func.max', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (2790, 2808), False, 'from sqlalchemy import create_engine, func\n'), ((3222, 3248), 'sqlalchemy.func.min', 'func.min', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (3230, 3248), False, 'from sqlalchemy import create_engine, func\n'), ((3250, 3276), 'sqlalchemy.func.avg', 'func.avg', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (3258, 3276), False, 'from sqlalchemy import create_engine, func\n'), ((3278, 3304), 'sqlalchemy.func.max', 'func.max', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (3286, 3304), False, 'from sqlalchemy import create_engine, func\n')] |
from django.shortcuts import render, redirect
from django.views.generic import TemplateView, ListView, CreateView
from django.core.files.storage import FileSystemStorage
from django.urls import reverse_lazy
from django.core.files import File
from .forms import BookForm
from .models import Book
# Load libraries
import pandas as pd
from sklearn.tree import DecisionTreeClassifier # Import Decision Tree Classifier
from sklearn.model_selection import train_test_split # Import train_test_split function
from sklearn import metrics #Import scikit-learn metrics module for accuracy calculation
from sklearn.tree import export_graphviz
from six import StringIO
from IPython.display import Image
import os
import pydotplus
BASE_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
class Home(TemplateView):
template_name = 'home.html'
def upload(request):
context = {}
upload_file_name = ""
image_url = ""
if request.method == 'POST':
uploaded_file = request.FILES['document']
image_url = "Tree_of_"+str(os.path.splitext(uploaded_file.name)[0]) + ".png"
dataset_cols_name = []
pima = pd.read_csv(uploaded_file , header=0)
dataset_cols_name = pima.columns.values.tolist()
transet_cols_name = dataset_cols_name[:len(dataset_cols_name)-1]
transet_cols_name.append("decisionCol")
pima.columns = transet_cols_name
#split dataset in features and target variable
feature_cols = transet_cols_name[:len(transet_cols_name)-1]
X = pima[feature_cols] # Features
y = pima.decisionCol # Target variable
# Split dataset into training set and test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1) # 70% training and 30% test
# Create Decision Tree classifer object
clf = DecisionTreeClassifier()
# Train Decision Tree Classifer
clf = clf.fit(X_train,y_train)
#Predict the response for test dataset
y_pred = clf.predict(X_test)
# Model Accuracy, how often is the classifier correct?
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
dot_data = StringIO()
export_graphviz(clf, out_file=dot_data,
filled=True, rounded=True,
special_characters=True,feature_names = feature_cols,class_names=['0','1'])
graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
graph.write_png(image_url)
imge = Image(graph.create_png())
fs = FileSystemStorage()
upload_file_name = uploaded_file
path_to_generated_image = os.path.abspath(os.path.join(os.getcwd(), os.pardir))+"\\mining-assignment-master\\"+ image_url
file = open(path_to_generated_image , "rb")
django_file = File(file)
name_of_image = "Tree_of_"+str(os.path.splitext(uploaded_file.name)[0]) + ".png"
name = fs.save(name_of_image, django_file)
print(path_to_generated_image)
context['image_name'] = name_of_image
return render(request, 'upload.html', context)
| [
"django.core.files.storage.FileSystemStorage",
"django.core.files.File",
"os.getcwd",
"sklearn.model_selection.train_test_split",
"pandas.read_csv",
"sklearn.metrics.accuracy_score",
"sklearn.tree.DecisionTreeClassifier",
"six.StringIO",
"sklearn.tree.export_graphviz",
"os.path.splitext",
"django.shortcuts.render"
] | [((3114, 3153), 'django.shortcuts.render', 'render', (['request', '"""upload.html"""', 'context'], {}), "(request, 'upload.html', context)\n", (3120, 3153), False, 'from django.shortcuts import render, redirect\n'), ((766, 777), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (775, 777), False, 'import os\n'), ((1154, 1190), 'pandas.read_csv', 'pd.read_csv', (['uploaded_file'], {'header': '(0)'}), '(uploaded_file, header=0)\n', (1165, 1190), True, 'import pandas as pd\n'), ((1732, 1785), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)', 'random_state': '(1)'}), '(X, y, test_size=0.3, random_state=1)\n', (1748, 1785), False, 'from sklearn.model_selection import train_test_split\n'), ((1877, 1901), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (1899, 1901), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((2219, 2229), 'six.StringIO', 'StringIO', ([], {}), '()\n', (2227, 2229), False, 'from six import StringIO\n'), ((2238, 2390), 'sklearn.tree.export_graphviz', 'export_graphviz', (['clf'], {'out_file': 'dot_data', 'filled': '(True)', 'rounded': '(True)', 'special_characters': '(True)', 'feature_names': 'feature_cols', 'class_names': "['0', '1']"}), "(clf, out_file=dot_data, filled=True, rounded=True,\n special_characters=True, feature_names=feature_cols, class_names=['0', '1']\n )\n", (2253, 2390), False, 'from sklearn.tree import export_graphviz\n'), ((2599, 2618), 'django.core.files.storage.FileSystemStorage', 'FileSystemStorage', ([], {}), '()\n', (2616, 2618), False, 'from django.core.files.storage import FileSystemStorage\n'), ((2864, 2874), 'django.core.files.File', 'File', (['file'], {}), '(file)\n', (2868, 2874), False, 'from django.core.files import File\n'), ((2158, 2196), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (2180, 2196), False, 'from sklearn import metrics\n'), ((1057, 1093), 'os.path.splitext', 'os.path.splitext', (['uploaded_file.name'], {}), '(uploaded_file.name)\n', (1073, 1093), False, 'import os\n'), ((2723, 2734), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2732, 2734), False, 'import os\n'), ((2915, 2951), 'os.path.splitext', 'os.path.splitext', (['uploaded_file.name'], {}), '(uploaded_file.name)\n', (2931, 2951), False, 'import os\n')] |
#!/usr/bin/python36
print("content-type: text/html")
print("")
import cgi
import subprocess as sp
form = cgi.FieldStorage()
user_name = form.getvalue('user_name')
lv_size = form.getvalue('lv_size')
print(user_name)
print(lv_size)
output=sp.getstatusoutput("sudo ansible-playbook ostaas.yml --extra-vars='user_name={u} lv_size={l}'".format(u=user_name, l=lv_size))
if output[0] == 0 :
print("<b> NFS-server succesfully created</b>")
client_mount=sp.getstatusoutput("sudo ansible-playbook ostaasclient.yml --extra-vars='user_name={u} lv_size={l}'".format(u=user_name, l=lv_size))
if client_mount[0] == 0 :
print("<b>Enjoy free cloud storage..</b>")
else:
print("Sorry, We're facing technical issue. please visit after some time")
| [
"cgi.FieldStorage"
] | [((109, 127), 'cgi.FieldStorage', 'cgi.FieldStorage', ([], {}), '()\n', (125, 127), False, 'import cgi\n')] |
import tensorflow as tf
from tensorflow.python.keras.preprocessing import image as kp_image
# Keras is only used to load VGG19 model as a high level API to TensorFlow
from keras.applications.vgg19 import VGG19
from keras.models import Model
from keras import backend as K
# pillow is used for loading and saving images
from PIL import Image
# numPy is used for manipulation of array of object i.e Image in our case
import numpy as np
##
##
##
# list of layers to be considered for calculation of Content and Style Loss
content_layers = ['block3_conv3']
style_layers = ['block1_conv1','block2_conv2','block4_conv3']
num_content_layers = len(content_layers)
num_style_layers = len(style_layers)
# path where the content and style images are located
content_path = 'content.jpg'
style_path = 'style.jpg'
# Save the result as
save_name = 'generated.jpg'
# path to where Vgg19 model weight is located
vgg_weights = "vgg_weights/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5"
############################################################################################################
############################################################################################################
# UTILS
############################################################################################################
############################################################################################################
def load_img(path_to_img):
max_dim = 512
img = Image.open(path_to_img)
img_size = max(img.size)
scale = max_dim/img_size
img = img.resize((round(img.size[0]*scale), round(img.size[1]*scale)), Image.ANTIALIAS)
img = kp_image.img_to_array(img)
# We need to broadcast the image array such that it has a batch dimension
img = np.expand_dims(img, axis=0)
# preprocess raw images to make it suitable to be used by VGG19 model
out = tf.keras.applications.vgg19.preprocess_input(img)
return tf.convert_to_tensor(out)
def deprocess_img(processed_img):
x = processed_img.copy()
# perform the inverse of the preprocessiing step
x[:, :, 0] += 103.939
x[:, :, 1] += 116.779
x[:, :, 2] += 123.68
x = x[:, :, ::-1]
x = np.clip(x, 0, 255).astype('uint8')
return x
############################################################################################################
############################################################################################################
# Loss Function
############################################################################################################
############################################################################################################
### Content Loss Function
def get_content_loss(content, target):
return tf.reduce_mean(tf.square(content - target)) /2
### Style Loss Fucntion
def gram_matrix(input_tensor):
# if input tensor is a 3D array of size Nh x Nw X Nc
# we reshape it to a 2D array of Nc x (Nh*Nw)
channels = int(input_tensor.shape[-1])
a = tf.reshape(input_tensor, [-1, channels])
n = tf.shape(a)[0]
# get gram matrix
gram = tf.matmul(a, a, transpose_a=True)
return gram
def get_style_loss(base_style, gram_target):
height, width, channels = base_style.get_shape().as_list()
gram_style = gram_matrix(base_style)
# Original eqn as a constant to divide i.e 1/(4. * (channels ** 2) * (width * height) ** 2)
return tf.reduce_mean(tf.square(gram_style - gram_target)) / (channels**2 * width * height) #(4.0 * (channels ** 2) * (width * height) ** 2)
### Use to pass content and style image through it
def get_feature_representations(model, content_path, style_path, num_content_layers):
# Load our images in
content_image = load_img(content_path)
style_image = load_img(style_path)
# batch compute content and style features
content_outputs = model(content_image)
style_outputs = model(style_image)
# Get the style and content feature representations from our model
style_features = [ style_layer[0] for style_layer in style_outputs[num_content_layers:] ]
content_features = [ content_layer[0] for content_layer in content_outputs[:num_content_layers] ]
return style_features, content_features
### Total Loss
def compute_loss(model, loss_weights, generated_output_activations, gram_style_features, content_features, num_content_layers, num_style_layers):
generated_content_activations = generated_output_activations[:num_content_layers]
generated_style_activations = generated_output_activations[num_content_layers:]
style_weight, content_weight = loss_weights
style_score = 0
content_score = 0
# Accumulate style losses from all layers
# Here, we equally weight each contribution of each loss layer
weight_per_style_layer = 1.0 / float(num_style_layers)
for target_style, comb_style in zip(gram_style_features, generated_style_activations):
temp = get_style_loss(comb_style[0], target_style)
style_score += weight_per_style_layer * temp
# Accumulate content losses from all layers
weight_per_content_layer = 1.0 / float(num_content_layers)
for target_content, comb_content in zip(content_features, generated_content_activations):
temp = get_content_loss(comb_content[0], target_content)
content_score += weight_per_content_layer* temp
# Get total loss
loss = style_weight*style_score + content_weight*content_score
return loss, style_score, content_score
############################################################################################################
############################################################################################################
# CREATE STYLE TRANFER
############################################################################################################
############################################################################################################
# Using Keras Load VGG19 model
def get_model(content_layers,style_layers):
# Load our model. We load pretrained VGG, trained on imagenet data
vgg19 = VGG19(weights=None, include_top=False)
# We don't need to (or want to) train any layers of our pre-trained vgg model, so we set it's trainable to false.
vgg19.trainable = False
style_model_outputs = [vgg19.get_layer(name).output for name in style_layers]
content_model_outputs = [vgg19.get_layer(name).output for name in content_layers]
model_outputs = content_model_outputs + style_model_outputs
# Build model
return Model(inputs = vgg19.input, outputs = model_outputs), vgg19
def run_style_transfer(content_path, style_path, num_iterations=200, content_weight=0.1, style_weight=0.9):
# Create a tensorflow session
sess = tf.Session()
# Assign keras back-end to the TF session which we created
K.set_session(sess)
model, vgg19 = get_model(content_layers,style_layers)
# Get the style and content feature representations (from our specified intermediate layers)
style_features, content_features = get_feature_representations(model, content_path, style_path, num_content_layers)
gram_style_features = [gram_matrix(style_feature) for style_feature in style_features]
# VGG default normalization
norm_means = np.array([103.939, 116.779, 123.68])
min_vals = -norm_means
max_vals = 255 - norm_means
# In original paper, the initial stylized image is random matrix of same size as that of content image
# but in later images content image was used instead on random values for first stylized image
# because it proved to help to stylize faster
generated_image = load_img(content_path)
# generated_image = np.random.randint(0,255, size=generated_image.shape)
# Create tensorflow variable to hold a stylized/generated image during the training
generated_image = tf.Variable(generated_image, dtype=tf.float32)
model_outputs = model(generated_image)
# weightages of each content and style images i.e alpha & beta
loss_weights = (style_weight, content_weight)
# Create our optimizer
loss = compute_loss(model, loss_weights, model_outputs, gram_style_features, content_features, num_content_layers, num_style_layers)
opt = tf.train.AdamOptimizer(learning_rate=9, beta1=0.9, epsilon=1e-1).minimize( loss[0], var_list = [generated_image])
sess.run(tf.global_variables_initializer())
sess.run(generated_image.initializer)
# loading the weights again because tf.global_variables_initializer() resets the weights
vgg19.load_weights(vgg_weights)
# Put loss as infinity before training starts and Create a variable to hold best image (i.e image with minimum loss)
best_loss, best_img = float('inf'), None
for i in range(num_iterations):
# Do optimization
sess.run(opt)
# Make sure image values stays in the range of max-min value of VGG norm
clipped = tf.clip_by_value(generated_image, min_vals, max_vals)
# assign the clipped value to the tensor stylized image
generated_image.assign(clipped)
# Open the Tuple of tensors
total_loss, style_score, content_score = loss
total_loss = total_loss.eval(session=sess)
if total_loss < best_loss:
# Update best loss and best image from total loss.
best_loss = total_loss
# generated image is of shape (1, h, w, 3) convert it to (h, w, 3)
temp_generated_image = sess.run(generated_image)[0]
best_img = deprocess_img(temp_generated_image)
s_loss = sess.run(style_score)
c_loss = sess.run(content_score)
# print best loss
print('best: iteration: ', i ,'loss: ', total_loss ,' style_loss: ', s_loss,' content_loss: ', c_loss)
# Save image after every 100 iterations
if (i+1)%100 == 0:
output = Image.fromarray(best_img)
output.save(str(i+1)+'-'+save_name)
# after num_iterations iterations are completed, close the TF session
sess.close()
return best_img, best_loss
best, best_loss = run_style_transfer(content_path, style_path) | [
"tensorflow.clip_by_value",
"tensorflow.reshape",
"keras.models.Model",
"numpy.clip",
"tensorflow.matmul",
"tensorflow.Variable",
"tensorflow.global_variables_initializer",
"keras.backend.set_session",
"tensorflow.Session",
"keras.applications.vgg19.VGG19",
"tensorflow.keras.applications.vgg19.preprocess_input",
"tensorflow.python.keras.preprocessing.image.img_to_array",
"tensorflow.convert_to_tensor",
"numpy.expand_dims",
"PIL.Image.open",
"tensorflow.shape",
"numpy.array",
"tensorflow.square",
"PIL.Image.fromarray",
"tensorflow.train.AdamOptimizer"
] | [((1538, 1561), 'PIL.Image.open', 'Image.open', (['path_to_img'], {}), '(path_to_img)\n', (1548, 1561), False, 'from PIL import Image\n'), ((1728, 1754), 'tensorflow.python.keras.preprocessing.image.img_to_array', 'kp_image.img_to_array', (['img'], {}), '(img)\n', (1749, 1754), True, 'from tensorflow.python.keras.preprocessing import image as kp_image\n'), ((1841, 1868), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (1855, 1868), True, 'import numpy as np\n'), ((1950, 1999), 'tensorflow.keras.applications.vgg19.preprocess_input', 'tf.keras.applications.vgg19.preprocess_input', (['img'], {}), '(img)\n', (1994, 1999), True, 'import tensorflow as tf\n'), ((2010, 2035), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['out'], {}), '(out)\n', (2030, 2035), True, 'import tensorflow as tf\n'), ((3120, 3160), 'tensorflow.reshape', 'tf.reshape', (['input_tensor', '[-1, channels]'], {}), '(input_tensor, [-1, channels])\n', (3130, 3160), True, 'import tensorflow as tf\n'), ((3213, 3246), 'tensorflow.matmul', 'tf.matmul', (['a', 'a'], {'transpose_a': '(True)'}), '(a, a, transpose_a=True)\n', (3222, 3246), True, 'import tensorflow as tf\n'), ((6230, 6268), 'keras.applications.vgg19.VGG19', 'VGG19', ([], {'weights': 'None', 'include_top': '(False)'}), '(weights=None, include_top=False)\n', (6235, 6268), False, 'from keras.applications.vgg19 import VGG19\n'), ((6888, 6900), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (6898, 6900), True, 'import tensorflow as tf\n'), ((6965, 6984), 'keras.backend.set_session', 'K.set_session', (['sess'], {}), '(sess)\n', (6978, 6984), True, 'from keras import backend as K\n'), ((7392, 7428), 'numpy.array', 'np.array', (['[103.939, 116.779, 123.68]'], {}), '([103.939, 116.779, 123.68])\n', (7400, 7428), True, 'import numpy as np\n'), ((7970, 8016), 'tensorflow.Variable', 'tf.Variable', (['generated_image'], {'dtype': 'tf.float32'}), '(generated_image, dtype=tf.float32)\n', (7981, 8016), True, 'import tensorflow as tf\n'), ((3167, 3178), 'tensorflow.shape', 'tf.shape', (['a'], {}), '(a)\n', (3175, 3178), True, 'import tensorflow as tf\n'), ((6673, 6721), 'keras.models.Model', 'Model', ([], {'inputs': 'vgg19.input', 'outputs': 'model_outputs'}), '(inputs=vgg19.input, outputs=model_outputs)\n', (6678, 6721), False, 'from keras.models import Model\n'), ((8468, 8501), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (8499, 8501), True, 'import tensorflow as tf\n'), ((9004, 9057), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['generated_image', 'min_vals', 'max_vals'], {}), '(generated_image, min_vals, max_vals)\n', (9020, 9057), True, 'import tensorflow as tf\n'), ((2251, 2269), 'numpy.clip', 'np.clip', (['x', '(0)', '(255)'], {}), '(x, 0, 255)\n', (2258, 2269), True, 'import numpy as np\n'), ((2880, 2907), 'tensorflow.square', 'tf.square', (['(content - target)'], {}), '(content - target)\n', (2889, 2907), True, 'import tensorflow as tf\n'), ((3532, 3567), 'tensorflow.square', 'tf.square', (['(gram_style - gram_target)'], {}), '(gram_style - gram_target)\n', (3541, 3567), True, 'import tensorflow as tf\n'), ((8342, 8405), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(9)', 'beta1': '(0.9)', 'epsilon': '(0.1)'}), '(learning_rate=9, beta1=0.9, epsilon=0.1)\n', (8364, 8405), True, 'import tensorflow as tf\n'), ((9890, 9915), 'PIL.Image.fromarray', 'Image.fromarray', (['best_img'], {}), '(best_img)\n', (9905, 9915), False, 'from PIL import Image\n')] |
from __future__ import annotations
import tkinter as tk
from typing import Callable
from src.graphics import utils
from src.graphics.interfaces import Panel
from src.timer import Clock
class PanelStyle:
""" This class holds the immutable style properties of the TextPanel """
def __init__(self, width: int, height: int, bg_colour: str, font: str):
self.width: int = width
self.height: int = height
self.bg_colour: str = bg_colour
self.font: str = font
class TextPanel(Panel):
"""
Represents a canvas containing one or more texts with, possibly, some variable styles and content.
"""
def __init__(self, root: tk.Tk, clock: Clock, style: PanelStyle, var_callback: Callable[[Clock], str]):
self.root: tk.Tk = root
self.clock: Clock = clock
self.style: PanelStyle = style
self.var_callback: Callable = var_callback
self.style_var = tk.StringVar(self.root, self.var_callback(self.clock))
def draw(self) -> None:
canvas = tk.Canvas(self.root, width=self.style.width, height=self.style.height,
bg=self.style.bg_colour, highlightthickness=0)
text_id = canvas.create_text(self.style.width / 2, self.style.height / 2,
anchor=tk.CENTER,
text=self.style_var.get().split(":")[0],
fill=self.style_var.get().split(":")[1],
font=self.style.font)
canvas.pack()
def on_change(varname, index, mode):
"""
The signature of the method must stay as is to work properly with tkinter.
It also seems I can't move it from here to a more sensible place.
"""
canvas.itemconfigure(text_id,
text=self.style_var.get().split(":")[0],
fill=self.style_var.get().split(":")[1])
self.style_var.trace_add('write', on_change)
def tick(self) -> None:
self.style_var.set(self.var_callback(self.clock))
class ClockPanel(Panel):
"""
Represents the canvas containing the clock with the hours and the circle.
"""
def __init__(self, root: tk.Tk, clock: Clock, style: PanelStyle, var_callback: Callable[[Clock], str]):
self.root: tk.Tk = root
self.clock: Clock = clock
self.style: PanelStyle = style
self.var_callback: Callable = var_callback
self.style_var = tk.StringVar(self.root, self.var_callback(self.clock))
def draw(self) -> None:
canvas = tk.Canvas(self.root, width=self.style.width, height=self.style.height,
bg=self.style.bg_colour, highlightthickness=0)
text_id = canvas.create_text(self.style.width / 2, self.style.height / 2,
anchor=tk.CENTER,
text=self.style_var.get().split(":")[0],
# fill=self.style_var.get().split(":")[1], # 'white',
fill='white',
font=self.style.font)
utils.draw_circle(canvas, self.style.width // 2, self.style.height // 2, self.style.width // 3,
outline='white',
width=8)
arc_id = utils.draw_circle(canvas, self.style.width // 2, self.style.height // 2, self.style.width // 3,
outline='red',
width=6,
extent=-1 * int(self.style_var.get().split(":")[1]) * 6)
canvas.pack()
def on_change(varname, index, mode):
"""
The signature of the method must stay as is to work properly with tkinter.
It also seems I can't move it from here to a more sensible place.
"""
hour = self.style_var.get().split(":")[0]
canvas.itemconfigure(text_id, text=hour)
minutes = int(self.style_var.get().split(":")[1])
extent = utils.calc_arc_extent(self.clock.day, self.clock.hour, minutes)
canvas.itemconfigure(arc_id, extent=extent)
self.style_var.trace_add('write', on_change)
def tick(self) -> None:
self.style_var.set(self.var_callback(self.clock))
| [
"src.graphics.utils.calc_arc_extent",
"tkinter.Canvas",
"src.graphics.utils.draw_circle"
] | [((1033, 1155), 'tkinter.Canvas', 'tk.Canvas', (['self.root'], {'width': 'self.style.width', 'height': 'self.style.height', 'bg': 'self.style.bg_colour', 'highlightthickness': '(0)'}), '(self.root, width=self.style.width, height=self.style.height, bg=\n self.style.bg_colour, highlightthickness=0)\n', (1042, 1155), True, 'import tkinter as tk\n'), ((2638, 2760), 'tkinter.Canvas', 'tk.Canvas', (['self.root'], {'width': 'self.style.width', 'height': 'self.style.height', 'bg': 'self.style.bg_colour', 'highlightthickness': '(0)'}), '(self.root, width=self.style.width, height=self.style.height, bg=\n self.style.bg_colour, highlightthickness=0)\n', (2647, 2760), True, 'import tkinter as tk\n'), ((3209, 3335), 'src.graphics.utils.draw_circle', 'utils.draw_circle', (['canvas', '(self.style.width // 2)', '(self.style.height // 2)', '(self.style.width // 3)'], {'outline': '"""white"""', 'width': '(8)'}), "(canvas, self.style.width // 2, self.style.height // 2, \n self.style.width // 3, outline='white', width=8)\n", (3226, 3335), False, 'from src.graphics import utils\n'), ((4140, 4203), 'src.graphics.utils.calc_arc_extent', 'utils.calc_arc_extent', (['self.clock.day', 'self.clock.hour', 'minutes'], {}), '(self.clock.day, self.clock.hour, minutes)\n', (4161, 4203), False, 'from src.graphics import utils\n')] |
import dynet as dy
import numpy as np
import moire
from moire import Expression
__all__ = [
'zeros', 'ones', 'full', 'normal', 'bernoulli', 'uniform', 'gumbel',
'zeros_like', 'ones_like', 'full_like', 'normal_like', 'bernoulli_like', 'uniform_like', 'gumbel_like',
'eye', 'diagonal',
'where',
]
def zeros(*dim, batch_size: int = 1) -> Expression:
a = np.zeros((*dim, batch_size), dtype=np.float32)
return dy.inputTensor(a, batched=True, device=moire.config.device)
def zeros_like(x: Expression) -> Expression:
dim, batch_size = x.dim()
return zeros(*dim, batch_size=batch_size)
def ones(*dim, batch_size: int = 1) -> Expression:
a = np.ones((*dim, batch_size), dtype=np.float32)
return dy.inputTensor(a, batched=True, device=moire.config.device)
def ones_like(x: Expression) -> Expression:
dim, batch_size = x.dim()
return ones(*dim, batch_size=batch_size)
def eye(N: int, M: int = None, k: int = 0) -> Expression:
return dy.inputTensor(np.eye(N, M, k), batched=False, device=moire.config.device)
def diagonal(x: Expression) -> Expression:
(dim0, dim1), batch_size = x.dim()
return dy.cmult(x, eye(dim0, dim1))
def full(*dim, value, batch_size: int = 1) -> Expression:
a = np.full((*dim, batch_size), fill_value=value, dtype=np.float32)
return dy.inputTensor(a, batched=True, device=moire.config.device)
def full_like(x: Expression, value) -> Expression:
dim, batch_size = x.dim()
return full(*dim, value=value, batch_size=batch_size)
def normal(*dim, mean: float = 0.0, stddev: float = 1.0, batch_size: int = 1) -> Expression:
a = np.random.normal(loc=mean, scale=stddev, size=(*dim, batch_size)).astype(np.float32)
return dy.inputTensor(a, batched=True, device=moire.config.device)
def normal_like(x: Expression, mean: float = 0.0, stddev: float = 1.0) -> Expression:
dim, batch_size = x.dim()
return normal(*dim, mean=mean, stddev=stddev, batch_size=batch_size)
def bernoulli(*dim, p: float, batch_size: int = 1) -> Expression:
a = np.random.uniform(low=0, high=1.0, size=(*dim, batch_size)) < p
return dy.inputTensor(a.astype(np.int32), batched=True, device=moire.config.device)
def bernoulli_like(x: Expression, p: float) -> Expression:
dim, batch_size = x.dim()
return bernoulli(*dim, p=p, batch_size=batch_size)
def uniform(*dim, low: float, high: float, batch_size: int = 1) -> Expression:
a = np.random.uniform(low=low, high=high, size=(*dim, batch_size))
return dy.inputTensor(a, batched=True, device=moire.config.device)
def uniform_like(x: Expression, low: float, high: float) -> Expression:
dim, batch_size = x.dim()
return uniform(dim, low=low, high=high, batch_size=batch_size)
def gumbel(*dim, mu: float = 0.0, beta: float = 1.0, batch_size: int = 1) -> Expression:
a = np.random.gumbel(loc=mu, scale=beta, size=(*dim, batch_size))
return dy.inputTensor(a, batched=True, device=moire.config.device)
def gumbel_like(x: Expression, mu: float = 0.0, beta: float = 1.0) -> Expression:
dim, batch_size = x.dim()
return gumbel(*dim, mu=mu, beta=beta, batch_size=batch_size)
def where(cond: Expression, x: Expression, y: Expression) -> Expression:
return dy.cmult(cond, x) + dy.cmult(1.0 - cond, y)
if __name__ == '__main__':
a = dy.inputTensor([[1, 2, 3], [2, 3, 4], ])
moire.debug(f'a :: {a.dim()} => {a.value()}')
b = diagonal(a)
moire.debug(f'b :: {b.dim()} => {b.value()}')
| [
"numpy.full",
"numpy.random.uniform",
"numpy.random.gumbel",
"dynet.inputTensor",
"dynet.cmult",
"numpy.zeros",
"numpy.ones",
"numpy.random.normal",
"numpy.eye"
] | [((375, 421), 'numpy.zeros', 'np.zeros', (['(*dim, batch_size)'], {'dtype': 'np.float32'}), '((*dim, batch_size), dtype=np.float32)\n', (383, 421), True, 'import numpy as np\n'), ((433, 492), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (447, 492), True, 'import dynet as dy\n'), ((677, 722), 'numpy.ones', 'np.ones', (['(*dim, batch_size)'], {'dtype': 'np.float32'}), '((*dim, batch_size), dtype=np.float32)\n', (684, 722), True, 'import numpy as np\n'), ((734, 793), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (748, 793), True, 'import dynet as dy\n'), ((1253, 1316), 'numpy.full', 'np.full', (['(*dim, batch_size)'], {'fill_value': 'value', 'dtype': 'np.float32'}), '((*dim, batch_size), fill_value=value, dtype=np.float32)\n', (1260, 1316), True, 'import numpy as np\n'), ((1328, 1387), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (1342, 1387), True, 'import dynet as dy\n'), ((1728, 1787), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (1742, 1787), True, 'import dynet as dy\n'), ((2442, 2504), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': 'low', 'high': 'high', 'size': '(*dim, batch_size)'}), '(low=low, high=high, size=(*dim, batch_size))\n', (2459, 2504), True, 'import numpy as np\n'), ((2516, 2575), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (2530, 2575), True, 'import dynet as dy\n'), ((2846, 2907), 'numpy.random.gumbel', 'np.random.gumbel', ([], {'loc': 'mu', 'scale': 'beta', 'size': '(*dim, batch_size)'}), '(loc=mu, scale=beta, size=(*dim, batch_size))\n', (2862, 2907), True, 'import numpy as np\n'), ((2919, 2978), 'dynet.inputTensor', 'dy.inputTensor', (['a'], {'batched': '(True)', 'device': 'moire.config.device'}), '(a, batched=True, device=moire.config.device)\n', (2933, 2978), True, 'import dynet as dy\n'), ((3325, 3363), 'dynet.inputTensor', 'dy.inputTensor', (['[[1, 2, 3], [2, 3, 4]]'], {}), '([[1, 2, 3], [2, 3, 4]])\n', (3339, 3363), True, 'import dynet as dy\n'), ((1001, 1016), 'numpy.eye', 'np.eye', (['N', 'M', 'k'], {}), '(N, M, k)\n', (1007, 1016), True, 'import numpy as np\n'), ((2055, 2114), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0)', 'high': '(1.0)', 'size': '(*dim, batch_size)'}), '(low=0, high=1.0, size=(*dim, batch_size))\n', (2072, 2114), True, 'import numpy as np\n'), ((3244, 3261), 'dynet.cmult', 'dy.cmult', (['cond', 'x'], {}), '(cond, x)\n', (3252, 3261), True, 'import dynet as dy\n'), ((3264, 3287), 'dynet.cmult', 'dy.cmult', (['(1.0 - cond)', 'y'], {}), '(1.0 - cond, y)\n', (3272, 3287), True, 'import dynet as dy\n'), ((1632, 1697), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'mean', 'scale': 'stddev', 'size': '(*dim, batch_size)'}), '(loc=mean, scale=stddev, size=(*dim, batch_size))\n', (1648, 1697), True, 'import numpy as np\n')] |
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
import torch
from nemo.core.classes import Loss, typecheck
from nemo.core.neural_types import LossType, NeuralType
__all__ = ['AggregatorLoss']
class AggregatorLoss(Loss):
"""
Sums several losses into one.
Args:
num_inputs: number of input losses
weights: a list of coefficient for merging losses
"""
@property
def input_types(self):
"""Returns definitions of module input ports.
"""
input_types = {}
for i in range(self._num_losses):
input_types["loss_" + str(i + 1)] = NeuralType(elements_type=LossType())
return input_types
@property
def output_types(self):
"""Returns definitions of module output ports.
"""
return {"loss": NeuralType(elements_type=LossType())}
def __init__(self, num_inputs: int = 2, weights: List[float] = None):
super().__init__()
self._num_losses = num_inputs
if weights is not None and len(weights) != num_inputs:
raise ValueError("Length of weights should be equal to the number of inputs (num_inputs)")
self._weights = weights
@typecheck()
def forward(self, **kwargs):
values = [kwargs[x] for x in sorted(kwargs.keys())]
loss = torch.zeros_like(values[0])
for loss_idx, loss_value in enumerate(values):
if self._weights is not None:
loss = loss.add(loss_value, alpha=self._weights[loss_idx])
else:
loss = loss.add(loss_value)
return loss
| [
"nemo.core.classes.typecheck",
"torch.zeros_like",
"nemo.core.neural_types.LossType"
] | [((1779, 1790), 'nemo.core.classes.typecheck', 'typecheck', ([], {}), '()\n', (1788, 1790), False, 'from nemo.core.classes import Loss, typecheck\n'), ((1899, 1926), 'torch.zeros_like', 'torch.zeros_like', (['values[0]'], {}), '(values[0])\n', (1915, 1926), False, 'import torch\n'), ((1222, 1232), 'nemo.core.neural_types.LossType', 'LossType', ([], {}), '()\n', (1230, 1232), False, 'from nemo.core.neural_types import LossType, NeuralType\n'), ((1421, 1431), 'nemo.core.neural_types.LossType', 'LossType', ([], {}), '()\n', (1429, 1431), False, 'from nemo.core.neural_types import LossType, NeuralType\n')] |
import urllib.request
from bs4 import BeautifulSoup
def get_go():
url = "https://www.mohfw.gov.in/"
uClient = urllib.request.urlopen(url)
page_html = uClient.read()
uClient.close()
page_soup = BeautifulSoup(page_html,"html.parser")
news = page_soup.find_all('div',class_ = 'update-box')
newz = []
for new in news:
newz.append([new.strong.text,new.a.text.strip(),new.a['href']])
return newz | [
"bs4.BeautifulSoup"
] | [((217, 256), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_html', '"""html.parser"""'], {}), "(page_html, 'html.parser')\n", (230, 256), False, 'from bs4 import BeautifulSoup\n')] |
# -*- coding: utf-8 -*-
import glob
import os
import re
import sys
import logging
from boto3.session import Session
from botocore.exceptions import ClientError
from argparse import ArgumentParser
TEMPLATES = [
"/scripts/ec2.yml",
]
logger = logging.getLogger()
formatter = '%(levelname)s : %(asctime)s : %(message)s'
logging.basicConfig(level=logging.INFO, format=formatter)
class DeployStack:
# Option parser.
def get_option(self):
usage = "python " + sys.argv[0] + " [-h | --help] [-a | --app <APP_NAME>] [-p | --profile <AWS_PROFILE>] [-r | --region <AWS_REGION>]"
argparser = ArgumentParser(usage=usage)
argparser.add_argument("-a", "--app", type=str,
default="snatch",
help="Target app name.")
argparser.add_argument("-p", "--profile", type=str,
default="default",
help="~/.aws/config.")
argparser.add_argument("-r", "--region", type=str,
default="ap-northeast-1",
help="AWs regions. e.g. ap-northeast-1, us-east-1, ...")
return argparser.parse_args()
# Update CFn stacks.
def update_stack(self, stack_name, cfn, input):
w = cfn.get_waiter("stack_update_complete")
try:
cfn.update_stack(**input)
logger.info("Update %s.", stack_name)
w.wait(
StackName = stack_name,
)
return logger.info("Update %s complete.", stack_name)
except ClientError as e:
return logger.warning("%s", e.response["Error"]["Message"])
# Create CFn stacks.
def create_stack(self, stack_name, cfn, input):
w = cfn.get_waiter("stack_create_complete")
try:
cfn.create_stack(**input)
logger.info("Create %s.", stack_name)
w.wait(
StackName = stack_name,
)
return logger.info("Create %s complete.", stack_name)
except ClientError as e:
if e.response["Error"]["Code"] == "AlreadyExistsException":
self.update_stack(stack_name, cfn, input)
return
else:
return logger.warning("%s", e.response["Error"]["Message"])
# Valid CFn template.
def valid_template(self, template, body, cfn):
logger.info("Validate checks %s", template)
try:
cfn.validate_template(
TemplateBody = body,
)
return logger.info("%s is validation OK.", template)
except ClientError as e:
return logger.warning("%s", e.response["Error"]["Message"])
# Create EC2 keypair.
# 秘密鍵は ~/.ssh/ 配下に書き出す(file permission: 0600)
def create_keypair(self, app_name, session):
logger.info("Create %s KeyPair.", app_name)
ec2 = session.client("ec2")
try:
ec2.describe_key_pairs(
KeyNames=[
app_name,
],
)
return logger.info("%s KeyPair already exists.", app_name)
except ClientError as e:
if e.response["Error"]["Code"] == "InvalidKeyPair.NotFound":
res = ec2.create_key_pair(
KeyName=app_name,
)
private_key = res["KeyMaterial"]
pem_file = open(os.environ["HOME"] + "/.ssh/" + app_name + ".pem", "w")
pem_file.write(private_key)
pem_file.close
os.chmod(os.environ["HOME"] + "/.ssh/" + app_name + ".pem", 0o600)
return logger.info("%s KeyPair created.", app_name)
else:
return logger.warning("%s", e.response["Error"]["Message"])
# Provisiond stack
def provisiond(self, app_name, profile, region):
session = Session(profile_name=profile, region_name=region)
self.create_keypair(app_name, session)
cfn = session.client("cloudformation")
for t in TEMPLATES:
path = os.getcwd() + t
body = open(path).read()
stack_name = app_name + "-" + re.sub('\/(.*)\/(.*)\.yml', '\\1-\\2', t)
self.valid_template(t, body, cfn)
input = {
"StackName": stack_name,
"TemplateBody": body,
"Capabilities": [
'CAPABILITY_NAMED_IAM',
],
"Parameters": [
{
"ParameterKey": "AppName",
"ParameterValue": app_name,
},
],
}
try:
self.create_stack(stack_name, cfn, input)
except ClientError as e:
logger.warning("%s", e.response["Error"]["Message"])
return
@staticmethod
def main():
logger.info("Start provision stacks.")
self = DeployStack()
options = self.get_option()
app_name = options.app
profile = options.profile
region = options.region
self.provisiond(app_name, profile, region)
return logger.info("Finish provision stacks.")
if __name__ == '__main__':
DeployStack.main()
| [
"os.chmod",
"argparse.ArgumentParser",
"logging.basicConfig",
"os.getcwd",
"boto3.session.Session",
"re.sub",
"logging.getLogger"
] | [((248, 267), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (265, 267), False, 'import logging\n'), ((324, 381), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': 'formatter'}), '(level=logging.INFO, format=formatter)\n', (343, 381), False, 'import logging\n'), ((613, 640), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'usage': 'usage'}), '(usage=usage)\n', (627, 640), False, 'from argparse import ArgumentParser\n'), ((3929, 3978), 'boto3.session.Session', 'Session', ([], {'profile_name': 'profile', 'region_name': 'region'}), '(profile_name=profile, region_name=region)\n', (3936, 3978), False, 'from boto3.session import Session\n'), ((4122, 4133), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4131, 4133), False, 'import os\n'), ((4217, 4261), 're.sub', 're.sub', (['"""\\\\/(.*)\\\\/(.*)\\\\.yml"""', '"""\\\\1-\\\\2"""', 't'], {}), "('\\\\/(.*)\\\\/(.*)\\\\.yml', '\\\\1-\\\\2', t)\n", (4223, 4261), False, 'import re\n'), ((3605, 3669), 'os.chmod', 'os.chmod', (["(os.environ['HOME'] + '/.ssh/' + app_name + '.pem')", '(384)'], {}), "(os.environ['HOME'] + '/.ssh/' + app_name + '.pem', 384)\n", (3613, 3669), False, 'import os\n')] |
import glob
from collections import namedtuple
import dateutil.parser
import numpy as np
import pandas as pd
import pymzml
import config
import lcms.utils as utils
def create_spectrum_and_peak_tables(msrun_list, experiment_id):
'''
fills the Spectrum table and for each spectrum the Peak table
:param msrun_list:
:param experiment_id:
:return:
'''
spectrum = namedtuple('spectrum',
'experiment_id ' +
'spectrum_id ' +
'total_ion_current ' +
'time_passed_since_start ' +
'ms_level ' +
'highest_observed_mz ' +
'lowest_observed_mz ' +
'scan_window_upper_limit ' +
'scan_window_lower_limit')
measurement = namedtuple('measurement',
'experiment_id ' +
'spectrum_id' +
' mz' +
' intensity')
spectrum_list = []
measurement_list = []
for i, spc in enumerate(msrun_list):
if i % 100 == 0:
print("Spectrum {}".format(i))
s = spectrum(experiment_id=experiment_id, spectrum_id=spc.ID,
total_ion_current=spc['total ion current'],
time_passed_since_start=spc['scan start time'], ms_level=spc['ms level'],
highest_observed_mz=spc['highest observed m/z'],
lowest_observed_mz=spc['lowest observed m/z'],
scan_window_upper_limit=spc['scan window upper limit'],
scan_window_lower_limit=spc['scan window lower limit'])
spectrum_list.append(s)
if i == 0:
m = measurement(experiment_id=experiment_id, spectrum_id=spc.ID, mz=np.nan,
intensity=np.nan)
else:
m = measurement(experiment_id=experiment_id, spectrum_id=spc.ID, mz=spc.mz,
intensity=spc.i)
# Fill peak table if experiment_id + spectrum_id do not already exist in table Peak
check_peak = config.db_connection.execute(
"""SELECT experiment_id from "Peak"
WHERE experiment_id = '{}'
AND spectrum_id = '{}'"""\
.format(experiment_id, spc.ID)).fetchone()
if check_peak is not None:
print(
("Experiment_id {} + spectrum_id {} combination already exists in Peak table. " +
"To avoid duplicates the spectra won't be added to the Peak table").format(experiment_id, spc.ID))
else:
peak_table = pd.DataFrame({"mz": spc.mz, "intensity": spc.i})
peak_table['experiment_id'] = experiment_id
peak_table['spectrum_id'] = spc.ID
peak_table.to_sql('Peak', con=config.db_connection, index=False, if_exists='append')
print("Appended to Peak table from experiment_id: {}, spectrum_id: {}".format(experiment_id, spc.ID))
measurement_list.append(m)
# check if experiment_id already exists in Spectrum table. If not, append data to Spectrum table
check_spectrum = config.db_connection.execute(
"""SELECT experiment_id from "Spectrum" WHERE experiment_id = '{}' """
.format(experiment_id)).fetchone()
if check_spectrum is not None:
print(("Experiment_id {} already exists in Spectrum table. " +
"To avoid duplicates the spectra won't be added to the Spectrum table")
.format(experiment_id))
else:
spectrum_table = pd.DataFrame(spectrum_list)
spectrum_table.to_sql('Spectrum', con=config.db_connection, index=False, if_exists='append')
print("Appended to Spectrum table with info from experiment_id: {}".format(experiment_id))
def create_experiment(msrun_list, filename):
"""
Create a new experiment structure based on the information in the msrun_list.
:param msrun_list: an open pymzml runner
:param filename: name of the pymzml file
:return: a dictionary containing the initialized experiment
"""
experiment = dict.fromkeys(['run_id', 'run_start_time', 'human_run_start_time', 'spectra_count',
'experimental_state_id', 'match_type_id', 'filename'])
# Todo: waarden in onderstaande tabel moeten nog ingevuld worden
experiment['run_id'] = msrun_list.info['run_id']
if "start_time" in msrun_list.info.keys():
start_time_str = msrun_list.info["start_time"]
start_time = dateutil.parser.parse(start_time_str)
experiment['run_start_time'] = start_time.timestamp()
experiment['human_run_start_time'] = start_time
else:
experiment['run_start_time'] = None
experiment['human_run_start_time'] = None
experiment['spectra_count'] = msrun_list.info['spectrum_count']
experiment['experimental_state_id'] = None
experiment['match_type_id'] = None
experiment['filename'] = filename.split('/')[-1]
return experiment
def create_experiment_table(msrun_list, filename):
"""
fills the Experiment table.
:param msrun_list: an open pymzml runner
:param filename: name of the pymzml file
:return:
"""
experiment = create_experiment(msrun_list, filename)
utils.append_to_experiment('Experiment', experiment)
experiment_id = config.db_connection.execute(
"""SELECT experiment_id from "Experiment" WHERE filename = '{}' """.format(
filename.split('/')[-1])).fetchone()[0]
return experiment_id
def read_file(filename):
msrun = pymzml.run.Reader(filename)
msrun_list = list(msrun)
# check if filename already in Experiment table
check = config.db_connection.execute(
"""SELECT experiment_id from "Experiment" WHERE filename = '{}' """.format(
filename.split('/')[-1])).fetchone()
if check is not None:
print("File already exists in DB. Continue to filling Spectrum table")
experiment_id = check[0]
else:
# fill the Experiment table with data from file
experiment_id = create_experiment_table(msrun_list, filename)
# fill the Spectrum and Peak table with data from file
create_spectrum_and_peak_tables(msrun_list, experiment_id)
if __name__ == "__main__":
for n, filename in enumerate(glob.iglob('{}Geconverteerd/*.mzML'.format(config.data_dir))):
print("reading file {} ({})".format(n, filename.split('/')[-1]))
if '+' in filename.split('/')[-1]:
print("Raw data, will be skipped for now")
continue
read_file(filename)
| [
"pandas.DataFrame",
"pymzml.run.Reader",
"collections.namedtuple",
"lcms.utils.append_to_experiment"
] | [((393, 634), 'collections.namedtuple', 'namedtuple', (['"""spectrum"""', "('experiment_id ' + 'spectrum_id ' + 'total_ion_current ' +\n 'time_passed_since_start ' + 'ms_level ' + 'highest_observed_mz ' +\n 'lowest_observed_mz ' + 'scan_window_upper_limit ' +\n 'scan_window_lower_limit')"], {}), "('spectrum', 'experiment_id ' + 'spectrum_id ' +\n 'total_ion_current ' + 'time_passed_since_start ' + 'ms_level ' +\n 'highest_observed_mz ' + 'lowest_observed_mz ' +\n 'scan_window_upper_limit ' + 'scan_window_lower_limit')\n", (403, 634), False, 'from collections import namedtuple\n'), ((875, 961), 'collections.namedtuple', 'namedtuple', (['"""measurement"""', "('experiment_id ' + 'spectrum_id' + ' mz' + ' intensity')"], {}), "('measurement', 'experiment_id ' + 'spectrum_id' + ' mz' +\n ' intensity')\n", (885, 961), False, 'from collections import namedtuple\n'), ((5451, 5503), 'lcms.utils.append_to_experiment', 'utils.append_to_experiment', (['"""Experiment"""', 'experiment'], {}), "('Experiment', experiment)\n", (5477, 5503), True, 'import lcms.utils as utils\n'), ((5755, 5782), 'pymzml.run.Reader', 'pymzml.run.Reader', (['filename'], {}), '(filename)\n', (5772, 5782), False, 'import pymzml\n'), ((3734, 3761), 'pandas.DataFrame', 'pd.DataFrame', (['spectrum_list'], {}), '(spectrum_list)\n', (3746, 3761), True, 'import pandas as pd\n'), ((2780, 2828), 'pandas.DataFrame', 'pd.DataFrame', (["{'mz': spc.mz, 'intensity': spc.i}"], {}), "({'mz': spc.mz, 'intensity': spc.i})\n", (2792, 2828), True, 'import pandas as pd\n')] |
from setuptools import setup, find_packages
ver = "0.4"
setup(
name = 'dirutil',
version = ver,
description = 'High level directory utilities',
keywords = ['dir', 'directory', 'workdir', 'tempdir'],
author = '<NAME>',
author_email = '<EMAIL>',
packages = find_packages(),
test_suite = 'dirutil.get_tests',
url = 'https://github.com/ddolzhenko/dirutil',
download_url = 'https://github.com/ddolzhenko/dirutil/archive/v{}.tar.gz'.format(ver),
classifiers = [],
install_requires = [
# "checksumdir==1.0.5",
],
)
| [
"setuptools.find_packages"
] | [((317, 332), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (330, 332), False, 'from setuptools import setup, find_packages\n')] |
from collections import deque
import random
import numpy as np
from collections import deque
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
class DYNAgent:
def __init__(self, state_size, action_size):
self.state_size = state_size
self.action_size = action_size
self.memory = deque(maxlen=2000)
self.gamma = 0.95 # discount future rewards less
self.exploration_rate = 1
self.exploration_decay = 0.995
self.exploration_min = 0.01
self.model = self._build_model()
def _build_model(self):
model = Sequential()
model.add(Dense(24, input_dim=self.state_size, activation='relu'))
model.add(Dense(24, activation='relu'))
model.add(Dense(self.action_size, activation='linear'))
model.compile(loss='mse', optimizer=Adam(lr=self.learning_rate))
return model
| [
"keras.models.Sequential",
"keras.layers.Dense",
"keras.optimizers.Adam",
"collections.deque"
] | [((361, 379), 'collections.deque', 'deque', ([], {'maxlen': '(2000)'}), '(maxlen=2000)\n', (366, 379), False, 'from collections import deque\n'), ((633, 645), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (643, 645), False, 'from keras.models import Sequential\n'), ((664, 719), 'keras.layers.Dense', 'Dense', (['(24)'], {'input_dim': 'self.state_size', 'activation': '"""relu"""'}), "(24, input_dim=self.state_size, activation='relu')\n", (669, 719), False, 'from keras.layers import Dense\n'), ((739, 767), 'keras.layers.Dense', 'Dense', (['(24)'], {'activation': '"""relu"""'}), "(24, activation='relu')\n", (744, 767), False, 'from keras.layers import Dense\n'), ((787, 831), 'keras.layers.Dense', 'Dense', (['self.action_size'], {'activation': '"""linear"""'}), "(self.action_size, activation='linear')\n", (792, 831), False, 'from keras.layers import Dense\n'), ((878, 905), 'keras.optimizers.Adam', 'Adam', ([], {'lr': 'self.learning_rate'}), '(lr=self.learning_rate)\n', (882, 905), False, 'from keras.optimizers import Adam\n')] |
#!/usr/bin/env python3
"""Test suite for pandas-marc."""
from pandas_marc import MARCDataFrame
def test_instantiate_marcdataframe(dataframe):
kwargs = {
'dataframe': dataframe,
'occurrence_delimiter': '|',
'subfield_delimiter': '‡'
}
mdf = MARCDataFrame(**kwargs)
for key, value in kwargs.items():
assert getattr(mdf, key) is value
def test_marcdataframe_produces_correct_marc_records(dataframe, records):
mdf = MARCDataFrame(dataframe)
output = [record.as_marc() for record in mdf.records]
expected = [record.as_marc() for record in records]
assert output == expected
def test_marcdataframe_with_other_occurrence_delimiter(dataframe, records):
# Find and replace backslashes in original dataframe with pipes
dataframe = dataframe.replace(r'\\', '|', regex=True)
mdf = MARCDataFrame(dataframe, occurrence_delimiter='|')
output = [record.as_marc() for record in mdf.records]
expected = [record.as_marc() for record in records]
assert output == expected
def test_marcdataframe_with_other_subfield_delimiter(dataframe, records):
# Find and replace double daggers in original dataframe with dollar signs
dataframe = dataframe.replace(r'\$', '‡', regex=True)
mdf = MARCDataFrame(dataframe, subfield_delimiter='‡')
output = [record.as_marc() for record in mdf.records]
expected = [record.as_marc() for record in records]
assert output == expected
| [
"pandas_marc.MARCDataFrame"
] | [((280, 303), 'pandas_marc.MARCDataFrame', 'MARCDataFrame', ([], {}), '(**kwargs)\n', (293, 303), False, 'from pandas_marc import MARCDataFrame\n'), ((470, 494), 'pandas_marc.MARCDataFrame', 'MARCDataFrame', (['dataframe'], {}), '(dataframe)\n', (483, 494), False, 'from pandas_marc import MARCDataFrame\n'), ((853, 903), 'pandas_marc.MARCDataFrame', 'MARCDataFrame', (['dataframe'], {'occurrence_delimiter': '"""|"""'}), "(dataframe, occurrence_delimiter='|')\n", (866, 903), False, 'from pandas_marc import MARCDataFrame\n'), ((1270, 1318), 'pandas_marc.MARCDataFrame', 'MARCDataFrame', (['dataframe'], {'subfield_delimiter': '"""‡"""'}), "(dataframe, subfield_delimiter='‡')\n", (1283, 1318), False, 'from pandas_marc import MARCDataFrame\n')] |
import argparse
from tangotest.tangotools import create_vnv_test
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Prepare tests for uploading to the V&V platform.')
parser.add_argument('tests_path', help='The path to the directory with test files')
parser.add_argument('ns_package_path', help='The path to the network service package')
parser.add_argument('-t', '--test_package_path', help='The path to generated output folder')
parser.add_argument('-p', '--probe_name', help='Probe name')
args = parser.parse_args()
create_vnv_test(**vars(args))
| [
"argparse.ArgumentParser"
] | [((107, 199), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Prepare tests for uploading to the V&V platform."""'}), "(description=\n 'Prepare tests for uploading to the V&V platform.')\n", (130, 199), False, 'import argparse\n')] |
# from scripts import tabledef
# from scripts import forms
# from scripts import helpers
from flask import Flask, redirect, url_for, render_template, request, session
import json
import sys
import os
# import stripe
import pandas as pd
from werkzeug.utils import secure_filename
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import Ridge
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import CountVectorizer
from tkinter import Tk
from tkinter.filedialog import askopenfilename
import numpy as np
import pandas as pd
import jieba
import jieba.analyse
import csv
import ast
import sys
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfpage import PDFPage
from pdfminer.converter import XMLConverter, HTMLConverter, TextConverter
from pdfminer.layout import LAParams
import io
app = Flask(__name__)
# app.secret_key = os.urandom(12) # Generic key for dev purposes only
# stripe_keys = {
# 'secret_key': os.environ['secret_key'],
# 'publishable_key': os.environ['publishable_key']
# }
# stripe.api_key = stripe_keys['secret_key']
# Heroku
#from flask_heroku import Heroku
#heroku = Heroku(app)
# ======== Routing =========================================================== #
# -------- Login ------------------------------------------------------------- #
@app.route('/', methods=['GET', 'POST'])
def login():
# creating a pdf file object
basepath = os.path.dirname(__file__)
file_path = os.path.join(basepath, 'uploads', 'sample.pdf')
fp = open(file_path, 'rb')
rsrcmgr = PDFResourceManager()
retstr = io.StringIO()
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, laparams=laparams)
# Create a PDF interpreter object.
interpreter = PDFPageInterpreter(rsrcmgr, device)
# Process each page contained in the document.
for page in PDFPage.get_pages(fp):
interpreter.process_page(page)
data = retstr.getvalue()
print(data)
return render_template('home.html', user="manoj")
# return text
def getFile():
Tk().withdraw()
filename = askopenfilename()
Tk.close()
return filename
@app.route("/logout")
def logout():
session['logged_in'] = False
return redirect(url_for('login'))
@app.route('/predict', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
# f = request.files['file']
basepath = os.path.dirname(__file__)
# file_path = os.path.join(
# basepath, 'uploads', secure_filename(f.filename))
# f.save(file_path)
file_path = os.path.join(basepath, 'uploads', 'test-upload.csv')
df = pd.read_csv(file_path)
seg_list01 = df['job-description']
seg_list02 = df['your-resume']
item01_list = seg_list01
item01 = ','.join(item01_list)
item02_list = seg_list02
item02 = ','.join(item02_list)
documents = [item01, item02]
count_vectorizer = CountVectorizer()
sparse_matrix = count_vectorizer.fit_transform(documents)
doc_term_matrix = sparse_matrix.todense()
df = pd.DataFrame(doc_term_matrix,
columns=count_vectorizer.get_feature_names(),
index=['item01', 'item02'])
df.to_csv(os.path.join(basepath, 'uploads', 'result.csv'))
read_file = pd.read_csv(os.path.join(basepath, 'uploads',
'result.csv'))
read_file.to_excel(os.path.join(basepath, 'uploads', 'result.xlsx'),
index=None,
header=True)
answer = cosine_similarity(df, df)
print("CSV Created Successfully")
answer = pd.DataFrame(answer)
answer = answer.iloc[[1], [0]].values[0]
answer = round(float(answer), 4) * 100
return "Your resume matched " + str(
answer) + " %" + " of the job-description!"
return None
# ======== Main ============================================================== #
if __name__ == "__main__":
app.run(debug=True, use_reloader=True) | [
"pandas.DataFrame",
"io.StringIO",
"tkinter.Tk.close",
"sklearn.feature_extraction.text.CountVectorizer",
"pdfminer.converter.TextConverter",
"sklearn.metrics.pairwise.cosine_similarity",
"pandas.read_csv",
"os.path.dirname",
"flask.Flask",
"pdfminer.layout.LAParams",
"tkinter.filedialog.askopenfilename",
"flask.url_for",
"flask.render_template",
"pdfminer.pdfpage.PDFPage.get_pages",
"pdfminer.pdfinterp.PDFResourceManager",
"os.path.join",
"tkinter.Tk",
"pdfminer.pdfinterp.PDFPageInterpreter"
] | [((996, 1011), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1001, 1011), False, 'from flask import Flask, redirect, url_for, render_template, request, session\n'), ((1581, 1606), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1596, 1606), False, 'import os\n'), ((1624, 1671), 'os.path.join', 'os.path.join', (['basepath', '"""uploads"""', '"""sample.pdf"""'], {}), "(basepath, 'uploads', 'sample.pdf')\n", (1636, 1671), False, 'import os\n'), ((1718, 1738), 'pdfminer.pdfinterp.PDFResourceManager', 'PDFResourceManager', ([], {}), '()\n', (1736, 1738), False, 'from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter\n'), ((1752, 1765), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1763, 1765), False, 'import io\n'), ((1781, 1791), 'pdfminer.layout.LAParams', 'LAParams', ([], {}), '()\n', (1789, 1791), False, 'from pdfminer.layout import LAParams\n'), ((1805, 1854), 'pdfminer.converter.TextConverter', 'TextConverter', (['rsrcmgr', 'retstr'], {'laparams': 'laparams'}), '(rsrcmgr, retstr, laparams=laparams)\n', (1818, 1854), False, 'from pdfminer.converter import XMLConverter, HTMLConverter, TextConverter\n'), ((1912, 1947), 'pdfminer.pdfinterp.PDFPageInterpreter', 'PDFPageInterpreter', (['rsrcmgr', 'device'], {}), '(rsrcmgr, device)\n', (1930, 1947), False, 'from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter\n'), ((2016, 2037), 'pdfminer.pdfpage.PDFPage.get_pages', 'PDFPage.get_pages', (['fp'], {}), '(fp)\n', (2033, 2037), False, 'from pdfminer.pdfpage import PDFPage\n'), ((2140, 2182), 'flask.render_template', 'render_template', (['"""home.html"""'], {'user': '"""manoj"""'}), "('home.html', user='manoj')\n", (2155, 2182), False, 'from flask import Flask, redirect, url_for, render_template, request, session\n'), ((2253, 2270), 'tkinter.filedialog.askopenfilename', 'askopenfilename', ([], {}), '()\n', (2268, 2270), False, 'from tkinter.filedialog import askopenfilename\n'), ((2275, 2285), 'tkinter.Tk.close', 'Tk.close', ([], {}), '()\n', (2283, 2285), False, 'from tkinter import Tk\n'), ((2397, 2413), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (2404, 2413), False, 'from flask import Flask, redirect, url_for, render_template, request, session\n'), ((2567, 2592), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2582, 2592), False, 'import os\n'), ((2742, 2794), 'os.path.join', 'os.path.join', (['basepath', '"""uploads"""', '"""test-upload.csv"""'], {}), "(basepath, 'uploads', 'test-upload.csv')\n", (2754, 2794), False, 'import os\n'), ((2809, 2831), 'pandas.read_csv', 'pd.read_csv', (['file_path'], {}), '(file_path)\n', (2820, 2831), True, 'import pandas as pd\n'), ((3127, 3144), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {}), '()\n', (3142, 3144), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((3801, 3826), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['df', 'df'], {}), '(df, df)\n', (3818, 3826), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((3887, 3907), 'pandas.DataFrame', 'pd.DataFrame', (['answer'], {}), '(answer)\n', (3899, 3907), True, 'import pandas as pd\n'), ((2222, 2226), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (2224, 2226), False, 'from tkinter import Tk\n'), ((3451, 3498), 'os.path.join', 'os.path.join', (['basepath', '"""uploads"""', '"""result.csv"""'], {}), "(basepath, 'uploads', 'result.csv')\n", (3463, 3498), False, 'import os\n'), ((3533, 3580), 'os.path.join', 'os.path.join', (['basepath', '"""uploads"""', '"""result.csv"""'], {}), "(basepath, 'uploads', 'result.csv')\n", (3545, 3580), False, 'import os\n'), ((3654, 3702), 'os.path.join', 'os.path.join', (['basepath', '"""uploads"""', '"""result.xlsx"""'], {}), "(basepath, 'uploads', 'result.xlsx')\n", (3666, 3702), False, 'import os\n')] |
from uuid import uuid4
def generate_player_data(event_name, rating):
return {
"id": str(uuid4()),
"response-queue": "{}-response-queue".format(str(uuid4())),
"event-name": event_name,
"detail": {
"rating": rating,
"content": {}
}
}
| [
"uuid.uuid4"
] | [((102, 109), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (107, 109), False, 'from uuid import uuid4\n'), ((169, 176), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (174, 176), False, 'from uuid import uuid4\n')] |
"""Tests for the variant of MT2 by <NAME>."""
from typing import Optional, Union
import numpy
import pytest
from .common import mt2_lester, mt2_tombs
def test_simple_example():
computed_val = mt2_tombs(100, 410, 20, 150, -210, -300, -200, 280, 100, 100)
assert computed_val == pytest.approx(412.628)
def test_near_massless():
# This test is based on Fig 5 of https://arxiv.org/pdf/1411.4312.pdf
m_vis_a = 0
px_a = -42.017340486
py_a = -146.365340528
m_vis_b = 0.087252259
px_b = -9.625614206
py_b = 145.757295514
px_miss = -16.692279406
py_miss = -14.730240471
chi_a = 0
chi_b = 0
computed_val = mt2_tombs(
m_vis_a, px_a, py_a, m_vis_b, px_b, py_b, px_miss, py_miss, chi_a, chi_b
)
assert computed_val == pytest.approx(0.09719971)
def test_fuzz():
batch_size = 100
num_tests = 1000
numpy.random.seed(42)
def _random_batch(min_, max_):
return numpy.random.uniform(min_, max_, (batch_size,))
for _ in range(num_tests):
m_vis_1 = _random_batch(0, 100)
px_vis_1 = _random_batch(-100, 100)
py_vis_1 = _random_batch(-100, 100)
m_vis_2 = _random_batch(0, 100)
px_vis_2 = _random_batch(-100, 100)
py_vis_2 = _random_batch(-100, 100)
px_miss = _random_batch(-100, 100)
py_miss = _random_batch(-100, 100)
m_invis_1 = _random_batch(0, 100)
m_invis_2 = _random_batch(0, 100)
args = (
m_vis_1,
px_vis_1,
py_vis_1,
m_vis_2,
px_vis_2,
py_vis_2,
px_miss,
py_miss,
m_invis_1,
m_invis_2,
)
result_lester = mt2_lester(*args)
result_tombs = mt2_tombs(*args)
numpy.testing.assert_allclose(result_lester, result_tombs, rtol=1e-12)
def test_scale_invariance():
example_args = numpy.array((100, 410, 20, 150, -210, -300, -200, 280, 100, 100))
example_val = mt2_tombs(*example_args)
# mt2 scales with its arguments; check over some orders of magnitude.
for i in range(-100, 100, 10):
scale = 10.0 ** i
with numpy.errstate(over="ignore"):
# Suppress overflow warnings when performing the evaluation; we're happy
# so long as we match approximately in the test below.
computed_val = mt2_tombs(*(example_args * scale))
assert computed_val == pytest.approx(example_val * scale)
def test_negative_masses():
# Any negative mass is unphysical.
# These arguments use negative masses to make both initial bounds negative.
# Check that the result is neither positive nor an infinite loop.
computed_val = mt2_tombs(1, 2, 3, 4, 5, 6, 7, 8, -90, -100)
assert not (computed_val > 0)
| [
"numpy.random.uniform",
"numpy.random.seed",
"numpy.errstate",
"numpy.array",
"numpy.testing.assert_allclose",
"pytest.approx"
] | [((880, 901), 'numpy.random.seed', 'numpy.random.seed', (['(42)'], {}), '(42)\n', (897, 901), False, 'import numpy\n'), ((1918, 1983), 'numpy.array', 'numpy.array', (['(100, 410, 20, 150, -210, -300, -200, 280, 100, 100)'], {}), '((100, 410, 20, 150, -210, -300, -200, 280, 100, 100))\n', (1929, 1983), False, 'import numpy\n'), ((290, 312), 'pytest.approx', 'pytest.approx', (['(412.628)'], {}), '(412.628)\n', (303, 312), False, 'import pytest\n'), ((788, 813), 'pytest.approx', 'pytest.approx', (['(0.09719971)'], {}), '(0.09719971)\n', (801, 813), False, 'import pytest\n'), ((953, 1000), 'numpy.random.uniform', 'numpy.random.uniform', (['min_', 'max_', '(batch_size,)'], {}), '(min_, max_, (batch_size,))\n', (973, 1000), False, 'import numpy\n'), ((1797, 1867), 'numpy.testing.assert_allclose', 'numpy.testing.assert_allclose', (['result_lester', 'result_tombs'], {'rtol': '(1e-12)'}), '(result_lester, result_tombs, rtol=1e-12)\n', (1826, 1867), False, 'import numpy\n'), ((2176, 2205), 'numpy.errstate', 'numpy.errstate', ([], {'over': '"""ignore"""'}), "(over='ignore')\n", (2190, 2205), False, 'import numpy\n'), ((2452, 2486), 'pytest.approx', 'pytest.approx', (['(example_val * scale)'], {}), '(example_val * scale)\n', (2465, 2486), False, 'import pytest\n')] |
from datetime import datetime
pessoa = dict()
anohoje = datetime.now().year
pessoa['nome'] = str(input('Informe o nome: ')).strip().title()
nasc = int(input('Informe o ano de nascimento: '))
pessoa['idade'] = anohoje - nasc
pessoa['ctps'] = int(input('Informe a CTPS (0 se não tiver): '))
if pessoa['ctps'] != 0:
pessoa['contratacao'] = int(input('Informe o ano de contratação: '))
pessoa['salario'] = float(input('Informe o salário: '))
faltam = 35 - (anohoje - pessoa['contratacao'])
pessoa['aposentar'] = pessoa['idade'] + faltam
print('-='*15)
for k, v in pessoa.items():
print(f' - {k} tem o valor {v}')
| [
"datetime.datetime.now"
] | [((58, 72), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (70, 72), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# dicomgui.py
"""Main app file that convert DICOM data via a wxPython GUI dialog."""
# Copyright (c) 2018-2020 <NAME>
# Copyright (c) 2009-2017 <NAME>
# Copyright (c) 2009 <NAME>
# This file is part of dicompyler, released under a BSD license.
# See the file license.txt included with this distribution, also
# available at https://github.com/bastula/dicompyler/
#
# It's assumed that the reference (prescription) dose is in cGy.
import hashlib, os, threading, functools, json, warnings
from logging import getLogger, DEBUG, INFO
logger = getLogger('DcmConverter')
import wx
warnings.filterwarnings("ignore", category=wx.wxPyDeprecationWarning)
from wx.xrc import *
import numpy as np
from dicompylercore import dicomparser
from pyDcmConverter import guiutil, util
class DcmConverterApp(wx.App):
"""Prepare to show the dialog that will Import DICOM and DICOM RT files."""
def OnInit(self):
wx.GetApp().SetAppName("DicomConverter")
# Load the XRC file for our gui resources
self.res = XmlResource(util.GetResourcePath('dicomgui.xrc'))
dlgDicomImporter = self.res.LoadDialog(None, "DicomImporterDialog")
dlgDicomImporter.Init(self.res)
# Show the dialog and return the result
ret = dlgDicomImporter.ShowModal()
# Save configure
conf = {}
with open('.dcmconverter.conf', 'w') as f:
conf['path'] = dlgDicomImporter.path
conf['only_export_voldata'] = dlgDicomImporter.only_export_voldata
conf['min_slice_num'] = dlgDicomImporter.min_slice_num
conf['offset'] = dlgDicomImporter.offset
conf['export_mori_format'] = dlgDicomImporter.export_mori_format
conf['export_nii_format'] = dlgDicomImporter.export_nii_format
conf['output_dir'] = dlgDicomImporter.output_dir
conf['output_name'] = dlgDicomImporter.output_name
json.dump(conf, f, indent=2, sort_keys=True)
# Block until the thread is done before destroying the dialog
if dlgDicomImporter:
if hasattr(dlgDicomImporter, 't'):
dlgDicomImporter.t.join()
dlgDicomImporter.Destroy()
os.sys.exit(0)
return 1
class DicomImporterDialog(wx.Dialog):
"""Import DICOM RT files and return a dictionary of data."""
def __init__(self):
wx.Dialog.__init__(self)
def Init(self, res):
"""Method called after the panel has been initialized."""
# Set window icon
if not guiutil.IsMac():
self.SetIcon(guiutil.get_icon())
# Initialize controls
self.txtDicomImport = XRCCTRL(self, 'txtDicomImport')
self.btnDicomImport = XRCCTRL(self, 'btnDicomImport')
self.btnPause = XRCCTRL(self, 'btn_pause')
self.checkSearchSubfolders = XRCCTRL(self, 'checkSearchSubfolders')
self.lblProgressLabel = XRCCTRL(self, 'lblProgressLabel')
self.lblProgress = XRCCTRL(self, 'lblProgress')
self.gaugeProgress = XRCCTRL(self, 'gaugeProgress')
self.lblProgressPercent = XRCCTRL(self, 'lblProgressPercent')
self.lblProgressPercentSym = XRCCTRL(self, 'lblProgressPercentSym')
self.tcPatients = XRCCTRL(self, 'tcPatients')
self.bmpRxDose = XRCCTRL(self, 'bmpRxDose')
self.lblRxDose = XRCCTRL(self, 'lblRxDose')
self.txtRxDose = XRCCTRL(self, 'txtRxDose')
self.lblRxDoseUnits = XRCCTRL(self, 'lblRxDoseUnits')
# Bind interface events to the proper methods
self.Bind(wx.EVT_BUTTON, self.OnBrowseDicomImport, id=XRCID('btnDicomImport'))
self.Bind(wx.EVT_CHECKBOX, self.OnCheckSearchSubfolders, id=XRCID('checkSearchSubfolders'))
self.Bind(wx.EVT_TREE_SEL_CHANGED, self.OnSelectTreeItem, id=XRCID('tcPatients'))
#self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.OnOK, id=XRCID('tcPatients'))
#added by CL.Wang
self.Bind(wx.EVT_CHECKBOX, self.OnCheckVolFlag, id=XRCID('check_volume'))
self.Bind(wx.EVT_SPINCTRL, self.OnSpinSliceNum, id=XRCID('spin_minslices'))
self.Bind(wx.EVT_SPINCTRL, self.OnSpinOffset, id=XRCID('spin_offset'))
self.Bind(wx.EVT_CHECKBOX, self.OnCheckMoriFormat, id=XRCID('check_mori'))
self.Bind(wx.EVT_CHECKBOX, self.OnCheckNiftiFormat, id=XRCID('check_nifti'))
self.Bind(wx.EVT_DIRPICKER_CHANGED, self.OnPickOutdir, id=XRCID('picker_output'))
self.Bind(wx.EVT_TEXT, self.OnInputName, id=XRCID('text_output_name'))
self.Bind(wx.EVT_BUTTON, self.OnConvert, id=XRCID('btn_convert'))
self.Bind(wx.EVT_BUTTON, self.OnPause, id=XRCID('btn_pause'))
self.Bind(wx.EVT_BUTTON, self.OnRescan, id=XRCID('btn_rescan'))
# Init variables
if os.path.isfile('.dcmconverter.conf'):
logger.info('Loading previous configuration...')
with open('.dcmconverter.conf', 'r') as f:
conf = json.load(f)
self.path = conf['path']
self.txtDicomImport.SetValue(self.path)
self.only_export_voldata = conf['only_export_voldata']
XRCCTRL(self, 'check_mori').SetValue(self.only_export_voldata)
self.min_slice_num = conf['min_slice_num']
XRCCTRL(self, 'spin_minslices').SetValue(self.min_slice_num)
self.offset = conf['offset']
XRCCTRL(self, 'spin_offset').SetValue(self.offset)
self.export_mori_format = conf['export_mori_format']
XRCCTRL(self, 'check_mori').SetValue(self.export_mori_format)
self.export_nii_format = conf['export_nii_format']
XRCCTRL(self, 'check_nifti').SetValue(self.export_nii_format)
self.output_dir = conf['output_dir']
XRCCTRL(self, 'picker_output').SetPath(self.output_dir)
self.output_name = conf['output_name']
XRCCTRL(self, 'text_output_name').SetValue(self.output_name)
else:
self.path = os.path.expanduser('~')
self.only_export_voldata = XRCCTRL(self, 'check_volume').IsChecked()
self.min_slice_num = int(XRCCTRL(self, 'spin_minslices').GetValue())
self.offset = int(XRCCTRL(self, 'spin_offset').GetValue())
self.export_mori_format = XRCCTRL(self, 'check_mori').IsChecked()
self.export_nii_format = XRCCTRL(self, 'check_nifti').IsChecked()
self.output_dir = ''
self.output_name = ''
# Set the dialog font and bold the font of the directions label
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
if guiutil.IsMac():
self.txtDicomImport.SetFont(font)
self.btnDicomImport.SetFont(font)
self.checkSearchSubfolders.SetFont(font)
self.lblProgressLabel.SetFont(font)
self.lblProgress.SetFont(font)
self.lblProgressPercent.SetFont(font)
self.lblProgressPercentSym.SetFont(font)
self.tcPatients.SetFont(font)
self.txtRxDose.SetFont(font)
self.lblRxDoseUnits.SetFont(font)
font.SetWeight(wx.FONTWEIGHT_BOLD)
self.lblRxDose.SetFont(font)
# Initialize the patients tree control
self.root = self.InitTree()
# Initialize the patients dictionary
self.patients = {}
# Search subfolders by default
self.import_search_subfolders = True
# Set the threading termination status to false intially
self.terminate = False
# Hide the progress bar until it needs to be shown
self.gaugeProgress.Show(False)
self.lblProgressPercent.Show(False)
self.lblProgressPercentSym.Show(False)
# Start the directory search as soon as the panel loads
#self.OnDirectorySearch()
def OnRescan(self, evt):
self.OnDirectorySearch()
def OnPause(self, evt):
self.terminate = True
def OnSpinOffset(self, evt):
self.offset = evt.GetPosition()
def OnCheckVolFlag(self, evt):
self.only_export_voldata = evt.IsChecked()
try:
self.Check_Export_Files()
except:
logger.info('Adjusted parameters befor the tree generated.')
def OnSpinSliceNum(self, evt):
self.min_slice_num = evt.GetPosition()
try:
self.Check_Export_Files()
except:
logger.info('Adjusted parameters befor the tree generated.')
def OnCheckMoriFormat(self, evt):
self.export_mori_format = evt.IsChecked()
def OnCheckNiftiFormat(self, evt):
self.export_nii_format = evt.IsChecked()
def OnPickOutdir(self, evt):
self.output_dir = evt.GetPath()
def OnInputName(self, evt):
self.output_name = evt.GetString()
def AlertDialog(self, msg):
dialog = wx.MessageDialog(self, msg, 'Error', style=wx.OK)
dialog.ShowModal()
dialog.Destroy()
def ChoiceDialog(self, msg):
dialog = wx.MessageDialog(self, msg, 'Warning', style=wx.OK_DEFAULT|wx.CANCEL)
self.contiune_export = dialog.ShowModal()
dialog.Destroy()
def __GetNiftiAffineMatrix__(self, dp):
di = float(dp.ds.PixelSpacing[0])
dj = float(dp.ds.PixelSpacing[1])
orientation = dp.ds.ImageOrientationPatient
dk = float(dp.ds.SliceThickness)
m = np.array(
[[float(orientation[0])*di, float(orientation[3])*dj, 0, 0],
[float(orientation[1])*di, float(orientation[4])*dj, 0, 0],
[float(orientation[2])*di, float(orientation[5])*dj, dk, 0],
[0, 0, 0, 1]], dtype=np.float)
return m
def ExportFunc(self, out_basepath, patient_data, progressFunc=None):
if patient_data is None:
return
# Existence check
if self.export_mori_format:
out_dir = os.path.join(os.path.dirname(out_basepath), 'LabFormat')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
mori_fname = os.path.join(out_dir, os.path.basename(out_basepath))
if os.path.isfile(mori_fname+'.raw.gz'):
self.ChoiceDialog('File existed! Continue?')
if self.contiune_export != wx.ID_OK:
return
if self.export_nii_format:
out_dir = os.path.join(os.path.dirname(out_basepath), 'NiftiFormat')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
nii_fname = os.path.join(out_dir, os.path.basename(out_basepath)+'.nii.gz')
if os.path.isfile(nii_fname):
self.ChoiceDialog('File existed! Continue?')
if self.contiune_export != wx.ID_OK:
return
dp = dicomparser.DicomParser(patient_data['images'][0])
reso = [ float(dp.ds.PixelSpacing[0]), float(dp.ds.PixelSpacing[1]), float(dp.ds.SliceThickness)]
affine = self.__GetNiftiAffineMatrix__(dp)
conv_kernel, hospital, kvp, model_name = dp.ds.ConvolutionKernel, dp.ds.InstitutionName, dp.ds.KVP, dp.ds.ManufacturerModelName
img_ori, pat_ori, pat_pos = np.array(dp.ds.ImageOrientationPatient), dp.ds.PatientOrientation, dp.ds.PatientPosition
study_date, serise_date, acq_date = dp.ds.StudyDate, dp.ds.SeriesDate, dp.ds.AcquisitionDate
if (dp.ds.SamplesPerPixel > 1) or (dp.ds.PhotometricInterpretation == 'RGB'):
logger.info('Cannot handle color image!')
return
if dp.ds.BitsAllocated == 16:
image_array = np.zeros([dp.ds.Rows, dp.ds.Columns, len(patient_data['images'])]).astype(np.int16)
elif dp.ds.BitsAllocated == 32:
image_array = np.zeros([dp.ds.Rows, dp.ds.Columns, len(patient_data['images'])]).astype(np.int32)
elif dp.ds.BitsAllocated == 8:
image_array = np.zeros([dp.ds.Rows, dp.ds.Columns, len(patient_data['images'])]).astype(np.int8)
else:
image_array = np.zeros([dp.ds.Rows, dp.ds.Columns, len(patient_data['images'])])
pos = []
for i, img in enumerate(patient_data['images']):
dp = dicomparser.DicomParser(img)
intercept, slope = dp.GetRescaleInterceptSlope()
pos.append(dp.ds.ImagePositionPatient[2])
pixel_array = dp.ds.pixel_array
rescaled_image = pixel_array * slope + intercept + self.offset
image_array[:,:,i] = rescaled_image
wx.CallAfter(progressFunc, (i+image_array.shape[-1])//2, image_array.shape[-1]+1, 'Creating image array...')
image_array = np.transpose(image_array, (1,0,2))
if self.export_mori_format:
from utils_cw import write_mori, get_mori_header_fields
logger.info('Exporting image to %s', mori_fname)
header_name = write_mori(image_array, reso, mori_fname, True)
with open(header_name, 'r') as f:
origin_header_lines = f.read().splitlines()
with open(header_name, 'w') as f:
for field in origin_header_lines: # \r\n
if 'Thickness' in field:
f.write('{} {:.6f}\r'.format(field,reso[2]))
elif 'ImagePositionBegin' in field:
f.write('{} {:.6f}\r'.format(field,np.min(pos)))
elif 'ImagePositionEnd' in field:
f.write('{} {:.6f}\r'.format(field,np.max(pos)))
elif 'Hospital' in field:
f.write('{} {}\r'.format(field,hospital))
elif 'KVP' in field:
f.write('{} {}\r'.format(field,kvp))
elif 'KernelFunction' in field:
f.write('{} {}\r'.format(field,conv_kernel))
elif 'ModelName' in field:
f.write('{} {}\r'.format(field,model_name))
elif 'PatientPosition' in field:
f.write('{} {}\r'.format(field,pat_pos))
elif 'PatientOrientation' in field:
f.write('{} {}\r'.format(field,pat_ori))
elif 'ImageOrientation' in field:
f.write('{} {}\r'.format(field,img_ori.tolist()))
elif 'StudyDate' in field:
f.write('{} {}\r'.format(field,study_date))
elif 'SeriesDate' in field:
f.write('{} {}\r'.format(field,serise_date))
elif 'AcquisitionDate' in field:
f.write('{} {}\r'.format(field,acq_date))
elif 'Orientation' in field:
f.write('{} {}\r'.format(field,'LPF'))
elif '' == field:
pass
else:
f.write('{} \r'.format(field))
wx.CallAfter(progressFunc, 97, 100, 'Export RAW image completed')
if self.export_nii_format:
import nibabel as nib
logger.info('Exporting image to %s', nii_fname)
nib.save(nib.Nifti1Image(image_array, affine=affine), nii_fname)
wx.CallAfter(progressFunc, 98, 100, 'Export Nifti image completed')
def OnConvert(self, evt):
if not self.selected_exports:
self.AlertDialog('No Dicom series have been selected!')
return
if not self.output_dir:
self.AlertDialog('Please enter valid output dir!')
return
if not self.output_name:
self.AlertDialog('Please enter valid output file name!')
return
if not os.path.isdir(self.output_dir):
logger.info("Output dir not exists! Create new dir [%s]", self.output_dir)
os.makedirs(self.output_dir)
all_export_threads = []
for export in self.selected_exports:
info = self.tcPatients.GetItemData(export)
filearray, series_no = info['filearray'], info['info']['series_number']
basename = os.path.join(self.output_dir, self.output_name+'-'+str(series_no)+'.512')
all_export_threads.append(threading.Thread(target=self.ExportPatientData,
args=(self.path, filearray, self.txtRxDose.GetValue(),
self.SetThreadStatus, self.OnUpdateProgress,
functools.partial(self.ExportFunc, out_basepath=basename))))
[th.start() for th in all_export_threads]
#[th.join() for th in all_export_threads] # wait all threads
#self.AlertDialog('All exports finished!')
def OnCheckSearchSubfolders(self, evt):
"""Determine whether to search subfolders for DICOM data."""
self.import_search_subfolders = evt.IsChecked()
self.terminate = True
self.OnDirectorySearch()
def OnBrowseDicomImport(self, evt):
"""Get the directory selected by the user."""
self.terminate = True
dlg = wx.DirDialog(
self, defaultPath = self.path,
message="Choose a directory containing DICOM RT files...")
if dlg.ShowModal() == wx.ID_OK:
self.path = dlg.GetPath()
self.txtDicomImport.SetValue(self.path)
dlg.Destroy()
#self.OnDirectorySearch()
def OnDirectorySearch(self):
"""Begin directory search."""
self.patients = {}
self.tcPatients.DeleteChildren(self.root)
self.terminate = False
self.gaugeProgress.Show(True)
self.lblProgressPercent.Show(True)
self.lblProgressPercentSym.Show(True)
#self.btnSelect.Enable(False)
# Disable Rx dose controls except on GTK due to control placement oddities
if not guiutil.IsGtk():
self.EnableRxDose(False)
# If a previous search thread exists, block until it is done before
# starting a new thread
if (hasattr(self, 't')):
self.t.join()
del self.t
self.t=threading.Thread(target=self.DirectorySearchThread,
args=(self, self.path, self.import_search_subfolders,
self.SetThreadStatus, self.OnUpdateProgress,
self.AddPatientTree, self.AddPatientDataTree))
self.t.start()
def SetThreadStatus(self):
"""Tell the directory search thread whether to terminate or not."""
return self.terminate
def DirectorySearchThread(self, parent, path, subfolders, terminate,
progressFunc, foundFunc, resultFunc):
"""Thread to start the directory search."""
# Call the progress function to update the gui
wx.CallAfter(progressFunc, 0, 0, 'Searching for patients...')
patients = {}
# Check if the path is valid
if os.path.isdir(path):
files = []
for root, dirs, filenames in os.walk(path):
files += map(lambda f:os.path.join(root, f), filenames)
if (self.import_search_subfolders == False):
break
for n in range(len(files)):
# terminate the thread if the value has changed
# during the loop duration
if terminate():
wx.CallAfter(progressFunc, 0, 0, 'Search terminated.')
return
if (os.path.isfile(files[n])):
try:
logger.debug("Reading: %s", files[n])
dp = dicomparser.DicomParser(files[n])
except (AttributeError, EOFError, IOError, KeyError):
pass
logger.info("%s is not a valid DICOM file.", files[n])
else:
patient = dp.GetDemographics()
h = hashlib.sha1(patient['id'].encode('utf-8')).hexdigest()
if not h in patients:
patients[h] = {}
patients[h]['demographics'] = patient
if not 'studies' in patients[h]:
patients[h]['studies'] = {}
patients[h]['series'] = {}
wx.CallAfter(foundFunc, patient)
# Create each Study but don't create one for RT Dose
# since some vendors use incorrect StudyInstanceUIDs
if not (dp.GetSOPClassUID() == 'rtdose'):
stinfo = dp.GetStudyInfo()
if not stinfo['id'] in patients[h]['studies']:
patients[h]['studies'][stinfo['id']] = stinfo
# Create each Series of images
if (('ImageOrientationPatient' in dp.ds) and \
not (dp.GetSOPClassUID() == 'rtdose')):
seinfo = dp.GetSeriesInfo()
try:
seinfo['series_number'] = dp.ds.SeriesNumber #added by CL.Wang
seinfo['KVP'] = dp.ds.KVP
seinfo['PatientPosition'] = dp.ds.PatientPosition
seinfo['ModelName'] = dp.ds.ManufacturerModelName
seinfo['PixelSpacing'] = dp.ds.PixelSpacing
seinfo['Orientation'] = dp.ds.ImageOrientationPatient
except:
logger.error('Get dcm info error!')
seinfo['numimages'] = 0
seinfo['modality'] = dp.ds.SOPClassUID.name
if not seinfo['id'] in patients[h]['series']:
patients[h]['series'][seinfo['id']] = seinfo
if not 'images' in patients[h]:
patients[h]['images'] = {}
image = {}
image['id'] = dp.GetSOPInstanceUID()
image['filename'] = files[n]
image['series'] = seinfo['id']
image['referenceframe'] = dp.GetFrameOfReferenceUID()
patients[h]['series'][seinfo['id']]['numimages'] = \
patients[h]['series'][seinfo['id']]['numimages'] + 1
patients[h]['images'][image['id']] = image
# Create each RT Structure Set
elif dp.ds.Modality in ['RTSTRUCT']:
if not 'structures' in patients[h]:
patients[h]['structures'] = {}
structure = dp.GetStructureInfo()
structure['id'] = dp.GetSOPInstanceUID()
structure['filename'] = files[n]
structure['series'] = dp.GetReferencedSeries()
structure['referenceframe'] = dp.GetFrameOfReferenceUID()
patients[h]['structures'][structure['id']] = structure
# Create each RT Plan
elif dp.ds.Modality in ['RTPLAN']:
if not 'plans' in patients[h]:
patients[h]['plans'] = {}
plan = dp.GetPlan()
plan['id'] = dp.GetSOPInstanceUID()
plan['filename'] = files[n]
plan['series'] = dp.ds.SeriesInstanceUID
plan['referenceframe'] = dp.GetFrameOfReferenceUID()
plan['beams'] = dp.GetReferencedBeamsInFraction()
plan['rtss'] = dp.GetReferencedStructureSet()
patients[h]['plans'][plan['id']] = plan
# Create each RT Dose
elif dp.ds.Modality in ['RTDOSE']:
if not 'doses' in patients[h]:
patients[h]['doses'] = {}
dose = {}
dose['id'] = dp.GetSOPInstanceUID()
dose['filename'] = files[n]
dose['referenceframe'] = dp.GetFrameOfReferenceUID()
dose['hasdvh'] = dp.HasDVHs()
dose['hasgrid'] = "PixelData" in dp.ds
dose['summationtype'] = dp.ds.DoseSummationType
dose['beam'] = dp.GetReferencedBeamNumber()
dose['rtss'] = dp.GetReferencedStructureSet()
dose['rtplan'] = dp.GetReferencedRTPlan()
patients[h]['doses'][dose['id']] = dose
# Otherwise it is a currently unsupported file
else:
logger.info("%s is a %s file and is not " + \
"currently supported.",
files[n], dp.ds.SOPClassUID.name)
# Call the progress function to update the gui
wx.CallAfter(progressFunc, n, len(files), 'Searching for patients...')
if (len(patients) == 0):
progressStr = 'Found 0 patients.'
elif (len(patients) == 1):
progressStr = 'Found 1 patient. Reading DICOM data...'
elif (len(patients) > 1):
progressStr = 'Found ' + str(len(patients)) + ' patients. Reading DICOM data...'
wx.CallAfter(progressFunc, 0, 1, progressStr)
wx.CallAfter(resultFunc, patients)
# if the path is not valid, display an error message
else:
wx.CallAfter(progressFunc, 0, 0, 'Select a valid location.')
dlg = wx.MessageDialog(
parent,
"The DICOM import location does not exist. Please select a valid location.",
"Invalid DICOM Import Location", wx.OK|wx.ICON_ERROR)
dlg.ShowModal()
def OnUpdateProgress(self, num, length, message):
"""Update the DICOM Import process interface elements."""
if not length:
percentDone = 0
else:
percentDone = int(100 * (num+1) / length)
self.gaugeProgress.SetValue(percentDone)
self.lblProgressPercent.SetLabel(str(percentDone))
self.lblProgress.SetLabel(message)
if not (percentDone == 100):
self.gaugeProgress.Show(True)
self.lblProgressPercent.Show(True)
self.lblProgressPercentSym.Show(True)
else:
self.gaugeProgress.Show(False)
self.lblProgressPercent.Show(False)
self.lblProgressPercentSym.Show(False)
# End the dialog since we are done with the import process
if (message == 'Importing patient complete.'):
self.EndModal(wx.ID_OK)
elif (message == 'Importing patient cancelled.'):
self.EndModal(wx.ID_CANCEL)
def InitTree(self):
"""Initialize the tree control for use."""
iSize = (16,16)
iList = wx.ImageList(iSize[0], iSize[1])
iList.Add(
wx.Bitmap(
util.GetResourcePath('group.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('user.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('book.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('table_multiple.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('pencil.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('chart_bar.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('chart_curve.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('pencil_error.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('chart_bar_error.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('chart_curve_error.png'),
wx.BITMAP_TYPE_PNG))
iList.Add(
wx.Bitmap(
util.GetResourcePath('table_selected.png'),
wx.BITMAP_TYPE_PNG))
self.tcPatients.AssignImageList(iList)
root = self.tcPatients.AddRoot('Patients', image=0)
return root
def AddPatientTree(self, patient):
"""Add a new patient to the tree control."""
# Create a hash for each patient
h = hashlib.sha1(patient['id'].encode('utf-8')).hexdigest()
# Add the patient to the tree if they don't already exist
if not h in self.patients:
self.patients[h] = {}
self.patients[h]['demographics'] = patient
name = str(patient['name']) + ' (' + patient['id'] + ')'
self.patients[h]['treeid'] = \
self.tcPatients.AppendItem(self.root, name, 1)
self.tcPatients.SortChildren(self.root)
self.tcPatients.ExpandAll()
def AddPatientDataTree(self, patients):
"""Add the patient data to the tree control."""
# Now add the specific item to the tree
for key, patient in self.patients.items():
patient.update(patients[key])
if 'studies' in patient:
for studyid, study in patient['studies'].items():
name = 'Study: ' + study['description']
study['treeid'] = self.tcPatients.AppendItem(patient['treeid'], name, 2)
# Search for series and images
if 'series' in patient:
for seriesid, series in patient['series'].items():
if 'studies' in patient:
for studyid, study in patient['studies'].items():
if (studyid == series['study']):
modality = series['modality'].partition(' Image Storage')[0]
name = 'Series {}: {}. ({}, {} {})'.format(series['series_number'], series['description'], modality, series['numimages'], 'image' if series['numimages']==1 else 'images')
#name = 'Series: ' + series['description'] + ' (' + modality + ', '
#numimages = str(series['numimages']) + ' image)' if (series['numimages'] == 1) else str(series['numimages']) + ' images)'
#name = name + numimages
series['treeid'] = self.tcPatients.AppendItem(study['treeid'], name, 3)
self.EnableItemSelection(patient, series, [])
# Search for RT Structure Sets
if 'structures' in patient:
for structureid, structure in patient['structures'].items():
if 'series' in patient:
foundseries = False
name = 'RT Structure Set: ' + structure['label']
for seriesid, series in patient['series'].items():
foundseries = False
if (seriesid == structure['series']):
structure['treeid'] = self.tcPatients.AppendItem(series['treeid'], name, 4)
foundseries = True
# If no series were found, add the rtss to the study
if not foundseries:
structure['treeid'] = self.tcPatients.AppendItem(study['treeid'], name, 4)
filearray = [structure['filename']]
self.EnableItemSelection(patient, structure, filearray)
# Search for RT Plans
if 'plans' in patient:
for planid, plan in patient['plans'].items():
foundstructure = False
planname = ' (' + plan['name'] + ')' if len(plan['name']) else ""
rxdose = plan['rxdose'] if plan['rxdose'] > 0 else "Unknown"
name = 'RT Plan: ' + plan['label'] + planname + \
' - Dose: ' + str(rxdose) + ' cGy'
if 'structures' in patient:
for structureid, structure in patient['structures'].items():
foundstructure = False
if (structureid == plan['rtss']):
plan['treeid'] = self.tcPatients.AppendItem(structure['treeid'], name, 5)
foundstructure = True
# If no structures were found, add the plan to the study/series instead
if not foundstructure:
# If there is an image series, add a fake rtss to it
foundseries = False
for seriesid, series in patient['series'].items():
foundseries = False
if (series['referenceframe'] == plan['referenceframe']):
badstructure = self.tcPatients.AppendItem(
series['treeid'], "RT Structure Set not found", 7)
foundseries = True
# If no series were found, add the rtss to the study
if not foundseries:
badstructure = self.tcPatients.AppendItem(
patient['treeid'], "RT Structure Set not found", 7)
plan['treeid'] = self.tcPatients.AppendItem(badstructure, name, 5)
self.tcPatients.SetItemTextColour(badstructure, wx.RED)
filearray = [plan['filename']]
self.EnableItemSelection(patient, plan, filearray, plan['rxdose'])
# Search for RT Doses
if 'doses' in patient:
for doseid, dose in patient['doses'].items():
foundplan = False
if 'plans' in patient:
for planid, plan in patient['plans'].items():
foundplan = False
if (planid == dose['rtplan']):
foundplan = True
rxdose = None
if dose['hasgrid']:
if dose['hasdvh']:
name = 'RT Dose with DVH'
else:
name = 'RT Dose without DVH'
else:
if dose['hasdvh']:
name = 'RT Dose without Dose Grid (DVH only)'
else:
name = 'RT Dose without Dose Grid or DVH'
if (dose['summationtype'] == "BEAM"):
name += " (Beam " + str(dose['beam']) + ": "
if dose['beam'] in plan['beams']:
b = plan['beams'][dose['beam']]
name += b['name']
if len(b['description']):
name += " - " + b['description']
name += ")"
if "dose" in b:
name += " - Dose: " + str(int(b['dose'])) + " cGy"
rxdose = int(b['dose'])
dose['treeid'] = self.tcPatients.AppendItem(plan['treeid'], name, 6)
filearray = [dose['filename']]
self.EnableItemSelection(patient, dose, filearray, rxdose)
# If no plans were found, add the dose to the structure/study instead
if not foundplan:
if dose['hasgrid']:
if dose['hasdvh']:
name = 'RT Dose with DVH'
else:
name = 'RT Dose without DVH'
else:
if dose['hasdvh']:
name = 'RT Dose without Dose Grid (DVH only)'
else:
name = 'RT Dose without Dose Grid or DVH'
foundstructure = False
if 'structures' in patient:
for structureid, structure in patient['structures'].items():
foundstructure = False
if 'rtss' in dose:
if (structureid == dose['rtss']):
foundstructure = True
if (structure['referenceframe'] == dose['referenceframe']):
foundstructure = True
if foundstructure:
badplan = self.tcPatients.AppendItem(
structure['treeid'], "RT Plan not found", 8)
dose['treeid'] = self.tcPatients.AppendItem(badplan, name, 6)
self.tcPatients.SetItemTextColour(badplan, wx.RED)
filearray = [dose['filename']]
self.EnableItemSelection(patient, dose, filearray)
if not foundstructure:
# If there is an image series, add a fake rtss to it
foundseries = False
for seriesid, series in patient['series'].items():
foundseries = False
if (series['referenceframe'] == dose['referenceframe']):
badstructure = self.tcPatients.AppendItem(
series['treeid'], "RT Structure Set not found", 7)
foundseries = True
# If no series were found, add the rtss to the study
if not foundseries:
badstructure = self.tcPatients.AppendItem(
patient['treeid'], "RT Structure Set not found", 7)
self.tcPatients.SetItemTextColour(badstructure, wx.RED)
badplan = self.tcPatients.AppendItem(
badstructure, "RT Plan not found", 8)
dose['treeid'] = self.tcPatients.AppendItem(badplan, name, 5)
self.tcPatients.SetItemTextColour(badplan, wx.RED)
filearray = [dose['filename']]
self.EnableItemSelection(patient, dose, filearray)
# No RT Dose files were found
else:
if 'structures' in patient:
for structureid, structure in patient['structures'].items():
if 'plans' in patient:
for planid, plan in patient['plans'].items():
name = 'RT Dose not found'
baddose = self.tcPatients.AppendItem(plan['treeid'], name, 9)
self.tcPatients.SetItemTextColour(baddose, wx.RED)
# No RT Plan nor RT Dose files were found
else:
name = 'RT Plan not found'
badplan = self.tcPatients.AppendItem(structure['treeid'], name, 8)
self.tcPatients.SetItemTextColour(badplan, wx.RED)
name = 'RT Dose not found'
baddose = self.tcPatients.AppendItem(badplan, name, 9)
self.tcPatients.SetItemTextColour(baddose, wx.RED)
#self.btnSelect.SetFocus()
self.tcPatients.ExpandAll()
self.lblProgress.SetLabel(
str(self.lblProgress.GetLabel()).replace(' Reading DICOM data...', ''))
#Added by CL.Wang
self.Check_Export_Files()
def Check_Export_Files(self):
def select(child, flag):
if flag:
self.tcPatients.SetItemImage(child, 10)
self.selected_exports.append(child)
else:
self.tcPatients.SetItemImage(child, 3)
def minslice_check(child):
info = self.tcPatients.GetItemData(child)['info']
return int(info['numimages'])>self.min_slice_num
self.selected_exports = []
first_patient = self.tcPatients.GetFirstChild(self.tcPatients.RootItem)[0]
first_study = self.tcPatients.GetFirstChild(first_patient)[0]
child, cookie = self.tcPatients.GetFirstChild(first_study)
while child.IsOk():
if self.only_export_voldata:
title = self.tcPatients.GetItemText(child)
flag = 'vol' in title.lower() and minslice_check(child)
select(child, flag)
else:
select(child, minslice_check(child))
child, cookie = self.tcPatients.GetNextChild(child, cookie)
logger.info('%d files selected!', len(self.selected_exports))
def EnableItemSelection(self, patient, item, filearray = [], rxdose = None):
"""Enable an item to be selected in the tree control."""
# Add the respective images to the filearray if they exist
if 'images' in patient:
for imageid, image in patient['images'].items():
appendImage = False
# used for image series
if 'id' in item:
if (item['id'] == image['series']):
appendImage = True
# used for RT structure set
if 'series' in item:
if (item['series'] == image['series']):
appendImage = True
# used for RT plan / dose
if 'referenceframe' in item:
if (item['referenceframe'] == image['referenceframe']):
if not 'numimages' in item:
appendImage = True
if appendImage:
filearray.append(image['filename'])
# Add the respective rtss files to the filearray if they exist
if 'structures' in patient:
for structureid, structure in patient['structures'].items():
if 'rtss' in item:
if (structureid == item['rtss']):
filearray.append(structure['filename'])
break
elif (structure['referenceframe'] == item['referenceframe']):
filearray.append(structure['filename'])
break
# If no referenced rtss, but ref'd rtplan, check rtplan->rtss
if 'rtplan' in item:
if 'plans' in patient:
for planid, plan in patient['plans'].items():
if (planid == item['rtplan']):
if 'rtss' in plan:
if (structureid == plan['rtss']):
filearray.append(structure['filename'])
# Add the respective rtplan files to the filearray if they exist
if 'plans' in patient:
for planid, plan in patient['plans'].items():
if 'rtplan' in item:
if (planid == item['rtplan']):
filearray.append(plan['filename'])
if not rxdose:
self.tcPatients.SetItemData(item['treeid'], {'filearray':filearray, 'info':item})
else:
self.tcPatients.SetItemData(item['treeid'], {'filearray':filearray, 'info':item, 'rxdose':rxdose})
self.tcPatients.SetItemBold(item['treeid'], True)
self.tcPatients.SelectItem(item['treeid'])
def OnSelectTreeItem(self, evt):
"""Update the interface when the selected item has changed."""
item = evt.GetItem()
# Disable the rx dose message and select button by default
self.EnableRxDose(False)
#self.btnSelect.Enable(False)
# If the item has data, check to see whether there is an rxdose
if not (self.tcPatients.GetItemData(item) == None):
data = self.tcPatients.GetItemData(item)
#self.btnSelect.Enable()
rxdose = 0
parent = self.tcPatients.GetItemParent(item)
if 'rxdose' in data:
rxdose = data['rxdose']
else:
parentdata = self.tcPatients.GetItemData(parent)
if not (parentdata == None):
if 'rxdose' in parentdata:
rxdose = parentdata['rxdose']
# Show the rxdose text box if no rxdose was found
# and if it is an RT plan or RT dose file
self.txtRxDose.SetValue(rxdose)
if (self.tcPatients.GetItemText(item).startswith('RT Plan') or
self.tcPatients.GetItemText(parent).startswith('RT Plan')):
self.EnableRxDose(True)
def EnableRxDose(self, value):
"""Show or hide the prescription dose message."""
self.bmpRxDose.Show(value)
self.lblRxDose.Show(value)
self.txtRxDose.Show(value)
self.lblRxDoseUnits.Show(value)
# if set to hide, reset the rx dose
if not value:
self.txtRxDose.SetValue(1)
def ExportPatientData(self, path, filearray, RxDose, terminate, progressFunc, exportFunc):
"""Get the data of the selected patient from the DICOM importer dialog."""
msgs = ['Scanning patient. Please wait...','Exporting patient cancelled.','Exporting patient...']
wx.CallAfter(progressFunc, -1, 100, msgs[0])
for n in range(0, len(filearray)):
if terminate():
wx.CallAfter(progressFunc, 98, 100, msgs[1])
return
dcmfile = str(os.path.join(self.path, filearray[n]))
dp = dicomparser.DicomParser(dcmfile)
if (n == 0):
patient = {}
patient['rxdose'] = RxDose
if (('ImageOrientationPatient' in dp.ds) and \
not (dp.GetSOPClassUID() == 'rtdose')):
if not 'images' in patient:
patient['images'] = []
patient['images'].append(dp.ds)
elif (dp.ds.Modality in ['RTSTRUCT']):
patient['rtss'] = dp.ds
elif (dp.ds.Modality in ['RTPLAN']):
patient['rtplan'] = dp.ds
elif (dp.ds.Modality in ['RTDOSE']):
patient['rtdose'] = dp.ds
wx.CallAfter(progressFunc, n//2, len(filearray), msgs[0])
# Sort the images based on a sort descriptor:
# (ImagePositionPatient, InstanceNumber or AcquisitionNumber)
if 'images' in patient:
sortedimages = []
unsortednums = []
sortednums = []
images = patient['images']
sort = 'IPP'
# Determine if all images in the series are parallel
# by testing for differences in ImageOrientationPatient
parallel = True
for i, item in enumerate(images):
if (i > 0):
iop0 = np.array(item.ImageOrientationPatient)
iop1 = np.array(images[i-1].ImageOrientationPatient)
if (np.any(np.array(np.round(iop0 - iop1), dtype=np.int32))):
parallel = False
break
# Also test ImagePositionPatient, as some series
# use the same patient position for every slice
ipp0 = np.array(item.ImagePositionPatient)
ipp1 = np.array(images[i-1].ImagePositionPatient)
if not (np.any(np.array(np.round(ipp0 - ipp1), dtype=np.int32))):
parallel = False
break
# If the images are parallel, sort by ImagePositionPatient
if parallel:
sort = 'IPP'
else:
# Otherwise sort by Instance Number
if not (images[0].InstanceNumber == \
images[1].InstanceNumber):
sort = 'InstanceNumber'
# Otherwise sort by Acquisition Number
elif not (images[0].AcquisitionNumber == \
images[1].AcquisitionNumber):
sort = 'AcquisitionNumber'
# Add the sort descriptor to a list to be sorted
for i, image in enumerate(images):
if (sort == 'IPP'):
unsortednums.append(image.ImagePositionPatient[2])
else:
unsortednums.append(image.data_element(sort).value)
# Sort in LPI order! Modified by CL.Wang
# Sort image numbers in descending order for head first patients
if ('hf' in image.PatientPosition.lower()) and (sort == 'IPP'):
sortednums = sorted(unsortednums, reverse=True)
# Otherwise sort image numbers in ascending order
else:
sortednums = sorted(unsortednums, reverse=False)
# Add the images to the array based on the sorted order
for s, slice in enumerate(sortednums):
for i, image in enumerate(images):
if (sort == 'IPP'):
if (slice == image.ImagePositionPatient[2]):
sortedimages.append(image)
elif (slice == image.data_element(sort).value):
sortedimages.append(image)
# Save the images back to the patient dictionary
logger.debug('Slices num: %d', len(sortedimages))
patient['images'] = sortedimages
wx.CallAfter(progressFunc, 49, 100, msgs[2])
if exportFunc:
exportFunc(patient_data=patient, progressFunc=progressFunc)
wx.CallAfter(progressFunc, 99, 100, '')
def GetPatient(self):
"""Return the patient data from the DICOM importer dialog."""
return self.patient
def OnCancel(self, evt):
"""Stop the directory search and close the dialog."""
self.terminate = True
super().OnCancel(evt)
def main():
app = DcmConverterApp(0)
app.MainLoop()
if __name__ == '__main__':
main()
| [
"wx.Dialog.__init__",
"os.walk",
"wx.CallAfter",
"os.path.isfile",
"os.path.join",
"numpy.round",
"pyDcmConverter.guiutil.get_icon",
"wx.SystemSettings.GetFont",
"os.path.dirname",
"numpy.transpose",
"numpy.max",
"wx.DirDialog",
"wx.GetApp",
"nibabel.Nifti1Image",
"threading.Thread",
"pyDcmConverter.util.GetResourcePath",
"json.dump",
"functools.partial",
"os.path.basename",
"wx.ImageList",
"numpy.min",
"dicompylercore.dicomparser.DicomParser",
"os.sys.exit",
"pyDcmConverter.guiutil.IsGtk",
"json.load",
"os.makedirs",
"warnings.filterwarnings",
"os.path.isdir",
"wx.MessageDialog",
"pyDcmConverter.guiutil.IsMac",
"utils_cw.write_mori",
"numpy.array",
"os.path.expanduser",
"logging.getLogger"
] | [((592, 617), 'logging.getLogger', 'getLogger', (['"""DcmConverter"""'], {}), "('DcmConverter')\n", (601, 617), False, 'from logging import getLogger, DEBUG, INFO\n'), ((628, 697), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'wx.wxPyDeprecationWarning'}), "('ignore', category=wx.wxPyDeprecationWarning)\n", (651, 697), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((2258, 2272), 'os.sys.exit', 'os.sys.exit', (['(0)'], {}), '(0)\n', (2269, 2272), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((2427, 2451), 'wx.Dialog.__init__', 'wx.Dialog.__init__', (['self'], {}), '(self)\n', (2445, 2451), False, 'import wx\n'), ((4806, 4842), 'os.path.isfile', 'os.path.isfile', (['""".dcmconverter.conf"""'], {}), "('.dcmconverter.conf')\n", (4820, 4842), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((6646, 6696), 'wx.SystemSettings.GetFont', 'wx.SystemSettings.GetFont', (['wx.SYS_DEFAULT_GUI_FONT'], {}), '(wx.SYS_DEFAULT_GUI_FONT)\n', (6671, 6696), False, 'import wx\n'), ((6708, 6723), 'pyDcmConverter.guiutil.IsMac', 'guiutil.IsMac', ([], {}), '()\n', (6721, 6723), False, 'from pyDcmConverter import guiutil, util\n'), ((8925, 8974), 'wx.MessageDialog', 'wx.MessageDialog', (['self', 'msg', '"""Error"""'], {'style': 'wx.OK'}), "(self, msg, 'Error', style=wx.OK)\n", (8941, 8974), False, 'import wx\n'), ((9082, 9153), 'wx.MessageDialog', 'wx.MessageDialog', (['self', 'msg', '"""Warning"""'], {'style': '(wx.OK_DEFAULT | wx.CANCEL)'}), "(self, msg, 'Warning', style=wx.OK_DEFAULT | wx.CANCEL)\n", (9098, 9153), False, 'import wx\n'), ((10857, 10907), 'dicompylercore.dicomparser.DicomParser', 'dicomparser.DicomParser', (["patient_data['images'][0]"], {}), "(patient_data['images'][0])\n", (10880, 10907), False, 'from dicompylercore import dicomparser\n'), ((12686, 12722), 'numpy.transpose', 'np.transpose', (['image_array', '(1, 0, 2)'], {}), '(image_array, (1, 0, 2))\n', (12698, 12722), True, 'import numpy as np\n'), ((17178, 17283), 'wx.DirDialog', 'wx.DirDialog', (['self'], {'defaultPath': 'self.path', 'message': '"""Choose a directory containing DICOM RT files..."""'}), "(self, defaultPath=self.path, message=\n 'Choose a directory containing DICOM RT files...')\n", (17190, 17283), False, 'import wx\n'), ((18200, 18406), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.DirectorySearchThread', 'args': '(self, self.path, self.import_search_subfolders, self.SetThreadStatus, self\n .OnUpdateProgress, self.AddPatientTree, self.AddPatientDataTree)'}), '(target=self.DirectorySearchThread, args=(self, self.path,\n self.import_search_subfolders, self.SetThreadStatus, self.\n OnUpdateProgress, self.AddPatientTree, self.AddPatientDataTree))\n', (18216, 18406), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((18832, 18893), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(0)', '(0)', '"""Searching for patients..."""'], {}), "(progressFunc, 0, 0, 'Searching for patients...')\n", (18844, 18893), False, 'import wx\n'), ((18966, 18985), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (18979, 18985), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((27449, 27481), 'wx.ImageList', 'wx.ImageList', (['iSize[0]', 'iSize[1]'], {}), '(iSize[0], iSize[1])\n', (27461, 27481), False, 'import wx\n'), ((46981, 47025), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(-1)', '(100)', 'msgs[0]'], {}), '(progressFunc, -1, 100, msgs[0])\n', (46993, 47025), False, 'import wx\n'), ((51148, 51192), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(49)', '(100)', 'msgs[2]'], {}), '(progressFunc, 49, 100, msgs[2])\n', (51160, 51192), False, 'import wx\n'), ((51297, 51336), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(99)', '(100)', '""""""'], {}), "(progressFunc, 99, 100, '')\n", (51309, 51336), False, 'import wx\n'), ((1083, 1119), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""dicomgui.xrc"""'], {}), "('dicomgui.xrc')\n", (1103, 1119), False, 'from pyDcmConverter import guiutil, util\n'), ((1969, 2013), 'json.dump', 'json.dump', (['conf', 'f'], {'indent': '(2)', 'sort_keys': '(True)'}), '(conf, f, indent=2, sort_keys=True)\n', (1978, 2013), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((2586, 2601), 'pyDcmConverter.guiutil.IsMac', 'guiutil.IsMac', ([], {}), '()\n', (2599, 2601), False, 'from pyDcmConverter import guiutil, util\n'), ((6078, 6101), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (6096, 6101), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10194, 10232), 'os.path.isfile', 'os.path.isfile', (["(mori_fname + '.raw.gz')"], {}), "(mori_fname + '.raw.gz')\n", (10208, 10232), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10675, 10700), 'os.path.isfile', 'os.path.isfile', (['nii_fname'], {}), '(nii_fname)\n', (10689, 10700), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((11237, 11276), 'numpy.array', 'np.array', (['dp.ds.ImageOrientationPatient'], {}), '(dp.ds.ImageOrientationPatient)\n', (11245, 11276), True, 'import numpy as np\n'), ((12232, 12260), 'dicompylercore.dicomparser.DicomParser', 'dicomparser.DicomParser', (['img'], {}), '(img)\n', (12255, 12260), False, 'from dicompylercore import dicomparser\n'), ((12555, 12674), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '((i + image_array.shape[-1]) // 2)', '(image_array.shape[-1] + 1)', '"""Creating image array..."""'], {}), "(progressFunc, (i + image_array.shape[-1]) // 2, image_array.\n shape[-1] + 1, 'Creating image array...')\n", (12567, 12674), False, 'import wx\n'), ((12914, 12961), 'utils_cw.write_mori', 'write_mori', (['image_array', 'reso', 'mori_fname', '(True)'], {}), '(image_array, reso, mori_fname, True)\n', (12924, 12961), False, 'from utils_cw import write_mori, get_mori_header_fields\n'), ((15001, 15066), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(97)', '(100)', '"""Export RAW image completed"""'], {}), "(progressFunc, 97, 100, 'Export RAW image completed')\n", (15013, 15066), False, 'import wx\n'), ((15299, 15366), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(98)', '(100)', '"""Export Nifti image completed"""'], {}), "(progressFunc, 98, 100, 'Export Nifti image completed')\n", (15311, 15366), False, 'import wx\n'), ((15776, 15806), 'os.path.isdir', 'os.path.isdir', (['self.output_dir'], {}), '(self.output_dir)\n', (15789, 15806), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((15907, 15935), 'os.makedirs', 'os.makedirs', (['self.output_dir'], {}), '(self.output_dir)\n', (15918, 15935), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((17939, 17954), 'pyDcmConverter.guiutil.IsGtk', 'guiutil.IsGtk', ([], {}), '()\n', (17952, 17954), False, 'from pyDcmConverter import guiutil, util\n'), ((19052, 19065), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (19059, 19065), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((25856, 25901), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(0)', '(1)', 'progressStr'], {}), '(progressFunc, 0, 1, progressStr)\n', (25868, 25901), False, 'import wx\n'), ((25914, 25948), 'wx.CallAfter', 'wx.CallAfter', (['resultFunc', 'patients'], {}), '(resultFunc, patients)\n', (25926, 25948), False, 'import wx\n'), ((26037, 26097), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(0)', '(0)', '"""Select a valid location."""'], {}), "(progressFunc, 0, 0, 'Select a valid location.')\n", (26049, 26097), False, 'import wx\n'), ((26116, 26282), 'wx.MessageDialog', 'wx.MessageDialog', (['parent', '"""The DICOM import location does not exist. Please select a valid location."""', '"""Invalid DICOM Import Location"""', '(wx.OK | wx.ICON_ERROR)'], {}), "(parent,\n 'The DICOM import location does not exist. Please select a valid location.'\n , 'Invalid DICOM Import Location', wx.OK | wx.ICON_ERROR)\n", (26132, 26282), False, 'import wx\n'), ((47263, 47295), 'dicompylercore.dicomparser.DicomParser', 'dicomparser.DicomParser', (['dcmfile'], {}), '(dcmfile)\n', (47286, 47295), False, 'from dicompylercore import dicomparser\n'), ((961, 972), 'wx.GetApp', 'wx.GetApp', ([], {}), '()\n', (970, 972), False, 'import wx\n'), ((2628, 2646), 'pyDcmConverter.guiutil.get_icon', 'guiutil.get_icon', ([], {}), '()\n', (2644, 2646), False, 'from pyDcmConverter import guiutil, util\n'), ((4983, 4995), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4992, 4995), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((9974, 10003), 'os.path.dirname', 'os.path.dirname', (['out_basepath'], {}), '(out_basepath)\n', (9989, 10003), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10037, 10059), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (10050, 10059), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10078, 10098), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (10089, 10098), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10147, 10177), 'os.path.basename', 'os.path.basename', (['out_basepath'], {}), '(out_basepath)\n', (10163, 10177), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10444, 10473), 'os.path.dirname', 'os.path.dirname', (['out_basepath'], {}), '(out_basepath)\n', (10459, 10473), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10509, 10531), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (10522, 10531), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10550, 10570), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (10561, 10570), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((15231, 15274), 'nibabel.Nifti1Image', 'nib.Nifti1Image', (['image_array'], {'affine': 'affine'}), '(image_array, affine=affine)\n', (15246, 15274), True, 'import nibabel as nib\n'), ((19529, 19553), 'os.path.isfile', 'os.path.isfile', (['files[n]'], {}), '(files[n])\n', (19543, 19553), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((27540, 27573), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""group.png"""'], {}), "('group.png')\n", (27560, 27573), False, 'from pyDcmConverter import guiutil, util\n'), ((27670, 27702), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""user.png"""'], {}), "('user.png')\n", (27690, 27702), False, 'from pyDcmConverter import guiutil, util\n'), ((27799, 27831), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""book.png"""'], {}), "('book.png')\n", (27819, 27831), False, 'from pyDcmConverter import guiutil, util\n'), ((27928, 27970), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""table_multiple.png"""'], {}), "('table_multiple.png')\n", (27948, 27970), False, 'from pyDcmConverter import guiutil, util\n'), ((28067, 28101), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""pencil.png"""'], {}), "('pencil.png')\n", (28087, 28101), False, 'from pyDcmConverter import guiutil, util\n'), ((28198, 28235), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""chart_bar.png"""'], {}), "('chart_bar.png')\n", (28218, 28235), False, 'from pyDcmConverter import guiutil, util\n'), ((28332, 28371), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""chart_curve.png"""'], {}), "('chart_curve.png')\n", (28352, 28371), False, 'from pyDcmConverter import guiutil, util\n'), ((28468, 28508), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""pencil_error.png"""'], {}), "('pencil_error.png')\n", (28488, 28508), False, 'from pyDcmConverter import guiutil, util\n'), ((28605, 28648), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""chart_bar_error.png"""'], {}), "('chart_bar_error.png')\n", (28625, 28648), False, 'from pyDcmConverter import guiutil, util\n'), ((28745, 28790), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""chart_curve_error.png"""'], {}), "('chart_curve_error.png')\n", (28765, 28790), False, 'from pyDcmConverter import guiutil, util\n'), ((28887, 28929), 'pyDcmConverter.util.GetResourcePath', 'util.GetResourcePath', (['"""table_selected.png"""'], {}), "('table_selected.png')\n", (28907, 28929), False, 'from pyDcmConverter import guiutil, util\n'), ((47113, 47157), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(98)', '(100)', 'msgs[1]'], {}), '(progressFunc, 98, 100, msgs[1])\n', (47125, 47157), False, 'import wx\n'), ((47207, 47244), 'os.path.join', 'os.path.join', (['self.path', 'filearray[n]'], {}), '(self.path, filearray[n])\n', (47219, 47244), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((10618, 10648), 'os.path.basename', 'os.path.basename', (['out_basepath'], {}), '(out_basepath)\n', (10634, 10648), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((19426, 19480), 'wx.CallAfter', 'wx.CallAfter', (['progressFunc', '(0)', '(0)', '"""Search terminated."""'], {}), "(progressFunc, 0, 0, 'Search terminated.')\n", (19438, 19480), False, 'import wx\n'), ((48556, 48594), 'numpy.array', 'np.array', (['item.ImageOrientationPatient'], {}), '(item.ImageOrientationPatient)\n', (48564, 48594), True, 'import numpy as np\n'), ((48622, 48669), 'numpy.array', 'np.array', (['images[i - 1].ImageOrientationPatient'], {}), '(images[i - 1].ImageOrientationPatient)\n', (48630, 48669), True, 'import numpy as np\n'), ((48985, 49020), 'numpy.array', 'np.array', (['item.ImagePositionPatient'], {}), '(item.ImagePositionPatient)\n', (48993, 49020), True, 'import numpy as np\n'), ((49048, 49092), 'numpy.array', 'np.array', (['images[i - 1].ImagePositionPatient'], {}), '(images[i - 1].ImagePositionPatient)\n', (49056, 49092), True, 'import numpy as np\n'), ((19105, 19126), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (19117, 19126), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((19672, 19705), 'dicompylercore.dicomparser.DicomParser', 'dicomparser.DicomParser', (['files[n]'], {}), '(files[n])\n', (19695, 19705), False, 'from dicompylercore import dicomparser\n'), ((16572, 16629), 'functools.partial', 'functools.partial', (['self.ExportFunc'], {'out_basepath': 'basename'}), '(self.ExportFunc, out_basepath=basename)\n', (16589, 16629), False, 'import hashlib, os, threading, functools, json, warnings\n'), ((20418, 20450), 'wx.CallAfter', 'wx.CallAfter', (['foundFunc', 'patient'], {}), '(foundFunc, patient)\n', (20430, 20450), False, 'import wx\n'), ((48708, 48729), 'numpy.round', 'np.round', (['(iop0 - iop1)'], {}), '(iop0 - iop1)\n', (48716, 48729), True, 'import numpy as np\n'), ((49135, 49156), 'numpy.round', 'np.round', (['(ipp0 - ipp1)'], {}), '(ipp0 - ipp1)\n', (49143, 49156), True, 'import numpy as np\n'), ((13407, 13418), 'numpy.min', 'np.min', (['pos'], {}), '(pos)\n', (13413, 13418), True, 'import numpy as np\n'), ((13534, 13545), 'numpy.max', 'np.max', (['pos'], {}), '(pos)\n', (13540, 13545), True, 'import numpy as np\n')] |
import os
from os import listdir
from argparse import ArgumentParser
from os.path import isdir,join
middle = '├'
pipe = '│'
last = '└'
scale = 2
def traverse(path, parents='', depth=0, isLast=True):
tree = [(path, parents, depth, isLast)]
realPath = join(parents,path)
files = listdir(realPath)
maxFiles = len(files)
for idx,file in enumerate(files):
curPath = join(realPath,file)
isLast = idx == maxFiles -1
if isdir(curPath):
tree = tree + (traverse(file,realPath, depth+1, isLast))
else:
tree.append((file, parents, depth+1, isLast))
return tree
def addDepth(depth,connections,spacer=" "):
return "".join([pipe if x in connections else spacer for x in range(0, depth)])
def findConnections(depth, below):
depths = []
for (_,_,curDepth,_) in below:
if curDepth < depth:
depths.append(curDepth)
depth=curDepth
return depths
def prettyPrint(treeInfo):
for idx,node in enumerate(treeInfo):
(name, parents, depth, isLast) = node
prefix = last if isLast else middle
connections = [x*scale for x in findConnections(depth,treeInfo[idx:])]
print("%s%s %s"%(addDepth(depth*scale,connections), prefix, name))
parser = ArgumentParser(description="list a folder as a tree")
parser.add_argument("folder",default="./", type=str)
args = parser.parse_args()
folderInfo = traverse(args.folder)
print(prettyPrint(folderInfo))
| [
"os.listdir",
"os.path.isdir",
"os.path.join",
"argparse.ArgumentParser"
] | [((1288, 1341), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""list a folder as a tree"""'}), "(description='list a folder as a tree')\n", (1302, 1341), False, 'from argparse import ArgumentParser\n'), ((267, 286), 'os.path.join', 'join', (['parents', 'path'], {}), '(parents, path)\n', (271, 286), False, 'from os.path import isdir, join\n'), ((298, 315), 'os.listdir', 'listdir', (['realPath'], {}), '(realPath)\n', (305, 315), False, 'from os import listdir\n'), ((398, 418), 'os.path.join', 'join', (['realPath', 'file'], {}), '(realPath, file)\n', (402, 418), False, 'from os.path import isdir, join\n'), ((465, 479), 'os.path.isdir', 'isdir', (['curPath'], {}), '(curPath)\n', (470, 479), False, 'from os.path import isdir, join\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
import os
import sys
from keystone.common import logging
from keystone.openstack.common import cfg
gettext.install('keystone', unicode=1)
CONF = cfg.CONF
def setup_logging(conf):
"""
Sets up the logging options for a log with supplied name
:param conf: a cfg.ConfOpts object
"""
if conf.log_config:
# Use a logging configuration file for all settings...
if os.path.exists(conf.log_config):
logging.config.fileConfig(conf.log_config)
return
else:
raise RuntimeError('Unable to locate specified logging '
'config file: %s' % conf.log_config)
root_logger = logging.root
if conf.debug:
root_logger.setLevel(logging.DEBUG)
elif conf.verbose:
root_logger.setLevel(logging.INFO)
else:
root_logger.setLevel(logging.WARNING)
formatter = logging.Formatter(conf.log_format, conf.log_date_format)
if conf.use_syslog:
try:
facility = getattr(logging.SysLogHandler,
conf.syslog_log_facility)
except AttributeError:
raise ValueError(_('Invalid syslog facility'))
handler = logging.SysLogHandler(address='/dev/log',
facility=facility)
elif conf.log_file:
logfile = conf.log_file
if conf.log_dir:
logfile = os.path.join(conf.log_dir, logfile)
handler = logging.WatchedFileHandler(logfile)
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
def register_str(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_opt(cfg.StrOpt(*args, **kw), group=group)
def register_cli_str(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_cli_opt(cfg.StrOpt(*args, **kw), group=group)
def register_bool(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_opt(cfg.BoolOpt(*args, **kw), group=group)
def register_cli_bool(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_cli_opt(cfg.BoolOpt(*args, **kw), group=group)
def register_int(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_opt(cfg.IntOpt(*args, **kw), group=group)
def register_cli_int(*args, **kw):
conf = kw.pop('conf', CONF)
group = kw.pop('group', None)
return conf.register_cli_opt(cfg.IntOpt(*args, **kw), group=group)
register_str('admin_token', default='ADMIN')
register_str('bind_host', default='0.0.0.0')
register_str('compute_port', default=8774)
register_str('admin_port', default=35357)
register_str('public_port', default=5000)
register_str('onready')
register_str('auth_admin_prefix', default='')
#ssl options
register_bool('enable', group='ssl', default=False)
register_str('certfile', group='ssl', default=None)
register_str('keyfile', group='ssl', default=None)
register_str('ca_certs', group='ssl', default=None)
register_bool('cert_required', group='ssl', default=False)
#signing options
register_str('token_format', group='signing',
default="UUID")
register_str('certfile', group='signing',
default="/etc/keystone/ssl/certs/signing_cert.pem")
register_str('keyfile', group='signing',
default="/etc/keystone/ssl/private/signing_key.pem")
register_str('ca_certs', group='signing',
default="/etc/keystone/ssl/certs/ca.pem")
register_int('key_size', group='signing', default=1024)
register_int('valid_days', group='signing', default=3650)
register_str('ca_password', group='signing', default=None)
# sql options
register_str('connection', group='sql', default='sqlite:///keystone.db')
register_int('idle_timeout', group='sql', default=200)
register_str('driver', group='catalog',
default='keystone.catalog.backends.sql.Catalog')
register_str('driver', group='identity',
default='keystone.identity.backends.sql.Identity')
register_str('driver', group='policy',
default='keystone.policy.backends.rules.Policy')
register_str('driver', group='token',
default='keystone.token.backends.kvs.Token')
register_str('driver', group='ec2',
default='keystone.contrib.ec2.backends.kvs.Ec2')
register_str('driver', group='stats',
default='keystone.contrib.stats.backends.kvs.Stats')
#ldap
register_str('url', group='ldap', default='ldap://localhost')
register_str('user', group='ldap', default='dc=Manager,dc=example,dc=com')
register_str('password', group='ldap', default='<PASSWORD>')
register_str('suffix', group='ldap', default='cn=example,cn=com')
register_bool('use_dumb_member', group='ldap', default=False)
register_str('user_name_attribute', group='ldap', default='sn')
register_str('user_tree_dn', group='ldap', default=None)
register_str('user_objectclass', group='ldap', default='inetOrgPerson')
register_str('user_id_attribute', group='ldap', default='cn')
register_str('tenant_tree_dn', group='ldap', default=None)
register_str('tenant_objectclass', group='ldap', default='groupOfNames')
register_str('tenant_id_attribute', group='ldap', default='cn')
register_str('tenant_member_attribute', group='ldap', default='member')
register_str('tenant_name_attribute', group='ldap', default='ou')
register_str('role_tree_dn', group='ldap', default=None)
register_str('role_objectclass', group='ldap', default='organizationalRole')
register_str('role_id_attribute', group='ldap', default='cn')
register_str('role_member_attribute', group='ldap', default='roleOccupant')
#pam
register_str('url', group='pam', default=None)
register_str('userid', group='pam', default=None)
register_str('password', group='pam', default=None)
| [
"keystone.common.logging.StreamHandler",
"keystone.openstack.common.cfg.IntOpt",
"os.path.join",
"keystone.common.logging.WatchedFileHandler",
"os.path.exists",
"keystone.openstack.common.cfg.BoolOpt",
"keystone.openstack.common.cfg.StrOpt",
"keystone.common.logging.SysLogHandler",
"keystone.common.logging.Formatter",
"keystone.common.logging.config.fileConfig",
"gettext.install"
] | [((741, 779), 'gettext.install', 'gettext.install', (['"""keystone"""'], {'unicode': '(1)'}), "('keystone', unicode=1)\n", (756, 779), False, 'import gettext\n'), ((1533, 1589), 'keystone.common.logging.Formatter', 'logging.Formatter', (['conf.log_format', 'conf.log_date_format'], {}), '(conf.log_format, conf.log_date_format)\n', (1550, 1589), False, 'from keystone.common import logging\n'), ((1041, 1072), 'os.path.exists', 'os.path.exists', (['conf.log_config'], {}), '(conf.log_config)\n', (1055, 1072), False, 'import os\n'), ((1848, 1908), 'keystone.common.logging.SysLogHandler', 'logging.SysLogHandler', ([], {'address': '"""/dev/log"""', 'facility': 'facility'}), "(address='/dev/log', facility=facility)\n", (1869, 1908), False, 'from keystone.common import logging\n'), ((2405, 2428), 'keystone.openstack.common.cfg.StrOpt', 'cfg.StrOpt', (['*args'], {}), '(*args, **kw)\n', (2415, 2428), False, 'from keystone.openstack.common import cfg\n'), ((2579, 2602), 'keystone.openstack.common.cfg.StrOpt', 'cfg.StrOpt', (['*args'], {}), '(*args, **kw)\n', (2589, 2602), False, 'from keystone.openstack.common import cfg\n'), ((2746, 2770), 'keystone.openstack.common.cfg.BoolOpt', 'cfg.BoolOpt', (['*args'], {}), '(*args, **kw)\n', (2757, 2770), False, 'from keystone.openstack.common import cfg\n'), ((2922, 2946), 'keystone.openstack.common.cfg.BoolOpt', 'cfg.BoolOpt', (['*args'], {}), '(*args, **kw)\n', (2933, 2946), False, 'from keystone.openstack.common import cfg\n'), ((3089, 3112), 'keystone.openstack.common.cfg.IntOpt', 'cfg.IntOpt', (['*args'], {}), '(*args, **kw)\n', (3099, 3112), False, 'from keystone.openstack.common import cfg\n'), ((3263, 3286), 'keystone.openstack.common.cfg.IntOpt', 'cfg.IntOpt', (['*args'], {}), '(*args, **kw)\n', (3273, 3286), False, 'from keystone.openstack.common import cfg\n'), ((1086, 1128), 'keystone.common.logging.config.fileConfig', 'logging.config.fileConfig', (['conf.log_config'], {}), '(conf.log_config)\n', (1111, 1128), False, 'from keystone.common import logging\n'), ((2106, 2141), 'keystone.common.logging.WatchedFileHandler', 'logging.WatchedFileHandler', (['logfile'], {}), '(logfile)\n', (2132, 2141), False, 'from keystone.common import logging\n'), ((2170, 2203), 'keystone.common.logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (2191, 2203), False, 'from keystone.common import logging\n'), ((2052, 2087), 'os.path.join', 'os.path.join', (['conf.log_dir', 'logfile'], {}), '(conf.log_dir, logfile)\n', (2064, 2087), False, 'import os\n')] |
from datetime import datetime, timedelta
import pytz
from bcrypt_hash import BcryptHash
import pytest
from src.users import Users
from src.events import Events
from src.stores import MemoryStore
from src.session import Session
def test_login_user():
store = MemoryStore()
users = Users(store)
params = {}
params['password'] = 'password'
session = Session(params, store, '')
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
user.validated = True
with pytest.raises(Exception):
session.login('')
loging_dict = session.login('test')
assert loging_dict
assert 'user' in loging_dict
def test_login_user_bad_password():
store = MemoryStore()
users = Users(store)
params = {}
params['password'] = '<PASSWORD>'
session = Session(params, store, '')
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
user.validated = True
with pytest.raises(Exception):
session.login('test')
def test_login_user_register():
store = MemoryStore()
users = Users(store)
events = Events(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
params = {}
params['password'] = 'password'
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
events.add('test', 'test', 30, start, dur, 'test', 'test',
'<EMAIL>', 'test', user)
user.validated = True
params['register'] = 'test'
session = Session(params, store, '')
loging_dict = session.login('test')
assert loging_dict
assert 'user' in loging_dict
assert 'register' in loging_dict
assert loging_dict['register'] == 'test'
def test_login_user_register_bad_event():
store = MemoryStore()
users = Users(store)
events = Events(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
params = {}
params['password'] = 'password'
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
events.add('test', 'test', 30, start, dur, 'test', 'test',
'<EMAIL>', 'test', user)
user.validated = True
params['register'] = ''
session = Session(params, store, '')
with pytest.raises(Exception):
session.login('test')
| [
"src.session.Session",
"bcrypt_hash.BcryptHash",
"src.stores.MemoryStore",
"src.users.Users",
"pytest.raises",
"datetime.timedelta",
"pytz.timezone",
"src.events.Events"
] | [((264, 277), 'src.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (275, 277), False, 'from src.stores import MemoryStore\n'), ((290, 302), 'src.users.Users', 'Users', (['store'], {}), '(store)\n', (295, 302), False, 'from src.users import Users\n'), ((369, 395), 'src.session.Session', 'Session', (['params', 'store', '""""""'], {}), "(params, store, '')\n", (376, 395), False, 'from src.session import Session\n'), ((792, 805), 'src.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (803, 805), False, 'from src.stores import MemoryStore\n'), ((818, 830), 'src.users.Users', 'Users', (['store'], {}), '(store)\n', (823, 830), False, 'from src.users import Users\n'), ((899, 925), 'src.session.Session', 'Session', (['params', 'store', '""""""'], {}), "(params, store, '')\n", (906, 925), False, 'from src.session import Session\n'), ((1227, 1240), 'src.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (1238, 1240), False, 'from src.stores import MemoryStore\n'), ((1253, 1265), 'src.users.Users', 'Users', (['store'], {}), '(store)\n', (1258, 1265), False, 'from src.users import Users\n'), ((1279, 1292), 'src.events.Events', 'Events', (['store'], {}), '(store)\n', (1285, 1292), False, 'from src.events import Events\n'), ((1363, 1381), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1372, 1381), False, 'from datetime import datetime, timedelta\n'), ((1773, 1799), 'src.session.Session', 'Session', (['params', 'store', '""""""'], {}), "(params, store, '')\n", (1780, 1799), False, 'from src.session import Session\n'), ((2034, 2047), 'src.stores.MemoryStore', 'MemoryStore', ([], {}), '()\n', (2045, 2047), False, 'from src.stores import MemoryStore\n'), ((2060, 2072), 'src.users.Users', 'Users', (['store'], {}), '(store)\n', (2065, 2072), False, 'from src.users import Users\n'), ((2086, 2099), 'src.events.Events', 'Events', (['store'], {}), '(store)\n', (2092, 2099), False, 'from src.events import Events\n'), ((2170, 2188), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (2179, 2188), False, 'from datetime import datetime, timedelta\n'), ((2576, 2602), 'src.session.Session', 'Session', (['params', 'store', '""""""'], {}), "(params, store, '')\n", (2583, 2602), False, 'from src.session import Session\n'), ((594, 618), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (607, 618), False, 'import pytest\n'), ((1125, 1149), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (1138, 1149), False, 'import pytest\n'), ((1318, 1351), 'pytz.timezone', 'pytz.timezone', (['"""America/New_York"""'], {}), "('America/New_York')\n", (1331, 1351), False, 'import pytz\n'), ((2125, 2158), 'pytz.timezone', 'pytz.timezone', (['"""America/New_York"""'], {}), "('America/New_York')\n", (2138, 2158), False, 'import pytz\n'), ((2612, 2636), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (2625, 2636), False, 'import pytest\n'), ((411, 433), 'bcrypt_hash.BcryptHash', 'BcryptHash', (['"""password"""'], {}), "('password')\n", (421, 433), False, 'from bcrypt_hash import BcryptHash\n'), ((942, 964), 'bcrypt_hash.BcryptHash', 'BcryptHash', (['"""password"""'], {}), "('password')\n", (952, 964), False, 'from bcrypt_hash import BcryptHash\n'), ((1450, 1472), 'bcrypt_hash.BcryptHash', 'BcryptHash', (['"""password"""'], {}), "('password')\n", (1460, 1472), False, 'from bcrypt_hash import BcryptHash\n'), ((2257, 2279), 'bcrypt_hash.BcryptHash', 'BcryptHash', (['"""password"""'], {}), "('password')\n", (2267, 2279), False, 'from bcrypt_hash import BcryptHash\n')] |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for estimator.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import six
from tensorflow.contrib.distributions.python.ops import estimator as estimator_lib
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_metrics
from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_no_variables
from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_summary_tags
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.platform import test
class EstimatorHeadDistributionRegressionTest(test.TestCase):
def _assert_output_alternatives(self, model_fn_ops):
self.assertEquals({
None: constants.ProblemType.LINEAR_REGRESSION
}, {
k: v[0] for k, v in six.iteritems(model_fn_ops.output_alternatives)
})
def testNormalLocScaleLogits(self):
# We will bias logits[..., 1] so that: logits[..., 1]=0 implies scale=1.
scale_bias = np.log(np.expm1(1.))
def softplus(x):
return np.log1p(np.exp(x))
def actual_loss(logits, labels):
mu = actual_mean(logits)
sigma = actual_stddev(logits)
labels = np.squeeze(labels, -1)
z = (labels - mu) / sigma
loss = 0.5 * (z**2. + np.log(2. * np.pi)) + np.log(sigma)
return loss.mean()
def actual_mean(logits):
return logits[..., 0]
def actual_stddev(logits):
return softplus(logits[..., 1] + scale_bias)
def make_distribution_fn(logits):
return normal_lib.Normal(
loc=logits[..., 0],
scale=nn_ops.softplus(logits[..., 1] + scale_bias))
head = estimator_lib.estimator_head_distribution_regression(
make_distribution_fn,
logits_dimension=2)
labels = np.float32([[-1.],
[0.],
[1.]])
logits = np.float32([[0., -1],
[1, 0.5],
[-1, 1]])
with ops.Graph().as_default(), session.Session():
# Convert to tensor so we can index into head.distributions.
tflogits = ops.convert_to_tensor(logits, name="logits")
model_fn_ops = head.create_model_fn_ops(
{},
labels=labels,
mode=model_fn.ModeKeys.TRAIN,
train_op_fn=head_lib.no_op_train_fn,
logits=tflogits)
self._assert_output_alternatives(model_fn_ops)
_assert_summary_tags(self, ["loss"])
_assert_no_variables(self)
loss = actual_loss(logits, labels)
_assert_metrics(self, loss, {"loss": loss}, model_fn_ops)
# Now we verify the underlying distribution was correctly constructed.
expected_mean = logits[..., 0]
self.assertAllClose(
expected_mean,
head.distribution(tflogits).mean().eval(),
rtol=1e-6, atol=0.)
expected_stddev = softplus(logits[..., 1] + scale_bias)
self.assertAllClose(
expected_stddev,
head.distribution(tflogits).stddev().eval(),
rtol=1e-6, atol=0.)
# Should have created only one distribution.
self.assertEqual(1, len(head.distributions))
if __name__ == "__main__":
test.main()
| [
"tensorflow.python.platform.test.main",
"tensorflow.python.framework.ops.Graph",
"tensorflow.python.client.session.Session",
"numpy.log",
"tensorflow.python.ops.nn_ops.softplus",
"numpy.float32",
"tensorflow.contrib.learn.python.learn.estimators.head_test._assert_no_variables",
"tensorflow.contrib.distributions.python.ops.estimator.estimator_head_distribution_regression",
"numpy.expm1",
"numpy.exp",
"numpy.squeeze",
"tensorflow.contrib.learn.python.learn.estimators.head_test._assert_summary_tags",
"six.iteritems",
"tensorflow.python.framework.ops.convert_to_tensor",
"tensorflow.contrib.learn.python.learn.estimators.head_test._assert_metrics"
] | [((4270, 4281), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (4279, 4281), False, 'from tensorflow.python.platform import test\n'), ((2756, 2854), 'tensorflow.contrib.distributions.python.ops.estimator.estimator_head_distribution_regression', 'estimator_lib.estimator_head_distribution_regression', (['make_distribution_fn'], {'logits_dimension': '(2)'}), '(make_distribution_fn,\n logits_dimension=2)\n', (2808, 2854), True, 'from tensorflow.contrib.distributions.python.ops import estimator as estimator_lib\n'), ((2881, 2915), 'numpy.float32', 'np.float32', (['[[-1.0], [0.0], [1.0]]'], {}), '([[-1.0], [0.0], [1.0]])\n', (2891, 2915), True, 'import numpy as np\n'), ((2976, 3018), 'numpy.float32', 'np.float32', (['[[0.0, -1], [1, 0.5], [-1, 1]]'], {}), '([[0.0, -1], [1, 0.5], [-1, 1]])\n', (2986, 3018), True, 'import numpy as np\n'), ((2107, 2120), 'numpy.expm1', 'np.expm1', (['(1.0)'], {}), '(1.0)\n', (2115, 2120), True, 'import numpy as np\n'), ((2296, 2318), 'numpy.squeeze', 'np.squeeze', (['labels', '(-1)'], {}), '(labels, -1)\n', (2306, 2318), True, 'import numpy as np\n'), ((3103, 3120), 'tensorflow.python.client.session.Session', 'session.Session', ([], {}), '()\n', (3118, 3120), False, 'from tensorflow.python.client import session\n'), ((3206, 3250), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['logits'], {'name': '"""logits"""'}), "(logits, name='logits')\n", (3227, 3250), False, 'from tensorflow.python.framework import ops\n'), ((3510, 3546), 'tensorflow.contrib.learn.python.learn.estimators.head_test._assert_summary_tags', '_assert_summary_tags', (['self', "['loss']"], {}), "(self, ['loss'])\n", (3530, 3546), False, 'from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_summary_tags\n'), ((3553, 3579), 'tensorflow.contrib.learn.python.learn.estimators.head_test._assert_no_variables', '_assert_no_variables', (['self'], {}), '(self)\n', (3573, 3579), False, 'from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_no_variables\n'), ((3627, 3684), 'tensorflow.contrib.learn.python.learn.estimators.head_test._assert_metrics', '_assert_metrics', (['self', 'loss', "{'loss': loss}", 'model_fn_ops'], {}), "(self, loss, {'loss': loss}, model_fn_ops)\n", (3642, 3684), False, 'from tensorflow.contrib.learn.python.learn.estimators.head_test import _assert_metrics\n'), ((2165, 2174), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (2171, 2174), True, 'import numpy as np\n'), ((2401, 2414), 'numpy.log', 'np.log', (['sigma'], {}), '(sigma)\n', (2407, 2414), True, 'import numpy as np\n'), ((1912, 1959), 'six.iteritems', 'six.iteritems', (['model_fn_ops.output_alternatives'], {}), '(model_fn_ops.output_alternatives)\n', (1925, 1959), False, 'import six\n'), ((2698, 2742), 'tensorflow.python.ops.nn_ops.softplus', 'nn_ops.softplus', (['(logits[..., 1] + scale_bias)'], {}), '(logits[..., 1] + scale_bias)\n', (2713, 2742), False, 'from tensorflow.python.ops import nn_ops\n'), ((3077, 3088), 'tensorflow.python.framework.ops.Graph', 'ops.Graph', ([], {}), '()\n', (3086, 3088), False, 'from tensorflow.python.framework import ops\n'), ((2379, 2398), 'numpy.log', 'np.log', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (2385, 2398), True, 'import numpy as np\n')] |
# Copyright (c) 2012-2016 Seafile Ltd.
import logging
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authentication import SessionAuthentication
from seaserv import ccnet_api
from seahub.api2.permissions import IsProVersion
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.utils import api_error
from seahub.api2.endpoints.utils import is_org_user
from seahub.utils import is_valid_email
from seahub.base.accounts import User
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.profile.models import Profile
logger = logging.getLogger(__name__)
def get_user_info(email):
profile = Profile.objects.get_profile_by_user(email)
info = {}
info['email'] = email
info['name'] = email2nickname(email)
info['contact_email'] = profile.contact_email if profile and profile.contact_email else ''
return info
class OrgAdminUser(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsProVersion,)
def put(self, request, org_id, email):
""" update name of an org user.
Permission checking:
1. only admin can perform this action.
"""
# resource check
org_id = int(org_id)
if not ccnet_api.get_org_by_id(org_id):
error_msg = 'Organization %s not found.' % org_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
error_msg = 'User %s not found.' % email
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
if not request.user.org.is_staff:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if request.user.org.org_id != org_id:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if not is_org_user(email, org_id):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
# update user's name
name = request.data.get("name", None)
if name is not None:
name = name.strip()
if len(name) > 64:
error_msg = 'Name is too long (maximum is 64 characters).'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
if "/" in name:
error_msg = "Name should not include '/'."
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
try:
Profile.objects.add_or_update(email, nickname=name)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# update user's contact email
contact_email = request.data.get("contact_email", None)
if contact_email is not None:
contact_email = contact_email.strip()
if contact_email != '' and not is_valid_email(contact_email):
error_msg = 'contact_email invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
try:
Profile.objects.add_or_update(email, contact_email=contact_email)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
info = get_user_info(email)
info['is_active'] = user.is_active
return Response(info)
| [
"seahub.api2.utils.api_error",
"seahub.base.accounts.User.objects.get",
"seahub.utils.is_valid_email",
"seahub.profile.models.Profile.objects.add_or_update",
"seaserv.ccnet_api.get_org_by_id",
"seahub.base.templatetags.seahub_tags.email2nickname",
"rest_framework.response.Response",
"seahub.profile.models.Profile.objects.get_profile_by_user",
"seahub.api2.endpoints.utils.is_org_user",
"logging.getLogger"
] | [((720, 747), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (737, 747), False, 'import logging\n'), ((790, 832), 'seahub.profile.models.Profile.objects.get_profile_by_user', 'Profile.objects.get_profile_by_user', (['email'], {}), '(email)\n', (825, 832), False, 'from seahub.profile.models import Profile\n'), ((893, 914), 'seahub.base.templatetags.seahub_tags.email2nickname', 'email2nickname', (['email'], {}), '(email)\n', (907, 914), False, 'from seahub.base.templatetags.seahub_tags import email2nickname\n'), ((3892, 3906), 'rest_framework.response.Response', 'Response', (['info'], {}), '(info)\n', (3900, 3906), False, 'from rest_framework.response import Response\n'), ((1460, 1491), 'seaserv.ccnet_api.get_org_by_id', 'ccnet_api.get_org_by_id', (['org_id'], {}), '(org_id)\n', (1483, 1491), False, 'from seaserv import ccnet_api\n'), ((1574, 1621), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_404_NOT_FOUND', 'error_msg'], {}), '(status.HTTP_404_NOT_FOUND, error_msg)\n', (1583, 1621), False, 'from seahub.api2.utils import api_error\n'), ((1655, 1684), 'seahub.base.accounts.User.objects.get', 'User.objects.get', ([], {'email': 'email'}), '(email=email)\n', (1671, 1684), False, 'from seahub.base.accounts import User\n'), ((1973, 2020), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_403_FORBIDDEN', 'error_msg'], {}), '(status.HTTP_403_FORBIDDEN, error_msg)\n', (1982, 2020), False, 'from seahub.api2.utils import api_error\n'), ((2132, 2179), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_403_FORBIDDEN', 'error_msg'], {}), '(status.HTTP_403_FORBIDDEN, error_msg)\n', (2141, 2179), False, 'from seahub.api2.utils import api_error\n'), ((2196, 2222), 'seahub.api2.endpoints.utils.is_org_user', 'is_org_user', (['email', 'org_id'], {}), '(email, org_id)\n', (2207, 2222), False, 'from seahub.api2.endpoints.utils import is_org_user\n'), ((2288, 2335), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_403_FORBIDDEN', 'error_msg'], {}), '(status.HTTP_403_FORBIDDEN, error_msg)\n', (2297, 2335), False, 'from seahub.api2.utils import api_error\n'), ((1791, 1838), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_404_NOT_FOUND', 'error_msg'], {}), '(status.HTTP_404_NOT_FOUND, error_msg)\n', (1800, 1838), False, 'from seahub.api2.utils import api_error\n'), ((2603, 2652), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_400_BAD_REQUEST', 'error_msg'], {}), '(status.HTTP_400_BAD_REQUEST, error_msg)\n', (2612, 2652), False, 'from seahub.api2.utils import api_error\n'), ((2764, 2813), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_400_BAD_REQUEST', 'error_msg'], {}), '(status.HTTP_400_BAD_REQUEST, error_msg)\n', (2773, 2813), False, 'from seahub.api2.utils import api_error\n'), ((2848, 2899), 'seahub.profile.models.Profile.objects.add_or_update', 'Profile.objects.add_or_update', (['email'], {'nickname': 'name'}), '(email, nickname=name)\n', (2877, 2899), False, 'from seahub.profile.models import Profile\n'), ((3444, 3493), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_400_BAD_REQUEST', 'error_msg'], {}), '(status.HTTP_400_BAD_REQUEST, error_msg)\n', (3453, 3493), False, 'from seahub.api2.utils import api_error\n'), ((3528, 3593), 'seahub.profile.models.Profile.objects.add_or_update', 'Profile.objects.add_or_update', (['email'], {'contact_email': 'contact_email'}), '(email, contact_email=contact_email)\n', (3557, 3593), False, 'from seahub.profile.models import Profile\n'), ((3042, 3101), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_500_INTERNAL_SERVER_ERROR', 'error_msg'], {}), '(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)\n', (3051, 3101), False, 'from seahub.api2.utils import api_error\n'), ((3337, 3366), 'seahub.utils.is_valid_email', 'is_valid_email', (['contact_email'], {}), '(contact_email)\n', (3351, 3366), False, 'from seahub.utils import is_valid_email\n'), ((3736, 3795), 'seahub.api2.utils.api_error', 'api_error', (['status.HTTP_500_INTERNAL_SERVER_ERROR', 'error_msg'], {}), '(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)\n', (3745, 3795), False, 'from seahub.api2.utils import api_error\n')] |
import logging
import os
from enum import Enum
from imageai.Prediction.Custom import CustomImagePrediction
# Show only errors in console
logging.getLogger("tensorflow").setLevel(logging.ERROR)
class MovesEnum(int, Enum):
ROCK = 0
PAPER = 1
SCISSORS = 2
class ModelTypeEnum(Enum):
"""
An helper enum to help for model type choice
"""
RESNET = 0
SQEEZENET = 1
INCEPTIONV3 = 2
DENSENET = 3
class RockPaperScissorsPredictor:
"""
This class contains the required code for model training and move prediction using a
webcam
"""
MODEL_TYPE_SET_LOOKUP = {
ModelTypeEnum.RESNET: lambda x: x.setModelTypeAsResNet(),
ModelTypeEnum.SQEEZENET: lambda x: x.setModelTypeAsSqueezeNet(),
ModelTypeEnum.INCEPTIONV3: lambda x: x.setModelTypeAsInceptionV3(),
ModelTypeEnum.DENSENET: lambda x: x.setModelTypeAsDenseNet(),
}
MOVES_LOOKUP = {
"rock": MovesEnum.ROCK,
"paper": MovesEnum.PAPER,
"scissors": MovesEnum.SCISSORS,
}
def __init__(
self,
model_type=ModelTypeEnum.RESNET,
class_number=3, # We have 3 different objects: "rock", "paper", "scissors"
):
self.model_type = model_type
self.class_number = class_number
self.base_path = os.getcwd()
# Instantiate the CustomImagePrediction object that will predict our moves
self.predictor = CustomImagePrediction()
# Set the model type of the neural network (it must be the same of the training)
self._set_proper_model_type(self.model_type)
# Set path to the trained model file
self.predictor.setModelPath(
os.path.join(self.base_path, "data", "move_detector", "model.h5")
)
# Set path to the json file that contains our classes and their labels
self.predictor.setJsonPath(
os.path.join(self.base_path, "data", "move_detector", "model_class.json")
)
# Load the trained model and set it to use "class_number" classes
self.predictor.loadModel(num_objects=self.class_number)
def _set_proper_model_type(self, model_type):
self.MODEL_TYPE_SET_LOOKUP[model_type](self.predictor)
def detect_move_from_picture(self, picture, sensibility=90):
predictions, probabilities = self.predictor.predictImage(
picture, result_count=3, input_type="array"
)
# Get a tuple (class_predicted, probability) that contains the best
# prediction
best_prediction = max(
zip(predictions, probabilities), key=lambda x: x[1]
)
if best_prediction[1] < sensibility:
return
return self.MOVES_LOOKUP[best_prediction[0]]
| [
"os.getcwd",
"os.path.join",
"logging.getLogger",
"imageai.Prediction.Custom.CustomImagePrediction"
] | [((139, 170), 'logging.getLogger', 'logging.getLogger', (['"""tensorflow"""'], {}), "('tensorflow')\n", (156, 170), False, 'import logging\n'), ((1322, 1333), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1331, 1333), False, 'import os\n'), ((1442, 1465), 'imageai.Prediction.Custom.CustomImagePrediction', 'CustomImagePrediction', ([], {}), '()\n', (1463, 1465), False, 'from imageai.Prediction.Custom import CustomImagePrediction\n'), ((1702, 1767), 'os.path.join', 'os.path.join', (['self.base_path', '"""data"""', '"""move_detector"""', '"""model.h5"""'], {}), "(self.base_path, 'data', 'move_detector', 'model.h5')\n", (1714, 1767), False, 'import os\n'), ((1905, 1978), 'os.path.join', 'os.path.join', (['self.base_path', '"""data"""', '"""move_detector"""', '"""model_class.json"""'], {}), "(self.base_path, 'data', 'move_detector', 'model_class.json')\n", (1917, 1978), False, 'import os\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Exercise 18: Cows And Bulls
Create a program that will play the “cows and bulls” game with the user.
The game works like this:
Randomly generate a 4-digit number. Ask the user to guess a 4-digit
number.
For every digit that the user guessed correctly in the correct place,
they have a “cow”. For every digit the user guessed correctly in the
wrong place is a “bull.” Every time the user makes a guess, tell them
how many “cows” and “bulls” they have. Once the user guesses the correct
number, the game is over.
Keep track of the number of guesses the user makes throughout teh game
and tell the user at the end.
Say the number generated by the computer is 1038. An example interaction
could look like this:
Welcome to the Cows and Bulls Game!
Enter a number:
>>> 1234
2 cows, 0 bulls
>>> 1256
1 cow, 1 bull
Until the user guesses the number.
"""
import random
def get_secret_number():
""" Define the secret number and write it to a file.
"""
secret_number = str(random.randint(1000, 9999))
with open("secret_number.txt", "w") as file:
print(secret_number, file=file)
return secret_number
def get_cows_and_bulls(secret, user):
"""Calculate the amount of cows and bulls.
"""
cows = bulls = 0
secret_chars = secret
for i in range(len(secret)):
if user[i] == secret[i]:
cows += 1
if user[i] in secret_chars:
bulls += 1
secret_chars = remove_char(secret_chars, user[i])
return cows, bulls
def remove_char(s, c):
"""Remove a char of the string.
When a user character exist in a secret_chars, add 1 to bulls and
remove it of secret_chars to don't duplicate the count
"""
list_chars = list(s)
list_chars.remove(c)
return "".join(list_chars)
if __name__ == "__main__":
guessed = False
attempts = 0
secret = get_secret_number()
while not guessed:
user = input("Guess a 4-digit number: ")
attempts += 1
if user == secret:
guessed = True
print("%i cows, %i bulls" % (get_cows_and_bulls(secret, user)))
print(
"Congrats! The number is %s. You did %s attempts." %
(secret, attempts))
| [
"random.randint"
] | [((1056, 1082), 'random.randint', 'random.randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (1070, 1082), False, 'import random\n')] |
'''
Created on Apr 12, 2013
@author: <NAME>
'''
from lxml import etree
from StringIO import StringIO
from screensketch.screenspec import model
class XMLReader(object):
def __init__(self, input_data):
self.input_data = input_data
self.retval = None;
def __parseComponent(self, node, parent):
items = node.items()
if len(items) > 1:
raise ValueError('Incorrect data in component node')
name = None
if len(items) == 1:
name = items[0][1]
clazz = {
'EDIT_BOX': model.EditBox,
'BUTTON': model.Button,
'CHECK_BOX': model.CheckBox,
'CHECK_BOXES': model.CheckBoxes,
'COMBO_BOX': model.ComboBox,
'DYNAMIC_TEXT': model.DynamicText,
'EDIT_BOX': model.EditBox,
'IMAGE': model.Image,
'LINK': model.Link,
'LIST': model.List,
'LIST_BOX': model.ListBox,
'PASSWORD': model.Password,
'RADIO_BUTTON': model.RadioButton,
'RADIO_BUTTONS': model.RadioButtons,
'SIMPLE': model.Simple,
'STATIC_TEXT': model.StaticText,
'TABLE': model.Table,
'TEXT_AREA': model.TextArea,
}.get(name, model.Entity)
if clazz is None:
raise ValueError('%s is an unsupported type of component' % name)
children = []
values = []
for n in node.getchildren():
if n.tag == 'identifier':
identifier = n.text
elif n.tag == 'children':
children = self.__parseChildren(n, parent)
elif n.tag == 'values':
values = self.__parseValues(n, parent)
else:
raise ValueError('%s is an unsupported node in component tag' % n.tag)
component = clazz(identifier)
for child in children:
component.append(child)
if values:
component._set_static_values(values)
return component
def __parseValues(self, node, parent):
# tag name checked in __parseComponent
children = []
for n in node.getchildren():
if n.tag == 'value':
selected = False
items = n.items()
if len(items) == 1 and len(items[0]) == 2:
selected = items[0][1]
children.append(model.StaticValue(n.text, selected))
else:
raise ValueError('%s is an unsupported node in values tag' % n.tag)
return children
def __parseChildren(self, node, parent):
# tag name checked in __parseScreen
children = []
for n in node.getchildren():
children.append(self.__parseComponent(n, parent))
return children
def __parseScreen(self, node, parent):
if node.tag != 'screen':
raise ValueError('Tag screen-spec not found')
children = []
for n in node.getchildren():
if n.tag == 'name':
name = n.text
elif n.tag == 'children':
children = self.__parseChildren(n, parent)
else:
raise ValueError('Unknown node in screen tag found')
parent.append(model.Screen(name, children))
def __parseScreenSpec(self, node):
if node.tag != 'screen-spec':
raise ValueError('Tag screen-spec not found')
self.retval = model.ScreenSpec()
for n in node.getchildren():
self.__parseScreen(n, self.retval)
def execute(self):
root = etree.fromstring(self.input_data)
self.__parseScreenSpec(root)
return self.retval
| [
"screensketch.screenspec.model.ScreenSpec",
"screensketch.screenspec.model.StaticValue",
"lxml.etree.fromstring",
"screensketch.screenspec.model.Screen"
] | [((2820, 2838), 'screensketch.screenspec.model.ScreenSpec', 'model.ScreenSpec', ([], {}), '()\n', (2836, 2838), False, 'from screensketch.screenspec import model\n'), ((2939, 2972), 'lxml.etree.fromstring', 'etree.fromstring', (['self.input_data'], {}), '(self.input_data)\n', (2955, 2972), False, 'from lxml import etree\n'), ((2655, 2683), 'screensketch.screenspec.model.Screen', 'model.Screen', (['name', 'children'], {}), '(name, children)\n', (2667, 2683), False, 'from screensketch.screenspec import model\n'), ((1950, 1985), 'screensketch.screenspec.model.StaticValue', 'model.StaticValue', (['n.text', 'selected'], {}), '(n.text, selected)\n', (1967, 1985), False, 'from screensketch.screenspec import model\n')] |
from discord.ext import commands
import discord
import datetime
class OnError(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
ignore = (commands.CommandNotFound, discord.NotFound, discord.Forbidden)
if isinstance(error, ignore):
return
embed = discord.Embed(color=self.bot.embed_color)
if isinstance(error, commands.BotMissingPermissions):
perms = ", ".join([f"{x.replace('_', ' ').replace('guild', 'server').title()}" for x in error.missing_permissions])
embed.title = "Bot Missing Permissions"
embed.description = f"I am missing the following permissions: {perms}!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.MissingPermissions):
perms = ", ".join([f"{x.replace('_', ' ').replace('guild', 'server').title()}" for x in error.missing_permissions])
embed.title = "Missing Permissions"
embed.description = f"You are missing the following permissions: {perms}!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.NotOwner):
embed.title = "Not Owner"
embed.description = f"You're not the owner of this bot!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.MissingRequiredArgument):
embed.title = "Missing Argument"
embed.description = f"You are missing a required argument for this command to work: `{error.param.name}`!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.CommandOnCooldown):
seconds = int(error.retry_after)
wait_until_finish = datetime.datetime.now() + datetime.timedelta(seconds=seconds)
await ctx.send(f'⏱️ This command is on a cooldown. Use it after <t:{int(datetime.datetime.timestamp(wait_until_finish))}:R>')
return
if isinstance(error, commands.DisabledCommand):
embed.title = "Disabled"
embed.description = "This command is disabled by the bot's owner!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.BadArgument):
if isinstance(error, commands.MessageNotFound):
embed.title = "Message Not Found"
embed.description = "The message id/link you provided is invalid or deleted!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.MemberNotFound):
embed.title = "Member Not Found"
embed.description = "The member id/mention/name you provided is invalid or didn't exist in this server!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.UserNotFound):
embed.title = "User Not Found"
embed.description = "The user id/mention/name you provided is invalid or I cannot see that User!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.ChannelNotFound):
embed.title = "Channel Not Found"
embed.description = "The channel id/mention/name you provided is invalid or I access it!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.RoleNotFound):
embed.title = "Role Not Found"
embed.description = "The role id/mention/name you provided is invalid or I cannot see that role!"
await ctx.send(embed=embed)
return
if isinstance(error, commands.EmojiNotFound):
embed.title = "Emoji Not Found"
embed.description = "The emoji id/name you provided is invalid or I cannot see that emoji!"
await ctx.send(embed=embed)
return
embed.title = "Unexpected Error"
embed.description = error
await ctx.send(embed=embed)
async def setup(bot):
await bot.add_cog(OnError(bot)) | [
"discord.Embed",
"discord.ext.commands.Cog.listener",
"datetime.timedelta",
"datetime.datetime.now",
"datetime.datetime.timestamp"
] | [((153, 176), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (174, 176), False, 'from discord.ext import commands\n'), ((383, 424), 'discord.Embed', 'discord.Embed', ([], {'color': 'self.bot.embed_color'}), '(color=self.bot.embed_color)\n', (396, 424), False, 'import discord\n'), ((1833, 1856), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1854, 1856), False, 'import datetime\n'), ((1859, 1894), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'seconds'}), '(seconds=seconds)\n', (1877, 1894), False, 'import datetime\n'), ((1983, 2029), 'datetime.datetime.timestamp', 'datetime.datetime.timestamp', (['wait_until_finish'], {}), '(wait_until_finish)\n', (2010, 2029), False, 'import datetime\n')] |
def main() -> None:
"""
>>> from collections import deque
>>> queue = deque(["Python", "Java", "C"])
>>> len(queue)
3
>>> queue
deque(['Python', 'Java', 'C'])
>>> queue.popleft()
'Python'
>>> queue.popleft()
'Java'
>>> queue.clear()
>>> len(queue)
0
>>> queue
deque([])
>>> queue.popleft()
Traceback (most recent call last):
...
IndexError: pop from an empty deque
"""
if __name__ == "__main__":
from doctest import testmod
testmod()
| [
"doctest.testmod"
] | [((519, 528), 'doctest.testmod', 'testmod', ([], {}), '()\n', (526, 528), False, 'from doctest import testmod\n')] |
#!/usr/bin/env python3
from typing import List, Dict
from datetime import date, datetime
from calendar import monthrange
import os
from typing import TypeVar, Tuple
from benedict import benedict
from Events import Event
from CalendarErrors import BreakoutError, MainError
from Prompt import prompt_user_date, parse_user_date, prompt_user_time
"""
Should print a calendar to the terminal/console output and prompt the user to
input some number of possible commands to:
* scroll from month to month
* make, read, and modify events on certain days
"""
DateTypes = TypeVar("DateTypes", date, datetime)
class Calendar:
"""
Calendar class to hold info on all events in our calendar
"""
WEEKDAYS = ["Mo", "Tu", "We", "Th", "Fr", "Sa", "Su"]
# calendar commands ============================================================================
# scroll ---------------------------------------------------------------------------------------
SCROLL = "S"
FORWARD = "F"
BACKWARD = "B"
SCROLLING = [SCROLL, FORWARD, BACKWARD]
# Event ----------------------------------------------------------------------------------------
NEW = "N"
MODIFY = "C"
READ = "R"
EVENTS = [NEW, MODIFY, READ]
VERB = {
NEW: "Made",
MODIFY: "Modified",
READ: "Read"
}
# utility --------------------------------------------------------------------------------------
QUIT = "Q"
HELP = "H"
ALL = "A"
UTIL = [QUIT, HELP, ALL]
COMMANDS = SCROLLING + EVENTS + UTIL
# indicators -----------------------------------------------------------------------------------
DAY = "D"
MONTH = "M"
YEAR = "Y"
EVENT = "E"
INDICATORS = [DAY, MONTH, YEAR, EVENT]
MONTHS = {
1: "January",
2: "February",
3: "March",
4: "April",
5: "May",
6: "June",
7: "July",
8: "August",
9: "September",
10: "October",
11: "November",
12: "December"
}
MENU_STRING = f"""
Here's how to use the calendar!
To scroll to the next day enter : {FORWARD}{DAY}
TO scroll to the previous day enter : {BACKWARD}{DAY}
To scroll to the the next month enter : {FORWARD}{MONTH}
To scroll to the previous month enter : {BACKWARD}{MONTH}
To scroll to the next year enter : {FORWARD}{YEAR}
To scroll to the previous year enter : {BACKWARD}{YEAR}
To scroll to a date enter : {SCROLL} <date in yyyy/mm/dd format>
To create an event enter : {NEW} <date in yyyy/mm/dd format> - <name>
To modify an event enter : {MODIFY} <date in yyyy/mm/dd format> - <name>
To read an event enter : {READ} <date in yyyy/mm/dd format> - <name>
To print all events enter : {ALL}
(To continue Press enter)
"""
def __init__(self):
"""
Constructor for the Calendar
Stores events as a nested dictionary with dates as keys, lastly with a names dict.
Structure:
self.events = {
year(str) : {
month(str) : {
day(str) : {
name(str) : (Event)
}
}
}
}
"""
self.events = benedict()
self.today = date.today()
def command_loop(self):
"""
Main loop of the calendar. Prompts the user to input commands to modify the calendar or
scroll around in time
"""
command_ms = "Welcome to the calendar, what would you like to do? \n"
command_ms += "(Q to quit, H for help) : "
ignores = [" ", "\n"]
while True:
self.print_calendar()
user_input = input(command_ms)
for ignore in ignores:
user_input = user_input.replace(ignore, "")
try:
cmd = user_input[0].upper()
except IndexError:
continue
try:
if cmd == self.QUIT:
break
elif cmd == self.HELP:
input(self.MENU_STRING)
elif cmd == self.ALL:
self.print_all_events()
elif cmd in self.SCROLLING:
self.scroll(user_input)
elif cmd in self.EVENTS:
self.eventing(user_input)
else:
input(f"{cmd} is not a valid command, please input a valid command\
{self.MENU_STRING}")
# MainError is just an indicator that user wants to try and input again
except MainError:
continue
def scroll(self, usr_input: str):
"""
parse scroll commands from the user and make the correct call to print_calendar()
Args:
usr_input : string input by the user. Should be led by a valid scroll based command
"""
cmd = usr_input[0]
if len(usr_input) > 1:
usr_args = usr_input[1:]
else:
usr_args = None
if cmd == self.SCROLL:
calendar_date = parse_user_date(usr_args)
self.today = calendar_date
elif cmd == self.FORWARD or cmd == self.BACKWARD:
# Move forward of backward
if cmd == self.FORWARD:
sgn = 1
else:
sgn = -1
if usr_args is not None:
usr_ind = usr_args[0].upper()
else:
usr_ind = usr_args
if usr_ind == self.YEAR:
self.today = date(self.today.year+sgn, self.today.month, self.today.day)
elif usr_ind == self.DAY:
self.today = date(self.today.year, self.today.month, self.today.day+sgn)
else: # Scroll by month is default
self.today = date(self.today.year, self.today.month+sgn, self.today.day)
def eventing(self, usr_input: str):
"""
parse event commands from the user and edit self.events dict
Args:
usr_input : string input by the user. Should be led by a valid event based command
"""
cmd = usr_input[0]
if len(usr_input) > 1:
usr_args = usr_input[1:]
else:
usr_args = None
if usr_args is None:
calendar_date = prompt_user_date("Lets get a date for the event")
name = input("Give us a name for the event : ")
else:
usr_args = usr_args.split("-")[:2]
calendar_date = parse_user_date(usr_args[0])
if len(usr_args) >= 2:
name = usr_args[1]
else:
name = input(f"What is the name of the event to be {Calendar.VERB[cmd]}")
if cmd == self.NEW:
self.add_event(calendar_date, name)
input(f"new event created {self.get_event(calendar_date, name)}")
if cmd == self.MODIFY or cmd == self.READ:
if name in self.find_events(calendar_date).keys():
if cmd == self.MODIFY:
mod_event = self.get_event(calendar_date, name)
mod_event.modify()
self.update_event(mod_event, calendar_date, name)
input(f"Modified event : {mod_event}")
else:
input(self.get_event(calendar_date, name))
else:
input("The event you described does not exist. Back to main menu ")
def update_event(self, modified_event: Event, old_date: DateTypes, old_name: str):
"""
Checks event after it's been modified and rewrites it to the dict with updated indeces
"""
input("Hello There")
new_ev = self.get_event(modified_event.date_of_event, modified_event.name)
old_ev = self.get_event(old_date, old_name)
if new_ev != old_ev:
input("General Kenobi")
pop_str = f"{old_date.year}.{old_date.month}.{old_date.day}.{old_name}"
self.events.pop(pop_str)
Calendar.clean_nested_dict(self.events)
self.events[
self.ind_from_date(modified_event.date_of_event, modified_event.name)
] = modified_event
def print_all_events(self):
prnt = "{\n"
for year, months in self.events.items():
prnt += f"\t{year} : " + "{\n"
for month, days in months.items():
prnt += f"\t\t{month} : " + "{\n"
for day, names in days.items():
prnt += f"\t\t\t{day} : " + "{\n"
for name, ev in names.items():
ev_str = repr(ev).replace("\n", "\n\t\t\t\t\t")
prnt += f"\t\t\t\t{name}\t{ev_str}\n"
prnt += "\t\t\t},\n"
prnt += "\t\t},\n"
prnt += "\t},\n"
prnt += "}"
input(prnt)
@staticmethod
def clean_nested_dict(nested_dict):
"""
Recursively cleans nested_dict to remove empty dicts and subdicts
Believe it or not this works. Checkout the Calendar testing ipython notebook.
"""
# if lowest level item is not an empty dict, don't pop this, or parents
if not isinstance(nested_dict, dict):
return False
# if lowest level item is an empty dict, pop this from the parent and clean up recursively
if nested_dict == {}:
return True
# indicates whether this dict/sub_dict should be "popped" (cleaned up)
pop_this = True
for key, sub_dict in list(nested_dict.items()):
pop_that = Calendar.clean_nested_dict(sub_dict)
if pop_that:
nested_dict.pop(key)
pop_this *= pop_that
return pop_this
@staticmethod
def ind_from_date(calendar_date: DateTypes, name: str = None):
"""
Args:
calendar_date : date to be used for indexing
name : optional. Tacked on to return if included
Returns:
year (int), month (int), day (int), name (str)
"""
if name is not None:
return str(calendar_date.year), str(calendar_date.month), str(calendar_date.day), name
else:
return str(calendar_date.year), str(calendar_date.month), str(calendar_date.day)
def get_event(self, calendar_date: DateTypes, name: str) -> Event:
"""
Gets an event from a name and a date
Args:
calendar_date : date of the event
name : name of the event:
Returns:
The event found. Or None if none are found
"""
try:
ev = self.events[self.ind_from_date(calendar_date, name)]
except KeyError:
ev = None
return ev
def find_events(self, calendar_date: DateTypes) -> Dict:
"""
finds all events that occur on calendar_date and returns them
Args:
calendar_date : date or datetime object where we're looking for events
Returns:
daily events : dictionary of events occurring on that day, empty dict if there are none
"""
try:
daily_events = self.events[self.ind_from_date(calendar_date)]
except KeyError:
daily_events = {}
return daily_events
def add_event(self, calendar_date: DateTypes, name: str):
"""
Adds an event to the calendar
Args:
calendar_date : date of the new event
name : name of that event
"""
while name in self.find_events(calendar_date).keys():
overwrite = input(
f"Another event is named {name} on that date. Do you wish to overwrite it? (Y/n) : "
f"Other event : {self.get_event(calendar_date, name)}\n"
)
overwrite = overwrite.upper() != "N"
if not overwrite:
name = input(f"Please enter a new name for the event : ")
else:
break
description = input("Give us a brief description of the event : \n")
if input("Do you wish to specify a time? (y/N)").upper() != "Y":
self.events[self.ind_from_date(calendar_date, name)] = Event(
calendar_date,
name,
description,
)
else:
self.events[self.ind_from_date(calendar_date, name)] = Event(
calendar_date,
name,
description,
prompt_user_time("What time do you want to set?")
)
def print_calendar(self):
"""
Prints a calendar to the terminal or command for the month which contains day.
"""
def color_entry(message: str, txt: str = "normal", bg: str = "normal") -> str:
"""
turns message into a colorful version of itself
Args:
message : message to be beautified
txt : string indicating color of text
bg : string indicating color of background
Returns:
beautified message
"""
txt_colors = {
"black": "30",
"red": "31",
"green": "32",
"yellow": "33",
"blue": "34",
"purple": "35",
"cyan": "36",
"white": "37",
"normal": 1
}
bg_colors = {
"black": f"{37+10}",
"red": f"{31 + 10}",
"green": f"{32 + 10}",
"yellow": f"{33 + 10}",
"blue": f"{34 + 10}",
"purple": f"{35 + 10}",
"cyan": f"{36 + 10}",
"white": f"{30 + 10}",
"normal": 1
}
return f"\033[1;{txt_colors[txt]};{bg_colors[bg]}m{message}\033[0m"
os.system('cls')
# Find which day of the week the month started on
first_day = date(self.today.year, self.today.month, 1).weekday()
# Find number of days in month
num_days = monthrange(self.today.year, self.today.month)[1]
try:
monthly_events = list(self.events[str(self.today.year), str(self.today.month)].keys())
monthly_events = [int(dy) for dy in monthly_events]
except KeyError:
monthly_events = []
cal_string = ""
# Print month and year
cal_string += color_entry(
f"{self.MONTHS[self.today.month]} : {self.today.year}\n",
txt="cyan"
)
# Print the days of the week
for day in self.WEEKDAYS:
cal_string += f"{day} "
cal_string += "\n"
days = 0
while days < num_days:
for i, day in enumerate(self.WEEKDAYS):
if days == 0 and i < first_day:
entry = " "
else:
days += 1
entry = f"{days:2} "
if days in monthly_events and not days == self.today.day:
entry = color_entry(entry, txt="green")
if days == self.today.day and days not in monthly_events:
entry = color_entry(entry, bg="red")
if days == self.today.day and days in monthly_events:
entry = color_entry(entry, txt="green", bg="red")
if days > num_days:
entry = " "
cal_string += entry
cal_string += "\n"
print(cal_string)
if __name__ == '__main__':
cal = Calendar()
cal.command_loop()
| [
"Prompt.parse_user_date",
"Prompt.prompt_user_time",
"os.system",
"datetime.date.today",
"datetime.date",
"benedict.benedict",
"Events.Event",
"Prompt.prompt_user_date",
"calendar.monthrange",
"typing.TypeVar"
] | [((565, 601), 'typing.TypeVar', 'TypeVar', (['"""DateTypes"""', 'date', 'datetime'], {}), "('DateTypes', date, datetime)\n", (572, 601), False, 'from typing import TypeVar, Tuple\n'), ((2975, 2985), 'benedict.benedict', 'benedict', ([], {}), '()\n', (2983, 2985), False, 'from benedict import benedict\n'), ((3001, 3013), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3011, 3013), False, 'from datetime import date, datetime\n'), ((11394, 11410), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (11403, 11410), False, 'import os\n'), ((4398, 4423), 'Prompt.parse_user_date', 'parse_user_date', (['usr_args'], {}), '(usr_args)\n', (4413, 4423), False, 'from Prompt import prompt_user_date, parse_user_date, prompt_user_time\n'), ((5368, 5417), 'Prompt.prompt_user_date', 'prompt_user_date', (['"""Lets get a date for the event"""'], {}), "('Lets get a date for the event')\n", (5384, 5417), False, 'from Prompt import prompt_user_date, parse_user_date, prompt_user_time\n'), ((5534, 5562), 'Prompt.parse_user_date', 'parse_user_date', (['usr_args[0]'], {}), '(usr_args[0])\n', (5549, 5562), False, 'from Prompt import prompt_user_date, parse_user_date, prompt_user_time\n'), ((10205, 10244), 'Events.Event', 'Event', (['calendar_date', 'name', 'description'], {}), '(calendar_date, name, description)\n', (10210, 10244), False, 'from Events import Event\n'), ((11577, 11622), 'calendar.monthrange', 'monthrange', (['self.today.year', 'self.today.month'], {}), '(self.today.year, self.today.month)\n', (11587, 11622), False, 'from calendar import monthrange\n'), ((10386, 10435), 'Prompt.prompt_user_time', 'prompt_user_time', (['"""What time do you want to set?"""'], {}), "('What time do you want to set?')\n", (10402, 10435), False, 'from Prompt import prompt_user_date, parse_user_date, prompt_user_time\n'), ((11477, 11519), 'datetime.date', 'date', (['self.today.year', 'self.today.month', '(1)'], {}), '(self.today.year, self.today.month, 1)\n', (11481, 11519), False, 'from datetime import date, datetime\n'), ((4736, 4797), 'datetime.date', 'date', (['(self.today.year + sgn)', 'self.today.month', 'self.today.day'], {}), '(self.today.year + sgn, self.today.month, self.today.day)\n', (4740, 4797), False, 'from datetime import date, datetime\n'), ((4842, 4903), 'datetime.date', 'date', (['self.today.year', 'self.today.month', '(self.today.day + sgn)'], {}), '(self.today.year, self.today.month, self.today.day + sgn)\n', (4846, 4903), False, 'from datetime import date, datetime\n'), ((4958, 5019), 'datetime.date', 'date', (['self.today.year', '(self.today.month + sgn)', 'self.today.day'], {}), '(self.today.year, self.today.month + sgn, self.today.day)\n', (4962, 5019), False, 'from datetime import date, datetime\n')] |
import numpy as np
import gym
from random import randint
from metaworld.benchmarks import ML1
class ReachML1Env(gym.Env):
def __init__(self, max_episode_steps=150,out_of_distribution=False, n_train_tasks=50, n_test_tasks=10, **kwargs):
super(ReachML1Env, self).__init__()
self.train_env = ML1.get_train_tasks('reach-v1', out_of_distribution=out_of_distribution)
self.test_env = ML1.get_test_tasks('reach-v1', out_of_distribution=out_of_distribution)
self.train_tasks = self.train_env.sample_tasks(n_train_tasks)
self.test_tasks = self.test_env.sample_tasks(n_test_tasks)
self.tasks = self.train_tasks + self.test_tasks
self.env = self.train_env #this env will change depending on the idx
self.observation_space = self.env.observation_space
self.action_space = self.env.action_space
self.goal_space_origin = np.array([0, 0.85, 0.175])
self.current_task_idx = 0
self.episode_steps = 0
self._max_episode_steps = max_episode_steps
# self.get_tasks_goals()
# self.reset_task()
def step(self, action):
self.episode_steps += 1
obs, reward, done, info = self.env.step(action)
if self.episode_steps >= self._max_episode_steps:
done = True
return obs, reward, done, info
def reset(self):
self.episode_steps = 0
return self.env.reset()
def seed(self, seed):
self.train_env.seed(seed)
self.test_env.seed(seed)
def get_all_task_idx(self):
return range(len(self.tasks))
def set_task(self, idx):
self.current_task_idx = idx
self.env = self.train_env if idx < len(self.train_tasks) else self.test_env
self.env.set_task(self.tasks[idx])
self._goal = self.tasks[idx]['goal']
def get_task(self):
return self.tasks[self.current_task_idx]['goal'] # goal_pos
def reset_task(self, task=None, test=False):
# aparently this is called only without idx, so tasks are always scrambled
# we have to set anything only at test time
if task is None:
if test:
task = randint(len(self.train_tasks), len(self.tasks) - 1)
else:
task = randint(0, len(self.train_tasks) - 1)
self.set_task(task)
def render(self):
self.env.render()
def get_tasks_goals(self):
for idx in range(len(self.tasks)):
self.reset_task(idx)
_, _, _, info = self.step(self.action_space.sample())
self.tasks[idx]['goal_pos'] = info['goal']
| [
"metaworld.benchmarks.ML1.get_train_tasks",
"metaworld.benchmarks.ML1.get_test_tasks",
"numpy.array"
] | [((311, 383), 'metaworld.benchmarks.ML1.get_train_tasks', 'ML1.get_train_tasks', (['"""reach-v1"""'], {'out_of_distribution': 'out_of_distribution'}), "('reach-v1', out_of_distribution=out_of_distribution)\n", (330, 383), False, 'from metaworld.benchmarks import ML1\n'), ((408, 479), 'metaworld.benchmarks.ML1.get_test_tasks', 'ML1.get_test_tasks', (['"""reach-v1"""'], {'out_of_distribution': 'out_of_distribution'}), "('reach-v1', out_of_distribution=out_of_distribution)\n", (426, 479), False, 'from metaworld.benchmarks import ML1\n'), ((893, 919), 'numpy.array', 'np.array', (['[0, 0.85, 0.175]'], {}), '([0, 0.85, 0.175])\n', (901, 919), True, 'import numpy as np\n')] |
# --- Day 20: Infinite Elves and Infinite Houses ---
#
# To keep the Elves busy, Santa has them deliver some presents by hand, door-to-door. He sends them down a street with
# infinite houses numbered sequentially: 1, 2, 3, 4, 5, and so on.
#
# Each Elf is assigned a number, too, and delivers presents to houses based on that number:
#
# The first Elf (number 1) delivers presents to every house: 1, 2, 3, 4, 5, ....
# The second Elf (number 2) delivers presents to every second house: 2, 4, 6, 8, 10, ....
# Elf number 3 delivers presents to every third house: 3, 6, 9, 12, 15, ....
#
# There are infinitely many Elves, numbered starting with 1. Each Elf delivers presents equal to ten times his or her
# number at each house.
#
# So, the first nine houses on the street end up like this:
#
# House 1 got 10 presents.
# House 2 got 30 presents.
# House 3 got 40 presents.
# House 4 got 70 presents.
# House 5 got 60 presents.
# House 6 got 120 presents.
# House 7 got 80 presents.
# House 8 got 150 presents.
# House 9 got 130 presents.
#
# The first house gets 10 presents: it is visited only by Elf 1, which delivers 1 * 10 = 10 presents. The fourth house
# gets 70 presents, because it is visited by Elves 1, 2, and 4, for a total of 10 + 20 + 40 = 70 presents.
#
# What is the lowest house number of the house to get at least as many presents as the number in your puzzle input?
#
# --- Part Two ---
#
# The Elves decide they don't want to visit an infinite number of houses. Instead, each Elf will stop after delivering
# presents to 50 houses. To make up for it, they decide to deliver presents equal to eleven times their number at each
# house.
#
# With these changes, what is the new lowest house number of the house to get at least as many presents as the number
# in your puzzle input?
from math import sqrt
def get_part1_factors(n):
factors = set()
for x in range(1, int(sqrt(n)) + 1):
if n % x == 0:
factors.add(x)
factors.add(n // x)
return factors
def get_part2_factors(n):
factors = set()
for x in range(1, int(sqrt(n)) + 1):
if n % x == 0:
if x * 50 >= n:
factors.add(x)
if (n // x) * 50 >= n:
factors.add(n // x)
return factors
data = 33100000
# Part 1
house_no = 1
while True:
presents = sum(map(lambda i: i*10, get_part1_factors(house_no)))
if presents >= data:
print("House no: {0}".format(house_no))
break
house_no += 1
# Part 2
house_no = 1
while True:
presents = sum(map(lambda i: i*11, get_part2_factors(house_no)))
if presents >= data:
print("House no: {0}".format(house_no))
break
house_no += 1
| [
"math.sqrt"
] | [((1895, 1902), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (1899, 1902), False, 'from math import sqrt\n'), ((2085, 2092), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (2089, 2092), False, 'from math import sqrt\n')] |
# partymoder xbmc add-on
# Copyright 2017 aerth <<EMAIL>>
# Released under the terms of the MIT License
import xbmc
xbmc.executebuiltin('xbmc.PlayerControl(Partymode(music)', True)
xbmc.executebuiltin('xbmc.PlayerControl(repeatall)', True)
xbmc.executebuiltin("Action(Fullscreen)", True)
| [
"xbmc.executebuiltin"
] | [((119, 183), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""xbmc.PlayerControl(Partymode(music)"""', '(True)'], {}), "('xbmc.PlayerControl(Partymode(music)', True)\n", (138, 183), False, 'import xbmc\n'), ((184, 242), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""xbmc.PlayerControl(repeatall)"""', '(True)'], {}), "('xbmc.PlayerControl(repeatall)', True)\n", (203, 242), False, 'import xbmc\n'), ((243, 290), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""Action(Fullscreen)"""', '(True)'], {}), "('Action(Fullscreen)', True)\n", (262, 290), False, 'import xbmc\n')] |
from django.contrib import admin
from blog.models import Post, BlogComment, Category
# Register your models here.
admin.site.register((BlogComment,)) # it must be in tupple formate
admin.site.register(Category)
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
class Media:
js = ("tinyInject.js",)
| [
"django.contrib.admin.register",
"django.contrib.admin.site.register"
] | [((115, 150), 'django.contrib.admin.site.register', 'admin.site.register', (['(BlogComment,)'], {}), '((BlogComment,))\n', (134, 150), False, 'from django.contrib import admin\n'), ((182, 211), 'django.contrib.admin.site.register', 'admin.site.register', (['Category'], {}), '(Category)\n', (201, 211), False, 'from django.contrib import admin\n'), ((214, 234), 'django.contrib.admin.register', 'admin.register', (['Post'], {}), '(Post)\n', (228, 234), False, 'from django.contrib import admin\n')] |
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2018 <NAME> (<EMAIL>)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from argparse import ArgumentParser
import socket
from sys import exit, stdout
from time import sleep
__version__ = '0.1'
class RSSrvCore:
def __init__(self):
"""Initialize a shell server"""
self.args = None
self.arg_parser = ArgumentParser()
def get_args(self):
"""Set argument options"""
self.arg_parser.add_argument('--version', action = 'version',
version = '%(prog)s ' + str(__version__))
self.arg_parser.add_argument('-f',
action = 'store_true', dest = 'force',
help = ('bind to sockets that are already in use'))
self.arg_parser.add_argument('port',
action = 'store', type=int,
help = ('set the local port'))
self.args = self.arg_parser.parse_args()
def main_event(self, force=False):
"""Connect to an incoming shell"""
s = socket.socket()
if force:
print('Enabling socket address reuse.')
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print('Binding to port ' + str(self.args.port))
s.bind(('0.0.0.0', self.args.port))
s.listen(1)
conn, host = s.accept()
print('Received connection from ' + str(host[0]) + \
':' + str(host[1]) + '.')
remotehost, remotepyversion = str(
conn.recv(1024))[2:-1].split(':')
print('Remote hostname: ' + remotehost + '.\n' + \
'Remote Python major version: ' + remotepyversion + \
'.\nEnter h for help.')
remotepyversion = int(remotepyversion)
if remotehost.split('@')[0] == 'root':
promptsuffix = ' # '
else:
promptsuffix = ' $ '
print('Type exit or enter EOF (ctrl-d) to exit.')
while True:
try:
cmd = input(remotehost + promptsuffix)
if cmd == '!':
cmd = lastcmd
elif cmd == '':
cmd = '\n'
if cmd == 'exit':
conn.send(bytes(cmd, 'utf8'))
conn.close()
s.close()
exit(0)
elif cmd == 'drop':
conn.send(bytes('exit', 'utf8'))
conn.close()
s.close()
return 0
elif cmd == 'detach':
conn.send(bytes(cmd, 'utf8'))
conn.close()
s.close()
exit(0)
elif cmd == 'h':
self.show_help()
else:
conn.send(bytes(cmd, 'utf8'))
recdata = conn.recv(16834)
if remotepyversion == 2:
if recdata and recdata != ':':
stdout.buffer.write(recdata)
else:
if recdata and recdata != bytes('\n', 'utf8'):
stdout.buffer.write(recdata)
lastcmd = cmd
except EOFError:
conn.send(bytes('exit', 'utf8'))
print('exit')
conn.close()
s.close()
exit(0)
def show_help(self):
"""Show help for shell options"""
h = []
h.append('\nCommand Description')
h.append('-----------------------------')
h.append('h show this help menu')
h.append('exit close program (local and remote)')
h.append('drop close shell, keep server running')
h.append('detach close shell, keep client running')
h.append('cd DIR change directory')
h.append('')
print('\n'.join(h))
def run_script(self):
"""Run the shell server program"""
try:
self.get_args()
self.main_event(force=self.args.force)
while True:
self.main_event(force=True)
except KeyboardInterrupt:
print('\nExiting on KeyboardInterrupt')
def main():
thing = RSSrvCore()
thing.run_script()
if __name__ == "__main__":
main()
| [
"sys.stdout.buffer.write",
"socket.socket",
"argparse.ArgumentParser",
"sys.exit"
] | [((1400, 1416), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1414, 1416), False, 'from argparse import ArgumentParser\n'), ((2055, 2070), 'socket.socket', 'socket.socket', ([], {}), '()\n', (2068, 2070), False, 'import socket\n'), ((3345, 3352), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (3349, 3352), False, 'from sys import exit, stdout\n'), ((4422, 4429), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4426, 4429), False, 'from sys import exit, stdout\n'), ((3705, 3712), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (3709, 3712), False, 'from sys import exit, stdout\n'), ((4030, 4058), 'sys.stdout.buffer.write', 'stdout.buffer.write', (['recdata'], {}), '(recdata)\n', (4049, 4058), False, 'from sys import exit, stdout\n'), ((4184, 4212), 'sys.stdout.buffer.write', 'stdout.buffer.write', (['recdata'], {}), '(recdata)\n', (4203, 4212), False, 'from sys import exit, stdout\n')] |
# -*- coding: utf-8 -*-
from collections import OrderedDict, defaultdict
from shardingpy.api.algorithm.sharding.values import ListShardingValue, RangeShardingValue
from shardingpy.constant import ShardingOperator
from shardingpy.exception import UnsupportedOperationException
from shardingpy.parsing.parser.expressionparser import SQLPlaceholderExpression, SQLTextExpression, SQLNumberExpression
from shardingpy.util.extype import RangeType, Range
from shardingpy.util.strutil import equals_ignore_case
class Column:
def __init__(self, name, table_name):
self.name = name
self.table_name = table_name
def __eq__(self, other):
return other and isinstance(other, Column) and equals_ignore_case(self.name, other.name) and equals_ignore_case(
self.table_name, other.table_name)
def __hash__(self):
return hash(self.name) + 17 * hash(self.table_name) if self.table_name else 0
class Condition:
def __init__(self, column, operator, *sql_expressions):
if column:
assert isinstance(column, Column)
if operator:
assert isinstance(operator, ShardingOperator)
self.column = column
self.operator = operator
self._position_index_map = OrderedDict()
self._values = list()
position = 0
for expr in sql_expressions:
if isinstance(expr, SQLPlaceholderExpression):
self._position_index_map[position] = expr.index
elif isinstance(expr, SQLTextExpression):
self._values.append(expr.text)
elif isinstance(expr, SQLNumberExpression):
self._values.append(expr.number)
position += 1
# Deprecated
def get_sharding_value(self, parameters):
condition_values = self.get_condition_values(parameters)
if self.operator in [ShardingOperator.EQUAL, ShardingOperator.IN]:
return ListShardingValue(self.column.table_name, self.column.name, condition_values)
elif self.operator == ShardingOperator.BETWEEN:
return RangeShardingValue(self.column.table_name, self.column.name,
Range(condition_values[0], RangeType.CLOSED, condition_values[1],
RangeType))
else:
raise UnsupportedOperationException("sharding condition not support :" + self.operator.value)
def get_condition_values(self, parameters):
result = self._values[:]
for position, param_index in self._position_index_map.items():
parameter = parameters[param_index]
if position < len(result):
result.insert(position, parameter)
else:
result.append(parameter)
return result
class AndCondition(object):
def __init__(self):
self.conditions = list()
def get_conditions_map(self):
result = defaultdict(list)
for each in self.conditions:
result[each.column].append(each)
return result
def optimize(self):
result = AndCondition()
result.conditions = [each for each in self.conditions if type(each) == Condition]
if not result.conditions:
result.conditions.append(NullCondition())
return result
class OrCondition(object):
def __init__(self, condition=None):
self.and_conditions = list()
if condition:
self.add(condition)
def add(self, condition):
assert isinstance(condition, Condition)
if len(self.and_conditions) == 0:
self.and_conditions.append(AndCondition())
self.and_conditions[0].conditions.append(condition)
def find(self, column, index):
pass
class Conditions:
def __init__(self, conditions=None):
self.or_condition = OrCondition()
if conditions:
self.or_condition.and_conditions.extend(conditions.or_condition.and_conditions)
def add(self, condition, sharding_rule):
if sharding_rule.is_sharding_column(condition.column):
self.or_condition.add(condition)
class NullCondition(Condition):
def __init__(self):
super().__init__(None, None)
class GeneratedKeyCondition(Condition):
def __init__(self, column, index, value):
super().__init__(column, ShardingOperator.EQUAL, SQLNumberExpression(value))
self.index = index
self.value = value
def get_condition_values(self, parameters):
return [self.value] if self.value is not None else [parameters[self.index]]
| [
"shardingpy.api.algorithm.sharding.values.ListShardingValue",
"shardingpy.util.extype.Range",
"collections.defaultdict",
"shardingpy.parsing.parser.expressionparser.SQLNumberExpression",
"shardingpy.util.strutil.equals_ignore_case",
"shardingpy.exception.UnsupportedOperationException",
"collections.OrderedDict"
] | [((1253, 1266), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1264, 1266), False, 'from collections import OrderedDict, defaultdict\n'), ((2938, 2955), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2949, 2955), False, 'from collections import OrderedDict, defaultdict\n'), ((709, 750), 'shardingpy.util.strutil.equals_ignore_case', 'equals_ignore_case', (['self.name', 'other.name'], {}), '(self.name, other.name)\n', (727, 750), False, 'from shardingpy.util.strutil import equals_ignore_case\n'), ((755, 808), 'shardingpy.util.strutil.equals_ignore_case', 'equals_ignore_case', (['self.table_name', 'other.table_name'], {}), '(self.table_name, other.table_name)\n', (773, 808), False, 'from shardingpy.util.strutil import equals_ignore_case\n'), ((1933, 2010), 'shardingpy.api.algorithm.sharding.values.ListShardingValue', 'ListShardingValue', (['self.column.table_name', 'self.column.name', 'condition_values'], {}), '(self.column.table_name, self.column.name, condition_values)\n', (1950, 2010), False, 'from shardingpy.api.algorithm.sharding.values import ListShardingValue, RangeShardingValue\n'), ((4374, 4400), 'shardingpy.parsing.parser.expressionparser.SQLNumberExpression', 'SQLNumberExpression', (['value'], {}), '(value)\n', (4393, 4400), False, 'from shardingpy.parsing.parser.expressionparser import SQLPlaceholderExpression, SQLTextExpression, SQLNumberExpression\n'), ((2339, 2431), 'shardingpy.exception.UnsupportedOperationException', 'UnsupportedOperationException', (["('sharding condition not support :' + self.operator.value)"], {}), "('sharding condition not support :' + self.\n operator.value)\n", (2368, 2431), False, 'from shardingpy.exception import UnsupportedOperationException\n'), ((2185, 2261), 'shardingpy.util.extype.Range', 'Range', (['condition_values[0]', 'RangeType.CLOSED', 'condition_values[1]', 'RangeType'], {}), '(condition_values[0], RangeType.CLOSED, condition_values[1], RangeType)\n', (2190, 2261), False, 'from shardingpy.util.extype import RangeType, Range\n')] |
import re
import logging
from dataclasses import dataclass, field
from typing import List, Optional
import torch
import torch.nn as nn
from transformers import BertPreTrainedModel
from src.model.decoder import Decoder
from src.model.encoders.bert import _BertEncoder
from src.model.encoders.ca_mtl_base import CaMtlBaseEncoder
from src.model.encoders.ca_mtl_large import CaMtlLargeEncoder
logger = logging.getLogger(__name__)
@dataclass
class CaMtlArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
"""
model_name_or_path: str = field(
metadata={
"help": "Path to pretrained model or model identifier from: CA-MTL-base, CA-MTL-large, bert-base-cased "
"bert-base-uncased, bert-large-cased, bert-large-uncased"
}
)
encoder_type: str = field(
default=None,
metadata={
"help": "Identifier of encoder-type to use: CA-MTL-base, CA-MTL-large, bert-base-cased "
"bert-base-uncased, bert-large-cased, bert-large-uncased"
}
)
class CaMtl(BertPreTrainedModel):
def __init__(
self,
config,
model_args,
data_args,
):
super().__init__(config)
self.data_args = data_args
self.bert = self._create_encoder(model_args.encoder_type)
self.decoders = nn.ModuleList()
for task in data_args.tasks:
self.decoders.append(Decoder(config.hidden_size, task))
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
task_id=None,
span_locs=None,
sample_id=None,
):
outputs = self.bert(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
task_id=task_id,
)
sequence_output, pooled_output = outputs[:2]
loss_list = []
unique_task_ids = torch.unique(task_id)
unique_task_ids_list = (
unique_task_ids.cpu().numpy()
if unique_task_ids.is_cuda
else unique_task_ids.numpy()
)
loss_grouped_per_task = (
torch.zeros_like(task_id[0]).repeat(len(self.data_args.tasks)).float()
)
batch_entropy_per_task = torch.zeros(input_ids.shape[0])
batch_entropy_mean_per_task = torch.zeros(input_ids.shape[0])
max_mean_batch_entropy = None
logits = None
for unique_task_id in unique_task_ids_list:
task_id_filter = task_id == unique_task_id
decoder_id = unique_task_id
logits, current_loss, batch_entropy = self.decoders[decoder_id].forward(
sequence_output[task_id_filter],
pooled_output[task_id_filter],
labels=None if labels is None else labels[task_id_filter],
attention_mask=attention_mask[task_id_filter],
)
batch_entropy_mean = batch_entropy.mean().item()
batch_entropy_per_task[task_id_filter] = batch_entropy
batch_entropy_mean_per_task[task_id_filter] = torch.full_like(
batch_entropy, batch_entropy_mean
)
if (
max_mean_batch_entropy is None
or batch_entropy_mean > max_mean_batch_entropy
):
max_mean_batch_entropy = batch_entropy_mean
if labels is not None:
loss_grouped_per_task[unique_task_id] = current_loss
loss_list.append(current_loss)
outputs = (
(logits,)
+ outputs[2:]
+ (
batch_entropy_per_task,
batch_entropy_mean_per_task,
max_mean_batch_entropy,
)
)
if loss_list:
loss = torch.stack(loss_list)
outputs = (loss.mean(),) + outputs + (loss_grouped_per_task.view(1, -1),)
return outputs
def _create_encoder(self, encoder_type):
if encoder_type == "CA-MTL-large":
return CaMtlLargeEncoder(self.config, data_args=self.data_args)
elif encoder_type == "CA-MTL-base":
return CaMtlBaseEncoder(self.config, data_args=self.data_args)
elif encoder_type == "CA-MTL-base-uncased":
return CaMtlBaseEncoder(self.config, data_args=self.data_args)
elif encoder_type == "CA-MTL-tiny":
return CaMtlBaseEncoder(self.config, data_args=self.data_args)
else:
return _BertEncoder(self.config)
@staticmethod
def get_base_model(model_name_or_path):
if model_name_or_path == "CA-MTL-large":
return "bert-large-cased"
elif model_name_or_path == "CA-MTL-base":
return "bert-base-cased"
elif model_name_or_path == "CA-MTL-base-uncased":
return "bert-base-uncased"
elif model_name_or_path == "CA-MTL-tiny":
return 'huawei-noah/TinyBERT_General_6L_768D'
else:
return model_name_or_path
| [
"src.model.encoders.bert._BertEncoder",
"src.model.encoders.ca_mtl_base.CaMtlBaseEncoder",
"torch.full_like",
"torch.unique",
"torch.stack",
"torch.nn.ModuleList",
"torch.zeros_like",
"src.model.encoders.ca_mtl_large.CaMtlLargeEncoder",
"src.model.decoder.Decoder",
"dataclasses.field",
"torch.zeros",
"logging.getLogger"
] | [((401, 428), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (418, 428), False, 'import logging\n'), ((600, 786), 'dataclasses.field', 'field', ([], {'metadata': "{'help':\n 'Path to pretrained model or model identifier from: CA-MTL-base, CA-MTL-large, bert-base-cased bert-base-uncased, bert-large-cased, bert-large-uncased'\n }"}), "(metadata={'help':\n 'Path to pretrained model or model identifier from: CA-MTL-base, CA-MTL-large, bert-base-cased bert-base-uncased, bert-large-cased, bert-large-uncased'\n })\n", (605, 786), False, 'from dataclasses import dataclass, field\n'), ((861, 1045), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'help':\n 'Identifier of encoder-type to use: CA-MTL-base, CA-MTL-large, bert-base-cased bert-base-uncased, bert-large-cased, bert-large-uncased'\n }"}), "(default=None, metadata={'help':\n 'Identifier of encoder-type to use: CA-MTL-base, CA-MTL-large, bert-base-cased bert-base-uncased, bert-large-cased, bert-large-uncased'\n })\n", (866, 1045), False, 'from dataclasses import dataclass, field\n'), ((1393, 1408), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (1406, 1408), True, 'import torch.nn as nn\n'), ((2239, 2260), 'torch.unique', 'torch.unique', (['task_id'], {}), '(task_id)\n', (2251, 2260), False, 'import torch\n'), ((2586, 2617), 'torch.zeros', 'torch.zeros', (['input_ids.shape[0]'], {}), '(input_ids.shape[0])\n', (2597, 2617), False, 'import torch\n'), ((2656, 2687), 'torch.zeros', 'torch.zeros', (['input_ids.shape[0]'], {}), '(input_ids.shape[0])\n', (2667, 2687), False, 'import torch\n'), ((3415, 3465), 'torch.full_like', 'torch.full_like', (['batch_entropy', 'batch_entropy_mean'], {}), '(batch_entropy, batch_entropy_mean)\n', (3430, 3465), False, 'import torch\n'), ((4126, 4148), 'torch.stack', 'torch.stack', (['loss_list'], {}), '(loss_list)\n', (4137, 4148), False, 'import torch\n'), ((4367, 4423), 'src.model.encoders.ca_mtl_large.CaMtlLargeEncoder', 'CaMtlLargeEncoder', (['self.config'], {'data_args': 'self.data_args'}), '(self.config, data_args=self.data_args)\n', (4384, 4423), False, 'from src.model.encoders.ca_mtl_large import CaMtlLargeEncoder\n'), ((1479, 1512), 'src.model.decoder.Decoder', 'Decoder', (['config.hidden_size', 'task'], {}), '(config.hidden_size, task)\n', (1486, 1512), False, 'from src.model.decoder import Decoder\n'), ((4487, 4542), 'src.model.encoders.ca_mtl_base.CaMtlBaseEncoder', 'CaMtlBaseEncoder', (['self.config'], {'data_args': 'self.data_args'}), '(self.config, data_args=self.data_args)\n', (4503, 4542), False, 'from src.model.encoders.ca_mtl_base import CaMtlBaseEncoder\n'), ((4614, 4669), 'src.model.encoders.ca_mtl_base.CaMtlBaseEncoder', 'CaMtlBaseEncoder', (['self.config'], {'data_args': 'self.data_args'}), '(self.config, data_args=self.data_args)\n', (4630, 4669), False, 'from src.model.encoders.ca_mtl_base import CaMtlBaseEncoder\n'), ((2472, 2500), 'torch.zeros_like', 'torch.zeros_like', (['task_id[0]'], {}), '(task_id[0])\n', (2488, 2500), False, 'import torch\n'), ((4733, 4788), 'src.model.encoders.ca_mtl_base.CaMtlBaseEncoder', 'CaMtlBaseEncoder', (['self.config'], {'data_args': 'self.data_args'}), '(self.config, data_args=self.data_args)\n', (4749, 4788), False, 'from src.model.encoders.ca_mtl_base import CaMtlBaseEncoder\n'), ((4822, 4847), 'src.model.encoders.bert._BertEncoder', '_BertEncoder', (['self.config'], {}), '(self.config)\n', (4834, 4847), False, 'from src.model.encoders.bert import _BertEncoder\n')] |
import os
from colorama import Fore, init
# Current file directory details
file = os.path.realpath(__file__)
filedir = os.path.dirname(file)
parentdir = os.path.dirname(filedir)
# Initialise colors for terminal
init()
# Print out header
print(Fore.CYAN + '-' * 13 + Fore.RESET)
print('Call Server')
print(Fore.CYAN + '-' * 13 + Fore.RESET)
# Get variables
print()
print(Fore.CYAN + 'What is the root FQDN for this machine: ' + Fore.RESET, end='')
root_url = input()
print(Fore.CYAN + 'On which port should the call server run: ' + Fore.RESET, end='')
port = input()
# Write out configuration file
print()
print(Fore.CYAN + 'Writing Call Server configuration...' + Fore.RESET)
with open(parentdir + '\\settings.py', 'a+') as f:
f.write('# CALL SERVER SETTINGS\n')
f.write('ROOT_URL=\'%s\'\n' % root_url.rstrip('/').lstrip('http://').lstrip('https://'))
f.write('PORT=%s\n\n' % port)
print()
print(Fore.GREEN + 'Call Server configuration successfully written!' + Fore.RESET)
print()
| [
"colorama.init",
"os.path.realpath",
"os.path.dirname"
] | [((83, 109), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (99, 109), False, 'import os\n'), ((120, 141), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (135, 141), False, 'import os\n'), ((154, 178), 'os.path.dirname', 'os.path.dirname', (['filedir'], {}), '(filedir)\n', (169, 178), False, 'import os\n'), ((213, 219), 'colorama.init', 'init', ([], {}), '()\n', (217, 219), False, 'from colorama import Fore, init\n')] |
import unittest
from filter import Filter
from differ import Differ
from guess_combinator import GuessCombinator
class TestGuessCombinator(unittest.TestCase):
def test_it_exists(self):
self.assertNotEqual(GuessCombinator(), None)
def test_it_returns_a_best_guess(self):
# solution_unknown
corpus = ["abcde", "abcdf", "abcdg", "abcdh", "abcdi", "efghi"]
expected_best_guess_pair = ["abcde", "efghi"]
# I can't construct good examples.
self.assertNotEqual(GuessCombinator.process(
corpus, corpus), None)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"guess_combinator.GuessCombinator",
"guess_combinator.GuessCombinator.process"
] | [((574, 589), 'unittest.main', 'unittest.main', ([], {}), '()\n', (587, 589), False, 'import unittest\n'), ((214, 231), 'guess_combinator.GuessCombinator', 'GuessCombinator', ([], {}), '()\n', (229, 231), False, 'from guess_combinator import GuessCombinator\n'), ((487, 526), 'guess_combinator.GuessCombinator.process', 'GuessCombinator.process', (['corpus', 'corpus'], {}), '(corpus, corpus)\n', (510, 526), False, 'from guess_combinator import GuessCombinator\n')] |
# Copyright Contributors to the Tapqir project.
# SPDX-License-Identifier: Apache-2.0
"""
hmm
^^^
"""
import math
from typing import Union
import torch
import torch.distributions.constraints as constraints
from pyro.distributions.hmm import _logmatmulexp
from pyro.ops.indexing import Vindex
from pyroapi import distributions as dist
from pyroapi import handlers, infer, pyro
from torch.nn.functional import one_hot
from tapqir.distributions import KSMOGN, AffineBeta
from tapqir.distributions.util import expand_offtarget, probs_m, probs_theta
from tapqir.models.cosmos import Cosmos
class HMM(Cosmos):
r"""
**Single-Color Hidden Markov Colocalization Model**
.. note::
This model is used for kinetic simulations. Efficient fitting is not yet supported.
**Reference**:
1. <NAME>, <NAME>, <NAME>, Theobald DL.
Bayesian machine learning analysis of single-molecule fluorescence colocalization images.
bioRxiv. 2021 Oct. doi: `10.1101/2021.09.30.462536 <https://doi.org/10.1101/2021.09.30.462536>`_.
:param S: Number of distinct molecular states for the binder molecules.
:param K: Maximum number of spots that can be present in a single image.
:param channels: Number of color channels.
:param device: Computation device (cpu or gpu).
:param dtype: Floating point precision.
:param use_pykeops: Use pykeops as backend to marginalize out offset.
:param vectorized: Vectorize time-dimension.
"""
name = "hmm"
def __init__(
self,
S: int = 1,
K: int = 2,
channels: Union[tuple, list] = (0,),
device: str = "cpu",
dtype: str = "double",
use_pykeops: bool = True,
vectorized: bool = False,
):
self.vectorized = vectorized
super().__init__(S, K, channels, device, dtype, use_pykeops)
self.conv_params = ["-ELBO", "proximity_loc", "gain_loc", "lamda_loc"]
self._global_params = ["gain", "proximity", "lamda", "trans"]
def model(self):
"""
**Generative Model**
"""
# global parameters
gain = pyro.sample("gain", dist.HalfNormal(50))
init = pyro.sample(
"init", dist.Dirichlet(torch.ones(self.S + 1) / (self.S + 1))
)
init = expand_offtarget(init)
trans = pyro.sample(
"trans",
dist.Dirichlet(torch.ones(self.S + 1, self.S + 1) / (self.S + 1)).to_event(
1
),
)
trans = expand_offtarget(trans)
lamda = pyro.sample("lamda", dist.Exponential(1))
proximity = pyro.sample("proximity", dist.Exponential(1))
size = torch.stack(
(
torch.full_like(proximity, 2.0),
(((self.data.P + 1) / (2 * proximity)) ** 2 - 1),
),
dim=-1,
)
# spots
spots = pyro.plate("spots", self.K)
# aoi sites
aois = pyro.plate(
"aois",
self.data.Nt,
subsample=self.n,
subsample_size=self.nbatch_size,
dim=-2,
)
# time frames
frames = (
pyro.vectorized_markov(name="frames", size=self.data.F, dim=-1)
if self.vectorized
else pyro.markov(range(self.data.F))
)
with aois as ndx:
ndx = ndx[:, None]
# background mean and std
background_mean = pyro.sample("background_mean", dist.HalfNormal(1000))
background_std = pyro.sample("background_std", dist.HalfNormal(100))
z_prev = None
for fdx in frames:
if self.vectorized:
fsx, fdx = fdx
else:
fsx = fdx
# fetch data
obs, target_locs, is_ontarget = self.data.fetch(ndx, fdx, self.cdx)
# sample background intensity
background = pyro.sample(
f"background_{fdx}",
dist.Gamma(
(background_mean / background_std) ** 2,
background_mean / background_std ** 2,
),
)
# sample hidden model state (1+S,)
z_probs = (
Vindex(init)[..., :, is_ontarget.long()]
if isinstance(fdx, int) and fdx < 1
else Vindex(trans)[..., z_prev, :, is_ontarget.long()]
)
z_curr = pyro.sample(f"z_{fsx}", dist.Categorical(z_probs))
theta = pyro.sample(
f"theta_{fdx}",
dist.Categorical(
Vindex(probs_theta(self.K, self.device))[
torch.clamp(z_curr, min=0, max=1)
]
),
infer={"enumerate": "parallel"},
)
onehot_theta = one_hot(theta, num_classes=1 + self.K)
ms, heights, widths, xs, ys = [], [], [], [], []
for kdx in spots:
specific = onehot_theta[..., 1 + kdx]
# spot presence
m = pyro.sample(
f"m_{kdx}_{fsx}",
dist.Bernoulli(Vindex(probs_m(lamda, self.K))[..., theta, kdx]),
)
with handlers.mask(mask=m > 0):
# sample spot variables
height = pyro.sample(
f"height_{kdx}_{fsx}",
dist.HalfNormal(10000),
)
width = pyro.sample(
f"width_{kdx}_{fsx}",
AffineBeta(
1.5,
2,
0.75,
2.25,
),
)
x = pyro.sample(
f"x_{kdx}_{fsx}",
AffineBeta(
0,
Vindex(size)[..., specific],
-(self.data.P + 1) / 2,
(self.data.P + 1) / 2,
),
)
y = pyro.sample(
f"y_{kdx}_{fsx}",
AffineBeta(
0,
Vindex(size)[..., specific],
-(self.data.P + 1) / 2,
(self.data.P + 1) / 2,
),
)
# append
ms.append(m)
heights.append(height)
widths.append(width)
xs.append(x)
ys.append(y)
# observed data
pyro.sample(
f"data_{fsx}",
KSMOGN(
torch.stack(heights, -1),
torch.stack(widths, -1),
torch.stack(xs, -1),
torch.stack(ys, -1),
target_locs,
background,
gain,
self.data.offset.samples,
self.data.offset.logits.to(self.dtype),
self.data.P,
torch.stack(torch.broadcast_tensors(*ms), -1),
self.use_pykeops,
),
obs=obs,
)
z_prev = z_curr
def guide(self):
"""
**Variational Distribution**
"""
# global parameters
pyro.sample(
"gain",
dist.Gamma(
pyro.param("gain_loc") * pyro.param("gain_beta"),
pyro.param("gain_beta"),
),
)
pyro.sample(
"init", dist.Dirichlet(pyro.param("init_mean") * pyro.param("init_size"))
)
pyro.sample(
"trans",
dist.Dirichlet(
pyro.param("trans_mean") * pyro.param("trans_size")
).to_event(1),
)
pyro.sample(
"lamda",
dist.Gamma(
pyro.param("lamda_loc") * pyro.param("lamda_beta"),
pyro.param("lamda_beta"),
),
)
pyro.sample(
"proximity",
AffineBeta(
pyro.param("proximity_loc"),
pyro.param("proximity_size"),
0,
(self.data.P + 1) / math.sqrt(12),
),
)
# spots
spots = pyro.plate("spots", self.K)
# aoi sites
aois = pyro.plate(
"aois",
self.data.Nt,
subsample=self.n,
subsample_size=self.nbatch_size,
dim=-2,
)
# time frames
frames = (
pyro.vectorized_markov(name="frames", size=self.data.F, dim=-1)
if self.vectorized
else pyro.markov(range(self.data.F))
)
with aois as ndx:
ndx = ndx[:, None]
pyro.sample(
"background_mean",
dist.Delta(Vindex(pyro.param("background_mean_loc"))[ndx, 0]),
)
pyro.sample(
"background_std",
dist.Delta(Vindex(pyro.param("background_std_loc"))[ndx, 0]),
)
z_prev = None
for fdx in frames:
if self.vectorized:
fsx, fdx = fdx
else:
fsx = fdx
# sample background intensity
pyro.sample(
f"background_{fsx}",
dist.Gamma(
Vindex(pyro.param("b_loc"))[ndx, fdx]
* Vindex(pyro.param("b_beta"))[ndx, fdx],
Vindex(pyro.param("b_beta"))[ndx, fdx],
),
)
# sample hidden model state (3,1,1,1)
z_probs = (
Vindex(pyro.param("z_trans"))[ndx, fdx, 0]
if isinstance(fdx, int) and fdx < 1
else Vindex(pyro.param("z_trans"))[ndx, fdx, z_prev]
)
z_curr = pyro.sample(
f"z_{fsx}",
dist.Categorical(z_probs),
infer={"enumerate": "parallel"},
)
for kdx in spots:
# spot presence
m_probs = Vindex(pyro.param("m_probs"))[z_curr, kdx, ndx, fdx]
m = pyro.sample(
f"m_{kdx}_{fsx}",
dist.Categorical(m_probs),
infer={"enumerate": "parallel"},
)
with handlers.mask(mask=m > 0):
# sample spot variables
pyro.sample(
f"height_{kdx}_{fsx}",
dist.Gamma(
Vindex(pyro.param("h_loc"))[kdx, ndx, fdx]
* Vindex(pyro.param("h_beta"))[kdx, ndx, fdx],
Vindex(pyro.param("h_beta"))[kdx, ndx, fdx],
),
)
pyro.sample(
f"width_{kdx}_{fsx}",
AffineBeta(
Vindex(pyro.param("w_mean"))[kdx, ndx, fdx],
Vindex(pyro.param("w_size"))[kdx, ndx, fdx],
0.75,
2.25,
),
)
pyro.sample(
f"x_{kdx}_{fsx}",
AffineBeta(
Vindex(pyro.param("x_mean"))[kdx, ndx, fdx],
Vindex(pyro.param("size"))[kdx, ndx, fdx],
-(self.data.P + 1) / 2,
(self.data.P + 1) / 2,
),
)
pyro.sample(
f"y_{kdx}_{fsx}",
AffineBeta(
Vindex(pyro.param("y_mean"))[kdx, ndx, fdx],
Vindex(pyro.param("size"))[kdx, ndx, fdx],
-(self.data.P + 1) / 2,
(self.data.P + 1) / 2,
),
)
z_prev = z_curr
def init_parameters(self):
"""
Initialize variational parameters.
"""
device = self.device
data = self.data
pyro.param(
"proximity_loc",
lambda: torch.tensor(0.5, device=device),
constraint=constraints.interval(
0,
(self.data.P + 1) / math.sqrt(12) - torch.finfo(self.dtype).eps,
),
)
pyro.param(
"proximity_size",
lambda: torch.tensor(100, device=device),
constraint=constraints.greater_than(2.0),
)
pyro.param(
"lamda_loc",
lambda: torch.tensor(0.5, device=device),
constraint=constraints.positive,
)
pyro.param(
"lamda_beta",
lambda: torch.tensor(100, device=device),
constraint=constraints.positive,
)
pyro.param(
"init_mean",
lambda: torch.ones(self.S + 1, device=device),
constraint=constraints.simplex,
)
pyro.param(
"init_size",
lambda: torch.tensor(2, device=device),
constraint=constraints.positive,
)
pyro.param(
"trans_mean",
lambda: torch.ones(self.S + 1, self.S + 1, device=device),
constraint=constraints.simplex,
)
pyro.param(
"trans_size",
lambda: torch.full((self.S + 1, 1), 2, device=device),
constraint=constraints.positive,
)
pyro.param(
"gain_loc",
lambda: torch.tensor(5, device=device),
constraint=constraints.positive,
)
pyro.param(
"gain_beta",
lambda: torch.tensor(100, device=device),
constraint=constraints.positive,
)
pyro.param(
"background_mean_loc",
lambda: torch.full(
(data.Nt, 1),
data.median - self.data.offset.mean,
device=device,
),
constraint=constraints.positive,
)
pyro.param(
"background_std_loc",
lambda: torch.ones(data.Nt, 1, device=device),
constraint=constraints.positive,
)
pyro.param(
"b_loc",
lambda: torch.full(
(data.Nt, data.F),
data.median - self.data.offset.mean,
device=device,
),
constraint=constraints.positive,
)
pyro.param(
"b_beta",
lambda: torch.ones(data.Nt, data.F, device=device),
constraint=constraints.positive,
)
pyro.param(
"h_loc",
lambda: torch.full((self.K, data.Nt, data.F), 2000, device=device),
constraint=constraints.positive,
)
pyro.param(
"h_beta",
lambda: torch.full((self.K, data.Nt, data.F), 0.001, device=device),
constraint=constraints.positive,
)
pyro.param(
"w_mean",
lambda: torch.full((self.K, data.Nt, data.F), 1.5, device=device),
constraint=constraints.interval(
0.75 + torch.finfo(self.dtype).eps,
2.25 - torch.finfo(self.dtype).eps,
),
)
pyro.param(
"w_size",
lambda: torch.full((self.K, data.Nt, data.F), 100, device=device),
constraint=constraints.greater_than(2.0),
)
pyro.param(
"x_mean",
lambda: torch.zeros(self.K, data.Nt, data.F, device=device),
constraint=constraints.interval(
-(data.P + 1) / 2 + torch.finfo(self.dtype).eps,
(data.P + 1) / 2 - torch.finfo(self.dtype).eps,
),
)
pyro.param(
"y_mean",
lambda: torch.zeros(self.K, data.Nt, data.F, device=device),
constraint=constraints.interval(
-(data.P + 1) / 2 + torch.finfo(self.dtype).eps,
(data.P + 1) / 2 - torch.finfo(self.dtype).eps,
),
)
pyro.param(
"size",
lambda: torch.full((self.K, data.Nt, data.F), 200, device=device),
constraint=constraints.greater_than(2.0),
)
# classification
pyro.param(
"z_trans",
lambda: torch.ones(
data.Nt,
data.F,
1 + self.S,
1 + self.S,
device=device,
),
constraint=constraints.simplex,
)
pyro.param(
"m_probs",
lambda: torch.full(
(1 + self.S, self.K, self.data.Nt, self.data.F),
0.5,
device=device,
),
constraint=constraints.unit_interval,
)
def TraceELBO(self, jit=False):
"""
A trace implementation of ELBO-based SVI that supports - exhaustive enumeration over
discrete sample sites, and - local parallel sampling over any sample site in the guide.
"""
if self.vectorized:
return (
infer.JitTraceMarkovEnum_ELBO if jit else infer.TraceMarkovEnum_ELBO
)(max_plate_nesting=2, ignore_jit_warnings=True)
return (infer.JitTraceEnum_ELBO if jit else infer.TraceEnum_ELBO)(
max_plate_nesting=2, ignore_jit_warnings=True
)
@staticmethod
def _sequential_logmatmulexp(logits: torch.Tensor) -> torch.Tensor:
"""
For a tensor ``x`` whose time dimension is -3, computes::
x[..., 0, :, :] @ x[..., 1, :, :] @ ... @ x[..., T-1, :, :]
but does so numerically stably in log space.
"""
batch_shape = logits.shape[:-3]
state_dim = logits.size(-1)
sum_terms = []
# up sweep
while logits.size(-3) > 1:
time = logits.size(-3)
even_time = time // 2 * 2
even_part = logits[..., :even_time, :, :]
x_y = even_part.reshape(
batch_shape + (even_time // 2, 2, state_dim, state_dim)
)
x, y = x_y.unbind(-3)
contracted = _logmatmulexp(x, y)
if time > even_time:
contracted = torch.cat((contracted, logits[..., -1:, :, :]), dim=-3)
sum_terms.append(logits)
logits = contracted
else:
sum_terms.append(logits)
# handle root case
sum_term = sum_terms.pop()
left_term = HMM._contraction_identity(sum_term)
# down sweep
while sum_terms:
sum_term = sum_terms.pop()
new_left_term = HMM._contraction_identity(sum_term)
time = sum_term.size(-3)
even_time = time // 2 * 2
if time > even_time:
new_left_term[..., time - 1 : time, :, :] = left_term[
..., even_time // 2 : even_time // 2 + 1, :, :
]
left_term = left_term[..., : even_time // 2, :, :]
left_sum = sum_term[..., :even_time:2, :, :]
left_sum_and_term = _logmatmulexp(left_term, left_sum)
new_left_term[..., :even_time:2, :, :] = left_term
new_left_term[..., 1:even_time:2, :, :] = left_sum_and_term
left_term = new_left_term
else:
alphas = _logmatmulexp(left_term, sum_term)
return alphas
@staticmethod
def _contraction_identity(logits: torch.Tensor) -> torch.Tensor:
batch_shape = logits.shape[:-2]
state_dim = logits.size(-1)
result = torch.eye(state_dim).log()
result = result.reshape((1,) * len(batch_shape) + (state_dim, state_dim))
result = result.repeat(batch_shape + (1, 1))
return result
@property
def z_probs(self) -> torch.Tensor:
r"""
Probability of there being a target-specific spot :math:`p(z=1)`
"""
result = self._sequential_logmatmulexp(pyro.param("z_trans").data.log())
return result[..., 0, 1].exp()
@property
def pspecific(self) -> torch.Tensor:
r"""
Probability of there being a target-specific spot :math:`p(\mathsf{specific})`
"""
return self.z_probs
@property
def m_probs(self) -> torch.Tensor:
r"""
Posterior spot presence probability :math:`q(m=1)`.
"""
return torch.einsum(
"knf,nf->knf",
pyro.param("m_probs").data[1],
self.pspecific,
)
| [
"torch.eye",
"torch.broadcast_tensors",
"torch.cat",
"torch.full",
"pyroapi.pyro.param",
"tapqir.distributions.util.expand_offtarget",
"torch.ones",
"tapqir.distributions.util.probs_m",
"torch.distributions.constraints.greater_than",
"tapqir.distributions.util.probs_theta",
"pyroapi.handlers.mask",
"torch.zeros",
"pyroapi.distributions.Exponential",
"math.sqrt",
"tapqir.distributions.AffineBeta",
"pyroapi.distributions.Gamma",
"pyroapi.distributions.HalfNormal",
"pyroapi.distributions.Categorical",
"torch.clamp",
"pyro.ops.indexing.Vindex",
"torch.finfo",
"torch.full_like",
"torch.stack",
"pyroapi.pyro.plate",
"torch.nn.functional.one_hot",
"pyro.distributions.hmm._logmatmulexp",
"pyroapi.pyro.vectorized_markov",
"torch.tensor"
] | [((2289, 2311), 'tapqir.distributions.util.expand_offtarget', 'expand_offtarget', (['init'], {}), '(init)\n', (2305, 2311), False, 'from tapqir.distributions.util import expand_offtarget, probs_m, probs_theta\n'), ((2509, 2532), 'tapqir.distributions.util.expand_offtarget', 'expand_offtarget', (['trans'], {}), '(trans)\n', (2525, 2532), False, 'from tapqir.distributions.util import expand_offtarget, probs_m, probs_theta\n'), ((2892, 2919), 'pyroapi.pyro.plate', 'pyro.plate', (['"""spots"""', 'self.K'], {}), "('spots', self.K)\n", (2902, 2919), False, 'from pyroapi import handlers, infer, pyro\n'), ((2955, 3051), 'pyroapi.pyro.plate', 'pyro.plate', (['"""aois"""', 'self.data.Nt'], {'subsample': 'self.n', 'subsample_size': 'self.nbatch_size', 'dim': '(-2)'}), "('aois', self.data.Nt, subsample=self.n, subsample_size=self.\n nbatch_size, dim=-2)\n", (2965, 3051), False, 'from pyroapi import handlers, infer, pyro\n'), ((8872, 8899), 'pyroapi.pyro.plate', 'pyro.plate', (['"""spots"""', 'self.K'], {}), "('spots', self.K)\n", (8882, 8899), False, 'from pyroapi import handlers, infer, pyro\n'), ((8935, 9031), 'pyroapi.pyro.plate', 'pyro.plate', (['"""aois"""', 'self.data.Nt'], {'subsample': 'self.n', 'subsample_size': 'self.nbatch_size', 'dim': '(-2)'}), "('aois', self.data.Nt, subsample=self.n, subsample_size=self.\n nbatch_size, dim=-2)\n", (8945, 9031), False, 'from pyroapi import handlers, infer, pyro\n'), ((2141, 2160), 'pyroapi.distributions.HalfNormal', 'dist.HalfNormal', (['(50)'], {}), '(50)\n', (2156, 2160), True, 'from pyroapi import distributions as dist\n'), ((2570, 2589), 'pyroapi.distributions.Exponential', 'dist.Exponential', (['(1)'], {}), '(1)\n', (2586, 2589), True, 'from pyroapi import distributions as dist\n'), ((2636, 2655), 'pyroapi.distributions.Exponential', 'dist.Exponential', (['(1)'], {}), '(1)\n', (2652, 2655), True, 'from pyroapi import distributions as dist\n'), ((3171, 3234), 'pyroapi.pyro.vectorized_markov', 'pyro.vectorized_markov', ([], {'name': '"""frames"""', 'size': 'self.data.F', 'dim': '(-1)'}), "(name='frames', size=self.data.F, dim=-1)\n", (3193, 3234), False, 'from pyroapi import handlers, infer, pyro\n'), ((9151, 9214), 'pyroapi.pyro.vectorized_markov', 'pyro.vectorized_markov', ([], {'name': '"""frames"""', 'size': 'self.data.F', 'dim': '(-1)'}), "(name='frames', size=self.data.F, dim=-1)\n", (9173, 9214), False, 'from pyroapi import handlers, infer, pyro\n'), ((19217, 19236), 'pyro.distributions.hmm._logmatmulexp', '_logmatmulexp', (['x', 'y'], {}), '(x, y)\n', (19230, 19236), False, 'from pyro.distributions.hmm import _logmatmulexp\n'), ((20163, 20197), 'pyro.distributions.hmm._logmatmulexp', '_logmatmulexp', (['left_term', 'left_sum'], {}), '(left_term, left_sum)\n', (20176, 20197), False, 'from pyro.distributions.hmm import _logmatmulexp\n'), ((20406, 20440), 'pyro.distributions.hmm._logmatmulexp', '_logmatmulexp', (['left_term', 'sum_term'], {}), '(left_term, sum_term)\n', (20419, 20440), False, 'from pyro.distributions.hmm import _logmatmulexp\n'), ((2715, 2746), 'torch.full_like', 'torch.full_like', (['proximity', '(2.0)'], {}), '(proximity, 2.0)\n', (2730, 2746), False, 'import torch\n'), ((3482, 3503), 'pyroapi.distributions.HalfNormal', 'dist.HalfNormal', (['(1000)'], {}), '(1000)\n', (3497, 3503), True, 'from pyroapi import distributions as dist\n'), ((3564, 3584), 'pyroapi.distributions.HalfNormal', 'dist.HalfNormal', (['(100)'], {}), '(100)\n', (3579, 3584), True, 'from pyroapi import distributions as dist\n'), ((4966, 5004), 'torch.nn.functional.one_hot', 'one_hot', (['theta'], {'num_classes': '(1 + self.K)'}), '(theta, num_classes=1 + self.K)\n', (4973, 5004), False, 'from torch.nn.functional import one_hot\n'), ((8040, 8063), 'pyroapi.pyro.param', 'pyro.param', (['"""gain_beta"""'], {}), "('gain_beta')\n", (8050, 8063), False, 'from pyroapi import handlers, infer, pyro\n'), ((8532, 8556), 'pyroapi.pyro.param', 'pyro.param', (['"""lamda_beta"""'], {}), "('lamda_beta')\n", (8542, 8556), False, 'from pyroapi import handlers, infer, pyro\n'), ((8669, 8696), 'pyroapi.pyro.param', 'pyro.param', (['"""proximity_loc"""'], {}), "('proximity_loc')\n", (8679, 8696), False, 'from pyroapi import handlers, infer, pyro\n'), ((8714, 8742), 'pyroapi.pyro.param', 'pyro.param', (['"""proximity_size"""'], {}), "('proximity_size')\n", (8724, 8742), False, 'from pyroapi import handlers, infer, pyro\n'), ((13143, 13175), 'torch.tensor', 'torch.tensor', (['(0.5)'], {'device': 'device'}), '(0.5, device=device)\n', (13155, 13175), False, 'import torch\n'), ((13417, 13449), 'torch.tensor', 'torch.tensor', (['(100)'], {'device': 'device'}), '(100, device=device)\n', (13429, 13449), False, 'import torch\n'), ((13474, 13503), 'torch.distributions.constraints.greater_than', 'constraints.greater_than', (['(2.0)'], {}), '(2.0)\n', (13498, 13503), True, 'import torch.distributions.constraints as constraints\n'), ((13580, 13612), 'torch.tensor', 'torch.tensor', (['(0.5)'], {'device': 'device'}), '(0.5, device=device)\n', (13592, 13612), False, 'import torch\n'), ((13735, 13767), 'torch.tensor', 'torch.tensor', (['(100)'], {'device': 'device'}), '(100, device=device)\n', (13747, 13767), False, 'import torch\n'), ((13889, 13926), 'torch.ones', 'torch.ones', (['(self.S + 1)'], {'device': 'device'}), '(self.S + 1, device=device)\n', (13899, 13926), False, 'import torch\n'), ((14047, 14077), 'torch.tensor', 'torch.tensor', (['(2)'], {'device': 'device'}), '(2, device=device)\n', (14059, 14077), False, 'import torch\n'), ((14200, 14249), 'torch.ones', 'torch.ones', (['(self.S + 1)', '(self.S + 1)'], {'device': 'device'}), '(self.S + 1, self.S + 1, device=device)\n', (14210, 14249), False, 'import torch\n'), ((14371, 14416), 'torch.full', 'torch.full', (['(self.S + 1, 1)', '(2)'], {'device': 'device'}), '((self.S + 1, 1), 2, device=device)\n', (14381, 14416), False, 'import torch\n'), ((14538, 14568), 'torch.tensor', 'torch.tensor', (['(5)'], {'device': 'device'}), '(5, device=device)\n', (14550, 14568), False, 'import torch\n'), ((14690, 14722), 'torch.tensor', 'torch.tensor', (['(100)'], {'device': 'device'}), '(100, device=device)\n', (14702, 14722), False, 'import torch\n'), ((14855, 14931), 'torch.full', 'torch.full', (['(data.Nt, 1)', '(data.median - self.data.offset.mean)'], {'device': 'device'}), '((data.Nt, 1), data.median - self.data.offset.mean, device=device)\n', (14865, 14931), False, 'import torch\n'), ((15125, 15162), 'torch.ones', 'torch.ones', (['data.Nt', '(1)'], {'device': 'device'}), '(data.Nt, 1, device=device)\n', (15135, 15162), False, 'import torch\n'), ((15281, 15367), 'torch.full', 'torch.full', (['(data.Nt, data.F)', '(data.median - self.data.offset.mean)'], {'device': 'device'}), '((data.Nt, data.F), data.median - self.data.offset.mean, device=\n device)\n', (15291, 15367), False, 'import torch\n'), ((15544, 15586), 'torch.ones', 'torch.ones', (['data.Nt', 'data.F'], {'device': 'device'}), '(data.Nt, data.F, device=device)\n', (15554, 15586), False, 'import torch\n'), ((15704, 15762), 'torch.full', 'torch.full', (['(self.K, data.Nt, data.F)', '(2000)'], {'device': 'device'}), '((self.K, data.Nt, data.F), 2000, device=device)\n', (15714, 15762), False, 'import torch\n'), ((15881, 15940), 'torch.full', 'torch.full', (['(self.K, data.Nt, data.F)', '(0.001)'], {'device': 'device'}), '((self.K, data.Nt, data.F), 0.001, device=device)\n', (15891, 15940), False, 'import torch\n'), ((16059, 16116), 'torch.full', 'torch.full', (['(self.K, data.Nt, data.F)', '(1.5)'], {'device': 'device'}), '((self.K, data.Nt, data.F), 1.5, device=device)\n', (16069, 16116), False, 'import torch\n'), ((16354, 16411), 'torch.full', 'torch.full', (['(self.K, data.Nt, data.F)', '(100)'], {'device': 'device'}), '((self.K, data.Nt, data.F), 100, device=device)\n', (16364, 16411), False, 'import torch\n'), ((16436, 16465), 'torch.distributions.constraints.greater_than', 'constraints.greater_than', (['(2.0)'], {}), '(2.0)\n', (16460, 16465), True, 'import torch.distributions.constraints as constraints\n'), ((16539, 16590), 'torch.zeros', 'torch.zeros', (['self.K', 'data.Nt', 'data.F'], {'device': 'device'}), '(self.K, data.Nt, data.F, device=device)\n', (16550, 16590), False, 'import torch\n'), ((16853, 16904), 'torch.zeros', 'torch.zeros', (['self.K', 'data.Nt', 'data.F'], {'device': 'device'}), '(self.K, data.Nt, data.F, device=device)\n', (16864, 16904), False, 'import torch\n'), ((17165, 17222), 'torch.full', 'torch.full', (['(self.K, data.Nt, data.F)', '(200)'], {'device': 'device'}), '((self.K, data.Nt, data.F), 200, device=device)\n', (17175, 17222), False, 'import torch\n'), ((17247, 17276), 'torch.distributions.constraints.greater_than', 'constraints.greater_than', (['(2.0)'], {}), '(2.0)\n', (17271, 17276), True, 'import torch.distributions.constraints as constraints\n'), ((17377, 17443), 'torch.ones', 'torch.ones', (['data.Nt', 'data.F', '(1 + self.S)', '(1 + self.S)'], {'device': 'device'}), '(data.Nt, data.F, 1 + self.S, 1 + self.S, device=device)\n', (17387, 17443), False, 'import torch\n'), ((17657, 17736), 'torch.full', 'torch.full', (['(1 + self.S, self.K, self.data.Nt, self.data.F)', '(0.5)'], {'device': 'device'}), '((1 + self.S, self.K, self.data.Nt, self.data.F), 0.5, device=device)\n', (17667, 17736), False, 'import torch\n'), ((19299, 19354), 'torch.cat', 'torch.cat', (['(contracted, logits[..., -1:, :, :])'], {'dim': '(-3)'}), '((contracted, logits[..., -1:, :, :]), dim=-3)\n', (19308, 19354), False, 'import torch\n'), ((20644, 20664), 'torch.eye', 'torch.eye', (['state_dim'], {}), '(state_dim)\n', (20653, 20664), False, 'import torch\n'), ((2225, 2247), 'torch.ones', 'torch.ones', (['(self.S + 1)'], {}), '(self.S + 1)\n', (2235, 2247), False, 'import torch\n'), ((4028, 4123), 'pyroapi.distributions.Gamma', 'dist.Gamma', (['((background_mean / background_std) ** 2)', '(background_mean / background_std ** 2)'], {}), '((background_mean / background_std) ** 2, background_mean / \n background_std ** 2)\n', (4038, 4123), True, 'from pyroapi import distributions as dist\n'), ((4548, 4573), 'pyroapi.distributions.Categorical', 'dist.Categorical', (['z_probs'], {}), '(z_probs)\n', (4564, 4573), True, 'from pyroapi import distributions as dist\n'), ((7974, 7996), 'pyroapi.pyro.param', 'pyro.param', (['"""gain_loc"""'], {}), "('gain_loc')\n", (7984, 7996), False, 'from pyroapi import handlers, infer, pyro\n'), ((7999, 8022), 'pyroapi.pyro.param', 'pyro.param', (['"""gain_beta"""'], {}), "('gain_beta')\n", (8009, 8022), False, 'from pyroapi import handlers, infer, pyro\n'), ((8146, 8169), 'pyroapi.pyro.param', 'pyro.param', (['"""init_mean"""'], {}), "('init_mean')\n", (8156, 8169), False, 'from pyroapi import handlers, infer, pyro\n'), ((8172, 8195), 'pyroapi.pyro.param', 'pyro.param', (['"""init_size"""'], {}), "('init_size')\n", (8182, 8195), False, 'from pyroapi import handlers, infer, pyro\n'), ((8464, 8487), 'pyroapi.pyro.param', 'pyro.param', (['"""lamda_loc"""'], {}), "('lamda_loc')\n", (8474, 8487), False, 'from pyroapi import handlers, infer, pyro\n'), ((8490, 8514), 'pyroapi.pyro.param', 'pyro.param', (['"""lamda_beta"""'], {}), "('lamda_beta')\n", (8500, 8514), False, 'from pyroapi import handlers, infer, pyro\n'), ((8799, 8812), 'math.sqrt', 'math.sqrt', (['(12)'], {}), '(12)\n', (8808, 8812), False, 'import math\n'), ((10611, 10636), 'pyroapi.distributions.Categorical', 'dist.Categorical', (['z_probs'], {}), '(z_probs)\n', (10627, 10636), True, 'from pyroapi import distributions as dist\n'), ((21503, 21524), 'pyroapi.pyro.param', 'pyro.param', (['"""m_probs"""'], {}), "('m_probs')\n", (21513, 21524), False, 'from pyroapi import handlers, infer, pyro\n'), ((4309, 4321), 'pyro.ops.indexing.Vindex', 'Vindex', (['init'], {}), '(init)\n', (4315, 4321), False, 'from pyro.ops.indexing import Vindex\n'), ((4431, 4444), 'pyro.ops.indexing.Vindex', 'Vindex', (['trans'], {}), '(trans)\n', (4437, 4444), False, 'from pyro.ops.indexing import Vindex\n'), ((5414, 5439), 'pyroapi.handlers.mask', 'handlers.mask', ([], {'mask': '(m > 0)'}), '(mask=m > 0)\n', (5427, 5439), False, 'from pyroapi import handlers, infer, pyro\n'), ((7148, 7172), 'torch.stack', 'torch.stack', (['heights', '(-1)'], {}), '(heights, -1)\n', (7159, 7172), False, 'import torch\n'), ((7198, 7221), 'torch.stack', 'torch.stack', (['widths', '(-1)'], {}), '(widths, -1)\n', (7209, 7221), False, 'import torch\n'), ((7247, 7266), 'torch.stack', 'torch.stack', (['xs', '(-1)'], {}), '(xs, -1)\n', (7258, 7266), False, 'import torch\n'), ((7292, 7311), 'torch.stack', 'torch.stack', (['ys', '(-1)'], {}), '(ys, -1)\n', (7303, 7311), False, 'import torch\n'), ((10966, 10991), 'pyroapi.distributions.Categorical', 'dist.Categorical', (['m_probs'], {}), '(m_probs)\n', (10982, 10991), True, 'from pyroapi import distributions as dist\n'), ((11097, 11122), 'pyroapi.handlers.mask', 'handlers.mask', ([], {'mask': '(m > 0)'}), '(mask=m > 0)\n', (11110, 11122), False, 'from pyroapi import handlers, infer, pyro\n'), ((21027, 21048), 'pyroapi.pyro.param', 'pyro.param', (['"""z_trans"""'], {}), "('z_trans')\n", (21037, 21048), False, 'from pyroapi import handlers, infer, pyro\n'), ((2389, 2423), 'torch.ones', 'torch.ones', (['(self.S + 1)', '(self.S + 1)'], {}), '(self.S + 1, self.S + 1)\n', (2399, 2423), False, 'import torch\n'), ((4781, 4814), 'torch.clamp', 'torch.clamp', (['z_curr'], {'min': '(0)', 'max': '(1)'}), '(z_curr, min=0, max=1)\n', (4792, 4814), False, 'import torch\n'), ((5614, 5636), 'pyroapi.distributions.HalfNormal', 'dist.HalfNormal', (['(10000)'], {}), '(10000)\n', (5629, 5636), True, 'from pyroapi import distributions as dist\n'), ((5787, 5817), 'tapqir.distributions.AffineBeta', 'AffineBeta', (['(1.5)', '(2)', '(0.75)', '(2.25)'], {}), '(1.5, 2, 0.75, 2.25)\n', (5797, 5817), False, 'from tapqir.distributions import KSMOGN, AffineBeta\n'), ((7603, 7631), 'torch.broadcast_tensors', 'torch.broadcast_tensors', (['*ms'], {}), '(*ms)\n', (7626, 7631), False, 'import torch\n'), ((8293, 8317), 'pyroapi.pyro.param', 'pyro.param', (['"""trans_mean"""'], {}), "('trans_mean')\n", (8303, 8317), False, 'from pyroapi import handlers, infer, pyro\n'), ((8320, 8344), 'pyroapi.pyro.param', 'pyro.param', (['"""trans_size"""'], {}), "('trans_size')\n", (8330, 8344), False, 'from pyroapi import handlers, infer, pyro\n'), ((9457, 9490), 'pyroapi.pyro.param', 'pyro.param', (['"""background_mean_loc"""'], {}), "('background_mean_loc')\n", (9467, 9490), False, 'from pyroapi import handlers, infer, pyro\n'), ((9609, 9641), 'pyroapi.pyro.param', 'pyro.param', (['"""background_std_loc"""'], {}), "('background_std_loc')\n", (9619, 9641), False, 'from pyroapi import handlers, infer, pyro\n'), ((10338, 10359), 'pyroapi.pyro.param', 'pyro.param', (['"""z_trans"""'], {}), "('z_trans')\n", (10348, 10359), False, 'from pyroapi import handlers, infer, pyro\n'), ((10462, 10483), 'pyroapi.pyro.param', 'pyro.param', (['"""z_trans"""'], {}), "('z_trans')\n", (10472, 10483), False, 'from pyroapi import handlers, infer, pyro\n'), ((10817, 10838), 'pyroapi.pyro.param', 'pyro.param', (['"""m_probs"""'], {}), "('m_probs')\n", (10827, 10838), False, 'from pyroapi import handlers, infer, pyro\n'), ((13277, 13290), 'math.sqrt', 'math.sqrt', (['(12)'], {}), '(12)\n', (13286, 13290), False, 'import math\n'), ((13293, 13316), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (13304, 13316), False, 'import torch\n'), ((16186, 16209), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (16197, 16209), False, 'import torch\n'), ((16238, 16261), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (16249, 16261), False, 'import torch\n'), ((16673, 16696), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (16684, 16696), False, 'import torch\n'), ((16737, 16760), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (16748, 16760), False, 'import torch\n'), ((16987, 17010), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (16998, 17010), False, 'import torch\n'), ((17051, 17074), 'torch.finfo', 'torch.finfo', (['self.dtype'], {}), '(self.dtype)\n', (17062, 17074), False, 'import torch\n'), ((4718, 4750), 'tapqir.distributions.util.probs_theta', 'probs_theta', (['self.K', 'self.device'], {}), '(self.K, self.device)\n', (4729, 4750), False, 'from tapqir.distributions.util import expand_offtarget, probs_m, probs_theta\n'), ((10154, 10174), 'pyroapi.pyro.param', 'pyro.param', (['"""b_beta"""'], {}), "('b_beta')\n", (10164, 10174), False, 'from pyroapi import handlers, infer, pyro\n'), ((5324, 5346), 'tapqir.distributions.util.probs_m', 'probs_m', (['lamda', 'self.K'], {}), '(lamda, self.K)\n', (5331, 5346), False, 'from tapqir.distributions.util import expand_offtarget, probs_m, probs_theta\n'), ((6198, 6210), 'pyro.ops.indexing.Vindex', 'Vindex', (['size'], {}), '(size)\n', (6204, 6210), False, 'from pyro.ops.indexing import Vindex\n'), ((6589, 6601), 'pyro.ops.indexing.Vindex', 'Vindex', (['size'], {}), '(size)\n', (6595, 6601), False, 'from pyro.ops.indexing import Vindex\n'), ((10026, 10045), 'pyroapi.pyro.param', 'pyro.param', (['"""b_loc"""'], {}), "('b_loc')\n", (10036, 10045), False, 'from pyroapi import handlers, infer, pyro\n'), ((10090, 10110), 'pyroapi.pyro.param', 'pyro.param', (['"""b_beta"""'], {}), "('b_beta')\n", (10100, 10110), False, 'from pyroapi import handlers, infer, pyro\n'), ((11493, 11513), 'pyroapi.pyro.param', 'pyro.param', (['"""h_beta"""'], {}), "('h_beta')\n", (11503, 11513), False, 'from pyroapi import handlers, infer, pyro\n'), ((11754, 11774), 'pyroapi.pyro.param', 'pyro.param', (['"""w_mean"""'], {}), "('w_mean')\n", (11764, 11774), False, 'from pyroapi import handlers, infer, pyro\n'), ((11831, 11851), 'pyroapi.pyro.param', 'pyro.param', (['"""w_size"""'], {}), "('w_size')\n", (11841, 11851), False, 'from pyroapi import handlers, infer, pyro\n'), ((12164, 12184), 'pyroapi.pyro.param', 'pyro.param', (['"""x_mean"""'], {}), "('x_mean')\n", (12174, 12184), False, 'from pyroapi import handlers, infer, pyro\n'), ((12241, 12259), 'pyroapi.pyro.param', 'pyro.param', (['"""size"""'], {}), "('size')\n", (12251, 12259), False, 'from pyroapi import handlers, infer, pyro\n'), ((12607, 12627), 'pyroapi.pyro.param', 'pyro.param', (['"""y_mean"""'], {}), "('y_mean')\n", (12617, 12627), False, 'from pyroapi import handlers, infer, pyro\n'), ((12684, 12702), 'pyroapi.pyro.param', 'pyro.param', (['"""size"""'], {}), "('size')\n", (12694, 12702), False, 'from pyroapi import handlers, infer, pyro\n'), ((11339, 11358), 'pyroapi.pyro.param', 'pyro.param', (['"""h_loc"""'], {}), "('h_loc')\n", (11349, 11358), False, 'from pyroapi import handlers, infer, pyro\n'), ((11416, 11436), 'pyroapi.pyro.param', 'pyro.param', (['"""h_beta"""'], {}), "('h_beta')\n", (11426, 11436), False, 'from pyroapi import handlers, infer, pyro\n')] |
# imports
# -------
import re
from werkzeug.routing import BaseConverter
from werkzeug.exceptions import NotFound
# helpers
# -------
MODELS = dict()
def class_registry(cls):
"""
Function for dynamically getting class
registry dictionary from specified model.
"""
try:
return dict(cls._sa_registry._class_registry)
except:
return dict(cls._decl_class_registry)
return
def gather_models():
"""
Inspect sqlalchemy models from current context and set global
dictionary to be used in url conversion.
"""
global MODELS
from flask import current_app, has_app_context
if not has_app_context():
return
if 'sqlalchemy' not in current_app.extensions:
return
# inspect current models and add to map
db = current_app.extensions['sqlalchemy'].db
registry = class_registry(db.Model)
for cls in registry.values():
if isinstance(cls, type) and issubclass(cls, db.Model):
# class name
MODELS[cls.__name__] = cls
# lowercase name
MODELS[cls.__name__.lower()] = cls
# snake_case name
words = re.findall(r'([A-Z][0-9a-z]+)', cls.__name__)
if len(words) > 1:
alias = '_'.join(map(lambda x: x.lower(), words))
MODELS[alias] = cls
return
# converters
# ----------
class ModelConverter(BaseConverter):
"""
For url inputs containing a model identifier, look
up the model and return the object.
This method simplifies a lot of the boilerplate needed
to do model look ups in REST apis.
Examples:
.. code-block:: python
@app.route('/users/<id(User):user>')
def get_user(user):
return jsonify(user.json())
In addition, this class can be inherited and used
for other custom parameter url converters. For instance,
here is how you might use it to create a name converter:
.. code-block:: python
class NameConverter(ModelConverter):
__param__ = 'name'
app.url_map.converters['name'] = NameConverter
# ... handlers ...
@app.route('/users/<name(User):user>')
def get_user(user):
return jsonify(user.json())
"""
__param__ = 'id'
def __init__(self, map, model):
self.map = map
self.model = model
return
@property
def models(self):
global MODELS
if not MODELS:
gather_models()
return MODELS
def to_python(self, value):
mapper = self.models
# make sure model exists
if self.model not in mapper:
raise AssertionError(
'Specified model `{}` in url converter '
'not part of application models.'.format(self.model))
# set up class for conversion
cls = mapper[self.model]
# search for the object
model = cls.get(**{self.__param__: value})
if model is None:
raise NotFound
return model
def to_url(self, value):
return super(ModelConverter, self).to_url(getattr(value, self.__param__))
| [
"flask.has_app_context",
"re.findall"
] | [((648, 665), 'flask.has_app_context', 'has_app_context', ([], {}), '()\n', (663, 665), False, 'from flask import current_app, has_app_context\n'), ((1173, 1217), 're.findall', 're.findall', (['"""([A-Z][0-9a-z]+)"""', 'cls.__name__'], {}), "('([A-Z][0-9a-z]+)', cls.__name__)\n", (1183, 1217), False, 'import re\n')] |
import os
import json
import torch
from transformers import GPT2LMHeadModel, GPT2Tokenizer, AutoTokenizer, GPT2Config
PATH = './models'
OUTPUT_PATH = './output/'
if __name__ == '__main__':
#tokenizer = AutoTokenizer.from_pretrained("./models")
# add the EOS token as PAD token to avoid warnings
#tokenizer = GPT2Tokenizer(config=GPT2Config(**json.load(open(os.path.join(PATH, 'config.json')))))
model = GPT2LMHeadModel(config=GPT2Config(**json.load(open(os.path.join(PATH, 'config.json')))))
#input_ids = tokenizer.encode('', return_tensors='tf')
greedy_output = model.generate(torch.zeros((10, 1), dtype=torch.int), max_length=1024+1, min_length=1024+1)
print(list(greedy_output.data[0].numpy()))
for file in ('train', 'valid', 'test'):
with open(os.path.join(OUTPUT_PATH, f'{file}.txt'), 'w') as fout:
data = greedy_output.data
for i in range(len(data)):
elements = list(data[i].numpy())[1:]
for idx, element in enumerate(elements):
fout.write(str(int(element)))
if idx < len(elements):
fout.write(" ")
fout.write('\n')
| [
"torch.zeros",
"os.path.join"
] | [((608, 645), 'torch.zeros', 'torch.zeros', (['(10, 1)'], {'dtype': 'torch.int'}), '((10, 1), dtype=torch.int)\n', (619, 645), False, 'import torch\n'), ((795, 835), 'os.path.join', 'os.path.join', (['OUTPUT_PATH', 'f"""{file}.txt"""'], {}), "(OUTPUT_PATH, f'{file}.txt')\n", (807, 835), False, 'import os\n'), ((474, 507), 'os.path.join', 'os.path.join', (['PATH', '"""config.json"""'], {}), "(PATH, 'config.json')\n", (486, 507), False, 'import os\n')] |
#Copyright 2013 <NAME>
#
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
import Quartz
from AppKit import NSEvent
from .base import PyKeyboardMeta, PyKeyboardEventMeta
# Taken from events.h
# /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/HIToolbox.framework/Versions/A/Headers/Events.h
character_translate_table = {
'a': 0x00,
's': 0x01,
'd': 0x02,
'f': 0x03,
'h': 0x04,
'g': 0x05,
'z': 0x06,
'x': 0x07,
'c': 0x08,
'v': 0x09,
'b': 0x0b,
'q': 0x0c,
'w': 0x0d,
'e': 0x0e,
'r': 0x0f,
'y': 0x10,
't': 0x11,
'1': 0x12,
'2': 0x13,
'3': 0x14,
'4': 0x15,
'6': 0x16,
'5': 0x17,
'=': 0x18,
'9': 0x19,
'7': 0x1a,
'-': 0x1b,
'8': 0x1c,
'0': 0x1d,
']': 0x1e,
'o': 0x1f,
'u': 0x20,
'[': 0x21,
'i': 0x22,
'p': 0x23,
'l': 0x25,
'j': 0x26,
'\'': 0x27,
'k': 0x28,
';': 0x29,
'\\': 0x2a,
',': 0x2b,
'/': 0x2c,
'n': 0x2d,
'm': 0x2e,
'.': 0x2f,
'`': 0x32,
' ': 0x31,
'\r': 0x24,
'\t': 0x30,
'\n': 0x24,
'return' : 0x24,
'tab' : 0x30,
'space' : 0x31,
'delete' : 0x33,
'escape' : 0x35,
'command' : 0x37,
'shift' : 0x38,
'capslock' : 0x39,
'option' : 0x3A,
'alternate' : 0x3A,
'control' : 0x3B,
'rightshift' : 0x3C,
'rightoption' : 0x3D,
'rightcontrol' : 0x3E,
'function' : 0x3F,
}
# Taken from ev_keymap.h
# http://www.opensource.apple.com/source/IOHIDFamily/IOHIDFamily-86.1/IOHIDSystem/IOKit/hidsystem/ev_keymap.h
special_key_translate_table = {
'KEYTYPE_SOUND_UP': 0,
'KEYTYPE_SOUND_DOWN': 1,
'KEYTYPE_BRIGHTNESS_UP': 2,
'KEYTYPE_BRIGHTNESS_DOWN': 3,
'KEYTYPE_CAPS_LOCK': 4,
'KEYTYPE_HELP': 5,
'POWER_KEY': 6,
'KEYTYPE_MUTE': 7,
'UP_ARROW_KEY': 8,
'DOWN_ARROW_KEY': 9,
'KEYTYPE_NUM_LOCK': 10,
'KEYTYPE_CONTRAST_UP': 11,
'KEYTYPE_CONTRAST_DOWN': 12,
'KEYTYPE_LAUNCH_PANEL': 13,
'KEYTYPE_EJECT': 14,
'KEYTYPE_VIDMIRROR': 15,
'KEYTYPE_PLAY': 16,
'KEYTYPE_NEXT': 17,
'KEYTYPE_PREVIOUS': 18,
'KEYTYPE_FAST': 19,
'KEYTYPE_REWIND': 20,
'KEYTYPE_ILLUMINATION_UP': 21,
'KEYTYPE_ILLUMINATION_DOWN': 22,
'KEYTYPE_ILLUMINATION_TOGGLE': 23
}
class PyKeyboard(PyKeyboardMeta):
def __init__(self):
self.shift_key = 'shift'
self.modifier_table = {'Shift':False,'Command':False,'Control':False,'Alternate':False}
def press_key(self, key):
if key.title() in self.modifier_table:
self.modifier_table.update({key.title():True})
if key in special_key_translate_table:
self._press_special_key(key, True)
else:
self._press_normal_key(key, True)
def release_key(self, key):
# remove the key
if key.title() in self.modifier_table: self.modifier_table.update({key.title():False})
if key in special_key_translate_table:
self._press_special_key(key, False)
else:
self._press_normal_key(key, False)
def special_key_assignment(self):
self.volume_mute_key = 'KEYTYPE_MUTE'
self.volume_down_key = 'KEYTYPE_SOUND_DOWN'
self.volume_up_key = 'KEYTYPE_SOUND_UP'
self.media_play_pause_key = 'KEYTYPE_PLAY'
# Doesn't work :(
# self.media_next_track_key = 'KEYTYPE_NEXT'
# self.media_prev_track_key = 'KEYTYPE_PREVIOUS'
def _press_normal_key(self, key, down):
try:
key_code = character_translate_table[key.lower()]
# kCGEventFlagMaskAlternate | kCGEventFlagMaskCommand | kCGEventFlagMaskControl | kCGEventFlagMaskShift
event = Quartz.CGEventCreateKeyboardEvent(None, key_code, down)
mkeyStr = ''
for mkey in self.modifier_table:
if self.modifier_table[mkey]:
if len(mkeyStr)>1: mkeyStr = mkeyStr+' ^ '
mkeyStr = mkeyStr+'Quartz.kCGEventFlagMask'+mkey
if len(mkeyStr)>1: eval('Quartz.CGEventSetFlags(event, '+mkeyStr+')')
Quartz.CGEventPost(Quartz.kCGHIDEventTap, event)
if key.lower() == "shift":
time.sleep(.1)
except KeyError:
raise RuntimeError("Key {} not implemented.".format(key))
def _press_special_key(self, key, down):
""" Helper method for special keys.
Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac
"""
key_code = special_key_translate_table[key]
ev = NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_(
NSSystemDefined, # type
(0,0), # location
0xa00 if down else 0xb00, # flags
0, # timestamp
0, # window
0, # ctx
8, # subtype
(key_code << 16) | ((0xa if down else 0xb) << 8), # data1
-1 # data2
)
Quartz.CGEventPost(0, ev.Quartz.CGEvent())
class PyKeyboardEvent(PyKeyboardEventMeta):
def run(self):
tap = Quartz.CGEventTapCreate(
Quartz.kCGSessionEventTap,
Quartz.kCGHeadInsertEventTap,
Quartz.kCGEventTapOptionDefault,
Quartz.CGEventMaskBit(Quartz.kCGEventKeyDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventKeyUp),
self.handler,
None)
loopsource = Quartz.CFMachPortCreateRunLoopSource(None, tap, 0)
loop = Quartz.CFRunLoopGetCurrent()
Quartz.CFRunLoopAddSource(loop, loopsource, Quartz.kCFRunLoopDefaultMode)
Quartz.CGEventTapEnable(tap, True)
while self.state:
Quartz.CFRunLoopRunInMode(Quartz.kCFRunLoopDefaultMode, 5, False)
def handler(self, proxy, type, event, refcon):
key = Quartz.CGEventGetIntegerValueField(event, Quartz.kCGKeyboardEventKeycode)
if type == Quartz.kCGEventKeyDown:
self.key_press(key)
elif type == Quartz.kCGEventKeyUp:
self.key_release(key)
if self.capture:
Quartz.CGEventSetType(event, Quartz.kCGEventNull)
return event
| [
"AppKit.NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_",
"Quartz.CFRunLoopAddSource",
"Quartz.CGEventGetIntegerValueField",
"time.sleep",
"Quartz.CGEventCreateKeyboardEvent",
"Quartz.CFRunLoopGetCurrent",
"Quartz.CFRunLoopRunInMode",
"Quartz.CGEventMaskBit",
"Quartz.CFMachPortCreateRunLoopSource",
"Quartz.CGEventPost",
"Quartz.CGEventTapEnable",
"Quartz.CGEventSetType"
] | [((5226, 5445), 'AppKit.NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_', 'NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_', (['NSSystemDefined', '(0, 0)', '(2560 if down else 2816)', '(0)', '(0)', '(0)', '(8)', '(key_code << 16 | (10 if down else 11) << 8)', '(-1)'], {}), '(\n NSSystemDefined, (0, 0), 2560 if down else 2816, 0, 0, 0, 8, key_code <<\n 16 | (10 if down else 11) << 8, -1)\n', (5327, 5445), False, 'from AppKit import NSEvent\n'), ((6145, 6195), 'Quartz.CFMachPortCreateRunLoopSource', 'Quartz.CFMachPortCreateRunLoopSource', (['None', 'tap', '(0)'], {}), '(None, tap, 0)\n', (6181, 6195), False, 'import Quartz\n'), ((6211, 6239), 'Quartz.CFRunLoopGetCurrent', 'Quartz.CFRunLoopGetCurrent', ([], {}), '()\n', (6237, 6239), False, 'import Quartz\n'), ((6248, 6321), 'Quartz.CFRunLoopAddSource', 'Quartz.CFRunLoopAddSource', (['loop', 'loopsource', 'Quartz.kCFRunLoopDefaultMode'], {}), '(loop, loopsource, Quartz.kCFRunLoopDefaultMode)\n', (6273, 6321), False, 'import Quartz\n'), ((6330, 6364), 'Quartz.CGEventTapEnable', 'Quartz.CGEventTapEnable', (['tap', '(True)'], {}), '(tap, True)\n', (6353, 6364), False, 'import Quartz\n'), ((6536, 6609), 'Quartz.CGEventGetIntegerValueField', 'Quartz.CGEventGetIntegerValueField', (['event', 'Quartz.kCGKeyboardEventKeycode'], {}), '(event, Quartz.kCGKeyboardEventKeycode)\n', (6570, 6609), False, 'import Quartz\n'), ((4334, 4389), 'Quartz.CGEventCreateKeyboardEvent', 'Quartz.CGEventCreateKeyboardEvent', (['None', 'key_code', 'down'], {}), '(None, key_code, down)\n', (4367, 4389), False, 'import Quartz\n'), ((4752, 4800), 'Quartz.CGEventPost', 'Quartz.CGEventPost', (['Quartz.kCGHIDEventTap', 'event'], {}), '(Quartz.kCGHIDEventTap, event)\n', (4770, 4800), False, 'import Quartz\n'), ((6404, 6469), 'Quartz.CFRunLoopRunInMode', 'Quartz.CFRunLoopRunInMode', (['Quartz.kCFRunLoopDefaultMode', '(5)', '(False)'], {}), '(Quartz.kCFRunLoopDefaultMode, 5, False)\n', (6429, 6469), False, 'import Quartz\n'), ((6800, 6849), 'Quartz.CGEventSetType', 'Quartz.CGEventSetType', (['event', 'Quartz.kCGEventNull'], {}), '(event, Quartz.kCGEventNull)\n', (6821, 6849), False, 'import Quartz\n'), ((4854, 4869), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (4864, 4869), False, 'import time\n'), ((5974, 6019), 'Quartz.CGEventMaskBit', 'Quartz.CGEventMaskBit', (['Quartz.kCGEventKeyDown'], {}), '(Quartz.kCGEventKeyDown)\n', (5995, 6019), False, 'import Quartz\n'), ((6034, 6077), 'Quartz.CGEventMaskBit', 'Quartz.CGEventMaskBit', (['Quartz.kCGEventKeyUp'], {}), '(Quartz.kCGEventKeyUp)\n', (6055, 6077), False, 'import Quartz\n')] |
"""
Hypothesis strategies for generating Axiom-related data.
"""
from epsilon.extime import Time
from hypothesis import strategies as st
from hypothesis.extra.datetime import datetimes
from axiom.attributes import LARGEST_NEGATIVE, LARGEST_POSITIVE
def axiomText(*a, **kw):
"""
Strategy for generating Axiom-compatible text values.
"""
return st.text(
alphabet=st.characters(
blacklist_categories={'Cs'},
blacklist_characters={u'\x00'}),
*a, **kw)
def textlists():
"""
Strategy for generating lists storable with L{axiom.attributes.textlist}.
"""
return st.lists(st.text(
alphabet=st.characters(
blacklist_categories={'Cs'},
blacklist_characters={u'\x00', u'\x02', u'\x1f'})))
def axiomIntegers(minValue=LARGEST_NEGATIVE, maxValue=LARGEST_POSITIVE):
"""
Strategy for generating Axiom-compatible integers.
@type minValue: L{int}
@param minValue: Minimum value to generate; default is the least value
that can be stored in an L{axiom.attributes.integer} attribute.
@type manValue: L{int}
@param manValue: Maximum value to generate; default is the greatest value
that can be stored in an L{axiom.attributes.integer} attribute.
"""
return st.integers(min_value=minValue, max_value=maxValue)
def timestamps(*a, **kw):
"""
Strategy for generating L{epsilon.extime.Time} objects.
"""
return st.builds(Time.fromDatetime, datetimes(timezones=[], *a, **kw))
def fixedDecimals(precision, minValue=None, maxValue=None):
"""
Strategy for generating L{decimal.Decimal} values of a fixed precision.
@type precision: L{decimal.Decimal}
@param precision: The precision to use; for example, C{Decimal('0.01')} for
a L{axiom.attributes.point2decimal} attribute.
@type minValue: L{decimal.Decimal}
@param minValue: The minimum value to generate, or C{None} for the least
possible.
@type minValue: L{decimal.Decimal}
@param minValue: The maximum value to generate, or C{None} for the greatest
possible.
"""
if minValue is None:
minValue = LARGEST_NEGATIVE
else:
minValue = int(minValue / precision)
if maxValue is None:
maxValue = LARGEST_POSITIVE
else:
maxValue = int(maxValue / precision)
return st.integers(min_value=minValue, max_value=maxValue).map(
lambda v: v * precision)
__all__ = [
'axiomText', 'axiomIntegers', 'fixedDecimals', 'timestamps', 'textlists']
| [
"hypothesis.strategies.characters",
"hypothesis.strategies.integers",
"hypothesis.extra.datetime.datetimes"
] | [((1298, 1349), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': 'minValue', 'max_value': 'maxValue'}), '(min_value=minValue, max_value=maxValue)\n', (1309, 1349), True, 'from hypothesis import strategies as st\n'), ((1495, 1528), 'hypothesis.extra.datetime.datetimes', 'datetimes', (['*a'], {'timezones': '[]'}), '(*a, timezones=[], **kw)\n', (1504, 1528), False, 'from hypothesis.extra.datetime import datetimes\n'), ((389, 463), 'hypothesis.strategies.characters', 'st.characters', ([], {'blacklist_categories': "{'Cs'}", 'blacklist_characters': "{u'\\x00'}"}), "(blacklist_categories={'Cs'}, blacklist_characters={u'\\x00'})\n", (402, 463), True, 'from hypothesis import strategies as st\n'), ((2376, 2427), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': 'minValue', 'max_value': 'maxValue'}), '(min_value=minValue, max_value=maxValue)\n', (2387, 2427), True, 'from hypothesis import strategies as st\n'), ((667, 763), 'hypothesis.strategies.characters', 'st.characters', ([], {'blacklist_categories': "{'Cs'}", 'blacklist_characters': "{u'\\x00', u'\\x02', u'\\x1f'}"}), "(blacklist_categories={'Cs'}, blacklist_characters={u'\\x00',\n u'\\x02', u'\\x1f'})\n", (680, 763), True, 'from hypothesis import strategies as st\n')] |
# importing the required libraries
import pyautogui, time
# delay to switch windows
time.sleep(10)
# content you want to spam with
f = open("idoc.pub_green-lantern-movie-script.txt", 'r')
# loop to spam
for word in f:
# fetch and type each word from the file
pyautogui.write(word)
# press enter to send the message
pyautogui.press('enter')
| [
"pyautogui.press",
"pyautogui.write",
"time.sleep"
] | [((85, 99), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (95, 99), False, 'import pyautogui, time\n'), ((268, 289), 'pyautogui.write', 'pyautogui.write', (['word'], {}), '(word)\n', (283, 289), False, 'import pyautogui, time\n'), ((332, 356), 'pyautogui.press', 'pyautogui.press', (['"""enter"""'], {}), "('enter')\n", (347, 356), False, 'import pyautogui, time\n')] |
import tensorflow as tf
@tf.function
def tensor_scatter_nd_ops_by_name(segment_name, tensor, indices, updates, name=None):
"""Scatter operation chosen by name that pick tensor_scatter_nd functions.
Args:
segment_name (str): Operation to update scattered updates. Either 'sum' or 'min' etc.
tensor (tf.Tensor): Tensor to scatter updates into.
indices (tf.Tensor): Indices to for updates.
updates (tf.Tensor): Updates of new entries for tensor.
name (str): Name of the tensor.
Returns:
tf.Tensor: Updates scattered into tensor with different update rules.
"""
if segment_name in ["segment_sum", "sum", "reduce_sum", "add"]:
pool = tf.tensor_scatter_nd_add(tensor, indices, updates, name=name)
elif segment_name in ["segment_max", "max", "reduce_max"]:
pool = tf.tensor_scatter_nd_max(tensor, indices, updates, name=name)
elif segment_name in ["segment_min", "min", "reduce_min"]:
pool = tf.tensor_scatter_nd_min(tensor, indices, updates, name=name)
else:
raise TypeError("Unknown pooling, choose: 'mean', 'sum', ...")
return pool
| [
"tensorflow.tensor_scatter_nd_min",
"tensorflow.tensor_scatter_nd_add",
"tensorflow.tensor_scatter_nd_max"
] | [((709, 770), 'tensorflow.tensor_scatter_nd_add', 'tf.tensor_scatter_nd_add', (['tensor', 'indices', 'updates'], {'name': 'name'}), '(tensor, indices, updates, name=name)\n', (733, 770), True, 'import tensorflow as tf\n'), ((849, 910), 'tensorflow.tensor_scatter_nd_max', 'tf.tensor_scatter_nd_max', (['tensor', 'indices', 'updates'], {'name': 'name'}), '(tensor, indices, updates, name=name)\n', (873, 910), True, 'import tensorflow as tf\n'), ((989, 1050), 'tensorflow.tensor_scatter_nd_min', 'tf.tensor_scatter_nd_min', (['tensor', 'indices', 'updates'], {'name': 'name'}), '(tensor, indices, updates, name=name)\n', (1013, 1050), True, 'import tensorflow as tf\n')] |
# Utility functions to access azure data storage
import json, os
from azure.storage.blob import BlockBlobService, PublicAccess
def load_text_file(containerName, blobName, accountName, accountKey):
'''
load the file specified from azure block blob storage. if the file is not
found return an empty dictionary
Parameters
----------
containerName: str
container in storage account to open
blobName: str
name of blob in container to open
accountName: str
name of storage account
accountKey
access key for storage account
Returns
-------
dictionary
'''
# Create BlockBlockService
block_blob_service = BlockBlobService(
account_name=accountName, account_key=accountKey
)
# try loading data from blob store. if blob is not found return empty dict
try:
res = block_blob_service.get_blob_to_text(containerName, blobName)
blobData = json.loads(res.content)
except:
blobData = {}
return blobData
def save_text_file(data, containerName, blobName, accountName, accountKey):
'''
save a textfile to azure block blob storage.
Parameters
----------
data: str
(text)data to upload
containerName: str
container in storage account
blobName: str
name of blob in container
accountName: str
name of storage account
accountKey
access key for storage account
Returns
-------
'''
# Create BlockBlockService
block_blob_service = BlockBlobService(
account_name=accountName, account_key=accountKey
)
block_blob_service.create_blob_from_text(containerName, blobName, data)
| [
"azure.storage.blob.BlockBlobService",
"json.loads"
] | [((755, 821), 'azure.storage.blob.BlockBlobService', 'BlockBlobService', ([], {'account_name': 'accountName', 'account_key': 'accountKey'}), '(account_name=accountName, account_key=accountKey)\n', (771, 821), False, 'from azure.storage.blob import BlockBlobService, PublicAccess\n'), ((1696, 1762), 'azure.storage.blob.BlockBlobService', 'BlockBlobService', ([], {'account_name': 'accountName', 'account_key': 'accountKey'}), '(account_name=accountName, account_key=accountKey)\n', (1712, 1762), False, 'from azure.storage.blob import BlockBlobService, PublicAccess\n'), ((1024, 1047), 'json.loads', 'json.loads', (['res.content'], {}), '(res.content)\n', (1034, 1047), False, 'import json, os\n')] |
from curses.textpad import rectangle
import curses
ORIGIN_Y, ORIGIN_X = 5,2
class EditorManager:
def __init__(self,std_scr):
self.std_scr = std_scr
self.height, self.width = self.std_scr.getmaxyx()
self.origin_y, self.origin_x = 5, 2
self.canvas_height, self.canvas_width = self.height//4-1, self.width//4-4
self.all_editors = {}
self.is_global_state = False
self.navigator = None
curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK)
def left(self):
pass
def right(self):
pass
def up(self):
pass
def down(self):
pass
def run(self):
pass
def clear_content(self):
for i in range(self.origin_y+2,self.canvas_height):
self.std_scr.addstr(i, self.origin_x," "*(self.canvas_width-2))
def show_title(self):
self.std_scr.addstr(self.origin_y, self.origin_x+1, "Open Editors")
self.std_scr.addstr(self.origin_y, self.canvas_width-3, "▼")
rectangle(self.std_scr, self.origin_y - 1, self.origin_x - 1, self.origin_y + 1, self.width // 4 - 4)
def show_content(self):
self.clear_content()
self.all_editors = {}
index, editors = self.navigator.context["Manager"].get_all_editor_names()
for i, editor in enumerate(editors):
self.all_editors[i] = editor
rectangle(self.std_scr, self.origin_y+1,self.origin_x-1,self.canvas_height, self.canvas_width)
for i, editor in self.all_editors.items():
if i == index:
self.std_scr.addstr(self.origin_y + i + 2, self.origin_x + 1, editor, curses.color_pair(2))
else:
self.std_scr.addstr(self.origin_y+i+2, self.origin_x+1, editor)
self.std_scr.refresh()
def display(self):
self.show_title()
self.show_content()
def update_global_status(self,status):
self.is_global_state = status
def set_navigator(self, navigator):
self.navigator = navigator | [
"curses.color_pair",
"curses.textpad.rectangle",
"curses.init_pair"
] | [((453, 512), 'curses.init_pair', 'curses.init_pair', (['(2)', 'curses.COLOR_GREEN', 'curses.COLOR_BLACK'], {}), '(2, curses.COLOR_GREEN, curses.COLOR_BLACK)\n', (469, 512), False, 'import curses\n'), ((1027, 1132), 'curses.textpad.rectangle', 'rectangle', (['self.std_scr', '(self.origin_y - 1)', '(self.origin_x - 1)', '(self.origin_y + 1)', '(self.width // 4 - 4)'], {}), '(self.std_scr, self.origin_y - 1, self.origin_x - 1, self.origin_y +\n 1, self.width // 4 - 4)\n', (1036, 1132), False, 'from curses.textpad import rectangle\n'), ((1393, 1498), 'curses.textpad.rectangle', 'rectangle', (['self.std_scr', '(self.origin_y + 1)', '(self.origin_x - 1)', 'self.canvas_height', 'self.canvas_width'], {}), '(self.std_scr, self.origin_y + 1, self.origin_x - 1, self.\n canvas_height, self.canvas_width)\n', (1402, 1498), False, 'from curses.textpad import rectangle\n'), ((1652, 1672), 'curses.color_pair', 'curses.color_pair', (['(2)'], {}), '(2)\n', (1669, 1672), False, 'import curses\n')] |
from metaphor.common.cli import cli_main
from .extractor import TableauExtractor
if __name__ == "__main__":
cli_main("Tableau metadata extractor", TableauExtractor)
| [
"metaphor.common.cli.cli_main"
] | [((114, 170), 'metaphor.common.cli.cli_main', 'cli_main', (['"""Tableau metadata extractor"""', 'TableauExtractor'], {}), "('Tableau metadata extractor', TableauExtractor)\n", (122, 170), False, 'from metaphor.common.cli import cli_main\n')] |
#!/usr/bin/env python
import re
import sys
import decimal
from mt940m_v2 import ParseMT940
D = decimal.Decimal
# read and concatenate entire MT940 contents and add '-ABN' to make sure the last record is captured
if len(sys.argv)== 2:
argf = sys.argv[1]
else:
print('please provide a valid MT940 file')
exit()
text = open(argf).read().splitlines()
text = ''.join(text) +'-ABN'
payee = ''
memo = ''
total_amount = D('0')
bank_account = ''
fn = ''
# record: pattern to determine a MT940 record group, note more than one transaction
# is possible within a record
record_pat = re.compile(r'(?P<record>:\d\d.??:.*?(?=-ABN))')
# field_pat: pattern to seperate the fields in the MT940 file :num :field
field_pat = re.compile(r':(?P<num>\d\d).??:(?P<field>.*?(?=:\d\d.??:))')
# val61_pat: pattern to seperate the values in field 61
#:valuta (date) :date (transaction date and used for date) :sign :amount :code :reference
val61_pat = re.compile(r'(?P<valuta>\d{6})(?P<date>\d{4})(?P<sign>\D)'
r'(?P<amount>\d+[,.]\d*)(?P<code>\w{4})(?P<reference>\w+$)')
for match in re.finditer(record_pat, text):
# add token ':99:' to the end of the record to make sure the last field is also captured
record = match.group('record') +':99:'
# parse the string in a field number 'num' and its corresponding 'field'
for match in re.finditer(field_pat,record):
num = match.group('num')
field = match.group('field')
# in case field number is equal to '25' check if it is a new bank_account. If new make new qif file using
# the name of the bank account found in field '25'. Field 25 is assumed to be before field 61.
if num == '25':
# close the qif file if this is not the first instance
if field != bank_account and bank_account != '':
qif_file.close()
end_balance = start_balance + total_amount
print ('{}: start balance: {:.2f} / transfers: {:.2f} / end balance: {:.2f}' \
.format(fn, start_balance, total_amount, end_balance))
total_amount = D('0')
fn = ''
# open a new qif file if a new bank account is encountered
if field != bank_account:
bank_account = field
new_bank_flag = True
fn = argf.rsplit('.',1)[0] # make the file name the same as the 1st argument + some changes
fn = fn + '_' + bank_account +'.qif'
qif_file = open(fn,'w')
qif_file.write('!Type:Bank\n')
#find the start_balance for a new bank account in field 60
if num == '60' and new_bank_flag:
m=re.search(r'(\D)\d{6}.*?(?=[\d])(.*$)',field)
start_balance=D(ParseMT940.conv_amount_str(m.group(1),m.group(2)))
new_bank_flag = False
# in case field number is '61' handle the transaction using the information in field 61 and subsequent 86
if num == '61':
f61 = re.match(val61_pat, field)
f61_dict = f61.groupdict()
# in case field number is '86' handle to payee and memo and write the transaction to QIF
if num == '86':
date = ParseMT940.transaction_date_conversion(f61_dict['valuta'], f61_dict['date'])
amount = ParseMT940.conv_amount_str(f61_dict['sign'], f61_dict['amount'])
payee, memo = ParseMT940.code86(field, bank_account, date, amount)
total_amount += D(amount)
ParseMT940.write_qif_record (qif_file, date, amount, payee, memo)
# on finishing the program close the last qif_file
if fn !='':
qif_file.close()
end_balance = start_balance + total_amount
print ('{}: start balance: {:.2f} / transfers: {:.2f} / end balance: {:.2f}'.format(fn, start_balance, total_amount, end_balance))
else:
print('this is not a valid MT940 file')
| [
"mt940m_v2.ParseMT940.conv_amount_str",
"mt940m_v2.ParseMT940.transaction_date_conversion",
"re.finditer",
"mt940m_v2.ParseMT940.code86",
"mt940m_v2.ParseMT940.write_qif_record",
"re.match",
"re.search",
"re.compile"
] | [((589, 637), 're.compile', 're.compile', (['"""(?P<record>:\\\\d\\\\d.??:.*?(?=-ABN))"""'], {}), "('(?P<record>:\\\\d\\\\d.??:.*?(?=-ABN))')\n", (599, 637), False, 'import re\n'), ((724, 787), 're.compile', 're.compile', (['""":(?P<num>\\\\d\\\\d).??:(?P<field>.*?(?=:\\\\d\\\\d.??:))"""'], {}), "(':(?P<num>\\\\d\\\\d).??:(?P<field>.*?(?=:\\\\d\\\\d.??:))')\n", (734, 787), False, 'import re\n'), ((944, 1075), 're.compile', 're.compile', (['"""(?P<valuta>\\\\d{6})(?P<date>\\\\d{4})(?P<sign>\\\\D)(?P<amount>\\\\d+[,.]\\\\d*)(?P<code>\\\\w{4})(?P<reference>\\\\w+$)"""'], {}), "(\n '(?P<valuta>\\\\d{6})(?P<date>\\\\d{4})(?P<sign>\\\\D)(?P<amount>\\\\d+[,.]\\\\d*)(?P<code>\\\\w{4})(?P<reference>\\\\w+$)'\n )\n", (954, 1075), False, 'import re\n'), ((1101, 1130), 're.finditer', 're.finditer', (['record_pat', 'text'], {}), '(record_pat, text)\n', (1112, 1130), False, 'import re\n'), ((1363, 1393), 're.finditer', 're.finditer', (['field_pat', 'record'], {}), '(field_pat, record)\n', (1374, 1393), False, 'import re\n'), ((2720, 2768), 're.search', 're.search', (['"""(\\\\D)\\\\d{6}.*?(?=[\\\\d])(.*$)"""', 'field'], {}), "('(\\\\D)\\\\d{6}.*?(?=[\\\\d])(.*$)', field)\n", (2729, 2768), False, 'import re\n'), ((3036, 3062), 're.match', 're.match', (['val61_pat', 'field'], {}), '(val61_pat, field)\n', (3044, 3062), False, 'import re\n'), ((3243, 3319), 'mt940m_v2.ParseMT940.transaction_date_conversion', 'ParseMT940.transaction_date_conversion', (["f61_dict['valuta']", "f61_dict['date']"], {}), "(f61_dict['valuta'], f61_dict['date'])\n", (3281, 3319), False, 'from mt940m_v2 import ParseMT940\n'), ((3341, 3405), 'mt940m_v2.ParseMT940.conv_amount_str', 'ParseMT940.conv_amount_str', (["f61_dict['sign']", "f61_dict['amount']"], {}), "(f61_dict['sign'], f61_dict['amount'])\n", (3367, 3405), False, 'from mt940m_v2 import ParseMT940\n'), ((3432, 3484), 'mt940m_v2.ParseMT940.code86', 'ParseMT940.code86', (['field', 'bank_account', 'date', 'amount'], {}), '(field, bank_account, date, amount)\n', (3449, 3484), False, 'from mt940m_v2 import ParseMT940\n'), ((3535, 3599), 'mt940m_v2.ParseMT940.write_qif_record', 'ParseMT940.write_qif_record', (['qif_file', 'date', 'amount', 'payee', 'memo'], {}), '(qif_file, date, amount, payee, memo)\n', (3562, 3599), False, 'from mt940m_v2 import ParseMT940\n')] |
import sys
import os
import Labyrinth
import time
import threading
class Agent:
num = 0
x = 0
y = 0
labyrinth = None
callback = None
def __init__(self, x, y, labyrinth, callback):
self.num = time.time()*1000
self.x = x
self.y = y
self.labyrinth = labyrinth
self.callback = callback
print(str(self.num)+': Created new agent. Exploring...')
t = threading.Thread(target=self.explore)
t.start()
def explore(self):
self.callback()
if self.labyrinth.finished or self.labyrinth.isVisited(self.x, self.y):
sys.exit()
walkableSpots = []
if (self.labyrinth.isFinish(self.x, self.y)):
print(str(self.num)+': Agent found the exit at x: '+str(self.x)+', y: '+str(self.y))
self.labyrinth.finished = True
sys.exit()
self.labyrinth.visit(self.x, self.y)
print('{}: Visiting {} {}'.format(str(self.num), self.x, self.y))
if (self.labyrinth.isWalkable(self.x-1, self.y)):
walkableSpots.append({'x': self.x-1, 'y': self.y})
if (self.labyrinth.isWalkable(self.x, self.y-1)):
walkableSpots.append({'x': self.x, 'y': self.y-1})
if (self.labyrinth.isWalkable(self.x+1, self.y)):
walkableSpots.append({'x': self.x+1, 'y': self.y})
if (self.labyrinth.isWalkable(self.x, self.y+1)):
walkableSpots.append({'x': self.x, 'y': self.y+1})
if (len(walkableSpots)==1):
self.x = walkableSpots[0]['x']
self.y = walkableSpots[0]['y']
t = threading.Thread(target=self.explore)
t.start()
if (len(walkableSpots)>1):
for num, spot in enumerate(walkableSpots, start = 1):
agent = Agent(spot['x'], spot['y'], self.labyrinth, self.callback)
self.x = walkableSpots[0]['x']
self.y = walkableSpots[0]['y']
t = threading.Thread(target=self.explore)
t.start()
if (len(walkableSpots) == 0):
print(str(self.num)+': Dead end reached, dying...')
sys.exit()
| [
"threading.Thread",
"sys.exit",
"time.time"
] | [((425, 462), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.explore'}), '(target=self.explore)\n', (441, 462), False, 'import threading\n'), ((225, 236), 'time.time', 'time.time', ([], {}), '()\n', (234, 236), False, 'import time\n'), ((621, 631), 'sys.exit', 'sys.exit', ([], {}), '()\n', (629, 631), False, 'import sys\n'), ((865, 875), 'sys.exit', 'sys.exit', ([], {}), '()\n', (873, 875), False, 'import sys\n'), ((1617, 1654), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.explore'}), '(target=self.explore)\n', (1633, 1654), False, 'import threading\n'), ((1963, 2000), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.explore'}), '(target=self.explore)\n', (1979, 2000), False, 'import threading\n'), ((2137, 2147), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2145, 2147), False, 'import sys\n')] |
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
from problem_instance import Problem
class ProblemPrinter:
def __init__(self, fixture_order_raw, duration_rnd_seed, **kwargs):
self.p = Problem(fixture_order_raw = fixture_order_raw, **kwargs)
self.fixture_order_raw = fixture_order_raw
self.no_grip = None
self.no_suction = None
self.duration_rnd_seed = duration_rnd_seed
def GetFixtureOrderString(self):
s = ""
current = ""
for i in range(len(self.fixture_order_raw)):
if self.fixture_order_raw[i] == 0:
current = "G"
elif self.fixture_order_raw[i] == 1:
current = "S"
elif self.fixture_order_raw[i] == -1:
current = "_"
else:
current = "ERROR"
s = s + current
return s
def GetNoComp(self):
return self.p.no_grip + self.p.no_suction
def FilePrint(self, file_name_prefix):
"""
Printing File Header
"""
setup_file_name = file_name_prefix + str(self.GetNoComp()) + "_" + self.GetFixtureOrderString() + ".dzn"
file1 = open(setup_file_name,"w")
file1.write("%-----------------------------------------------------------------------------%\n")
file1.write("% Dual Arm Multi Capacity Multi Tool Scheduling / Routing\n")
file1.write("% Assembly Configuration\n")
file1.write("% Auto Generated by python script, authored by <NAME> \n")
file1.write("%-----------------------------------------------------------------------------%\n\n\n")
"""
Printing durations
"""
self.p.RandomizeTaskDurations(self.duration_rnd_seed)
file1.write("task_durations = ")
file1.write(self.p.GetDurationsOfTasksString(str_offset=len("task_durations = ")+1))
file1.write('\n\n\n')
"""
Printing task sets
"""
file1.write("TRAY_TASKS = " + self.p.TrayTasksToString() + ";\n")
file1.write("CAMERA_TASKS = " + self.p.CameraTasksToString() + ";\n")
file1.write("OUTPUT_TASKS = " + self.p.OutputTasksToString() + ";\n")
file1.write('\n\n')
file1.write("empty_gripper_tasks = " + self.p.PressTasksToString() + ";\n")
file1.write('\n\n')
"""
Printing Tool Pick and Place- orders
"""
# TODO: last row does not seem to have press - which it does no, since it is output
# However, we assume it does!
# Fix!
file1.write("gripper_pick_tasks_orders = " + self.p.GetPickTaskOrderString(0) + ";\n\n")
file1.write("suction_pick_tasks_orders = " + self.p.GetPickTaskOrderString(1) + ";\n\n")
file1.write("fixture_task_orders = " + self.p.GetFixtureTaskOrderString() + ";\n\n")
file1.close()
return setup_file_name
| [
"problem_instance.Problem"
] | [((641, 695), 'problem_instance.Problem', 'Problem', ([], {'fixture_order_raw': 'fixture_order_raw'}), '(fixture_order_raw=fixture_order_raw, **kwargs)\n', (648, 695), False, 'from problem_instance import Problem\n')] |
#
# Configurações da aplicação
#
import os
from os.path import abspath
DEBUG = True
SECRET_KEY = 'a secret key'
# diretório base
basedir = os.path.abspath(os.path.dirname(__name__))
# diretório base da aplicação
BASE_DIR = basedir
# connection string: mysql://usuario:senha@host/nomedobanco
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:root@localhost/dbdevweb'
# SQLAlchemy monitorará modificações de objetos
SQLALCHEMY_TRACK_MODIFICATIONS = True | [
"os.path.dirname"
] | [((158, 183), 'os.path.dirname', 'os.path.dirname', (['__name__'], {}), '(__name__)\n', (173, 183), False, 'import os\n')] |
from rest_framework import serializers, status
from django_redis import get_redis_connection
from rest_framework_jwt.settings import api_settings
import logging
import re
from .models import User
from .utils import get_user_by_account
from celery_tasks.email.tasks import send_verify_email
logger = logging.getLogger('django')
class CreateUserSerializer(serializers.ModelSerializer):
"""
创建用户序列化器
"""
password2 = serializers.CharField(label='确认密码', required=True, allow_null=False, allow_blank=False, write_only=True)
sms_code = serializers.CharField(label='短信验证码', required=True, allow_null=False, allow_blank=False, write_only=True)
allow = serializers.CharField(label='同意协议', required=True, allow_null=False, allow_blank=False, write_only=True)
token = serializers.CharField(label='登录状态token', read_only=True) # 增加token字段
def validate_mobile(self, value):
"""验证手机号"""
if not re.match(r'^1[3-9]\d{9}$', value):
raise serializers.ValidationError('手机号格式错误')
return value
def validate_allow(self, value):
"""检验用户是否同意协议"""
if value != 'true':
raise serializers.ValidationError('请同意用户协议')
return value
def validate(self, attrs):
# 判断两次密码
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError('两次密码不一致')
# 判断短信验证码
redis_conn = get_redis_connection('verify_codes')
mobile = attrs['mobile']
real_sms_code = redis_conn.get('sms_%s' % mobile)
if real_sms_code is None:
raise serializers.ValidationError('无效的短信验证码')
if attrs['sms_code'] != real_sms_code.decode():
raise serializers.ValidationError('短信验证码错误')
return attrs
def create(self, validated_data):
"""
创建用户
"""
# 移除数据库模型中不存在的属性
del validated_data['password2']
del validated_data['sms_code']
del validated_data['allow']
user = super().create(validated_data)
# 调用django的认证系统加密密码
user.set_password(validated_data['password'])
user.save()
# 补充生成记录登录状态的token
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
user.token = token
return user
class Meta:
model = User
# 此序列化器用于传入和输出,所以得包含所有要用到的字段
fields = ('id', 'username', 'password', '<PASSWORD>', 'sms_code', 'mobile', 'allow', 'token')
extra_kwargs = {
'id': {'read_only': True}, # read_only为True,指明只有输出时才会用到
'username': {
'min_length': 5,
'max_length': 20,
'error_messages': {
'min_length': '仅允许5-20个字符的用户名',
'max_length': '仅允许5-20个字符的用户名',
}
},
'password': {
'write_only': True,
'min_length': 8,
'max_length': 20,
'error_messages': {
'min_length': '仅允许8-20个字符的密码',
'max_length': '仅允许8-20个字符的密码',
}
}
}
class CheckSMSCodeSerializer(serializers.Serializer):
"""
检查sms code
"""
sms_code = serializers.CharField(min_length=6, max_length=6)
def validate_sms_code(self, value):
account = self.context['view'].kwargs['account']
# 获取user
user = get_user_by_account(account)
if user is None:
raise serializers.ValidationError('用户不存在')
self.user = user
# 从redis中取出真实的验证码
redis_conn = get_redis_connection('verify_codes')
real_sms_code = redis_conn.get('sms_%s' % user.mobile)
if real_sms_code is None:
return serializers.ValidationError('无效的短信验证码')
if value != real_sms_code.decode():
raise serializers.ValidationError('短信验证码错误')
return value
class ResetPasswordSerializer(serializers.ModelSerializer):
password2 = serializers.CharField(label='确认密码', write_only=True)
access_token = serializers.CharField(label='操作token', write_only=True)
class Meta:
model = User
fields = ('id', 'password', 'password2', 'access_token')
extra_kwargs = {
'password': {
'write_only': True,
'min_length': 8,
'max_length': 20,
'error_messages': {
'min_length': '仅允许8-20个字符的密码',
'max_length': '仅允许8-20个字符的密码',
}
}
}
def validate(self, attrs):
"""
校验数据
"""
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError('两次密码不一致')
allow = User.check_set_password_token(self.context['view'].kwargs['pk'], attrs['access_token'])
if not allow:
raise serializers.ValidationError('无效的access token')
return attrs
def update(self, instance, validated_data):
"""
更新密码
"""
instance.set_password(validated_data['password'])
instance.save()
return instance
class UserDetailSerializer(serializers.ModelSerializer):
"""
用户详细信息序列化器
"""
class Meta:
model = User
fields = ['id', 'username', 'mobile', 'email', 'email_active']
class EmailSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'email')
extra_kwargs = {
'email': {
'required': True
}
}
def update(self, instance, validated_data):
email = validated_data['email']
instance.email = email
instance.save()
# 生成验证链接
verify_url = instance.generate_verify_email_url()
# 发送验证邮件
send_verify_email.delay(email, verify_url)
return instance
| [
"celery_tasks.email.tasks.send_verify_email.delay",
"re.match",
"rest_framework.serializers.CharField",
"django_redis.get_redis_connection",
"logging.getLogger",
"rest_framework.serializers.ValidationError"
] | [((301, 328), 'logging.getLogger', 'logging.getLogger', (['"""django"""'], {}), "('django')\n", (318, 328), False, 'import logging\n'), ((433, 541), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""确认密码"""', 'required': '(True)', 'allow_null': '(False)', 'allow_blank': '(False)', 'write_only': '(True)'}), "(label='确认密码', required=True, allow_null=False,\n allow_blank=False, write_only=True)\n", (454, 541), False, 'from rest_framework import serializers, status\n'), ((553, 662), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""短信验证码"""', 'required': '(True)', 'allow_null': '(False)', 'allow_blank': '(False)', 'write_only': '(True)'}), "(label='短信验证码', required=True, allow_null=False,\n allow_blank=False, write_only=True)\n", (574, 662), False, 'from rest_framework import serializers, status\n'), ((671, 779), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""同意协议"""', 'required': '(True)', 'allow_null': '(False)', 'allow_blank': '(False)', 'write_only': '(True)'}), "(label='同意协议', required=True, allow_null=False,\n allow_blank=False, write_only=True)\n", (692, 779), False, 'from rest_framework import serializers, status\n'), ((788, 844), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""登录状态token"""', 'read_only': '(True)'}), "(label='登录状态token', read_only=True)\n", (809, 844), False, 'from rest_framework import serializers, status\n'), ((3368, 3417), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'min_length': '(6)', 'max_length': '(6)'}), '(min_length=6, max_length=6)\n', (3389, 3417), False, 'from rest_framework import serializers, status\n'), ((4126, 4178), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""确认密码"""', 'write_only': '(True)'}), "(label='确认密码', write_only=True)\n", (4147, 4178), False, 'from rest_framework import serializers, status\n'), ((4198, 4253), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""操作token"""', 'write_only': '(True)'}), "(label='操作token', write_only=True)\n", (4219, 4253), False, 'from rest_framework import serializers, status\n'), ((1412, 1448), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""verify_codes"""'], {}), "('verify_codes')\n", (1432, 1448), False, 'from django_redis import get_redis_connection\n'), ((3731, 3767), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""verify_codes"""'], {}), "('verify_codes')\n", (3751, 3767), False, 'from django_redis import get_redis_connection\n'), ((5948, 5990), 'celery_tasks.email.tasks.send_verify_email.delay', 'send_verify_email.delay', (['email', 'verify_url'], {}), '(email, verify_url)\n', (5971, 5990), False, 'from celery_tasks.email.tasks import send_verify_email\n'), ((932, 965), 're.match', 're.match', (['"""^1[3-9]\\\\d{9}$"""', 'value'], {}), "('^1[3-9]\\\\d{9}$', value)\n", (940, 965), False, 'import re\n'), ((985, 1023), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""手机号格式错误"""'], {}), "('手机号格式错误')\n", (1012, 1023), False, 'from rest_framework import serializers, status\n'), ((1154, 1192), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""请同意用户协议"""'], {}), "('请同意用户协议')\n", (1181, 1192), False, 'from rest_framework import serializers, status\n'), ((1333, 1371), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""两次密码不一致"""'], {}), "('两次密码不一致')\n", (1360, 1371), False, 'from rest_framework import serializers, status\n'), ((1592, 1631), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""无效的短信验证码"""'], {}), "('无效的短信验证码')\n", (1619, 1631), False, 'from rest_framework import serializers, status\n'), ((1706, 1744), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""短信验证码错误"""'], {}), "('短信验证码错误')\n", (1733, 1744), False, 'from rest_framework import serializers, status\n'), ((3620, 3656), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""用户不存在"""'], {}), "('用户不存在')\n", (3647, 3656), False, 'from rest_framework import serializers, status\n'), ((3884, 3923), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""无效的短信验证码"""'], {}), "('无效的短信验证码')\n", (3911, 3923), False, 'from rest_framework import serializers, status\n'), ((3986, 4024), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""短信验证码错误"""'], {}), "('短信验证码错误')\n", (4013, 4024), False, 'from rest_framework import serializers, status\n'), ((4830, 4868), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""两次密码不一致"""'], {}), "('两次密码不一致')\n", (4857, 4868), False, 'from rest_framework import serializers, status\n'), ((5014, 5060), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""无效的access token"""'], {}), "('无效的access token')\n", (5041, 5060), False, 'from rest_framework import serializers, status\n')] |
import os
from flask import Flask
from MrLing import ling_blueprint
from MrBasicWff import wff_blueprint
app = Flask(__name__)
app.config['SECRET_KEY'] = 'random string'
app.debug = True
app.register_blueprint(ling_blueprint)
app.register_blueprint(wff_blueprint)
@app.route("/")
def home():
return '<p><a href="/MrLing">Ling</a></p><p><a href="/MrWff">Wff</a></p>'
if __name__ == '__main__':
app.run(host = '0.0.0.0', port = int(os.getenv('PORT', 5000)))
| [
"flask.Flask",
"os.getenv"
] | [((112, 127), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (117, 127), False, 'from flask import Flask\n'), ((442, 465), 'os.getenv', 'os.getenv', (['"""PORT"""', '(5000)'], {}), "('PORT', 5000)\n", (451, 465), False, 'import os\n')] |
# Copyright (c) 2018 <NAME>
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from phylanx import Phylanx
import numpy as np
@Phylanx
def foo(x, y):
return [x, y]
x = np.array([1, 2])
y = np.array([3, 4])
result = foo(x, y)
assert((result[0] == x).all() and (result[1] == y).all())
| [
"numpy.array"
] | [((282, 298), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (290, 298), True, 'import numpy as np\n'), ((303, 319), 'numpy.array', 'np.array', (['[3, 4]'], {}), '([3, 4])\n', (311, 319), True, 'import numpy as np\n')] |
from datetime import timedelta
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from django_future.jobs import schedule_job
from django_future.models import ScheduledJob
class RunScheduledJobsCommandTest(TestCase):
def setUp(self):
self.schedule_at = timezone.now() - timedelta(days=1)
self.jobs = [
schedule_job(self.schedule_at, 'math.pow', args=(2, 3)),
schedule_job(self.schedule_at, 'math.pow', args=(5, 2))
]
def test_cmd_noargs(self):
"""
Test invocation of command with no arguments. Ensure the scheduled jobs
are marked as completed.
"""
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs')
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_COMPLETE).count()
)
def test_cmd_delete_completed(self):
"""
Test invocation of command with '-d' argument to delete completed jobs.
Ensure the scheduled jobs are removed after.
"""
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs', '-d')
self.assertEqual(0, ScheduledJob.objects.count())
def test_cmd_ignore_errors(self):
"""
Test invocation of command with '-i' argument to keep processing jobs
even if a job fails. Ensure the non-failing jobs are marked as
completed and the error job is marked as failed.
"""
schedule_at = self.schedule_at - timedelta(days=1)
error_job = schedule_job(schedule_at, 'math.funky_error')
self.assertEqual(
3,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs', '-i')
error_job.refresh_from_db()
self.assertEqual(error_job.status, ScheduledJob.STATUS_FAILED)
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_COMPLETE).count()
)
| [
"django.utils.timezone.now",
"django.core.management.call_command",
"datetime.timedelta",
"django_future.models.ScheduledJob.objects.filter",
"django_future.models.ScheduledJob.objects.count",
"django_future.jobs.schedule_job"
] | [((870, 902), 'django.core.management.call_command', 'call_command', (['"""runscheduledjobs"""'], {}), "('runscheduledjobs')\n", (882, 902), False, 'from django.core.management import call_command\n'), ((1419, 1457), 'django.core.management.call_command', 'call_command', (['"""runscheduledjobs"""', '"""-d"""'], {}), "('runscheduledjobs', '-d')\n", (1431, 1457), False, 'from django.core.management import call_command\n'), ((1865, 1910), 'django_future.jobs.schedule_job', 'schedule_job', (['schedule_at', '"""math.funky_error"""'], {}), "(schedule_at, 'math.funky_error')\n", (1877, 1910), False, 'from django_future.jobs import schedule_job\n'), ((2075, 2113), 'django.core.management.call_command', 'call_command', (['"""runscheduledjobs"""', '"""-i"""'], {}), "('runscheduledjobs', '-i')\n", (2087, 2113), False, 'from django.core.management import call_command\n'), ((333, 347), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (345, 347), False, 'from django.utils import timezone\n'), ((350, 367), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (359, 367), False, 'from datetime import timedelta\n'), ((403, 458), 'django_future.jobs.schedule_job', 'schedule_job', (['self.schedule_at', '"""math.pow"""'], {'args': '(2, 3)'}), "(self.schedule_at, 'math.pow', args=(2, 3))\n", (415, 458), False, 'from django_future.jobs import schedule_job\n'), ((472, 527), 'django_future.jobs.schedule_job', 'schedule_job', (['self.schedule_at', '"""math.pow"""'], {'args': '(5, 2)'}), "(self.schedule_at, 'math.pow', args=(5, 2))\n", (484, 527), False, 'from django_future.jobs import schedule_job\n'), ((1487, 1515), 'django_future.models.ScheduledJob.objects.count', 'ScheduledJob.objects.count', ([], {}), '()\n', (1513, 1515), False, 'from django_future.models import ScheduledJob\n'), ((1827, 1844), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1836, 1844), False, 'from datetime import timedelta\n'), ((760, 825), 'django_future.models.ScheduledJob.objects.filter', 'ScheduledJob.objects.filter', ([], {'status': 'ScheduledJob.STATUS_SCHEDULED'}), '(status=ScheduledJob.STATUS_SCHEDULED)\n', (787, 825), False, 'from django_future.models import ScheduledJob\n'), ((957, 1021), 'django_future.models.ScheduledJob.objects.filter', 'ScheduledJob.objects.filter', ([], {'status': 'ScheduledJob.STATUS_COMPLETE'}), '(status=ScheduledJob.STATUS_COMPLETE)\n', (984, 1021), False, 'from django_future.models import ScheduledJob\n'), ((1309, 1374), 'django_future.models.ScheduledJob.objects.filter', 'ScheduledJob.objects.filter', ([], {'status': 'ScheduledJob.STATUS_SCHEDULED'}), '(status=ScheduledJob.STATUS_SCHEDULED)\n', (1336, 1374), False, 'from django_future.models import ScheduledJob\n'), ((1965, 2030), 'django_future.models.ScheduledJob.objects.filter', 'ScheduledJob.objects.filter', ([], {'status': 'ScheduledJob.STATUS_SCHEDULED'}), '(status=ScheduledJob.STATUS_SCHEDULED)\n', (1992, 2030), False, 'from django_future.models import ScheduledJob\n'), ((2276, 2340), 'django_future.models.ScheduledJob.objects.filter', 'ScheduledJob.objects.filter', ([], {'status': 'ScheduledJob.STATUS_COMPLETE'}), '(status=ScheduledJob.STATUS_COMPLETE)\n', (2303, 2340), False, 'from django_future.models import ScheduledJob\n')] |
# Copyright 2020 Open Climate Tech Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Training code using Keras for TF2
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from firecam.lib import settings
from firecam.lib import collect_args
from firecam.lib import goog_helper
from firecam.lib import tf_helper
import glob
import tensorflow as tf
from tensorflow import keras
import logging
import datetime
def _parse_function(example_proto):
"""
Function for converting TFRecordDataset to uncompressed image pixels + labels
:return:
"""
feature_description = {
'image/class/label': tf.io.FixedLenFeature([], tf.int64, default_value=0),
'image/encoded': tf.io.FixedLenFeature([], tf.string, default_value=''),
'image/format': tf.io.FixedLenFeature([], tf.string, default_value=''),
'image/height': tf.io.FixedLenFeature([], tf.int64, default_value=0),
'image/width': tf.io.FixedLenFeature([], tf.int64, default_value=0),
}
# Parse the input `tf.Example` proto using the dictionary above.
example = tf.io.parse_single_example(example_proto, feature_description)
image = tf.image.decode_jpeg(example['image/encoded'], channels=3, dct_method='INTEGER_ACCURATE')
#Resizing images in training set because they are apprently rectangular much fo the time
if example['image/height'] != 299 or example['image/width'] != 299:
image = tf.image.resize(tf.reshape(image, [example['image/height'], example['image/width'], 3]), [299, 299])
image = tf.cast(image, tf.uint8)
image = tf.reshape(image, [299, 299, 3]) #weird workaround because decode image doesnt get shape
label = tf.one_hot(example['image/class/label'], depth=2)
image = (tf.cast(image, tf.float32) - 128) / 128.0
return [image, label]
class LRTensorBoard(keras.callbacks.TensorBoard):
def __init__(self, log_dir, **kwargs): # add other arguments to __init__ if you need
super().__init__(log_dir=log_dir, **kwargs)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs.update({'lr': keras.backend.eval(self.model.optimizer.lr)})
super().on_epoch_end(epoch, logs)
def main():
reqArgs = [
["i", "inputDir", "directory containing TFRecord files"],
["o", "outputDir", "directory to write out checkpoints and tensorboard logs"],
["a", "algorithm", "adam, nadam, or rmsprop"],
]
optArgs = [
["m", "maxEpochs", "(optional) max number of epochs (default 1000)", int],
["r", "resumeModel", "resume training from given saved model"],
["s", "startEpoch", "epoch to resume from (epoch from resumeModel)"],
["t", "stepsPerEpoch", "(optional) number of steps per epoch", int],
["v", "valStepsPerEpoch", "(optional) number of validation steps per epoch", int],
]
args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
batch_size = 64
max_epochs = args.maxEpochs if args.maxEpochs else 1000
steps_per_epoch = args.stepsPerEpoch if args.stepsPerEpoch else 2000
overshoot_epochs = 30 #number of epochs over which validation loss hasnt decreased to stop training at
val_steps = args.valStepsPerEpoch if args.valStepsPerEpoch else 200
#val_steps only needed for now because of a bug in tf2.0, which should be fixed in next version
#TODO: either set this to # of validation examples /batch size (i.e. figure out num validation examples)
#or upgrade to TF2.1 when its ready and automatically go thorugh the whole set
train_filenames = glob.glob(os.path.join(args.inputDir, 'firecam_train_*.tfrecord'))
val_filenames = glob.glob(os.path.join(args.inputDir, 'firecam_validation_*.tfrecord'))
logging.warning('Found %d training files, and %d validation files', len(train_filenames), len(val_filenames))
if (len(train_filenames) == 0) or (len(val_filenames) == 0):
logging.error('Could not find data in %s', args.inputDir)
exit(1)
raw_dataset_train = tf.data.TFRecordDataset(train_filenames)
raw_dataset_val = tf.data.TFRecordDataset(val_filenames)
dataset_train = raw_dataset_train.map(_parse_function).repeat(max_epochs * steps_per_epoch).shuffle(batch_size * 5).batch(batch_size)
dataset_val = raw_dataset_val.map(_parse_function).repeat().batch(batch_size)
if args.resumeModel:
inception = tf_helper.loadModel(args.resumeModel)
assert int(args.startEpoch) > 0
initial_epoch = int(args.startEpoch)
else:
inception = keras.applications.inception_v3.InceptionV3(weights=None, include_top=True, input_tensor=None,
classes=2)
initial_epoch = 0
if args.algorithm == "adam":
# optimizer = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, amsgrad=False)
optimizer = tf.keras.optimizers.Adam(decay=1e-06, amsgrad=True)
elif args.algorithm == "nadam":
optimizer = tf.keras.optimizers.Nadam()
elif args.algorithm == "rmsprop":
optimizer = tf.keras.optimizers.RMSprop(decay=1e-06)
else:
logging.error('Unsupported algo %s', args.algorithm)
exit(1)
inception.compile(optimizer=optimizer, loss=tf.keras.losses.BinaryCrossentropy(), metrics=['accuracy'])
logdir = os.path.join(args.outputDir, datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
callbacks = [keras.callbacks.EarlyStopping(monitor='val_loss', patience=overshoot_epochs),
keras.callbacks.ModelCheckpoint(filepath=os.path.join(args.outputDir, 'model_{epoch}'),
monitor='val_loss', save_best_only=True),
LRTensorBoard(log_dir=logdir)]
logging.warning('Start training')
inception.fit(dataset_train, validation_data=dataset_val,
epochs=max_epochs, initial_epoch=initial_epoch,
steps_per_epoch=steps_per_epoch, validation_steps=val_steps,
callbacks=callbacks)
logging.warning('Done training')
if __name__ == "__main__":
main()
| [
"tensorflow.reshape",
"os.path.join",
"tensorflow.keras.optimizers.RMSprop",
"tensorflow.keras.callbacks.EarlyStopping",
"logging.error",
"tensorflow.one_hot",
"logging.warning",
"tensorflow.cast",
"tensorflow.keras.optimizers.Adam",
"datetime.datetime.now",
"firecam.lib.tf_helper.loadModel",
"tensorflow.keras.optimizers.Nadam",
"tensorflow.data.TFRecordDataset",
"tensorflow.keras.applications.inception_v3.InceptionV3",
"tensorflow.io.parse_single_example",
"tensorflow.keras.losses.BinaryCrossentropy",
"tensorflow.keras.backend.eval",
"tensorflow.io.FixedLenFeature",
"firecam.lib.goog_helper.getParentParser",
"tensorflow.image.decode_jpeg"
] | [((1733, 1795), 'tensorflow.io.parse_single_example', 'tf.io.parse_single_example', (['example_proto', 'feature_description'], {}), '(example_proto, feature_description)\n', (1759, 1795), True, 'import tensorflow as tf\n'), ((1808, 1902), 'tensorflow.image.decode_jpeg', 'tf.image.decode_jpeg', (["example['image/encoded']"], {'channels': '(3)', 'dct_method': '"""INTEGER_ACCURATE"""'}), "(example['image/encoded'], channels=3, dct_method=\n 'INTEGER_ACCURATE')\n", (1828, 1902), True, 'import tensorflow as tf\n'), ((2235, 2267), 'tensorflow.reshape', 'tf.reshape', (['image', '[299, 299, 3]'], {}), '(image, [299, 299, 3])\n', (2245, 2267), True, 'import tensorflow as tf\n'), ((2336, 2385), 'tensorflow.one_hot', 'tf.one_hot', (["example['image/class/label']"], {'depth': '(2)'}), "(example['image/class/label'], depth=2)\n", (2346, 2385), True, 'import tensorflow as tf\n'), ((4725, 4765), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['train_filenames'], {}), '(train_filenames)\n', (4748, 4765), True, 'import tensorflow as tf\n'), ((4788, 4826), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['val_filenames'], {}), '(val_filenames)\n', (4811, 4826), True, 'import tensorflow as tf\n'), ((6474, 6507), 'logging.warning', 'logging.warning', (['"""Start training"""'], {}), "('Start training')\n", (6489, 6507), False, 'import logging\n'), ((6758, 6790), 'logging.warning', 'logging.warning', (['"""Done training"""'], {}), "('Done training')\n", (6773, 6790), False, 'import logging\n'), ((1289, 1341), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.int64'], {'default_value': '(0)'}), '([], tf.int64, default_value=0)\n', (1310, 1341), True, 'import tensorflow as tf\n'), ((1364, 1418), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.string'], {'default_value': '""""""'}), "([], tf.string, default_value='')\n", (1385, 1418), True, 'import tensorflow as tf\n'), ((1440, 1494), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.string'], {'default_value': '""""""'}), "([], tf.string, default_value='')\n", (1461, 1494), True, 'import tensorflow as tf\n'), ((1516, 1568), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.int64'], {'default_value': '(0)'}), '([], tf.int64, default_value=0)\n', (1537, 1568), True, 'import tensorflow as tf\n'), ((1589, 1641), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.int64'], {'default_value': '(0)'}), '([], tf.int64, default_value=0)\n', (1610, 1641), True, 'import tensorflow as tf\n'), ((2197, 2221), 'tensorflow.cast', 'tf.cast', (['image', 'tf.uint8'], {}), '(image, tf.uint8)\n', (2204, 2221), True, 'import tensorflow as tf\n'), ((4290, 4345), 'os.path.join', 'os.path.join', (['args.inputDir', '"""firecam_train_*.tfrecord"""'], {}), "(args.inputDir, 'firecam_train_*.tfrecord')\n", (4302, 4345), False, 'import os\n'), ((4377, 4437), 'os.path.join', 'os.path.join', (['args.inputDir', '"""firecam_validation_*.tfrecord"""'], {}), "(args.inputDir, 'firecam_validation_*.tfrecord')\n", (4389, 4437), False, 'import os\n'), ((4626, 4683), 'logging.error', 'logging.error', (['"""Could not find data in %s"""', 'args.inputDir'], {}), "('Could not find data in %s', args.inputDir)\n", (4639, 4683), False, 'import logging\n'), ((5094, 5131), 'firecam.lib.tf_helper.loadModel', 'tf_helper.loadModel', (['args.resumeModel'], {}), '(args.resumeModel)\n', (5113, 5131), False, 'from firecam.lib import tf_helper\n'), ((5247, 5356), 'tensorflow.keras.applications.inception_v3.InceptionV3', 'keras.applications.inception_v3.InceptionV3', ([], {'weights': 'None', 'include_top': '(True)', 'input_tensor': 'None', 'classes': '(2)'}), '(weights=None, include_top=True,\n input_tensor=None, classes=2)\n', (5290, 5356), False, 'from tensorflow import keras\n'), ((5605, 5656), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'decay': '(1e-06)', 'amsgrad': '(True)'}), '(decay=1e-06, amsgrad=True)\n', (5629, 5656), True, 'import tensorflow as tf\n'), ((6147, 6223), 'tensorflow.keras.callbacks.EarlyStopping', 'keras.callbacks.EarlyStopping', ([], {'monitor': '"""val_loss"""', 'patience': 'overshoot_epochs'}), "(monitor='val_loss', patience=overshoot_epochs)\n", (6176, 6223), False, 'from tensorflow import keras\n'), ((2096, 2167), 'tensorflow.reshape', 'tf.reshape', (['image', "[example['image/height'], example['image/width'], 3]"], {}), "(image, [example['image/height'], example['image/width'], 3])\n", (2106, 2167), True, 'import tensorflow as tf\n'), ((2400, 2426), 'tensorflow.cast', 'tf.cast', (['image', 'tf.float32'], {}), '(image, tf.float32)\n', (2407, 2426), True, 'import tensorflow as tf\n'), ((5713, 5740), 'tensorflow.keras.optimizers.Nadam', 'tf.keras.optimizers.Nadam', ([], {}), '()\n', (5738, 5740), True, 'import tensorflow as tf\n'), ((5976, 6012), 'tensorflow.keras.losses.BinaryCrossentropy', 'tf.keras.losses.BinaryCrossentropy', ([], {}), '()\n', (6010, 6012), True, 'import tensorflow as tf\n'), ((2762, 2805), 'tensorflow.keras.backend.eval', 'keras.backend.eval', (['self.model.optimizer.lr'], {}), '(self.model.optimizer.lr)\n', (2780, 2805), False, 'from tensorflow import keras\n'), ((3600, 3629), 'firecam.lib.goog_helper.getParentParser', 'goog_helper.getParentParser', ([], {}), '()\n', (3627, 3629), False, 'from firecam.lib import goog_helper\n'), ((5799, 5839), 'tensorflow.keras.optimizers.RMSprop', 'tf.keras.optimizers.RMSprop', ([], {'decay': '(1e-06)'}), '(decay=1e-06)\n', (5826, 5839), True, 'import tensorflow as tf\n'), ((5858, 5910), 'logging.error', 'logging.error', (['"""Unsupported algo %s"""', 'args.algorithm'], {}), "('Unsupported algo %s', args.algorithm)\n", (5871, 5910), False, 'import logging\n'), ((6079, 6102), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6100, 6102), False, 'import datetime\n'), ((6283, 6328), 'os.path.join', 'os.path.join', (['args.outputDir', '"""model_{epoch}"""'], {}), "(args.outputDir, 'model_{epoch}')\n", (6295, 6328), False, 'import os\n')] |
# Generated by Django 3.0.9 on 2020-08-23 12:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0007_auto_20200823_1340'),
]
operations = [
migrations.DeleteModel(
name='addPost',
),
]
| [
"django.db.migrations.DeleteModel"
] | [((224, 262), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""addPost"""'}), "(name='addPost')\n", (246, 262), False, 'from django.db import migrations\n')] |
SI = lambda : input()
from collections import Counter
n = int(input())
a = SI()
b = SI()
def solve(n,a,b):
d = Counter(a)+Counter(b)
for i in d:
if(d[i]&1):
print(-1)
return
xa = d[a]//2
newa = []
newb = []
for i in range(n):
if(a[i]!=b[i]):
newa.append((a[i],i))
newb.append((b[i],i))
a,b = newa,newb
aux = len(a)
if(aux==0):
print(0)
return
canta = 0
for i in a:
if(i[0]=='a'):
canta+=1
if(canta&1):
print(len(a)//2+1)
print(a[0][1]+1,a[0][1]+1)
a[0],b[0] = b[0],a[0]
else:
print(len(a)//2)
lastA,lastB = -1,-1
for i in range(aux):
if(a[i][0]=='a'):
if(lastA==-1):
lastA=a[i][1]
else:
print(lastA+1,a[i][1]+1)
lastA=-1
else:
if(lastB==-1):
lastB=a[i][1]
else:
print(lastB+1,a[i][1]+1)
lastB=-1
solve(n,a,b)
| [
"collections.Counter"
] | [((114, 124), 'collections.Counter', 'Counter', (['a'], {}), '(a)\n', (121, 124), False, 'from collections import Counter\n'), ((125, 135), 'collections.Counter', 'Counter', (['b'], {}), '(b)\n', (132, 135), False, 'from collections import Counter\n')] |
import datetime
import discord
import pytz
from necrobot.botbase import server, discordutil
from necrobot.database import matchdb, racedb
from necrobot.util import console, timestr, writechannel, strutil, rtmputil
from necrobot.botbase.necrobot import Necrobot
from necrobot.gsheet.matchgsheetinfo import MatchGSheetInfo
from necrobot.match.match import Match
from necrobot.match.matchinfo import MatchInfo
from necrobot.match.matchroom import MatchRoom
from necrobot.race.raceinfo import RaceInfo
from necrobot.user.necrouser import NecroUser
from necrobot.config import Config
match_library = {}
# noinspection PyIncorrectDocstring
async def make_match(*args, register=False, **kwargs) -> Match:
"""Create a Match object. There should be no need to call this directly; use matchutil.make_match instead,
since this needs to interact with the database.
Parameters
----------
racer_1_id: int
The DB user ID of the first racer.
racer_2_id: int
The DB user ID of the second racer.
max_races: int
The maximum number of races this match can be. (If is_best_of is True, then the match is a best of
max_races; otherwise, the match is just repeating max_races.)
match_id: int
The DB unique ID of this match.
suggested_time: datetime.datetime
The time the match is suggested for. If no tzinfo, UTC is assumed.
r1_confirmed: bool
Whether the first racer has confirmed the match time.
r2_confirmed: bool
Whether the second racer has confirmed the match time.
r1_unconfirmed: bool
Whether the first racer wishes to unconfirm the match time.
r2_unconfirmed: bool
Whether the second racer wishes to unconfirm the match time.
match_info: MatchInfo
The types of races to be run in this match.
cawmentator_id: int
The DB unique ID of the cawmentator for this match.
sheet_id: int
The sheetID of the worksheet the match was created from, if any.
register: bool
Whether to register the match in the database.
Returns
---------
Match
The created match.
"""
if 'match_id' in kwargs and kwargs['match_id'] in match_library:
return match_library[kwargs['match_id']]
match = Match(*args, commit_fn=matchdb.write_match, **kwargs)
await match.initialize()
if register:
await match.commit()
match_library[match.match_id] = match
return match
async def get_match_from_id(match_id: int) -> Match or None:
"""Get a match object from its DB unique ID.
Parameters
----------
match_id: int
The databse ID of the match.
Returns
-------
Optional[Match]
The match found, if any.
"""
if match_id is None:
return None
if match_id in match_library:
return match_library[match_id]
raw_data = await matchdb.get_raw_match_data(match_id)
if raw_data is not None:
return await make_match_from_raw_db_data(raw_data)
else:
return None
def get_matchroom_name(match: Match) -> str:
"""Get a new unique channel name corresponding to the match.
Parameters
----------
match: Match
The match whose info determines the name.
Returns
-------
str
The name of the channel.
"""
name_prefix = match.matchroom_name
cut_length = len(name_prefix) + 1
largest_postfix = 1
found = False
for channel in server.server.channels:
if channel.name.startswith(name_prefix):
found = True
try:
val = int(channel.name[cut_length:])
largest_postfix = max(largest_postfix, val)
except ValueError:
pass
return name_prefix if not found else '{0}-{1}'.format(name_prefix, largest_postfix + 1)
async def get_upcoming_and_current() -> list:
"""
Returns
-------
list[Match]
A list of all upcoming and ongoing matches, in order.
"""
matches = []
for row in await matchdb.get_channeled_matches_raw_data(must_be_scheduled=True, order_by_time=True):
channel_id = int(row[13]) if row[13] is not None else None
if channel_id is not None:
channel = server.find_channel(channel_id=channel_id)
if channel is not None:
match = await make_match_from_raw_db_data(row=row)
if match.suggested_time is None:
console.warning('Found match object {} has no suggested time.'.format(repr(match)))
continue
if match.suggested_time > pytz.utc.localize(datetime.datetime.utcnow()):
matches.append(match)
else:
match_room = Necrobot().get_bot_channel(channel)
if match_room is not None and await match_room.during_races():
matches.append(match)
return matches
async def get_matches_with_channels(racer: NecroUser = None) -> list:
"""
Parameters
----------
racer: NecroUser
The racer to find channels for. If None, finds all channeled matches.
Returns
-------
list[Match]
A list of all Matches that have associated channels on the server featuring the specified racer.
"""
matches = []
if racer is not None:
raw_data = await matchdb.get_channeled_matches_raw_data(
must_be_scheduled=False, order_by_time=False, racer_id=racer.user_id
)
else:
raw_data = await matchdb.get_channeled_matches_raw_data(must_be_scheduled=False, order_by_time=False)
for row in raw_data:
channel_id = int(row[13])
channel = server.find_channel(channel_id=channel_id)
if channel is not None:
match = await make_match_from_raw_db_data(row=row)
matches.append(match)
else:
console.warning('Found Match with channel {0}, but couldn\'t find this channel.'.format(channel_id))
return matches
async def delete_all_match_channels(log=False, completed_only=False) -> None:
"""Delete all match channels from the server.
Parameters
----------
log: bool
If True, the channel text will be written to a log file before deletion.
completed_only: bool
If True, will only find completed matches.
"""
for row in await matchdb.get_channeled_matches_raw_data():
match_id = int(row[0])
channel_id = int(row[13])
channel = server.find_channel(channel_id=channel_id)
delete_this = True
if channel is not None:
if completed_only:
match_room = Necrobot().get_bot_channel(channel)
if match_room is None or not match_room.played_all_races:
delete_this = False
if delete_this:
if log:
await writechannel.write_channel(
client=server.client,
channel=channel,
outfile_name='{0}-{1}'.format(match_id, channel.name)
)
await server.client.delete_channel(channel)
if delete_this:
await matchdb.register_match_channel(match_id, None)
async def make_match_room(match: Match, register=False) -> MatchRoom or None:
"""Create a discord.Channel and a corresponding MatchRoom for the given Match.
Parameters
----------
match: Match
The Match to create a room for.
register: bool
If True, will register the Match in the database.
Returns
-------
Optional[MatchRoom]
The created room object.
"""
# Check to see the match is registered
if not match.is_registered:
if register:
await match.commit()
else:
console.warning('Tried to make a MatchRoom for an unregistered Match ({0}).'.format(match.matchroom_name))
return None
# Check to see if we already have the match channel
channel_id = match.channel_id
match_channel = server.find_channel(channel_id=channel_id) if channel_id is not None else None
# If we couldn't find the channel or it didn't exist, make a new one
if match_channel is None:
# Create permissions
deny_read = discord.PermissionOverwrite(read_messages=False)
permit_read = discord.PermissionOverwrite(read_messages=True)
racer_permissions = []
for racer in match.racers:
if racer.member is not None:
racer_permissions.append(discord.ChannelPermissions(target=racer.member, overwrite=permit_read))
# Make a channel for the room
# noinspection PyUnresolvedReferences
match_channel = await server.client.create_channel(
server.server,
get_matchroom_name(match),
discord.ChannelPermissions(target=server.server.default_role, overwrite=deny_read),
discord.ChannelPermissions(target=server.server.me, overwrite=permit_read),
*racer_permissions,
type=discord.ChannelType.text)
if match_channel is None:
console.warning('Failed to make a match channel.')
return None
# Put the match channel in the matches category
match_channel_category = server.find_channel(channel_name=Config.MATCH_CHANNEL_CATEGORY_NAME)
if match_channel_category is not None:
await discordutil.set_channel_category(channel=match_channel, category=match_channel_category)
# Make the actual RaceRoom and initialize it
match.set_channel_id(int(match_channel.id))
new_room = MatchRoom(match_discord_channel=match_channel, match=match)
Necrobot().register_bot_channel(match_channel, new_room)
await new_room.initialize()
return new_room
async def close_match_room(match: Match) -> None:
"""Close the discord.Channel corresponding to the Match, if any.
Parameters
----------
match: Match
The Match to close the channel for.
"""
if not match.is_registered:
console.warning('Trying to close the room for an unregistered match.')
return
channel_id = match.channel_id
channel = server.find_channel(channel_id=channel_id)
if channel is None:
console.warning('Coudn\'t find channel with id {0} in close_match_room '
'(match_id={1}).'.format(channel_id, match.match_id))
return
await Necrobot().unregister_bot_channel(channel)
await server.client.delete_channel(channel)
match.set_channel_id(None)
async def get_nextrace_displaytext(match_list: list) -> str:
utcnow = pytz.utc.localize(datetime.datetime.utcnow())
if len(match_list) > 1:
display_text = 'Upcoming matches: \n'
else:
display_text = 'Next match: \n'
for match in match_list:
# noinspection PyUnresolvedReferences
display_text += '\N{BULLET} **{0}** - **{1}**'.format(
match.racer_1.display_name,
match.racer_2.display_name)
if match.suggested_time is None:
display_text += '\n'
continue
display_text += ': {0} \n'.format(timestr.timedelta_to_str(match.suggested_time - utcnow, punctuate=True))
match_cawmentator = await match.get_cawmentator()
if match_cawmentator is not None:
display_text += ' Cawmentary: <http://www.twitch.tv/{0}> \n'.format(match_cawmentator.twitch_name)
elif match.racer_1.twitch_name is not None and match.racer_2.twitch_name is not None:
display_text += ' Kadgar: {} \n'.format(
rtmputil.kadgar_link(match.racer_1.twitch_name, match.racer_2.twitch_name)
)
# display_text += ' RTMP: {} \n'.format(
# rtmputil.rtmp_link(match.racer_1.rtmp_name, match.racer_2.rtmp_name)
# )
display_text += '\nFull schedule: <https://condor.host/schedule>'
return display_text
async def delete_match(match_id: int) -> None:
await matchdb.delete_match(match_id=match_id)
if match_id in match_library:
del match_library[match_id]
async def make_match_from_raw_db_data(row: list) -> Match:
match_id = int(row[0])
if match_id in match_library:
return match_library[match_id]
match_info = MatchInfo(
race_info=await racedb.get_race_info_from_type_id(int(row[1])) if row[1] is not None else RaceInfo(),
ranked=bool(row[9]),
is_best_of=bool(row[10]),
max_races=int(row[11])
)
sheet_info = MatchGSheetInfo()
sheet_info.wks_id = row[14]
sheet_info.row = row[15]
new_match = Match(
commit_fn=matchdb.write_match,
match_id=match_id,
match_info=match_info,
racer_1_id=int(row[2]),
racer_2_id=int(row[3]),
suggested_time=row[4],
finish_time=row[16],
r1_confirmed=bool(row[5]),
r2_confirmed=bool(row[6]),
r1_unconfirmed=bool(row[7]),
r2_unconfirmed=bool(row[8]),
cawmentator_id=row[12],
channel_id=int(row[13]) if row[13] is not None else None,
gsheet_info=sheet_info
)
await new_match.initialize()
match_library[new_match.match_id] = new_match
return new_match
async def get_schedule_infotext():
utcnow = pytz.utc.localize(datetime.datetime.utcnow())
matches = await get_upcoming_and_current()
max_r1_len = 0
max_r2_len = 0
for match in matches:
max_r1_len = max(max_r1_len, len(strutil.tickless(match.racer_1.display_name)))
max_r2_len = max(max_r2_len, len(strutil.tickless(match.racer_2.display_name)))
schedule_text = '``` \nUpcoming matches: \n'
for match in matches:
if len(schedule_text) > 1800:
break
schedule_text += '{r1:>{w1}} v {r2:<{w2}} : '.format(
r1=strutil.tickless(match.racer_1.display_name),
w1=max_r1_len,
r2=strutil.tickless(match.racer_2.display_name),
w2=max_r2_len
)
if match.suggested_time - utcnow < datetime.timedelta(minutes=0):
schedule_text += 'Right now!'
else:
schedule_text += timestr.str_full_24h(match.suggested_time)
schedule_text += '\n'
schedule_text += '```'
return schedule_text
async def get_race_data(match: Match):
return await matchdb.get_match_race_data(match.match_id)
| [
"discord.ChannelPermissions",
"necrobot.database.matchdb.delete_match",
"necrobot.match.match.Match",
"necrobot.util.rtmputil.kadgar_link",
"datetime.datetime.utcnow",
"necrobot.database.matchdb.get_raw_match_data",
"necrobot.race.raceinfo.RaceInfo",
"necrobot.botbase.server.client.delete_channel",
"datetime.timedelta",
"discord.PermissionOverwrite",
"necrobot.util.timestr.str_full_24h",
"necrobot.database.matchdb.register_match_channel",
"necrobot.database.matchdb.get_channeled_matches_raw_data",
"necrobot.util.timestr.timedelta_to_str",
"necrobot.database.matchdb.get_match_race_data",
"necrobot.util.strutil.tickless",
"necrobot.util.console.warning",
"necrobot.botbase.server.find_channel",
"necrobot.match.matchroom.MatchRoom",
"necrobot.gsheet.matchgsheetinfo.MatchGSheetInfo",
"necrobot.botbase.discordutil.set_channel_category",
"necrobot.botbase.necrobot.Necrobot"
] | [((2289, 2342), 'necrobot.match.match.Match', 'Match', (['*args'], {'commit_fn': 'matchdb.write_match'}), '(*args, commit_fn=matchdb.write_match, **kwargs)\n', (2294, 2342), False, 'from necrobot.match.match import Match\n'), ((9727, 9786), 'necrobot.match.matchroom.MatchRoom', 'MatchRoom', ([], {'match_discord_channel': 'match_channel', 'match': 'match'}), '(match_discord_channel=match_channel, match=match)\n', (9736, 9786), False, 'from necrobot.match.matchroom import MatchRoom\n'), ((10301, 10343), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_id': 'channel_id'}), '(channel_id=channel_id)\n', (10320, 10343), False, 'from necrobot.botbase import server, discordutil\n'), ((12662, 12679), 'necrobot.gsheet.matchgsheetinfo.MatchGSheetInfo', 'MatchGSheetInfo', ([], {}), '()\n', (12677, 12679), False, 'from necrobot.gsheet.matchgsheetinfo import MatchGSheetInfo\n'), ((2910, 2946), 'necrobot.database.matchdb.get_raw_match_data', 'matchdb.get_raw_match_data', (['match_id'], {}), '(match_id)\n', (2936, 2946), False, 'from necrobot.database import matchdb, racedb\n'), ((4082, 4168), 'necrobot.database.matchdb.get_channeled_matches_raw_data', 'matchdb.get_channeled_matches_raw_data', ([], {'must_be_scheduled': '(True)', 'order_by_time': '(True)'}), '(must_be_scheduled=True,\n order_by_time=True)\n', (4120, 4168), False, 'from necrobot.database import matchdb, racedb\n'), ((5753, 5795), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_id': 'channel_id'}), '(channel_id=channel_id)\n', (5772, 5795), False, 'from necrobot.botbase import server, discordutil\n'), ((6437, 6477), 'necrobot.database.matchdb.get_channeled_matches_raw_data', 'matchdb.get_channeled_matches_raw_data', ([], {}), '()\n', (6475, 6477), False, 'from necrobot.database import matchdb, racedb\n'), ((6562, 6604), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_id': 'channel_id'}), '(channel_id=channel_id)\n', (6581, 6604), False, 'from necrobot.botbase import server, discordutil\n'), ((8138, 8180), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_id': 'channel_id'}), '(channel_id=channel_id)\n', (8157, 8180), False, 'from necrobot.botbase import server, discordutil\n'), ((8370, 8418), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'read_messages': '(False)'}), '(read_messages=False)\n', (8397, 8418), False, 'import discord\n'), ((8441, 8488), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'read_messages': '(True)'}), '(read_messages=True)\n', (8468, 8488), False, 'import discord\n'), ((9391, 9459), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_name': 'Config.MATCH_CHANNEL_CATEGORY_NAME'}), '(channel_name=Config.MATCH_CHANNEL_CATEGORY_NAME)\n', (9410, 9459), False, 'from necrobot.botbase import server, discordutil\n'), ((10166, 10236), 'necrobot.util.console.warning', 'console.warning', (['"""Trying to close the room for an unregistered match."""'], {}), "('Trying to close the room for an unregistered match.')\n", (10181, 10236), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((10606, 10643), 'necrobot.botbase.server.client.delete_channel', 'server.client.delete_channel', (['channel'], {}), '(channel)\n', (10634, 10643), False, 'from necrobot.botbase import server, discordutil\n'), ((10769, 10795), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (10793, 10795), False, 'import datetime\n'), ((12134, 12173), 'necrobot.database.matchdb.delete_match', 'matchdb.delete_match', ([], {'match_id': 'match_id'}), '(match_id=match_id)\n', (12154, 12173), False, 'from necrobot.database import matchdb, racedb\n'), ((13438, 13464), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (13462, 13464), False, 'import datetime\n'), ((14476, 14519), 'necrobot.database.matchdb.get_match_race_data', 'matchdb.get_match_race_data', (['match.match_id'], {}), '(match.match_id)\n', (14503, 14519), False, 'from necrobot.database import matchdb, racedb\n'), ((4290, 4332), 'necrobot.botbase.server.find_channel', 'server.find_channel', ([], {'channel_id': 'channel_id'}), '(channel_id=channel_id)\n', (4309, 4332), False, 'from necrobot.botbase import server, discordutil\n'), ((5424, 5536), 'necrobot.database.matchdb.get_channeled_matches_raw_data', 'matchdb.get_channeled_matches_raw_data', ([], {'must_be_scheduled': '(False)', 'order_by_time': '(False)', 'racer_id': 'racer.user_id'}), '(must_be_scheduled=False,\n order_by_time=False, racer_id=racer.user_id)\n', (5462, 5536), False, 'from necrobot.database import matchdb, racedb\n'), ((5590, 5678), 'necrobot.database.matchdb.get_channeled_matches_raw_data', 'matchdb.get_channeled_matches_raw_data', ([], {'must_be_scheduled': '(False)', 'order_by_time': '(False)'}), '(must_be_scheduled=False,\n order_by_time=False)\n', (5628, 5678), False, 'from necrobot.database import matchdb, racedb\n'), ((9226, 9276), 'necrobot.util.console.warning', 'console.warning', (['"""Failed to make a match channel."""'], {}), "('Failed to make a match channel.')\n", (9241, 9276), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((9791, 9801), 'necrobot.botbase.necrobot.Necrobot', 'Necrobot', ([], {}), '()\n', (9799, 9801), False, 'from necrobot.botbase.necrobot import Necrobot\n'), ((11278, 11349), 'necrobot.util.timestr.timedelta_to_str', 'timestr.timedelta_to_str', (['(match.suggested_time - utcnow)'], {'punctuate': '(True)'}), '(match.suggested_time - utcnow, punctuate=True)\n', (11302, 11349), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((14176, 14205), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(0)'}), '(minutes=0)\n', (14194, 14205), False, 'import datetime\n'), ((14292, 14334), 'necrobot.util.timestr.str_full_24h', 'timestr.str_full_24h', (['match.suggested_time'], {}), '(match.suggested_time)\n', (14312, 14334), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((7271, 7317), 'necrobot.database.matchdb.register_match_channel', 'matchdb.register_match_channel', (['match_id', 'None'], {}), '(match_id, None)\n', (7301, 7317), False, 'from necrobot.database import matchdb, racedb\n'), ((8932, 9019), 'discord.ChannelPermissions', 'discord.ChannelPermissions', ([], {'target': 'server.server.default_role', 'overwrite': 'deny_read'}), '(target=server.server.default_role, overwrite=\n deny_read)\n', (8958, 9019), False, 'import discord\n'), ((9028, 9102), 'discord.ChannelPermissions', 'discord.ChannelPermissions', ([], {'target': 'server.server.me', 'overwrite': 'permit_read'}), '(target=server.server.me, overwrite=permit_read)\n', (9054, 9102), False, 'import discord\n'), ((9525, 9618), 'necrobot.botbase.discordutil.set_channel_category', 'discordutil.set_channel_category', ([], {'channel': 'match_channel', 'category': 'match_channel_category'}), '(channel=match_channel, category=\n match_channel_category)\n', (9557, 9618), False, 'from necrobot.botbase import server, discordutil\n'), ((10553, 10563), 'necrobot.botbase.necrobot.Necrobot', 'Necrobot', ([], {}), '()\n', (10561, 10563), False, 'from necrobot.botbase.necrobot import Necrobot\n'), ((12532, 12542), 'necrobot.race.raceinfo.RaceInfo', 'RaceInfo', ([], {}), '()\n', (12540, 12542), False, 'from necrobot.race.raceinfo import RaceInfo\n'), ((13619, 13663), 'necrobot.util.strutil.tickless', 'strutil.tickless', (['match.racer_1.display_name'], {}), '(match.racer_1.display_name)\n', (13635, 13663), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((13707, 13751), 'necrobot.util.strutil.tickless', 'strutil.tickless', (['match.racer_2.display_name'], {}), '(match.racer_2.display_name)\n', (13723, 13751), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((13963, 14007), 'necrobot.util.strutil.tickless', 'strutil.tickless', (['match.racer_1.display_name'], {}), '(match.racer_1.display_name)\n', (13979, 14007), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((14051, 14095), 'necrobot.util.strutil.tickless', 'strutil.tickless', (['match.racer_2.display_name'], {}), '(match.racer_2.display_name)\n', (14067, 14095), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((7190, 7227), 'necrobot.botbase.server.client.delete_channel', 'server.client.delete_channel', (['channel'], {}), '(channel)\n', (7218, 7227), False, 'from necrobot.botbase import server, discordutil\n'), ((8637, 8707), 'discord.ChannelPermissions', 'discord.ChannelPermissions', ([], {'target': 'racer.member', 'overwrite': 'permit_read'}), '(target=racer.member, overwrite=permit_read)\n', (8663, 8707), False, 'import discord\n'), ((11731, 11805), 'necrobot.util.rtmputil.kadgar_link', 'rtmputil.kadgar_link', (['match.racer_1.twitch_name', 'match.racer_2.twitch_name'], {}), '(match.racer_1.twitch_name, match.racer_2.twitch_name)\n', (11751, 11805), False, 'from necrobot.util import console, timestr, writechannel, strutil, rtmputil\n'), ((4678, 4704), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4702, 4704), False, 'import datetime\n'), ((6724, 6734), 'necrobot.botbase.necrobot.Necrobot', 'Necrobot', ([], {}), '()\n', (6732, 6734), False, 'from necrobot.botbase.necrobot import Necrobot\n'), ((4804, 4814), 'necrobot.botbase.necrobot.Necrobot', 'Necrobot', ([], {}), '()\n', (4812, 4814), False, 'from necrobot.botbase.necrobot import Necrobot\n')] |
import os
import os.path
from keras.layers import Dense, Flatten, Conv1D, Reshape
from keras.optimizers import Nadam
from keras.models import Sequential
from keras.models import load_model
from keras.regularizers import l2
from keras import backend as K
from keras.losses import mean_squared_error
from sljassbot.player.rl_player.input_handler import InputHandler
def huber_loss(a, b, in_keras=True):
error = a - b
quadratic_term = error * error / 2
linear_term = abs(error) - 1 / 2
use_linear_term = (abs(error) > 1.0)
if in_keras:
# Keras won't let us multiply floats by booleans, so we explicitly cast the booleans to floats
use_linear_term = K.cast(use_linear_term, 'float32')
return use_linear_term * linear_term + (1 - use_linear_term) * quadratic_term
'''
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=18, strides=18, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
'''
# TODO: first 2 Conv1D then 2 Fully
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=18, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=36, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
| [
"keras.models.load_model",
"keras.regularizers.l2",
"os.path.exists",
"keras.layers.Flatten",
"keras.layers.Conv1D",
"keras.optimizers.Nadam",
"keras.layers.Dense",
"keras.models.Sequential",
"keras.layers.Reshape",
"keras.backend.cast"
] | [((2311, 2337), 'os.path.exists', 'os.path.exists', (['model_path'], {}), '(model_path)\n', (2325, 2337), False, 'import os\n'), ((686, 720), 'keras.backend.cast', 'K.cast', (['use_linear_term', '"""float32"""'], {}), "(use_linear_term, 'float32')\n", (692, 720), True, 'from keras import backend as K\n'), ((2439, 2461), 'keras.models.load_model', 'load_model', (['model_path'], {}), '(model_path)\n', (2449, 2461), False, 'from keras.models import load_model\n'), ((2526, 2538), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (2536, 2538), False, 'from keras.models import Sequential\n'), ((3596, 3673), 'keras.optimizers.Nadam', 'Nadam', ([], {'lr': '(0.002)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': 'None', 'schedule_decay': '(0.004)'}), '(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)\n', (3601, 3673), False, 'from keras.optimizers import Nadam\n'), ((2693, 2783), 'keras.layers.Reshape', 'Reshape', (['(InputHandler.input_size * 2, 1)'], {'input_shape': '(InputHandler.input_size * 2,)'}), '((InputHandler.input_size * 2, 1), input_shape=(InputHandler.\n input_size * 2,))\n', (2700, 2783), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((2932, 3011), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(50)', 'kernel_size': '(9)', 'strides': '(9)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=50, kernel_size=9, strides=9, padding='same', activation='relu')\n", (2938, 3011), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((3031, 3116), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(50)', 'kernel_size': '(18)', 'strides': '(9)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=50, kernel_size=18, strides=9, padding='same', activation='relu'\n )\n", (3037, 3116), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((3131, 3216), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(50)', 'kernel_size': '(36)', 'strides': '(9)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=50, kernel_size=36, strides=9, padding='same', activation='relu'\n )\n", (3137, 3216), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((3231, 3310), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(25)', 'kernel_size': '(9)', 'strides': '(9)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=25, kernel_size=9, strides=9, padding='same', activation='relu')\n", (3237, 3310), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((3330, 3339), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (3337, 3339), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((3456, 3508), 'keras.layers.Dense', 'Dense', (['InputHandler.output_size'], {'activation': '"""linear"""'}), "(InputHandler.output_size, activation='linear')\n", (3461, 3508), False, 'from keras.layers import Dense, Flatten, Conv1D, Reshape\n'), ((2664, 2672), 'keras.regularizers.l2', 'l2', (['(0.01)'], {}), '(0.01)\n', (2666, 2672), False, 'from keras.regularizers import l2\n'), ((3427, 3435), 'keras.regularizers.l2', 'l2', (['(0.01)'], {}), '(0.01)\n', (3429, 3435), False, 'from keras.regularizers import l2\n')] |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import formats
from django.utils import timezone
# Create your models here.
class ConventionManager(models.Manager):
def next(self):
""" The upcoming event """
next_convention = self.exclude(end_time__lt=timezone.now()).order_by('start_time').first()
return next_convention
class Convention(models.Model):
""" A con, festival or event """
name = models.CharField(max_length=100)
description = models.TextField()
mail_signature = models.TextField()
# logo
# TODO: logo som sorl-greie
location = models.CharField(max_length=500)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
ticket_sales_opens = models.DateTimeField()
ticket_sales_closes = models.DateTimeField()
program_signup_opens = models.DateTimeField()
program_signup_closes = models.DateTimeField()
objects = ConventionManager()
class Meta:
verbose_name = _('Convention')
verbose_name_plural = _('Conventions')
def __str__(self):
return self.name
def dates(self):
days = (self.end_time.date() - self.start_time.date()).days + 1
dates = [self.start_time.replace(hour=0, minute=0) + timezone.timedelta(days=x) for x in range(days)]
return dates
def ticket_sales_has_started(self):
return timezone.now() > self.ticket_sales_opens
def ticket_sales_has_ended(self):
return timezone.now() > self.ticket_sales_closes
def full_description(self):
return '{name}\n{description}\n{start} to {end}'.format(
name=self.name,
description=self.description,
start=formats.date_format(self.start_time, 'SHORT_DATE_FORMAT'),
end=formats.date_format(self.end_time, 'SHORT_DATE_FORMAT'),
)
| [
"django.db.models.TextField",
"django.db.models.CharField",
"django.utils.timezone.now",
"django.utils.formats.date_format",
"django.utils.timezone.timedelta",
"django.db.models.DateTimeField",
"django.utils.translation.ugettext_lazy"
] | [((493, 525), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (509, 525), False, 'from django.db import models\n'), ((544, 562), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (560, 562), False, 'from django.db import models\n'), ((584, 602), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (600, 602), False, 'from django.db import models\n'), ((661, 693), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (677, 693), False, 'from django.db import models\n'), ((711, 733), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (731, 733), False, 'from django.db import models\n'), ((749, 771), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (769, 771), False, 'from django.db import models\n'), ((797, 819), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (817, 819), False, 'from django.db import models\n'), ((846, 868), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (866, 868), False, 'from django.db import models\n'), ((896, 918), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (916, 918), False, 'from django.db import models\n'), ((947, 969), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (967, 969), False, 'from django.db import models\n'), ((1045, 1060), 'django.utils.translation.ugettext_lazy', '_', (['"""Convention"""'], {}), "('Convention')\n", (1046, 1060), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1091, 1107), 'django.utils.translation.ugettext_lazy', '_', (['"""Conventions"""'], {}), "('Conventions')\n", (1092, 1107), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1438, 1452), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1450, 1452), False, 'from django.utils import timezone\n'), ((1533, 1547), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1545, 1547), False, 'from django.utils import timezone\n'), ((1312, 1338), 'django.utils.timezone.timedelta', 'timezone.timedelta', ([], {'days': 'x'}), '(days=x)\n', (1330, 1338), False, 'from django.utils import timezone\n'), ((1761, 1818), 'django.utils.formats.date_format', 'formats.date_format', (['self.start_time', '"""SHORT_DATE_FORMAT"""'], {}), "(self.start_time, 'SHORT_DATE_FORMAT')\n", (1780, 1818), False, 'from django.utils import formats\n'), ((1836, 1891), 'django.utils.formats.date_format', 'formats.date_format', (['self.end_time', '"""SHORT_DATE_FORMAT"""'], {}), "(self.end_time, 'SHORT_DATE_FORMAT')\n", (1855, 1891), False, 'from django.utils import formats\n'), ((331, 345), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (343, 345), False, 'from django.utils import timezone\n')] |
#!/usr/bin/env python
import time
from random import choice
from string import ascii_lowercase
from amqp import connect_get_channel_declare_exchange_and_return_channel, EXCHANGE_NAME
APPS = ["foo", "bar", "infrastructure"]
LEVELS = ["debug", "info", "warn", "error"]
def publish_cyclically():
channel = connect_get_channel_declare_exchange_and_return_channel()
for counter in range(1, 1000):
routing_key = "%s.%s" % (choice(APPS), choice(LEVELS))
body = "%03d Some random text: %s " % (
counter,
''.join(choice(ascii_lowercase) for _ in range(16))
)
channel.basic_publish(
exchange=EXCHANGE_NAME,
routing_key=routing_key,
body=body
)
print("Published '%s' to '%s' with routing-key '%s'." % (body, EXCHANGE_NAME, routing_key))
time.sleep(1)
if __name__ == "__main__":
try:
publish_cyclically()
except KeyboardInterrupt:
pass
| [
"random.choice",
"amqp.connect_get_channel_declare_exchange_and_return_channel",
"time.sleep"
] | [((311, 368), 'amqp.connect_get_channel_declare_exchange_and_return_channel', 'connect_get_channel_declare_exchange_and_return_channel', ([], {}), '()\n', (366, 368), False, 'from amqp import connect_get_channel_declare_exchange_and_return_channel, EXCHANGE_NAME\n'), ((858, 871), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (868, 871), False, 'import time\n'), ((438, 450), 'random.choice', 'choice', (['APPS'], {}), '(APPS)\n', (444, 450), False, 'from random import choice\n'), ((452, 466), 'random.choice', 'choice', (['LEVELS'], {}), '(LEVELS)\n', (458, 466), False, 'from random import choice\n'), ((558, 581), 'random.choice', 'choice', (['ascii_lowercase'], {}), '(ascii_lowercase)\n', (564, 581), False, 'from random import choice\n')] |
import discord
from discord.ext import commands
from discord.ext import *
from discord.ext.commands import *
import asyncio
class help(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def help(self, ctx):
embed = discord.Embed(
title="KEKW Bot Help",
description="_ _\nThank you for inviting KEKW bot!\nCheck out our other bot, [Essentials](https://essentialsbot.xyz)\n\n[Setup the bot](https://github.com/Fxcilities/KEKWBot/blob/main/README.md)",
color=discord.Color.dark_gold()
)
embed.add_field(name="Main commands:", value="**```kekw!start (amount, defaults to 50)```**\n**```kekw!emojis```**", inline=False)
embed.set_footer(text="Requested by: " + str(ctx.author), icon_url=str(ctx.author.avatar_url))
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=30)
def setup(bot):
bot.add_cog(help(bot))
| [
"discord.Color.dark_gold",
"discord.ext.commands.command"
] | [((209, 227), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (225, 227), False, 'from discord.ext import commands\n'), ((553, 578), 'discord.Color.dark_gold', 'discord.Color.dark_gold', ([], {}), '()\n', (576, 578), False, 'import discord\n')] |
# @author: <NAME>
# email: <EMAIL>
# -2
# fetch data from kafka producer
# doing computation using spark streaming
# store back to kafka producer in another topic
import argparse
import json
import logging
import atexit
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
from kafka import KafkaProducer
from kafka.errors import KafkaError
class spark_streaming():
def __init__(self, topic, target_topic, kafka_broker):
self.topic = topic
self.kafka_broker = kafka_broker
self.target_topic = target_topic
self.kafka_producer = KafkaProducer(bootrap_servers=kafka_broker)
sc = SparkContext("local[2]", "AveragePrice")
sc.setLogLevel("INFO")
self.ssc = StreamingContext(sc, 5)
logging.basicConfig()
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
def process(self, timeobj, rdd):
def group(record):
data = json.loads(record[1].decode('utf-8'))[0]
return data.get("StockSymbol"), (float(data.get("LastTradePrice")), 1)
newRDD = rdd.map(group).reduceByKey(lambda x, y: (x[0]+y[0], x[1]+y[1]))\
.map(lambda symbol, price : (symbol, price[0]/price[1]))
results = newRDD.collect()
for res in results:
msg = {"StockSymbol": res[0],
"AveragePrice": res[1]}
try:
self.kafka_producer.send(self.target_topic, value=json.dumps(msg))
self.logger.info("Successfully send processed data to {}, {}".format(self.target_topic, msg))
except KafkaError as KE:
self.logger.warning("Failed to send data, the error is {}".format(msg))
def createStream(self):
# create space for data computation
directKafkaStream = KafkaUtils.createDirectStream(self.ssc, [self.topic],
{"metadata.broker.list" : self.kafka_broker})
return directKafkaStream
def run(self):
direceKafkaStream = self.createStream()
direceKafkaStream.foreachRDD(self.process) # transformation with action
self.ssc.start()
self.ssc.awaitTermination()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("topic", help="this is the topic to receive data from kafka producer")
parser.add_argument("target_topic", help="this is the topic to send processed data to kafka broker")
parser.add_argument("kafka_broker", help="this is the kafka broker")
args = parser.parse_args()
topic = args.topic
target_topic = args.target_topic
kafka_broker = args.kafka_broker
KafkaSpark = spark_streaming(topic, target_topic, kafka_broker)
KafkaSpark.run()
| [
"argparse.ArgumentParser",
"logging.basicConfig",
"pyspark.SparkContext",
"pyspark.streaming.kafka.KafkaUtils.createDirectStream",
"kafka.KafkaProducer",
"json.dumps",
"pyspark.streaming.StreamingContext",
"logging.getLogger"
] | [((2318, 2343), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2341, 2343), False, 'import argparse\n'), ((643, 686), 'kafka.KafkaProducer', 'KafkaProducer', ([], {'bootrap_servers': 'kafka_broker'}), '(bootrap_servers=kafka_broker)\n', (656, 686), False, 'from kafka import KafkaProducer\n'), ((700, 740), 'pyspark.SparkContext', 'SparkContext', (['"""local[2]"""', '"""AveragePrice"""'], {}), "('local[2]', 'AveragePrice')\n", (712, 740), False, 'from pyspark import SparkContext\n'), ((791, 814), 'pyspark.streaming.StreamingContext', 'StreamingContext', (['sc', '(5)'], {}), '(sc, 5)\n', (807, 814), False, 'from pyspark.streaming import StreamingContext\n'), ((824, 845), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (843, 845), False, 'import logging\n'), ((868, 887), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (885, 887), False, 'import logging\n'), ((1876, 1979), 'pyspark.streaming.kafka.KafkaUtils.createDirectStream', 'KafkaUtils.createDirectStream', (['self.ssc', '[self.topic]', "{'metadata.broker.list': self.kafka_broker}"], {}), "(self.ssc, [self.topic], {\n 'metadata.broker.list': self.kafka_broker})\n", (1905, 1979), False, 'from pyspark.streaming.kafka import KafkaUtils\n'), ((1523, 1538), 'json.dumps', 'json.dumps', (['msg'], {}), '(msg)\n', (1533, 1538), False, 'import json\n')] |
import scrapy
from news_spider.items import WallstreetcnItem
from news_spider.utils.common import get_category_by_name
class WallstreetcnSpider(scrapy.Spider):
name = 'wallstreetcn'
allowed_domains = ['https://api.wallstcn.com']
start_urls = ['https://api.wallstcn.com/apiv1/content/information-flow?channel=global&accept=article&limit=20&action=upglide']
category_id = get_category_by_name(name)
def parse(self, response):
json_data = response.json()
res_list = json_data["data"]["items"]
for res in res_list:
item = WallstreetcnItem()
resource = res["resource"]
title = resource["title"]
display_time = resource["display_time"]
url = resource["uri"]
hot_val = resource["author"]["display_name"]
item["title"] = title
item["url"] = url
item["hot_val"] = hot_val
item["rank"] = display_time
item["category_id"] = self.category_id
yield item
| [
"news_spider.items.WallstreetcnItem",
"news_spider.utils.common.get_category_by_name"
] | [((389, 415), 'news_spider.utils.common.get_category_by_name', 'get_category_by_name', (['name'], {}), '(name)\n', (409, 415), False, 'from news_spider.utils.common import get_category_by_name\n'), ((578, 596), 'news_spider.items.WallstreetcnItem', 'WallstreetcnItem', ([], {}), '()\n', (594, 596), False, 'from news_spider.items import WallstreetcnItem\n')] |
from django.http import HttpResponse
from django.template import loader
from .models import *
from django.conf import settings
from django.shortcuts import redirect
from utils import dataLayerPDF
from utils import dprint
from utils import modelUtils
import pandas as pd
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django.utils.dateparse import parse_date
import datetime
from dateutil.parser import *
def homePage(request):
template = loader.get_template('base_intro.html')
context = {
}
return HttpResponse(template.render(context, request))
def loginForm(request):
context = {
'errors': "",
}
template = loader.get_template('registration/login.html')
return HttpResponse(template.render(context, request))
def loginUser(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
#print(user)
if user is not None:
login(request, user)
return redirect('systemForms')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
@login_required
def viewSystemForms(request):
pdfforms=PDFForm.objects.all()
context = {
'systemforms':pdfforms ,
}
template = loader.get_template('formsList.html')
return HttpResponse(template.render(context, request))
@login_required
def addFormToProfile(request,form_id):
#return HttpResponse(str(form_id))
errorCondition=False
loggedUserID=request.user.id
UserObject=request.user
PDFormObject=PDFForm.objects.get(id=form_id)
isFormPresent=GeneratedPDF.objects.filter(user=UserObject, pdf=PDFormObject).count()
if(isFormPresent==0):
addform=GeneratedPDF(user=UserObject, pdf=PDFormObject)
addform.save()
modelUtils.addFieldsToProfile(UserObject, PDFormObject)
#get all fields in PDF related to PDFID
fieldsinPDF=PDFFormField.objects.filter(pdf=form_id).values_list(
"field",
"field__field_display",
"field__field_question",
"field__field_state",
"field__field_description",
named=True
)
#get all fields Related to User in UserProfile and that match the fields in the PDFForm
userFields=UserProfile.objects.filter(user=loggedUserID).values_list(
"field",
"field_text",
"data_index",
named=True
)
#print(userFields)
#print(fieldsinPDF)
#Set the column as index on which the join is to be made in pandas
userFieldDF=pd.DataFrame(list(userFields)).set_index('field')
PDFFieldsDF=pd.DataFrame(list(fieldsinPDF)).set_index('field')
#dprint.dprint(userFieldDF)
#dprint.dprint(PDFFieldsDF)
#Make the Join
combinedDF=PDFFieldsDF.join(userFieldDF, on='field',lsuffix='_left', rsuffix='_right')
#remove rows with NA Values. Will happen when the number of rows in the above datasets differ in count.
#combinedDF.dropna(0,inplace=True)
#sort the Dataframe by Field Page Number, then convert it to a list of dictionaries
#dataSet=combinedDF.sort_values(by=['field_page_number']).to_dict('records')
#dprint.dprint(combinedDF)
missingQuestionsList=combinedDF[combinedDF["field__field_state"]=='DYNAMIC']
missingQuestionsList.fillna(value='',inplace=True)
missingQuestionsList.reset_index(inplace=True)
#missingQuestionsList['field_str']=missingQuestionsList['field'].astype(str)
missingQuestionTuples=list(missingQuestionsList.itertuples())
#print(type(missingQuestionTuples))
fieldIDStr=""
for question in missingQuestionTuples:
fieldIDStr=fieldIDStr+" " +str(question.field)
fieldIDStr=fieldIDStr.strip().replace(" ", ",")
#print(fieldIDStr)
numberOfMissingQuestions=len(missingQuestionTuples)
context = {
'formObject':PDFormObject,
"missingQuestions":missingQuestionTuples,
'questionCount':numberOfMissingQuestions,
'form_id':form_id,
'fieldIDS':fieldIDStr
}
#dprint.dprint(missingQuestionsList)
#print(context)
template = loader.get_template('process_form.html')
return HttpResponse(template.render(context, request))
@login_required
@require_http_methods(["POST"])
def saveDynamicFieldData(request,pdfid):
recievedDateFormat=""
fieldIDs=request.POST["fieldIDs"]
fieldIDList=[]
fieldData=[]
if(fieldIDs is not None):
fieldIDList=fieldIDs.split(",")
for fieldID in fieldIDList:
fieldDict={}
fieldDict["ID"]=fieldID
fieldDict["userValue"]=request.POST[fieldID]
fieldData.append(fieldDict)
#print(fieldData)
modelUtils.saveUserProfileFields(fieldData, request.user)
return redirect('/editPDF/'+str(pdfid))
def logoutUser(request):
logout(request)
return redirect('login')
@login_required
def fillForm(request, pdfid):
dataSet, formData=modelUtils.getUserFormData(request, pdfid)
#print(dataSet)
#Use the dataset as input to generate the pdf, recieve a buffer as reponse
pdfData=dataLayerPDF.addText(dataSet,formData)
# #output=dataLayerPDF.mergePDFs()
timestamp=datetime.datetime.now().strftime("%d-%m-%Y-%I-%M-%S")
filename=formData.pdf_name +"-"+request.user.first_name+"-" + str(timestamp) +".pdf"
metaData = {
'/Title': filename,
}
pdfData.addMetadata(metaData)
#Set the httpresponse to download a pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename= "%s"' % filename
#response.write(PDFBytes)
#write the pdfdata to the responseobject
pdfData.write(response)
#response.write(pdfData)
#return the response
return response
@login_required
def profile(request):
# userForms=GeneratedPDF.objects.filter(user=request.user).values("pdf__pdf_name",
# "pdf__pdf_description",
# "pdf__image",
# )
userForms=GeneratedPDF.objects.filter(user=request.user).prefetch_related("pdf")
#print(userForms)
userData=UserProfile.objects.filter(user=request.user).prefetch_related("field").order_by(
"field__category",
"field__category_order",
"field__field_description")
formsCount=userForms.count()
#print(userData)
template = loader.get_template('base_view_profile.html')
context = {
"systemforms":userForms,
"userData":userData,
"formcount":formsCount
}
#print(context)
return HttpResponse(template.render(context, request))
@login_required
def editPDFLive(request, pdfid):
userFormsCount=GeneratedPDF.objects.filter(user=request.user, pdf=pdfid).count()
if(userFormsCount==0):
return HttpResponse("Not found");
dataSet, formData=modelUtils.getUserFormData(request, pdfid, False)
#dprint.dprint(fieldsinPDF)
context = {
"userFormDataSet":dataSet,
"formData": formData,
'formID':pdfid
}
#print(formData)
template = loader.get_template('editPDF.html')
return HttpResponse(template.render(context, request))
@login_required
def arrangeFormQuestions(request, pdfid):
superUser=request.user.is_superuser
if(not superUser):
return HttpResponse(status=404)
FormFieldQueryset=PDFFormField.objects.filter(pdf=pdfid).prefetch_related('field')
context={
"formFields":FormFieldQueryset,
'formID':pdfid
}
#print(context)
template = loader.get_template('rearrangeformquestions.html')
return HttpResponse(template.render(context, request))
| [
"django.http.HttpResponse",
"django.shortcuts.redirect",
"utils.modelUtils.saveUserProfileFields",
"utils.dataLayerPDF.addText",
"datetime.datetime.now",
"django.contrib.auth.logout",
"django.contrib.auth.authenticate",
"utils.modelUtils.addFieldsToProfile",
"django.contrib.auth.login",
"utils.modelUtils.getUserFormData",
"django.views.decorators.http.require_http_methods",
"django.template.loader.get_template"
] | [((4341, 4371), 'django.views.decorators.http.require_http_methods', 'require_http_methods', (["['POST']"], {}), "(['POST'])\n", (4361, 4371), False, 'from django.views.decorators.http import require_http_methods\n'), ((589, 627), 'django.template.loader.get_template', 'loader.get_template', (['"""base_intro.html"""'], {}), "('base_intro.html')\n", (608, 627), False, 'from django.template import loader\n'), ((796, 842), 'django.template.loader.get_template', 'loader.get_template', (['"""registration/login.html"""'], {}), "('registration/login.html')\n", (815, 842), False, 'from django.template import loader\n'), ((1009, 1068), 'django.contrib.auth.authenticate', 'authenticate', (['request'], {'username': 'username', 'password': 'password'}), '(request, username=username, password=password)\n', (1021, 1068), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((1375, 1412), 'django.template.loader.get_template', 'loader.get_template', (['"""formsList.html"""'], {}), "('formsList.html')\n", (1394, 1412), False, 'from django.template import loader\n'), ((1876, 1931), 'utils.modelUtils.addFieldsToProfile', 'modelUtils.addFieldsToProfile', (['UserObject', 'PDFormObject'], {}), '(UserObject, PDFormObject)\n', (1905, 1931), False, 'from utils import modelUtils\n'), ((4223, 4263), 'django.template.loader.get_template', 'loader.get_template', (['"""process_form.html"""'], {}), "('process_form.html')\n", (4242, 4263), False, 'from django.template import loader\n'), ((4735, 4792), 'utils.modelUtils.saveUserProfileFields', 'modelUtils.saveUserProfileFields', (['fieldData', 'request.user'], {}), '(fieldData, request.user)\n', (4767, 4792), False, 'from utils import modelUtils\n'), ((4869, 4884), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (4875, 4884), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((4893, 4910), 'django.shortcuts.redirect', 'redirect', (['"""login"""'], {}), "('login')\n", (4901, 4910), False, 'from django.shortcuts import redirect\n'), ((4978, 5020), 'utils.modelUtils.getUserFormData', 'modelUtils.getUserFormData', (['request', 'pdfid'], {}), '(request, pdfid)\n', (5004, 5020), False, 'from utils import modelUtils\n'), ((5127, 5166), 'utils.dataLayerPDF.addText', 'dataLayerPDF.addText', (['dataSet', 'formData'], {}), '(dataSet, formData)\n', (5147, 5166), False, 'from utils import dataLayerPDF\n'), ((5531, 5575), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""application/pdf"""'}), "(content_type='application/pdf')\n", (5543, 5575), False, 'from django.http import HttpResponse\n'), ((6438, 6483), 'django.template.loader.get_template', 'loader.get_template', (['"""base_view_profile.html"""'], {}), "('base_view_profile.html')\n", (6457, 6483), False, 'from django.template import loader\n'), ((6865, 6914), 'utils.modelUtils.getUserFormData', 'modelUtils.getUserFormData', (['request', 'pdfid', '(False)'], {}), '(request, pdfid, False)\n', (6891, 6914), False, 'from utils import modelUtils\n'), ((7069, 7104), 'django.template.loader.get_template', 'loader.get_template', (['"""editPDF.html"""'], {}), "('editPDF.html')\n", (7088, 7104), False, 'from django.template import loader\n'), ((7493, 7543), 'django.template.loader.get_template', 'loader.get_template', (['"""rearrangeformquestions.html"""'], {}), "('rearrangeformquestions.html')\n", (7512, 7543), False, 'from django.template import loader\n'), ((1107, 1127), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (1112, 1127), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((1137, 1160), 'django.shortcuts.redirect', 'redirect', (['"""systemForms"""'], {}), "('systemForms')\n", (1145, 1160), False, 'from django.shortcuts import redirect\n'), ((1180, 1239), 'django.shortcuts.redirect', 'redirect', (["('%s?next=%s' % (settings.LOGIN_URL, request.path))"], {}), "('%s?next=%s' % (settings.LOGIN_URL, request.path))\n", (1188, 1239), False, 'from django.shortcuts import redirect\n'), ((6818, 6843), 'django.http.HttpResponse', 'HttpResponse', (['"""Not found"""'], {}), "('Not found')\n", (6830, 6843), False, 'from django.http import HttpResponse\n'), ((7286, 7310), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (7298, 7310), False, 'from django.http import HttpResponse\n'), ((5214, 5237), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5235, 5237), False, 'import datetime\n')] |
#!/usr/bin/python
# Copyright (C) 2012, <NAME> <<EMAIL>>
# http://aravindavk.in
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import fontforge
if __name__ == "__main__":
font = fontforge.open("../Gubbi.sfd")
# Remove all GSUB lookups
for lookup in font.gsub_lookups:
font.removeLookup(lookup)
# Merge the new featurefile
font.mergeFeature("gsub.fea")
font.save()
font.close()
| [
"fontforge.open"
] | [((805, 835), 'fontforge.open', 'fontforge.open', (['"""../Gubbi.sfd"""'], {}), "('../Gubbi.sfd')\n", (819, 835), False, 'import fontforge\n')] |
import typing
import random
import uuid
from pydantic import BaseModel, Field
class URL(BaseModel):
"""
FastAPI uses pydantic to validate and represent data.
Maybe dive deeper in it.
"""
id: int = Field(..., title="ID of URL")
full_url: str = Field(..., title="Full URL")
short_url_code: str = Field(..., title="Redirection code of URL")
class Database:
"""
Fake db
When using with real -- all CRUD should be awaited
"""
def __init__(self):
self._items: typing.Dict[int, URL] = {}
async def get_random(self) -> int:
"""
Create list from dict_keys, because it is not supported in random.choice
"""
ids = list(self._items.keys())
return random.choice(ids)
async def get_all(self) -> typing.List[URL]:
"""
To work with large collections of data better use generators, to give an item one at a time.
Combo with asyncio allows async for loop. With real db you will be awaiting reads from it.
"""
for url in self._items.values():
yield url
async def get(self, id: typing.Optional[int] = None,
full_url: typing.Optional[str] = None,
short_url_code: typing.Optional[str] = None) -> typing.Optional[URL]:
"""
Simulate get from db like in sqlalchemy, where u can .get by 'key'
"""
if id:
return self._items.get(id)
try:
return next(item for item in self._items.values()
if item.full_url == full_url or item.short_url_code == short_url_code)
except StopIteration:
return None
async def add(self, url: str) -> URL:
"""
DB create simulation.
Better check 'code' in db for duplicate, but not here, cause it`s an example project.
"""
id = len(self._items) + 1
code = uuid.uuid4().hex[:8]
new_url = URL(id=id, full_url=url, short_url_code=code)
self._items[id] = new_url
return new_url
async def delete(self, id: int) -> typing.Union[typing.NoReturn, None]:
"""
typing.NoReturn means that method raises an error
else it returns None as any other method/function with no 'return' specified
same as typing.Optional[typing.NoReturn]
"""
if id in self._items:
del self._items[id]
else:
raise ValueError("URL doesn`t exist")
| [
"uuid.uuid4",
"pydantic.Field",
"random.choice"
] | [((221, 250), 'pydantic.Field', 'Field', (['...'], {'title': '"""ID of URL"""'}), "(..., title='ID of URL')\n", (226, 250), False, 'from pydantic import BaseModel, Field\n'), ((271, 299), 'pydantic.Field', 'Field', (['...'], {'title': '"""Full URL"""'}), "(..., title='Full URL')\n", (276, 299), False, 'from pydantic import BaseModel, Field\n'), ((326, 369), 'pydantic.Field', 'Field', (['...'], {'title': '"""Redirection code of URL"""'}), "(..., title='Redirection code of URL')\n", (331, 369), False, 'from pydantic import BaseModel, Field\n'), ((743, 761), 'random.choice', 'random.choice', (['ids'], {}), '(ids)\n', (756, 761), False, 'import random\n'), ((1921, 1933), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1931, 1933), False, 'import uuid\n')] |
#!/usr/bin/env python3
################################################################
# <NAME> Personality Type Tweets Natural Language Processing
# By <NAME>
# Project can be found at:
# https://www.inertia7.com/projects/109 &
# https://www.inertia7.com/projects/110
################################################################
##################
# Import packages
##################
import sys, os
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
# Confirm the correct directory; break script and prompt user to move to correct directory otherwise
filepath = os.getcwd()
if not filepath.endswith('myersBriggsNLPAnalysis'):
print('\nYou do not appear to be in the correct directory,\
you must be in the \'myersBriggsNLPAnalysis\' directory\
in order to run these scripts. Type \'pwd\' in the command line\
if you are unsure of your location in the terminal.')
sys.exit(1)
raw_data = 'data/mbti_1.csv'
token_data = 'data/mbti_tokenized.csv'
clean_data = 'data/mbti_cleaned.csv'
columns = np.array(['type', 'posts'])
##################################################
# Make different versions of our data for analysis
##################################################
'''
Explanation
-----------
Now we will have various versions of our data:
- Raw, unfiltered data
- Tokenized data with hashtags, mentions, retweets, etc.
- Cleaned tokenized data with stopwords removed
We will now subset the data into various parts to be used in the other scripts
'''
# First check if the data has been generated
# If not prompt user to make it
token_file_exists = os.path.isfile(token_data)
clean_file_exists = os.path.isfile(clean_data)
if not token_file_exists or not clean_file_exists:
print('It looks like no processed data has been generated.\n',
'Please run the \'data_generation.py\' file and follow the prompts.')
sys.exit(1)
# Declare different processed and unprocessed objects for further analysis
raw_df = pd.read_csv(raw_data, header = 0)
raw_type = raw_df['type']
raw_posts = raw_df['posts']
token_df = pd.read_csv(token_data, header = 0)
token_type = token_df['type']
token_posts = token_df['posts']
clean_df = pd.read_csv(clean_data, header = 0)
clean_type = clean_df['type']
clean_posts = clean_df['posts']
# Split up data into training and testing datasets
# To evaluate effectiveness of model training
X_train_token, X_test_token, y_train_token, y_test_token = train_test_split(
token_posts, token_type, test_size = 0.30, random_state = 42)
X_train_clean, X_test_clean, y_train_clean, y_test_clean = train_test_split(
clean_posts, clean_type, test_size = 0.30, random_state = 42)
| [
"os.getcwd",
"sklearn.model_selection.train_test_split",
"pandas.read_csv",
"os.path.isfile",
"numpy.array",
"sys.exit"
] | [((615, 626), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (624, 626), False, 'import sys, os\n'), ((1048, 1075), 'numpy.array', 'np.array', (["['type', 'posts']"], {}), "(['type', 'posts'])\n", (1056, 1075), True, 'import numpy as np\n'), ((1616, 1642), 'os.path.isfile', 'os.path.isfile', (['token_data'], {}), '(token_data)\n', (1630, 1642), False, 'import sys, os\n'), ((1663, 1689), 'os.path.isfile', 'os.path.isfile', (['clean_data'], {}), '(clean_data)\n', (1677, 1689), False, 'import sys, os\n'), ((1988, 2019), 'pandas.read_csv', 'pd.read_csv', (['raw_data'], {'header': '(0)'}), '(raw_data, header=0)\n', (1999, 2019), True, 'import pandas as pd\n'), ((2088, 2121), 'pandas.read_csv', 'pd.read_csv', (['token_data'], {'header': '(0)'}), '(token_data, header=0)\n', (2099, 2121), True, 'import pandas as pd\n'), ((2198, 2231), 'pandas.read_csv', 'pd.read_csv', (['clean_data'], {'header': '(0)'}), '(clean_data, header=0)\n', (2209, 2231), True, 'import pandas as pd\n'), ((2453, 2526), 'sklearn.model_selection.train_test_split', 'train_test_split', (['token_posts', 'token_type'], {'test_size': '(0.3)', 'random_state': '(42)'}), '(token_posts, token_type, test_size=0.3, random_state=42)\n', (2469, 2526), False, 'from sklearn.model_selection import train_test_split\n'), ((2597, 2670), 'sklearn.model_selection.train_test_split', 'train_test_split', (['clean_posts', 'clean_type'], {'test_size': '(0.3)', 'random_state': '(42)'}), '(clean_posts, clean_type, test_size=0.3, random_state=42)\n', (2613, 2670), False, 'from sklearn.model_selection import train_test_split\n'), ((920, 931), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (928, 931), False, 'import sys, os\n'), ((1891, 1902), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1899, 1902), False, 'import sys, os\n')] |
# -*- coding: utf-8 -*-
import os
import time
import tensorflow as tf
from config import FLAGS
from model import build_graph
from preprocess import train_data_iterator, test_data_helper
def train():
with tf.Session() as sess:
# initialization
graph = build_graph(top_k=1)
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
# multi thread
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# log writer
train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
test_writer = tf.summary.FileWriter(FLAGS.log_dir + '/val')
# restore model
if FLAGS.restore:
ckpt = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
if ckpt:
saver.restore(sess, ckpt)
print("restore from the checkpoint {}".format(ckpt))
# training begins
try:
while not coord.should_stop():
for step, (x_batch, y_batch) in enumerate(train_data_iterator()):
start_time = time.time()
feed_dict = {graph['images']: x_batch,
graph['labels']: y_batch,
graph['keep_prob']: 0.8,
graph['is_training']: True}
train_opts = [graph['train_op'], graph['loss'], graph['merged_summary_op'], graph['global_step']]
_, loss_val, train_summary, step = sess.run(train_opts, feed_dict=feed_dict)
train_writer.add_summary(train_summary, step)
end_time = time.time()
print("the step {0} takes {1} loss {2}".format(step, end_time - start_time, loss_val))
# eval stage
if step % FLAGS.eval_steps == 0:
x_batch_test, y_batch_test = test_data_helper(128)
feed_dict = {graph['images']: x_batch_test,
graph['labels']: y_batch_test,
graph['keep_prob']: 1.0,
graph['is_training']: False}
test_opts = [graph['accuracy'], graph['merged_summary_op']]
accuracy_test, test_summary = sess.run(test_opts, feed_dict=feed_dict)
test_writer.add_summary(test_summary, step)
print('===============Eval a batch=======================')
print('the step {0} test accuracy: {1}'.format(step, accuracy_test))
print('===============Eval a batch=======================')
# save stage
if step % FLAGS.save_steps == 0 and step > FLAGS.min_save_steps:
saver.save(sess, os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name), global_step=graph['global_step'])
except tf.errors.OutOfRangeError:
print('==================Train Finished================')
saver.save(sess, os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name), global_step=graph['global_step'])
finally:
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
train() | [
"preprocess.train_data_iterator",
"tensorflow.train.Coordinator",
"tensorflow.train.Saver",
"model.build_graph",
"tensorflow.global_variables_initializer",
"tensorflow.Session",
"time.time",
"tensorflow.train.start_queue_runners",
"tensorflow.summary.FileWriter",
"tensorflow.train.latest_checkpoint",
"preprocess.test_data_helper",
"os.path.join"
] | [((212, 224), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (222, 224), True, 'import tensorflow as tf\n'), ((275, 295), 'model.build_graph', 'build_graph', ([], {'top_k': '(1)'}), '(top_k=1)\n', (286, 295), False, 'from model import build_graph\n'), ((312, 328), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (326, 328), True, 'import tensorflow as tf\n'), ((421, 443), 'tensorflow.train.Coordinator', 'tf.train.Coordinator', ([], {}), '()\n', (441, 443), True, 'import tensorflow as tf\n'), ((462, 514), 'tensorflow.train.start_queue_runners', 'tf.train.start_queue_runners', ([], {'sess': 'sess', 'coord': 'coord'}), '(sess=sess, coord=coord)\n', (490, 514), True, 'import tensorflow as tf\n'), ((560, 619), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (["(FLAGS.log_dir + '/train')", 'sess.graph'], {}), "(FLAGS.log_dir + '/train', sess.graph)\n", (581, 619), True, 'import tensorflow as tf\n'), ((642, 687), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (["(FLAGS.log_dir + '/val')"], {}), "(FLAGS.log_dir + '/val')\n", (663, 687), True, 'import tensorflow as tf\n'), ((346, 379), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (377, 379), True, 'import tensorflow as tf\n'), ((758, 806), 'tensorflow.train.latest_checkpoint', 'tf.train.latest_checkpoint', (['FLAGS.checkpoint_dir'], {}), '(FLAGS.checkpoint_dir)\n', (784, 806), True, 'import tensorflow as tf\n'), ((1080, 1101), 'preprocess.train_data_iterator', 'train_data_iterator', ([], {}), '()\n', (1099, 1101), False, 'from preprocess import train_data_iterator, test_data_helper\n'), ((1137, 1148), 'time.time', 'time.time', ([], {}), '()\n', (1146, 1148), False, 'import time\n'), ((1699, 1710), 'time.time', 'time.time', ([], {}), '()\n', (1708, 1710), False, 'import time\n'), ((3140, 3192), 'os.path.join', 'os.path.join', (['FLAGS.checkpoint_dir', 'FLAGS.model_name'], {}), '(FLAGS.checkpoint_dir, FLAGS.model_name)\n', (3152, 3192), False, 'import os\n'), ((1957, 1978), 'preprocess.test_data_helper', 'test_data_helper', (['(128)'], {}), '(128)\n', (1973, 1978), False, 'from preprocess import train_data_iterator, test_data_helper\n'), ((2910, 2962), 'os.path.join', 'os.path.join', (['FLAGS.checkpoint_dir', 'FLAGS.model_name'], {}), '(FLAGS.checkpoint_dir, FLAGS.model_name)\n', (2922, 2962), False, 'import os\n')] |
#!/usr/bin/env python3
import bitstring
from compression import huffman
simple = b"122333"
simple_codes = {
"1": "11",
"2": "10",
"3": "0"
}
simple_tree = bitstring.Bits("0b00100110001100110010100110011")
simple_compressed = bitstring.Bits("0x498cca67d0")
lorem = (b"Lorem ipsum dolor sit amet, consectetur adipisicing "
b"elit, sed do eiusmod tempor incididunt ut labore et dolore magna "
b"aliqua. Ut enim ad minim veniam, quis nostrud exercitation "
b"ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis "
b"aute irure dolor in reprehenderit in voluptate velit esse cillum "
b"dolore eu fugiat nulla pariatur. Excepteur sint occaecat "
b"cupidatat non proident, sunt in culpa qui officia deserunt "
b"mollit anim id est laborum.")
lorem_codes = {
" ": "001",
",": "1001000",
".": "111111",
"D": "100101101",
"E": "100101100",
"L": "11111011",
"U": "11111010",
"a": "0111",
"b": "1111100",
"c": "01001",
"d": "00011",
"e": "0000",
"f": "1001101",
"g": "1001100",
"h": "10010111",
"i": "110",
"l": "1110",
"m": "01000",
"n": "1010",
"o": "0110",
"p": "11110",
"q": "100111",
"r": "1011",
"s": "00010",
"t": "0101",
"u": "1000",
"v": "1001010",
"x": "1001001"
}
lorem_tree = bitstring.Bits("0x025c532ab62b85b2d25cadc2e2b359c5a144a2dd97"
"8965d4586deba2c76d480b25cec, 0b101")
lorem_compressed = bitstring.Bits("0x0204b8a6556c570b65a4b95b85c566b38b42"
"8945bb2f12cba8b0dbd7458eda90164b9d97edac10778508236e6b22ca5d00b20a5a8"
"4095058b2e3dec2c9d530876590440323621a04861950479acea4e1e1c529853cff1a"
"c08291b7358143cca72fda787fcfd290ac82e328d59065056744833c611a612dc0c84"
"90b4e575cd463b9d0963cff1af08d61630a5fb4f1bc42490729642186c52d4209e1d7"
"f32d8c22f0a075c5811b7359d46c3d612e1430bca75194dd1e57503283b2901080a77"
"74208db9ac08419b1333a9a8ee73e7bceb17f996494879422c8b52964a5c12ea531ec"
"3757534d47d6d8614b208a294ea298ef399e3169b37273918082e294a1bbb297ac838"
"6404a79fe35c23f")
def test_tree_from_data():
tree = huffman.Node.from_data(simple)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == simple_codes)
tree = huffman.Node.from_data(lorem)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == lorem_codes)
def test_tree_from_binary():
tree = huffman.Node.from_binary(simple_tree)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == simple_codes)
tree = huffman.Node.from_binary(lorem_tree)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == lorem_codes)
def test_compression():
compressed = huffman.compress(simple)
assert(bitstring.Bits(compressed) == simple_compressed)
compressed = huffman.compress(lorem)
assert(bitstring.Bits(compressed) == lorem_compressed)
def test_decompression():
data = huffman.decompress(simple_compressed)
assert(data == simple)
data = huffman.decompress(lorem_compressed)
assert(data == lorem)
def test_both():
compressed = huffman.compress(simple)
data = huffman.decompress(compressed)
assert(data == simple)
compressed = huffman.compress(lorem)
data = huffman.decompress(compressed)
assert(data == lorem)
| [
"compression.huffman.Node.from_data",
"compression.huffman.decompress",
"compression.huffman.Node.from_binary",
"bitstring.Bits",
"compression.huffman.compress"
] | [((170, 219), 'bitstring.Bits', 'bitstring.Bits', (['"""0b00100110001100110010100110011"""'], {}), "('0b00100110001100110010100110011')\n", (184, 219), False, 'import bitstring\n'), ((240, 270), 'bitstring.Bits', 'bitstring.Bits', (['"""0x498cca67d0"""'], {}), "('0x498cca67d0')\n", (254, 270), False, 'import bitstring\n'), ((1341, 1447), 'bitstring.Bits', 'bitstring.Bits', (['"""0x025c532ab62b85b2d25cadc2e2b359c5a144a2dd978965d4586deba2c76d480b25cec, 0b101"""'], {}), "(\n '0x025c532ab62b85b2d25cadc2e2b359c5a144a2dd978965d4586deba2c76d480b25cec, 0b101'\n )\n", (1355, 1447), False, 'import bitstring\n'), ((1464, 2028), 'bitstring.Bits', 'bitstring.Bits', (['"""0x0204b8a6556c570b65a4b95b85c566b38b428945bb2f12cba8b0dbd7458eda90164b9d97edac10778508236e6b22ca5d00b20a5a84095058b2e3dec2c9d530876590440323621a04861950479acea4e1e1c529853cff1ac08291b7358143cca72fda787fcfd290ac82e328d59065056744833c611a612dc0c8490b4e575cd463b9d0963cff1af08d61630a5fb4f1bc42490729642186c52d4209e1d7f32d8c22f0a075c5811b7359d46c3d612e1430bca75194dd1e57503283b2901080a7774208db9ac08419b1333a9a8ee73e7bceb17f996494879422c8b52964a5c12ea531ec3757534d47d6d8614b208a294ea298ef399e3169b37273918082e294a1bbb297ac8386404a79fe35c23f"""'], {}), "(\n '0x0204b8a6556c570b65a4b95b85c566b38b428945bb2f12cba8b0dbd7458eda90164b9d97edac10778508236e6b22ca5d00b20a5a84095058b2e3dec2c9d530876590440323621a04861950479acea4e1e1c529853cff1ac08291b7358143cca72fda787fcfd290ac82e328d59065056744833c611a612dc0c8490b4e575cd463b9d0963cff1af08d61630a5fb4f1bc42490729642186c52d4209e1d7f32d8c22f0a075c5811b7359d46c3d612e1430bca75194dd1e57503283b2901080a7774208db9ac08419b1333a9a8ee73e7bceb17f996494879422c8b52964a5c12ea531ec3757534d47d6d8614b208a294ea298ef399e3169b37273918082e294a1bbb297ac8386404a79fe35c23f'\n )\n", (1478, 2028), False, 'import bitstring\n'), ((2115, 2145), 'compression.huffman.Node.from_data', 'huffman.Node.from_data', (['simple'], {}), '(simple)\n', (2137, 2145), False, 'from compression import huffman\n'), ((2295, 2324), 'compression.huffman.Node.from_data', 'huffman.Node.from_data', (['lorem'], {}), '(lorem)\n', (2317, 2324), False, 'from compression import huffman\n'), ((2502, 2539), 'compression.huffman.Node.from_binary', 'huffman.Node.from_binary', (['simple_tree'], {}), '(simple_tree)\n', (2526, 2539), False, 'from compression import huffman\n'), ((2689, 2725), 'compression.huffman.Node.from_binary', 'huffman.Node.from_binary', (['lorem_tree'], {}), '(lorem_tree)\n', (2713, 2725), False, 'from compression import huffman\n'), ((2904, 2928), 'compression.huffman.compress', 'huffman.compress', (['simple'], {}), '(simple)\n', (2920, 2928), False, 'from compression import huffman\n'), ((3007, 3030), 'compression.huffman.compress', 'huffman.compress', (['lorem'], {}), '(lorem)\n', (3023, 3030), False, 'from compression import huffman\n'), ((3128, 3165), 'compression.huffman.decompress', 'huffman.decompress', (['simple_compressed'], {}), '(simple_compressed)\n', (3146, 3165), False, 'from compression import huffman\n'), ((3205, 3241), 'compression.huffman.decompress', 'huffman.decompress', (['lorem_compressed'], {}), '(lorem_compressed)\n', (3223, 3241), False, 'from compression import huffman\n'), ((3303, 3327), 'compression.huffman.compress', 'huffman.compress', (['simple'], {}), '(simple)\n', (3319, 3327), False, 'from compression import huffman\n'), ((3339, 3369), 'compression.huffman.decompress', 'huffman.decompress', (['compressed'], {}), '(compressed)\n', (3357, 3369), False, 'from compression import huffman\n'), ((3415, 3438), 'compression.huffman.compress', 'huffman.compress', (['lorem'], {}), '(lorem)\n', (3431, 3438), False, 'from compression import huffman\n'), ((3450, 3480), 'compression.huffman.decompress', 'huffman.decompress', (['compressed'], {}), '(compressed)\n', (3468, 3480), False, 'from compression import huffman\n'), ((2940, 2966), 'bitstring.Bits', 'bitstring.Bits', (['compressed'], {}), '(compressed)\n', (2954, 2966), False, 'import bitstring\n'), ((3042, 3068), 'bitstring.Bits', 'bitstring.Bits', (['compressed'], {}), '(compressed)\n', (3056, 3068), False, 'import bitstring\n')] |
from deep_rl import register_trainer
from deep_rl.core import AbstractTrainer
from deep_rl.core import MetricContext
from deep_rl.configuration import configuration
from environments.gym_house.goal import GoalImageCache
import os
import torch
import numpy as np
from torch.utils.data import Dataset,DataLoader
from models import AuxiliaryBigGoalHouseModel as Model
from torch.optim import Adam
import torch.nn.functional as F
from experiments.ai2_auxiliary.trainer import compute_auxiliary_target
class HouseDataset(Dataset):
def __init__(self, deconv_cell_size, transform = None):
self.image_cache = GoalImageCache((174,174), configuration.get('house3d.dataset_path'))
self.images = list(self.image_cache.all_image_paths(['rgb','depth','segmentation']))
self.transform = transform
self.deconv_cell_size = deconv_cell_size
def __len__(self):
return len(self.images)
def __getitem__(self, index):
image, depth, segmentation = self.images[index]
image = self.image_cache.read_image(image)
depth = self.image_cache.read_image(depth)
segmentation = self.image_cache.read_image(segmentation)
image = torch.from_numpy(np.transpose(image.astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
depth = torch.from_numpy(np.transpose(depth[:,:,:1].astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
segmentation = torch.from_numpy(np.transpose(segmentation.astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
segmentation = compute_auxiliary_target(segmentation.unsqueeze(0), self.deconv_cell_size, (42, 42)).squeeze(0)
depth = compute_auxiliary_target(depth.unsqueeze(0), self.deconv_cell_size, (42, 42)).squeeze(0)
ret = (image, depth, segmentation)
if self.transform:
ret = self.transform(ret)
return ret
@register_trainer(save = True, saving_period = 1)
class SupervisedTrained(AbstractTrainer):
def __init__(self, name, **kwargs):
super().__init__(dict(), dict())
self.name = name
self.batch_size = 32
self.main_device = torch.device('cuda')
def optimize(self, image, depth, segmentation):
image = image.to(self.main_device)
depth = depth.to(self.main_device)
segmentation = segmentation.to(self.main_device)
zeros1 = torch.rand((image.size()[0], 1, 3,174,174), dtype = torch.float32, device = self.main_device)
zeros2 = torch.rand((image.size()[0], 1, 3,174,174), dtype = torch.float32, device = self.main_device)
(r_depth, r_segmentation, _), _ = self.model.forward_deconv(((image, zeros1), None), None, None)
(_, _, r_goal_segmentation), _ = self.model.forward_deconv(((zeros2, image), None,), None, None)
loss = F.mse_loss(r_depth, depth) + F.mse_loss(r_segmentation, segmentation) + F.mse_loss(r_goal_segmentation, segmentation)
loss = loss / 3.0
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
return loss.item()
def save(self, path):
super().save(path)
torch.save(self.model.state_dict(), os.path.join(path, 'weights.pth'))
print('Saving to %s' % os.path.join(path, 'weights.pth'))
def process(self, mode = 'train', **kwargs):
assert mode == 'train'
# Single epoch
metric_context = MetricContext()
dataloader = DataLoader(self.dataset, batch_size=self.batch_size,shuffle=True, num_workers=4)
total_loss = 0
total_updates = 0
for i, item in enumerate(dataloader):
loss = self.optimize(*item)
print('loss is %s' % loss)
total_loss += loss
total_updates += 1
print('Epoch done with loss=%s' % (total_loss / total_updates))
return (1, (1, 1), metric_context)
def create_dataset(self, deconv_cell_size):
return HouseDataset(deconv_cell_size)
def _initialize(self):
model = Model(3, 6).to(self.main_device)
model_path = os.path.join(configuration.get('models_path'),'chouse-auxiliary4-supervised', 'weights.pth')
if os.path.isfile(model_path):
print('Loading %s' % model_path)
model.load_state_dict(torch.load(model_path))
self.dataset = self.create_dataset(model.deconv_cell_size)
self.optimizer = Adam(model.parameters())
return model
def run(self, process, **kwargs):
self.model = self._initialize()
for i in range(30):
print('Starting epoch %s' % (i + 1))
process()
def default_args():
return dict() | [
"torch.utils.data.DataLoader",
"torch.load",
"torch.nn.functional.mse_loss",
"models.AuxiliaryBigGoalHouseModel",
"deep_rl.configuration.configuration.get",
"os.path.isfile",
"deep_rl.core.MetricContext",
"torch.device",
"deep_rl.register_trainer",
"os.path.join"
] | [((1861, 1905), 'deep_rl.register_trainer', 'register_trainer', ([], {'save': '(True)', 'saving_period': '(1)'}), '(save=True, saving_period=1)\n', (1877, 1905), False, 'from deep_rl import register_trainer\n'), ((2114, 2134), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (2126, 2134), False, 'import torch\n'), ((3368, 3383), 'deep_rl.core.MetricContext', 'MetricContext', ([], {}), '()\n', (3381, 3383), False, 'from deep_rl.core import MetricContext\n'), ((3405, 3490), 'torch.utils.data.DataLoader', 'DataLoader', (['self.dataset'], {'batch_size': 'self.batch_size', 'shuffle': '(True)', 'num_workers': '(4)'}), '(self.dataset, batch_size=self.batch_size, shuffle=True,\n num_workers=4)\n', (3415, 3490), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((4152, 4178), 'os.path.isfile', 'os.path.isfile', (['model_path'], {}), '(model_path)\n', (4166, 4178), False, 'import os\n'), ((640, 681), 'deep_rl.configuration.configuration.get', 'configuration.get', (['"""house3d.dataset_path"""'], {}), "('house3d.dataset_path')\n", (657, 681), False, 'from deep_rl.configuration import configuration\n'), ((2850, 2895), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['r_goal_segmentation', 'segmentation'], {}), '(r_goal_segmentation, segmentation)\n', (2860, 2895), True, 'import torch.nn.functional as F\n'), ((3137, 3170), 'os.path.join', 'os.path.join', (['path', '"""weights.pth"""'], {}), "(path, 'weights.pth')\n", (3149, 3170), False, 'import os\n'), ((4052, 4084), 'deep_rl.configuration.configuration.get', 'configuration.get', (['"""models_path"""'], {}), "('models_path')\n", (4069, 4084), False, 'from deep_rl.configuration import configuration\n'), ((2778, 2804), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['r_depth', 'depth'], {}), '(r_depth, depth)\n', (2788, 2804), True, 'import torch.nn.functional as F\n'), ((2807, 2847), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['r_segmentation', 'segmentation'], {}), '(r_segmentation, segmentation)\n', (2817, 2847), True, 'import torch.nn.functional as F\n'), ((3203, 3236), 'os.path.join', 'os.path.join', (['path', '"""weights.pth"""'], {}), "(path, 'weights.pth')\n", (3215, 3236), False, 'import os\n'), ((3985, 3996), 'models.AuxiliaryBigGoalHouseModel', 'Model', (['(3)', '(6)'], {}), '(3, 6)\n', (3990, 3996), True, 'from models import AuxiliaryBigGoalHouseModel as Model\n'), ((4259, 4281), 'torch.load', 'torch.load', (['model_path'], {}), '(model_path)\n', (4269, 4281), False, 'import torch\n')] |
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash_extensions import Download
def create_actions_tab(projname, vername):
return [
dbc.Row(
dbc.Col(html.H2("Actions")),
),
dbc.Row(
dbc.Col(html.H4("Project: - Version: - "),
id='actions_projver'),
),
dbc.Row(
[
dbc.Col(
dbc.Card(
[
dbc.CardHeader("Export SPDX JSON file",
style={'classname': 'card-title'},
id='spdxtitle'),
dbc.CardBody(
[
dcc.Interval(
id='spdx_interval',
disabled=True,
interval=1 * 6000, # in milliseconds
n_intervals=0,
max_intervals=400
),
dbc.Form(
[
dbc.FormGroup(
[
dbc.Label("Filename", className="mr-2"),
dbc.Input(type="text",
id="spdx_file",
placeholder="Enter output SPDX file"),
],
className="mr-3",
),
dbc.FormGroup(
[
dbc.Checklist(
id="spdx_recursive",
options=[
{"label": "Recursive (Projects in Projects)",
"value": 1},
],
value=[],
switch=True,
)
],
className="mr-3",
),
dbc.Button("Export SPDX",
id="buttons_export_spdx",
color="primary"),
],
# inline=True,
),
html.Div('', id='spdx_status'),
dbc.Collapse(
[
dbc.Button("Download SPDX",
id="button_download_spdx",
color="primary"),
Download(id="download_spdx"),
],
id="spdx_collapse",
is_open=False,
),
],
),
# dbc.CardFooter(dbc.CardLink('Project Version link', href=projlink)),
], id="spdxcard",
),
width=4,
),
dbc.Col(
dbc.Card(
[
dbc.CardHeader("Ignore CVEs with BDSA Mismatch",
style={'classname': 'card-title'},
id='fixcvestitle'),
dbc.CardBody(
[
dcc.Interval(
id='fixcves_interval',
disabled=True,
interval=1 * 6000, # in milliseconds
n_intervals=0,
max_intervals=400
),
dbc.Form(
[
dbc.Button("Ignore CVEs with Mismatched BDSA Versions",
id="buttons_fixcves",
color="primary"),
],
# inline=True,
),
html.Div('', id='fixcves_status'),
],
),
# dbc.CardFooter(dbc.CardLink('Project Version link', href=projlink)),
], id="fixcvescard",
),
width=4,
),
],
)
]
def patch_cves(bd, version, vuln_list, vulns):
# vulnerable_components_url = hub.get_link(version, "vulnerable-components") + "?limit=9999"
# custom_headers = {'Accept':'application/vnd.blackducksoftware.bill-of-materials-6+json'}
# response = hub.execute_get(vulnerable_components_url, custom_headers=custom_headers)
# vulnerable_bom_components = response.json().get('items', [])
active_statuses = ["NEW", "NEEDS_REVIEW", "REMEDIATION_REQUIRED"]
status = "IGNORED"
comment = "Ignored as linked BDSA has component version as fixed"
print("Processing vulnerabilities ...")
ignoredcount = 0
alreadyignoredcount = 0
try:
for vuln in vulns:
vuln_name = vuln['vulnerabilityWithRemediation']['vulnerabilityName']
if vuln_name in vuln_list:
if vuln['vulnerabilityWithRemediation']['remediationStatus'] in active_statuses:
vuln['remediationStatus'] = status
vuln['remediationComment'] = comment
# result = hub.execute_put(vuln['_meta']['href'], data=vuln)
r = bd.session.put(vuln['_meta']['href'], json=vuln)
if r.status_code == 202:
ignoredcount += 1
print("{}: marked ignored".format(vuln_name))
else:
print("{}: Unable to change status".format(vuln_name))
else:
print(vuln_name + ": has BDSA which disgrees on version applicability but not active - no action")
alreadyignoredcount += 1
else:
print(vuln_name + ": No action")
except Exception as e:
print("ERROR: Unable to update vulnerabilities via API\n" + str(e))
return 0
print("- {} CVEs already inactive".format(alreadyignoredcount))
print("- {} CVEs newly marked as ignored".format(ignoredcount))
return ignoredcount
def check_cves(bd, projverurl, comps, vulns):
cve_list = []
num = 0
total = 0
for comp in comps:
# print(comp)
if 'componentVersionName' not in comp:
continue
print("- " + comp['componentName'] + '/' + comp['componentVersionName'])
for x in comp['_meta']['links']:
if x['rel'] == 'vulnerabilities':
# custom_headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'}
# response = hub.execute_get(x['href'] + "?limit=9999", custom_headers=custom_headers)
# vulns = response.json().get('items', [])
cvulns = bd.get_json(x['href'] + "?limit=3000")
for vuln in cvulns['items']:
total += 1
if vuln['source'] == 'NVD':
for y in vuln['_meta']['links']:
if y['rel'] == 'related-vulnerabilities':
if y['label'] == 'BDSA':
# print("{} has BDSA which disagrees with component version - potential false
# positive".format(vuln['name']))
if vuln['name'] not in cve_list:
cve_list.append(vuln['name'])
num += 1
print("Found {} total vulnerabilities".format(total))
print("Found {} CVEs with associated BDSAs but which do not agree on affected component version\n".format(num))
ret = patch_cves(bd, projverurl, cve_list, vulns)
return ret
| [
"dash_bootstrap_components.Label",
"dash_bootstrap_components.Input",
"dash_html_components.H2",
"dash_extensions.Download",
"dash_html_components.Div",
"dash_bootstrap_components.CardHeader",
"dash_bootstrap_components.Button",
"dash_html_components.H4",
"dash_bootstrap_components.Checklist",
"dash_core_components.Interval"
] | [((243, 261), 'dash_html_components.H2', 'html.H2', (['"""Actions"""'], {}), "('Actions')\n", (250, 261), True, 'import dash_html_components as html\n'), ((312, 345), 'dash_html_components.H4', 'html.H4', (['"""Project: - Version: - """'], {}), "('Project: - Version: - ')\n", (319, 345), True, 'import dash_html_components as html\n'), ((541, 635), 'dash_bootstrap_components.CardHeader', 'dbc.CardHeader', (['"""Export SPDX JSON file"""'], {'style': "{'classname': 'card-title'}", 'id': '"""spdxtitle"""'}), "('Export SPDX JSON file', style={'classname': 'card-title'},\n id='spdxtitle')\n", (555, 635), True, 'import dash_bootstrap_components as dbc\n'), ((4215, 4321), 'dash_bootstrap_components.CardHeader', 'dbc.CardHeader', (['"""Ignore CVEs with BDSA Mismatch"""'], {'style': "{'classname': 'card-title'}", 'id': '"""fixcvestitle"""'}), "('Ignore CVEs with BDSA Mismatch', style={'classname':\n 'card-title'}, id='fixcvestitle')\n", (4229, 4321), True, 'import dash_bootstrap_components as dbc\n'), ((831, 935), 'dash_core_components.Interval', 'dcc.Interval', ([], {'id': '"""spdx_interval"""', 'disabled': '(True)', 'interval': '(1 * 6000)', 'n_intervals': '(0)', 'max_intervals': '(400)'}), "(id='spdx_interval', disabled=True, interval=1 * 6000,\n n_intervals=0, max_intervals=400)\n", (843, 935), True, 'import dash_core_components as dcc\n'), ((3206, 3236), 'dash_html_components.Div', 'html.Div', (['""""""'], {'id': '"""spdx_status"""'}), "('', id='spdx_status')\n", (3214, 3236), True, 'import dash_html_components as html\n'), ((4517, 4624), 'dash_core_components.Interval', 'dcc.Interval', ([], {'id': '"""fixcves_interval"""', 'disabled': '(True)', 'interval': '(1 * 6000)', 'n_intervals': '(0)', 'max_intervals': '(400)'}), "(id='fixcves_interval', disabled=True, interval=1 * 6000,\n n_intervals=0, max_intervals=400)\n", (4529, 4624), True, 'import dash_core_components as dcc\n'), ((5390, 5423), 'dash_html_components.Div', 'html.Div', (['""""""'], {'id': '"""fixcves_status"""'}), "('', id='fixcves_status')\n", (5398, 5423), True, 'import dash_html_components as html\n'), ((2853, 2921), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Export SPDX"""'], {'id': '"""buttons_export_spdx"""', 'color': '"""primary"""'}), "('Export SPDX', id='buttons_export_spdx', color='primary')\n", (2863, 2921), True, 'import dash_bootstrap_components as dbc\n'), ((3374, 3445), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Download SPDX"""'], {'id': '"""button_download_spdx"""', 'color': '"""primary"""'}), "('Download SPDX', id='button_download_spdx', color='primary')\n", (3384, 3445), True, 'import dash_bootstrap_components as dbc\n'), ((3601, 3629), 'dash_extensions.Download', 'Download', ([], {'id': '"""download_spdx"""'}), "(id='download_spdx')\n", (3609, 3629), False, 'from dash_extensions import Download\n'), ((5011, 5110), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Ignore CVEs with Mismatched BDSA Versions"""'], {'id': '"""buttons_fixcves"""', 'color': '"""primary"""'}), "('Ignore CVEs with Mismatched BDSA Versions', id=\n 'buttons_fixcves', color='primary')\n", (5021, 5110), True, 'import dash_bootstrap_components as dbc\n'), ((1439, 1478), 'dash_bootstrap_components.Label', 'dbc.Label', (['"""Filename"""'], {'className': '"""mr-2"""'}), "('Filename', className='mr-2')\n", (1448, 1478), True, 'import dash_bootstrap_components as dbc\n'), ((1532, 1608), 'dash_bootstrap_components.Input', 'dbc.Input', ([], {'type': '"""text"""', 'id': '"""spdx_file"""', 'placeholder': '"""Enter output SPDX file"""'}), "(type='text', id='spdx_file', placeholder='Enter output SPDX file')\n", (1541, 1608), True, 'import dash_bootstrap_components as dbc\n'), ((2059, 2189), 'dash_bootstrap_components.Checklist', 'dbc.Checklist', ([], {'id': '"""spdx_recursive"""', 'options': "[{'label': 'Recursive (Projects in Projects)', 'value': 1}]", 'value': '[]', 'switch': '(True)'}), "(id='spdx_recursive', options=[{'label':\n 'Recursive (Projects in Projects)', 'value': 1}], value=[], switch=True)\n", (2072, 2189), True, 'import dash_bootstrap_components as dbc\n')] |
from view.runnable.Runnable import Runnable
class showMultiPlayerBoard(Runnable):
def __init__(self,vue, sec):
Runnable.__init__(self,vue)
self.sec = sec
def run(self):
self.vue.showMultiPlayerBoard(self.sec) | [
"view.runnable.Runnable.Runnable.__init__"
] | [((125, 153), 'view.runnable.Runnable.Runnable.__init__', 'Runnable.__init__', (['self', 'vue'], {}), '(self, vue)\n', (142, 153), False, 'from view.runnable.Runnable import Runnable\n')] |
# coding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import requests
import time
import warnings
from .auth import HMACAuth
from .compat import imap
from .compat import quote
from .compat import urljoin
from .compat import urlencode
from .error import build_api_error
from .model import APIObject
from .model import new_api_object
from .socket import Socket
from .util import check_uri_security
from .util import encode_params
class Client(object):
BASE_API_URI = 'https://api.cryptomkt.com/'
API_VERSION = 'v2'
def __init__(self, api_key, api_secret, base_api_uri=None, api_version=None, debug=False):
if not api_key:
raise ValueError('Missing `api_key`.')
if not api_secret:
raise ValueError('Missing `api_secret`.')
# Allow passing in a different API base.
self.BASE_API_URI = check_uri_security(base_api_uri or self.BASE_API_URI)
self.API_VERSION = api_version or self.API_VERSION
self.socket = None
# Set up a requests session for interacting with the API.
self.session = self._build_session(HMACAuth, api_key, api_secret, self.API_VERSION)
# a container for the socket if needed.
self.socket = None
def _build_session(self, auth_class, *args, **kwargs):
"""Internal helper for creating a requests `session` with the correct
authentication handling."""
session = requests.session()
session.auth = auth_class(*args, **kwargs)
# session.headers.update({'Content-type': 'application/json'})
return session
def _create_api_uri(self, *parts, **kwargs):
"""Internal helper for creating fully qualified endpoint URIs."""
params = kwargs.get("params", None)
if params and isinstance(params, dict):
url = urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)) + '?%s' % urlencode(params))
else:
url = urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)))
return url
def _request(self, method, *relative_path_parts, **kwargs):
"""Internal helper for creating HTTP requests to the CryptoMarket API.
Raises an APIError if the response is not 20X. Otherwise, returns the response object. Not intended for direct use by API consumers.
"""
uri = self._create_api_uri(*relative_path_parts, **kwargs)
data = kwargs.get("data", None)
if data and isinstance(data, dict):
kwargs['data'] = data
response = getattr(self.session, method)(uri, **kwargs)
return self._handle_response(response)
def _handle_response(self, response):
"""Internal helper for handling API responses from the CryptoMarket server.
Raises the appropriate exceptions when necessary; otherwise, returns the
response.
"""
if not str(response.status_code).startswith('2'):
raise build_api_error(response)
return response
def _get(self, *args, **kwargs):
return self._request('get', *args, **kwargs)
def _post(self, *args, **kwargs):
return self._request('post', *args, **kwargs)
def _make_api_object(self, response, model_type=None):
blob = response.json()
data = blob.get('data', None)
# All valid responses have a "data" key.
if data is None:
raise build_api_error(response, blob)
# Warn the user about each warning that was returned.
warnings_data = blob.get('warnings', None)
for warning_blob in warnings_data or []:
message = "%s (%s)" % (
warning_blob.get('message', ''),
warning_blob.get('url', ''))
warnings.warn(message, UserWarning)
pagination = blob.get('pagination', None)
kwargs = {
'response': response,
'pagination': pagination and new_api_object(None, pagination, APIObject),
'warnings': warnings_data and new_api_object(None, warnings_data, APIObject),
}
if isinstance(data, dict):
obj = new_api_object(self, data, model_type, **kwargs)
else:
obj = APIObject(self, **kwargs)
obj.data = new_api_object(self, data, model_type)
return obj
# Public API
# -----------------------------------------------------------
def get_markets(self):
"""Returns a list of the marketpairs as strings available in Cryptomkt
as the "data" member of a dict.
"""
response = self._get(self.API_VERSION, 'market')
return self._make_api_object(response, APIObject)
def get_ticker(self, market=None):
"""Returns a general view of the market state as a dict.
Shows the actual bid and ask, the volume and price, and the low and high of the market.
Stored in the "data" member of a dict.
Does not requiere to be authenticated.
Optional Arguments:
market: A market pair as string, if no market pair is provided,
the market state of all the market pairs are returned.
e.g: 'EHTARS'.
"""
params = {}
if market:
params['market'] = market
response = self._get(self.API_VERSION, 'ticker', params=params)
return self._make_api_object(response, APIObject)
def get_book(self, market, side, page=None, limit=None):
"""Returns a list of active orders of a given side in a specified
market pair. stored in the "data" member of a dict.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHEUR'.
side: 'buy' or 'sell'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market,
side=side
)
if page is not None and isinstance(page, int):
params['page'] = page
if limit is not None and isinstance(limit, int):
params['limit'] = limit
response = self._get(self.API_VERSION, 'book', params=params)
return self._make_api_object(response, APIObject)
def get_trades(self, market, start=None, end=None, page=None, limit=None):
"""returns a list of all trades (executed orders) of a market between
the start date, until the end date. the earlier trades first, and the
older last. stored in the "data" member of a dict
If no start date is given, returns trades since 2020-02-17.
If no end date is given, returns trades until the present moment.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
start: The older date to get trades from, inclusive.
end: The earlier date to get trades from, exclusive.
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if start is not None:
params['start'] = start
if end is not None:
params['end'] = end
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'trades', params=params)
return self._make_api_object(response, APIObject)
def get_prices(self, market, timeframe, page = None, limit = None):
"""returns a list of the prices of a market (candles on the market
prices graph), given a timeframe. The earlier prices first and the
older last. the list is stored in the data member of a dict
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
timeframe: timelapse between every candle in minutes.
accepted values are 1, 5, 15, 60, 240, 1440 and 10080.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market = market,
timeframe = timeframe
)
if page is not None:
params["page"] = page
if limit is not None:
params["limit"] = limit
response = self._get(self.API_VERSION,"prices", params = params)
return self._make_api_object(response, APIObject)
# Authenticated endpoints
#-------------------------------------------------------------------
# account
def get_account(self):
"""returns the account information of the user. Name, email, rate
and bank accounts.
"""
response = self._get(self.API_VERSION,"account")
return self._make_api_object(response,APIObject)
# orders
def get_active_orders(self, market, page=None, limit=None):
"""returns a list of the active orders of the user in a given market.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'orders', 'active', params=params)
return self._make_api_object(response, APIObject)
def get_executed_orders(self, market, page=None, limit=None):
"""returns the list of the executed orders of the user on a given market.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'orders', 'executed', params=params)
return self._make_api_object(response, APIObject)
def create_order(self, market, amount, price, side, type):
"""creates an orders from the specified argument.
Required Arguments:
amount: The amount of crypto to be buyed or selled.
market: A market pair as a string. Is the specified market to place the order in
e.g: 'ETHCLP'.
price: The price to ask or bid for one unit of crypto
side: 'buy' or 'sell' the crypto
type: one of the keywords 'market', 'limit', 'stop_limit'
"""
params = dict(
amount=amount,
market=market,
price=price,
side=side,
type=type,
)
response = self._post(self.API_VERSION, 'orders', 'create', data=params)
return self._make_api_object(response, APIObject)
def create_multi_orders(self, order_list):
for order in order_list:
if ('market' not in order
or 'type' not in order
or 'side' not in order
or 'amount' not in order):
return None
params = dict(
orders=json.dumps(order_list, sort_keys=True, separators=(',',':')),
)
response = self._post(self.API_VERSION, 'orders', 'create', 'bulk', data=params)
return self._make_api_object(response, APIObject)
def get_order_status(self, id):
"""returns the present status of an order, given the order id.
Required Arguments:
id: The identification of the order.
"""
params = dict(
id=id
)
response = self._get(self.API_VERSION, 'orders', 'status', params=params)
return self._make_api_object(response, APIObject)
def cancel_order(self, id):
"""Cancel an order given its id.
Required Arguments:
id: The identification of the order.
"""
params = dict(
id=id
)
response = self._post(self.API_VERSION, 'orders', 'cancel', data=params)
return self._make_api_object(response, APIObject)
def cancel_multi_orders(self, order_list):
for order in order_list:
if 'id' not in order:
return None
params = dict(
ids=json.dumps(order_list, sort_keys=True, separators=(',',':')),
)
response = self._post(self.API_VERSION, 'orders', 'cancel', 'bulk', data=params)
return self._make_api_object(response, APIObject)
def get_instant(self,market, side, amount):
"""If side is sell, returns an estimate of the amount of fiat obtained and the amount of crypto required to obatin it.
If side is buy, returns an estimate of the amount ofOrder crypto obtained and the amount of fiat required to obtain it.
Required Arguments:
market: The market to get the estimate of the transaction.
side: 'buy' or 'sell'
amount: Is the amount of crypto to 'buy' or 'sell'
"""
rest = float(amount)
book_side = 'sell' if side == 'buy' else 'buy'
amount_required = 0.0
amount_obtained = 0.0
page = 0
n_entries = 100
while True:
book_page = self.get_book(market, book_side, page=page, limit=n_entries)
for entry in book_page['data']:
price = float(entry['price'])
amount = float(entry['amount'])
if rest < amount:
amount_obtained += rest * price
amount_required += rest
rest = 0
break
else:
amount_obtained += amount * price
amount_required += amount
rest -= amount
if rest == 0 or len(book_page['data']) < n_entries:
break
else: time.sleep(3)
page = page + 1
if book_side == 'sell':
temp = amount_required
amount_required = amount_obtained
amount_obtained = temp
instant = dict(obtained=amount_obtained, required=amount_required)
return instant
#Wallet
def get_balance(self):
"""returns the balance of the user.
"""
response = self._get(self.API_VERSION, 'balance')
return self._make_api_object(response, APIObject)
def get_transactions(self, currency, page = None, limit = None):
"""return all the transactions of a currency of the user.
Required Arguments:
currency: The currency to get all the user transactions.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
currency = currency
)
if page is not None:
params["page"] = page
if limit is not None:
params["limit"] = limit
response = self._get(self.API_VERSION, "transactions", params=params)
return self._make_api_object(response, APIObject)
def notify_deposit(self,amount,bank_account, date= None, tracking_code = None, voucher = None):
"""Notifies a deposit from your bank account to your wallet (fiat).
Required Arguments:
amount: The amount deposited to your wallet.
bank_account: The address (id) of the bank account from which you deposited.
Extra Arguments required for Brazil and the European Union:
voucher: a file.
Extra Arguments required for Mexico:
date: The date of the deposit, in format dd/mm/yyyy.
tracking_code: The tracking code of the deposit.
voucher: a file.
"""
params = dict(
amount = amount,
bank_account = bank_account
)
if date is not None:
params["date"] = date
if tracking_code is not None:
params["tracking_code"] = tracking_code
if voucher is not None:
params["voucher"] = voucher
response = self._post(self.API_VERSION, "deposit", data = params)
return self._make_api_object(response,APIObject)
def notify_withdrawal(self, amount, bank_account):
"""Notifies a withdrawal from fiat wallet to your bank account.
Required Arguments:
amount: the amount you need to withdraw to your bank account.
bank_account: The address(id) of the bank account.
"""
params = dict(
amount = amount,
bank_account = bank_account
)
response = self._post(self.API_VERSION, "withdrawal", data = params)
return self._make_api_object(response, APIObject)
def transfer(self,address, amount, currency, memo = None):
"""transfer money between wallets.
Required Arguments:
adderss: The address of the wallet to transfer money.
amount: The amount of money to transfer into the wallet.
currency: The wallet from which to take the money.
e.g. 'ETH'
memo (optional): memo of the wallet to transfer money.
"""
params = dict(
address = address,
amount = amount,
currency = currency
)
if memo is not None:
params["memo"] = memo
response = self._post(self.API_VERSION, "transfer", data = params)
return self._make_api_object(response, APIObject)
def get_auth_socket(self):
"""returns the userid and the socket ids to permit a socket connection with cryptomkt.
"""
response = self._get("v2", "socket/auth")
return self._make_api_object(response, APIObject)
def get_socket(self, debug=False):
"""returns a socket connection with cryptomkt.
"""
if self.socket is None:
auth = self.get_auth_socket()
del auth['verify']
self.socket = Socket(auth, debug=debug)
return self.socket | [
"requests.session",
"warnings.warn",
"json.dumps",
"time.sleep"
] | [((1553, 1571), 'requests.session', 'requests.session', ([], {}), '()\n', (1569, 1571), False, 'import requests\n'), ((3852, 3887), 'warnings.warn', 'warnings.warn', (['message', 'UserWarning'], {}), '(message, UserWarning)\n', (3865, 3887), False, 'import warnings\n'), ((12120, 12181), 'json.dumps', 'json.dumps', (['order_list'], {'sort_keys': '(True)', 'separators': "(',', ':')"}), "(order_list, sort_keys=True, separators=(',', ':'))\n", (12130, 12181), False, 'import json\n'), ((13294, 13355), 'json.dumps', 'json.dumps', (['order_list'], {'sort_keys': '(True)', 'separators': "(',', ':')"}), "(order_list, sort_keys=True, separators=(',', ':'))\n", (13304, 13355), False, 'import json\n'), ((14907, 14920), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (14917, 14920), False, 'import time\n')] |
from setuptools import setup, find_packages, Extension
setup(name='aquila_stlfr',
version='1.1',
description='assembly and variant calling for stlfr and hybrid assembler for linked-reads',
author='XinZhou',
author_email='<EMAIL>',
packages=['bin',],
entry_points={'console_scripts':['Aquila_stLFR_step1=bin.Aquila_stLFR_step1:main','Aquila_step1_hybrid=bin.Aquila_step1_hybrid:main','Aquila_stLFR_step2=bin.Aquila_stLFR_step2:main','Aquila_stLFR_assembly_based_variants_call=bin.Aquila_stLFR_assembly_based_variants_call:main','Aquila_stLFR_phasing_all_variants=bin.Aquila_stLFR_phasing_all_variants:main','Aquila_stLFR_clean=bin.Aquila_stLFR_clean:main','Aquila_step0_sortbam_hybrid=bin.Aquila_step0_sortbam_hybrid:main','Aquila_stLFR_fastq_preprocess=bin.Aquila_stLFR_fastq_preprocess:main']},
zip_safe=False)
| [
"setuptools.setup"
] | [((56, 877), 'setuptools.setup', 'setup', ([], {'name': '"""aquila_stlfr"""', 'version': '"""1.1"""', 'description': '"""assembly and variant calling for stlfr and hybrid assembler for linked-reads"""', 'author': '"""XinZhou"""', 'author_email': '"""<EMAIL>"""', 'packages': "['bin']", 'entry_points': "{'console_scripts': ['Aquila_stLFR_step1=bin.Aquila_stLFR_step1:main',\n 'Aquila_step1_hybrid=bin.Aquila_step1_hybrid:main',\n 'Aquila_stLFR_step2=bin.Aquila_stLFR_step2:main',\n 'Aquila_stLFR_assembly_based_variants_call=bin.Aquila_stLFR_assembly_based_variants_call:main'\n ,\n 'Aquila_stLFR_phasing_all_variants=bin.Aquila_stLFR_phasing_all_variants:main'\n , 'Aquila_stLFR_clean=bin.Aquila_stLFR_clean:main',\n 'Aquila_step0_sortbam_hybrid=bin.Aquila_step0_sortbam_hybrid:main',\n 'Aquila_stLFR_fastq_preprocess=bin.Aquila_stLFR_fastq_preprocess:main']}", 'zip_safe': '(False)'}), "(name='aquila_stlfr', version='1.1', description=\n 'assembly and variant calling for stlfr and hybrid assembler for linked-reads'\n , author='XinZhou', author_email='<EMAIL>', packages=['bin'],\n entry_points={'console_scripts': [\n 'Aquila_stLFR_step1=bin.Aquila_stLFR_step1:main',\n 'Aquila_step1_hybrid=bin.Aquila_step1_hybrid:main',\n 'Aquila_stLFR_step2=bin.Aquila_stLFR_step2:main',\n 'Aquila_stLFR_assembly_based_variants_call=bin.Aquila_stLFR_assembly_based_variants_call:main'\n ,\n 'Aquila_stLFR_phasing_all_variants=bin.Aquila_stLFR_phasing_all_variants:main'\n , 'Aquila_stLFR_clean=bin.Aquila_stLFR_clean:main',\n 'Aquila_step0_sortbam_hybrid=bin.Aquila_step0_sortbam_hybrid:main',\n 'Aquila_stLFR_fastq_preprocess=bin.Aquila_stLFR_fastq_preprocess:main']\n }, zip_safe=False)\n", (61, 877), False, 'from setuptools import setup, find_packages, Extension\n')] |
from typing import Callable, Union
import rx
from rx.core import Observable, typing
from rx.disposable import SingleAssignmentDisposable, SerialDisposable
from rx.internal.utils import is_future
def catch_handler(source: Observable, handler: Callable[[Exception, Observable], Observable]) -> Observable:
def subscribe(observer, scheduler=None):
d1 = SingleAssignmentDisposable()
subscription = SerialDisposable()
subscription.disposable = d1
def on_error(exception):
try:
result = handler(exception, source)
except Exception as ex: # By design. pylint: disable=W0703
observer.on_error(ex)
return
result = rx.from_future(result) if is_future(result) else result
d = SingleAssignmentDisposable()
subscription.disposable = d
d.disposable = result.subscribe(observer, scheduler=scheduler)
d1.disposable = source.subscribe_(
observer.on_next,
on_error,
observer.on_completed,
scheduler
)
return subscription
return Observable(subscribe)
def _catch(handler: Union[Observable, Callable[[Exception, Observable], Observable]]
) -> Callable[[Observable], Observable]:
def catch(source: Observable) -> Observable:
"""Continues an observable sequence that is terminated by an
exception with the next observable sequence.
Examples:
>>> op = catch(ys)
>>> op = catch(lambda ex, src: ys(ex))
Args:
handler: Second observable sequence used to produce
results when an error occurred in the first sequence, or an
exception handler function that returns an observable sequence
given the error and source observable that occurred in the
first sequence.
Returns:
An observable sequence containing the first sequence's
elements, followed by the elements of the handler sequence
in case an exception occurred.
"""
if callable(handler):
return catch_handler(source, handler)
elif isinstance(handler, typing.Observable):
return rx.catch(source, handler)
else:
raise TypeError('catch operator takes whether an Observable or a callable handler as argument.')
return catch
| [
"rx.disposable.SingleAssignmentDisposable",
"rx.catch",
"rx.from_future",
"rx.core.Observable",
"rx.disposable.SerialDisposable",
"rx.internal.utils.is_future"
] | [((1150, 1171), 'rx.core.Observable', 'Observable', (['subscribe'], {}), '(subscribe)\n', (1160, 1171), False, 'from rx.core import Observable, typing\n'), ((365, 393), 'rx.disposable.SingleAssignmentDisposable', 'SingleAssignmentDisposable', ([], {}), '()\n', (391, 393), False, 'from rx.disposable import SingleAssignmentDisposable, SerialDisposable\n'), ((417, 435), 'rx.disposable.SerialDisposable', 'SerialDisposable', ([], {}), '()\n', (433, 435), False, 'from rx.disposable import SingleAssignmentDisposable, SerialDisposable\n'), ((804, 832), 'rx.disposable.SingleAssignmentDisposable', 'SingleAssignmentDisposable', ([], {}), '()\n', (830, 832), False, 'from rx.disposable import SingleAssignmentDisposable, SerialDisposable\n'), ((758, 775), 'rx.internal.utils.is_future', 'is_future', (['result'], {}), '(result)\n', (767, 775), False, 'from rx.internal.utils import is_future\n'), ((732, 754), 'rx.from_future', 'rx.from_future', (['result'], {}), '(result)\n', (746, 754), False, 'import rx\n'), ((2286, 2311), 'rx.catch', 'rx.catch', (['source', 'handler'], {}), '(source, handler)\n', (2294, 2311), False, 'import rx\n')] |
# -*- coding: utf-8 -*-
'''
This file is part of PyMbs.
PyMbs is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
PyMbs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with PyMbs.
If not, see <http://www.gnu.org/licenses/>.
Copyright 2011, 2012 <NAME>, <NAME>,
<NAME>, <NAME>
'''
'''
Created on 13.05.2011
@author: <NAME>
Pfade für Visualisierung anpassen !!!
'''
#################################
# import PyMbs & Lib. #
#################################
from PyMbs.Input import *
from PyMbs.Symbolics import Matrix,cos,sin
pi = 3.1415926535897932384626433832795
#################################
# set up inertial frame #
#################################
world=MbsSystem([0,0,-1])
#################################
# Parameters #
#################################
# Länge der Zylinderstangen und Gehäuse
hoehe = 0.01
R_AP=0.3
R_BP=0.5
R_Zyl_stange=0.02
R_Zyl_geh=0.04
l_zyl=0.6
m_z_geh = 0.1
m_z_st = 0.1
c=world.addParam('c',10)
c1=world.addParam('c1',5)
m1=world.addParam('m1', 1.0)
R1=world.addParam('R1', R_BP)
m2=world.addParam('m2', 50)
R2=world.addParam('R2', R_AP)
H2=world.addParam('H2',hoehe)
I2x=world.addParam( 'I2x', (m2*H2**2)/12) # Traägheit eines Vollzylinders um x die x-Achse
I2y=world.addParam( 'I2y', (m2*H2**2)/12) # Traägheit eines Vollzylinders um y die x-Achse
I2z=world.addParam( 'I2z', (m2*R2**2)/2) # Traägheit eines Vollzylinders um z die x-Achse
################################################
m_Zyl_Geh=world.addParam('m_Zyl_Geh', 18.6)
l_Zyl_Geh=world.addParam('l_Zyl_Geh',0.74)
cg_Zyl_Geh_x=world.addParam('cg_Zyl_Geh_x',0.353)
I_Zyl_Geh_x=world.addParam( 'I_Zyl_Geh_x', 0.027)
I_Zyl_Geh_y=world.addParam( 'I_Zyl_Geh_y', 1.061)
I_Zyl_Geh_z=world.addParam( 'I_Zyl_Geh_z', 1.061)
m_Zyl_Stange=world.addParam('m_Zyl_Stange', 8.4)
l_Zyl_Stange=world.addParam('l_Zyl_Stange',0.66)
cg_Zyl_Stange_x=world.addParam('cg_Zyl_Stange_x',-0.347)
I_Zyl_Stange_x=world.addParam('I_Zyl_Stange_x', 0.003)
I_Zyl_Stange_y=world.addParam('I_Zyl_Stange_y', 0.433)
I_Zyl_Stange_z=world.addParam('I_Zyl_Stange_z', 0.432)
###############
# Anordnungen #
###############
phi_BP_1 = pi/2-pi/18
phi_BP_2 = phi_BP_1 + pi/9
phi_BP_3 = phi_BP_1 + 2*pi/3
phi_BP_4 = phi_BP_2 + 2*pi/3
phi_BP_5 = phi_BP_3 + 2*pi/3
phi_BP_6 = phi_BP_4 + 2*pi/3
phi_AP_1 = pi/6+pi/18
phi_AP_2 = phi_AP_1 + 2*pi/3-pi/9
phi_AP_3 = phi_AP_1 + 2*pi/3
phi_AP_4 = phi_AP_3 + 2*pi/3-pi/9
phi_AP_5 = phi_AP_3 + 2*pi/3
phi_AP_6 = phi_AP_4 + 2*pi/3
################
# Hexapod #
################
#################################
# Bodies & KS #
#################################
Ground = world.addBody(name='Ground',mass=1)
Ground.addFrame(name='KS_1',p=[0,0,0])
BP = Ground.KS_1
BP.addFrame(name='BP_visual', p=[0,0,0],R=rotMat(pi/2,'x'))
BP.addFrame(name='BP_Anlenkpunkt_1', p=[R1*cos(phi_BP_1),R1*sin(phi_BP_1),0])
BP.addFrame(name='BP_Anlenkpunkt_2', p=[R1*cos(phi_BP_2),R1*sin(phi_BP_2),0])
BP.addFrame(name='BP_Anlenkpunkt_3', p=[R1*cos(phi_BP_3),R1*sin(phi_BP_3),0])
BP.addFrame(name='BP_Anlenkpunkt_4', p=[R1*cos(phi_BP_4),R1*sin(phi_BP_4),0])
BP.addFrame(name='BP_Anlenkpunkt_5', p=[R1*cos(phi_BP_5),R1*sin(phi_BP_5),0])
BP.addFrame(name='BP_Anlenkpunkt_6', p=[R1*cos(phi_BP_6),R1*sin(phi_BP_6),0])
BP.addFrame(name='BP_Feder',p=[0,0,1.1])
################################################################################
AP = world.addBody(name='Arbeitsplattform', mass=m2,inertia=diag([I2x,I2y,I2z]))
AP.addFrame(name='AP_visual', p=[0,0,0],R=rotMat(pi/2,'x'))
AP.addFrame(name='AP_Anlenkpunkt_1', p=[R2*cos(phi_AP_1),R2*sin(phi_AP_1),0])
AP.addFrame(name='AP_Anlenkpunkt_2', p=[R2*cos(phi_AP_2),R2*sin(phi_AP_2),0])
AP.addFrame(name='AP_Anlenkpunkt_3', p=[R2*cos(phi_AP_3),R2*sin(phi_AP_3),0])
AP.addFrame(name='AP_Anlenkpunkt_4', p=[R2*cos(phi_AP_4),R2*sin(phi_AP_4),0])
AP.addFrame(name='AP_Anlenkpunkt_5', p=[R2*cos(phi_AP_5),R2*sin(phi_AP_5),0])
AP.addFrame(name='AP_Anlenkpunkt_6', p=[R2*cos(phi_AP_6),R2*sin(phi_AP_6),0])
################################################################################
'''
#Für Visualisierung im Dymola
Zyl_geh_1 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_1')
Zyl_geh_1.addFrame('Zyl_geh_1_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_1.addFrame('Zyl_geh_1_cs', p=[0,0,0])
Zyl_geh_1.addFrame('Zyl_geh_1_cs_2', p=[0,0,0])
Zyl_geh_2 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_2')
Zyl_geh_2.addFrame('Zyl_geh_2_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_2.addFrame('Zyl_geh_2_cs', p=[0,0,0])
Zyl_geh_2.addFrame('Zyl_geh_2_cs_2', p=[0,0,0])
Zyl_geh_3 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_3')
Zyl_geh_3.addFrame('Zyl_geh_3_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_3.addFrame('Zyl_geh_3_cs', p=[0,0,0])
Zyl_geh_3.addFrame('Zyl_geh_3_cs_2', p=[0,0,0])
Zyl_geh_4 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_4')
Zyl_geh_4.addFrame('Zyl_geh_4_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_4.addFrame('Zyl_geh_4_cs', p=[0,0,0])
Zyl_geh_4.addFrame('Zyl_geh_4_cs_2', p=[0,0,0])
Zyl_geh_5 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_5')
Zyl_geh_5.addFrame('Zyl_geh_5_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_5.addFrame('Zyl_geh_5_cs', p=[0,0,0])
Zyl_geh_5.addFrame('Zyl_geh_5_cs_2', p=[0,0,0])
Zyl_geh_6 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_6')
Zyl_geh_6.addFrame('Zyl_geh_6_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_6.addFrame('Zyl_geh_6_cs', p=[0,0,0])
Zyl_geh_6.addFrame('Zyl_geh_6_cs_2', p=[0,0,0])
################################################################################
Zyl_stange_1 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_1')
Zyl_stange_1.addFrame('Zyl_stange_1_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_1.addFrame('Zyl_stange_1_cs', p=[0,0,0])
Zyl_stange_1.addFrame('Zyl_stange_1_cs_2', p=[0,0,0])
Zyl_stange_2 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_2')
Zyl_stange_2.addFrame('Zyl_stange_2_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_2.addFrame('Zyl_stange_2_cs', p=[0,0,0])
Zyl_stange_2.addFrame('Zyl_stange_2_cs_2', p=[0,0,0])
Zyl_stange_3 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_3')
Zyl_stange_3.addFrame('Zyl_stange_3_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_3.addFrame('Zyl_stange_3_cs', p=[0,0,0])
Zyl_stange_3.addFrame('Zyl_stange_3_cs_2', p=[0,0,0])
Zyl_stange_4 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_4')
Zyl_stange_4.addFrame('Zyl_stange_4_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_4.addFrame('Zyl_stange_4_cs', p=[0,0,0])
Zyl_stange_4.addFrame('Zyl_stange_4_cs_2', p=[0,0,0])
Zyl_stange_5 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_5')
Zyl_stange_5.addFrame('Zyl_stange_5_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_5.addFrame('Zyl_stange_5_cs', p=[0,0,0])
Zyl_stange_5.addFrame('Zyl_stange_5_cs_2', p=[0,0,0])
Zyl_stange_6 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_6')
Zyl_stange_6.addFrame('Zyl_stange_6_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_6.addFrame('Zyl_stange_6_cs', p=[0,0,0])
Zyl_stange_6.addFrame('Zyl_stange_6_cs_2', p=[0,0,0])
'''
# Für Visualisierung im PyMbs
Zyl_geh_1 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_1')
Zyl_geh_1.addFrame('Zyl_geh_1_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_1.addFrame('Zyl_geh_1_cs', p=[0,0,0])
Zyl_geh_1.addFrame('Zyl_geh_1_cs_2', p=[0,0,0])
Zyl_geh_2 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_2')
Zyl_geh_2.addFrame('Zyl_geh_2_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_2.addFrame('Zyl_geh_2_cs', p=[0,0,0])
Zyl_geh_2.addFrame('Zyl_geh_2_cs_2', p=[0,0,0])
Zyl_geh_3 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_3')
Zyl_geh_3.addFrame('Zyl_geh_3_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_3.addFrame('Zyl_geh_3_cs', p=[0,0,0])
Zyl_geh_3.addFrame('Zyl_geh_3_cs_2', p=[0,0,0])
Zyl_geh_4 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_4')
Zyl_geh_4.addFrame('Zyl_geh_4_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_4.addFrame('Zyl_geh_4_cs', p=[0,0,0])
Zyl_geh_4.addFrame('Zyl_geh_4_cs_2', p=[0,0,0])
Zyl_geh_5 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_5')
Zyl_geh_5.addFrame('Zyl_geh_5_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_5.addFrame('Zyl_geh_5_cs', p=[0,0,0])
Zyl_geh_5.addFrame('Zyl_geh_5_cs_2', p=[0,0,0])
Zyl_geh_6 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_6')
Zyl_geh_6.addFrame('Zyl_geh_6_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_6.addFrame('Zyl_geh_6_cs', p=[0,0,0])
Zyl_geh_6.addFrame('Zyl_geh_6_cs_2', p=[0,0,0])
################################################################################
Zyl_stange_1 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_1')
Zyl_stange_1.addFrame('Zyl_stange_1_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_1.addFrame('Zyl_stange_1_cs', p=[0,0,0])
Zyl_stange_1.addFrame('Zyl_stange_1_cs_2', p=[0,0,0])
Zyl_stange_2 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_2')
Zyl_stange_2.addFrame('Zyl_stange_2_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_2.addFrame('Zyl_stange_2_cs', p=[0,0,0])
Zyl_stange_2.addFrame('Zyl_stange_2_cs_2', p=[0,0,0])
Zyl_stange_3 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_3')
Zyl_stange_3.addFrame('Zyl_stange_3_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_3.addFrame('Zyl_stange_3_cs', p=[0,0,0])
Zyl_stange_3.addFrame('Zyl_stange_3_cs_2', p=[0,0,0])
Zyl_stange_4 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_4')
Zyl_stange_4.addFrame('Zyl_stange_4_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_4.addFrame('Zyl_stange_4_cs', p=[0,0,0])
Zyl_stange_4.addFrame('Zyl_stange_4_cs_2', p=[0,0,0])
Zyl_stange_5 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_5')
Zyl_stange_5.addFrame('Zyl_stange_5_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_5.addFrame('Zyl_stange_5_cs', p=[0,0,0])
Zyl_stange_5.addFrame('Zyl_stange_5_cs_2', p=[0,0,0])
Zyl_stange_6 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_6')
Zyl_stange_6.addFrame('Zyl_stange_6_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_6.addFrame('Zyl_stange_6_cs', p=[0,0,0])
Zyl_stange_6.addFrame('Zyl_stange_6_cs_2', p=[0,0,0])
#################################
# Joints #
#################################
#world.addJoint('fix_BP', world, BP)
world.addJoint( world, Ground, name='fix_BP')
jAP=world.addJoint(world, AP,['Tx', 'Ty', 'Tz','Rx', 'Ry', 'Rz'],[0,0,1,0,0,0],name='free_AP')
world.addJoint(BP.BP_Anlenkpunkt_1,Zyl_geh_1.Zyl_geh_1_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_1')
world.addJoint(BP.BP_Anlenkpunkt_2,Zyl_geh_2.Zyl_geh_2_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_2')
world.addJoint(BP.BP_Anlenkpunkt_3,Zyl_geh_3.Zyl_geh_3_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_3')
world.addJoint(BP.BP_Anlenkpunkt_4,Zyl_geh_4.Zyl_geh_4_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_4')
world.addJoint(BP.BP_Anlenkpunkt_5,Zyl_geh_5.Zyl_geh_5_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_5')
world.addJoint(BP.BP_Anlenkpunkt_6,Zyl_geh_6.Zyl_geh_6_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_6')
world.addJoint(Zyl_geh_1.Zyl_geh_1_cs,Zyl_stange_1.Zyl_stange_1_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_1')
world.addJoint(Zyl_geh_2.Zyl_geh_2_cs,Zyl_stange_2.Zyl_stange_2_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_2')
world.addJoint(Zyl_geh_3.Zyl_geh_3_cs,Zyl_stange_3.Zyl_stange_3_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_3')
world.addJoint(Zyl_geh_4.Zyl_geh_4_cs,Zyl_stange_4.Zyl_stange_4_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_4')
world.addJoint(Zyl_geh_5.Zyl_geh_5_cs,Zyl_stange_5.Zyl_stange_5_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_5')
world.addJoint(Zyl_geh_6.Zyl_geh_6_cs,Zyl_stange_6.Zyl_stange_6_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_6')
########################
# Constraints or Loops #
########################
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_1, Zyl_stange_1.Zyl_stange_1_cs, 'Verbindung_1')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_2, Zyl_stange_2.Zyl_stange_2_cs, 'Verbindung_2')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_3, Zyl_stange_3.Zyl_stange_3_cs, 'Verbindung_3')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_4, Zyl_stange_4.Zyl_stange_4_cs, 'Verbindung_4')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_5, Zyl_stange_5.Zyl_stange_5_cs, 'Verbindung_5')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_6, Zyl_stange_6.Zyl_stange_6_cs, 'Verbindung_6')
#####################
# add visualisation #
#####################
world.addVisualisation.Cylinder(BP.BP_visual,R_BP, hoehe)
world.addVisualisation.Cylinder(AP.AP_visual,R_AP, hoehe)
'''
# Für Visualisierung im Dymola
world.addVisualisation.File(Zyl_geh_1.Zyl_geh_1_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_1')
world.addVisualisation.File(Zyl_geh_2.Zyl_geh_2_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_2')
world.addVisualisation.File(Zyl_geh_3.Zyl_geh_3_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_3')
world.addVisualisation.File(Zyl_geh_4.Zyl_geh_4_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_4')
world.addVisualisation.File(Zyl_geh_5.Zyl_geh_5_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_5')
world.addVisualisation.File(Zyl_geh_6.Zyl_geh_6_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_6')
world.addVisualisation.File(Zyl_stange_1.Zyl_stange_1_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_1')
world.addVisualisation.File(Zyl_stange_2.Zyl_stange_2_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_2')
world.addVisualisation.File(Zyl_stange_3.Zyl_stange_3_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_3')
world.addVisualisation.File(Zyl_stange_4.Zyl_stange_4_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_4')
world.addVisualisation.File(Zyl_stange_5.Zyl_stange_5_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_5')
world.addVisualisation.File(Zyl_stange_6.Zyl_stange_6_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_6')
'''
# Für Visualisierung im Dymola
world.addVisualisation.Cylinder(Zyl_geh_1.Zyl_geh_1_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_2.Zyl_geh_2_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_3.Zyl_geh_3_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_4.Zyl_geh_4_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_5.Zyl_geh_5_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_6.Zyl_geh_6_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_1.Zyl_stange_1_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_2.Zyl_stange_2_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_3.Zyl_stange_3_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_4.Zyl_stange_4_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_5.Zyl_stange_5_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_6.Zyl_stange_6_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Frame(AP,0.4)
#world.addVisualisation.Frame(BP.BP_Feder,1)
world.addVisualisation.Frame(Ground,0.6)
#################################
# add visualisation #
#################################
print("System has been assembled")
#################################
# add Sensors #
#################################
#world.addSensor.Position(world,AP.AP_Anlenkpunkt_1,"P_AP_1")
#world.addSensor.Energy(AP,'E_AP')
#####################
# add Imput & Load #
#####################
#l = world.addSensor.Distance(AP,BP.BP_Feder, 'l', 'DistanceSensor')
#lz = world.addSensor.Distance(BP,AP, 'lz', 'DistanceSensor_Cylinder')
#c=50
#F_c = world.addExpression('SpringForce', 'F_c', -c*l[0])
#world.addLoad.PtPForce(AP,BP.BP_Feder, F_c, name='Spring')
#################################
# generate equations & sim Code #
#################################
world.genEquations.Recursive()
#world.genCode.Modelica('hexapod_z_kpl','.\HP_Output',inputsAsInputs=True, debugMode=False)
world.show('hexapod_z_kpl')
| [
"PyMbs.Symbolics.cos",
"PyMbs.Symbolics.sin"
] | [((3328, 3341), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_1'], {}), '(phi_BP_1)\n', (3331, 3341), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3345, 3358), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_1'], {}), '(phi_BP_1)\n', (3348, 3358), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3406, 3419), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_2'], {}), '(phi_BP_2)\n', (3409, 3419), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3423, 3436), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_2'], {}), '(phi_BP_2)\n', (3426, 3436), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3484, 3497), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_3'], {}), '(phi_BP_3)\n', (3487, 3497), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3501, 3514), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_3'], {}), '(phi_BP_3)\n', (3504, 3514), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3562, 3575), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_4'], {}), '(phi_BP_4)\n', (3565, 3575), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3579, 3592), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_4'], {}), '(phi_BP_4)\n', (3582, 3592), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3640, 3653), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_5'], {}), '(phi_BP_5)\n', (3643, 3653), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3657, 3670), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_5'], {}), '(phi_BP_5)\n', (3660, 3670), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3718, 3731), 'PyMbs.Symbolics.cos', 'cos', (['phi_BP_6'], {}), '(phi_BP_6)\n', (3721, 3731), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((3735, 3748), 'PyMbs.Symbolics.sin', 'sin', (['phi_BP_6'], {}), '(phi_BP_6)\n', (3738, 3748), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4065, 4078), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_1'], {}), '(phi_AP_1)\n', (4068, 4078), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4082, 4095), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_1'], {}), '(phi_AP_1)\n', (4085, 4095), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4143, 4156), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_2'], {}), '(phi_AP_2)\n', (4146, 4156), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4160, 4173), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_2'], {}), '(phi_AP_2)\n', (4163, 4173), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4221, 4234), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_3'], {}), '(phi_AP_3)\n', (4224, 4234), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4238, 4251), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_3'], {}), '(phi_AP_3)\n', (4241, 4251), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4299, 4312), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_4'], {}), '(phi_AP_4)\n', (4302, 4312), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4316, 4329), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_4'], {}), '(phi_AP_4)\n', (4319, 4329), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4377, 4390), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_5'], {}), '(phi_AP_5)\n', (4380, 4390), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4394, 4407), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_5'], {}), '(phi_AP_5)\n', (4397, 4407), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4455, 4468), 'PyMbs.Symbolics.cos', 'cos', (['phi_AP_6'], {}), '(phi_AP_6)\n', (4458, 4468), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n'), ((4472, 4485), 'PyMbs.Symbolics.sin', 'sin', (['phi_AP_6'], {}), '(phi_AP_6)\n', (4475, 4485), False, 'from PyMbs.Symbolics import Matrix, cos, sin\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
""" <NAME>
ISIR - CNRS / Sorbonne Université
02/2018
This file allows to build worlds
TODO : replace this file by a .json loader and code worlds in .json
"""
# WARNING : don't fo (from round_bot_py import round_bot_model) here to avoid mutual imports !
import os
def _texture_path(texture_bricks_name):
"""
Parameter
---------
texture_bricks_name : str
name of the world main texture
Return
------
texture_path: (str) path corresponding to the texture_bricks_name
Raises
------
ValueError : raised if texture_bricks_name is unkwnonw
"""
if texture_bricks_name == 'minecraft':
return '/textures/texture_minecraft.png'
elif texture_bricks_name == 'graffiti':
return '/textures/texture_graffiti.png'
elif texture_bricks_name == 'colours':
return '/textures/texture_colours.png'
else :
raise ValueError('Unknown texture name '+ texture_bricks_name + ' in loading world')
def _build_square_default_world(model, texture_bricks_name, width=45, depth=45, hwalls=4, dwalls=1,
texture_robot='/textures/robot.png',
texture_visualisation='/textures/visualisation.png',
texture_distractors='/textures/texture_distractors.png',
wall_reward=-1,
distractors=False,
distractors_speed=0.1,
sandboxes=False,
trigger_button=False,
):
"""
Builds a simple rectangle planar world with walls around
Parameters
----------
- model : (round_bot_model.Model) model to load world in
- texture_bricks_name : (str) name of the texture for the bricks
- width : (int) width of the world
- depth : (int) depth of the world
- hwalls : (int) heigh of walls
- dwalls: (int) depth of walls
- texture_bricks, texture_robot, texture_visualisation : (string)
paths for texture image of bricks, robot and visualisation
- wall_reward : (float) reward for wall collision
- distractors (Bool) : add visual distractors on walls and ground
- distractors_speed (float) : speed of visual distractors displacement
- sandboxes (Bool) : add sandboxes ont the ground (slowing down the robot when crossed)
- trigger_button (Bool) : add a trigger button that will trigger a change in the environment (change to be defined)
Returns
-------
world information
"""
texture_bricks = _texture_path(texture_bricks_name)
# TODO : better import would be global and without "from" but doesn't work for the moment
from gym_round_bot.envs import round_bot_model
# create textures coordinates
GRASS = round_bot_model.Block.tex_coords((1, 0), (0, 1), (0, 0))
SAND = round_bot_model.Block.tex_coords((1, 1), (1, 1), (1, 1))
BRICK = round_bot_model.Block.tex_coords((2, 0), (2, 0), (2, 0))
BRICK2 = round_bot_model.Block.tex_coords((0, 2), (0, 2), (0, 2))
STONE = round_bot_model.Block.tex_coords((2, 1), (2, 1), (2, 1))
STONE2 = round_bot_model.Block.tex_coords((1, 2), (1, 2), (1, 2))
BUTTON = round_bot_model.Block.tex_coords((2, 2), (2, 2), (2, 2))
DISTRACTORS = [ round_bot_model.Block.tex_coords(t,t,t) for t in [(0,0),(1,0),(2,0),(0,1),(1,1),(2,1),(2,0)] ]
nw = width/2.0 # 1/2 width of this world
nd = depth/2.0 # 1/2 depth of this world
wr = width/3.0 # wr width of reward area
wwalls = width
# get texture paths in current directory
brick_texture_path = os.path.dirname(__file__) + texture_bricks
robot_texture_path = os.path.dirname(__file__) + texture_robot
visualisation_texture_path = os.path.dirname(__file__) + texture_visualisation
distractors_texture_path = os.path.dirname(__file__) + texture_distractors
texture_paths = {'brick':brick_texture_path,
'robot':robot_texture_path,
'visualisation':visualisation_texture_path,
'distractors':distractors_texture_path,
}
# Build gound block
ground_block = model.add_block( (0, -3, 0, 2*nd, 6, 2*nw, 0.0, 0.0, 0.0), GRASS, block_type='brick')
# Build wall blocks with negative reward on collision
#front wall
back_wall_block = model.add_block( (0, hwalls/2, -nw, depth, hwalls, dwalls, 0.0, 0.0, 0.0),
texture=BRICK, block_type='brick', collision_reward = wall_reward)
#back wall
front_wall_block = model.add_block( (0, hwalls/2, nw, depth, hwalls, dwalls, 0.0, 0.0, 0.0),
texture=STONE2, block_type='brick', collision_reward = wall_reward)
#left wall
left_wall_block = model.add_block( (-nd, hwalls/2, 0, dwalls, hwalls, wwalls, 0.0, 0.0, 0.0),
texture=STONE, block_type='brick', collision_reward = wall_reward)
#right wall
right_wall_block = model.add_block( (nd, hwalls/2, 0, dwalls, hwalls, wwalls, 0.0, 0.0, 0.0),
texture=BRICK2, block_type='brick', collision_reward = wall_reward)
if distractors:
# add visual distractors on the groud and inner faces of walls if asked
# distractor ground block
size_ground_distractor = n = min(nw,nd)
ground_bb = round_bot_model.BoundingBoxBlock( (0, 0.1, 0), (2*n, 0, 2*n), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, size_ground_distractor, 0.0, size_ground_distractor, 0.0, 0.0, 0.0),
texture=DISTRACTORS[0], block_type='flat_distractor', boundingBox = ground_bb, speed=distractors_speed)
model.add_block( components=(0, 0, 0, size_ground_distractor, 0.0, size_ground_distractor, 0.0, 0.0, 0.0),
texture=DISTRACTORS[0], block_type='flat_distractor', boundingBox = ground_bb, speed=distractors_speed)
# wall distractors :
width_wall_distractors = wwalls/2
height_wall_distractors = hwalls*2/3
# distractor back_wall inner face block
back_wall_bb = round_bot_model.BoundingBoxBlock( (0, hwalls/2, -nw+dwalls/2+0.1), (wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, width_wall_distractors, height_wall_distractors, 0.0, 0.0, 0.0, 0.0),
texture=DISTRACTORS[1], block_type='flat_distractor', boundingBox = back_wall_bb, speed=distractors_speed)
# distractor front_wall inner face block
front_wall_bb = round_bot_model.BoundingBoxBlock(( 0, hwalls/2, nw-dwalls/2-0.1), (wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, width_wall_distractors, height_wall_distractors, 0.0, 0.0, 0.0, 0.0),
texture=DISTRACTORS[2], block_type='flat_distractor', boundingBox = front_wall_bb, speed=distractors_speed)
# distractor left_wall inner face block
left_wall_bb = round_bot_model.BoundingBoxBlock( (-nd+dwalls/2+0.1, hwalls/2, 0), (0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, 0.0, height_wall_distractors, width_wall_distractors, 0.0, 0.0, 0.0),
texture=DISTRACTORS[3], block_type='flat_distractor', boundingBox = left_wall_bb, speed=distractors_speed)
# distractor right_wall inner face block
right_wall_bb = round_bot_model.BoundingBoxBlock(( nd-dwalls/2-0.1, hwalls/2, 0), (0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, 0.0, height_wall_distractors, width_wall_distractors, 0.0, 0.0, 0.0),
texture=DISTRACTORS[4], block_type='flat_distractor', boundingBox = right_wall_bb, speed=distractors_speed)
if sandboxes :
# add sandboxes ont the ground if asked (slowing down the robot when crossed)
model.add_block( (0, 0.3, 0, nd/2, 0, nw/2, 0.0, 0.0, 0.0), SAND, block_type='sandbox')
if trigger_button :
# add a trigger button that will trigger a change in the world when crossed ON / OFF
#TRIGGER = round_bot_model.Block.tex_coords((1, 0), (1, 0), (1, 0))
model.add_block( (0, 0.3, 0, nw/3, 0.2, nw/3, 0.0, 0.0, 0.0), BUTTON, block_type='trigger_button')
world_info = { 'width' : 2*nw,
'depth' : 2*nd,
}
return texture_paths, world_info
def build_square_world(model, texture, robot_diameter=2 ,width=45, depth=45, hwalls=4, dwalls=1, wall_reward=-1, goal_reward=10, distractors=False,
distractors_speed=0.1, sandboxes=False, trigger_button=False, visible_reward=False):
"""
Builds the square world
"""
## first build default world
texture_paths, world_info = _build_square_default_world(model, texture, width=width, depth=depth,
hwalls=hwalls, dwalls=dwalls,
wall_reward=wall_reward, distractors=distractors,
distractors_speed=distractors_speed,
sandboxes=sandboxes, trigger_button=trigger_button,)
## then add specs
from gym_round_bot.envs import round_bot_model
BOT = round_bot_model.Block.tex_coords((0, 0), (0, 1), (0, 1))
START = round_bot_model.Block.tex_coords((0, 0), (0, 0), (0, 0))
REWARD = round_bot_model.Block.tex_coords((0, 1), (0, 1), (0, 1))
nw = width/2.0 # 1/2 width of this world
nd = depth/2.0 # 1/2 depth of this world
wwalls = width # width of walls
wr = width/4.0 # wr width of reward area
# set robot specifications
bot_radius = robot_diameter/2.0
bot_height = bot_radius
# Build reward block in the corner
rew = model.add_block( (nd-(wr/2+dwalls/2), bot_height/2.0, -nw+(wr/2+dwalls/2), wr, bot_height/3.0, wr, 0.0, 0.0, 0.0),
texture=REWARD, block_type='reward', collision_reward = goal_reward, visible=visible_reward)
# Build robot block, set initial height to bot_heigh/2 + small offset to avoid ground collision
model.add_block( (0, bot_height/2.0+0.1, 0, 2*bot_radius, bot_height, 2*bot_radius, 0.0, 0.0, 0.0),
texture=BOT, block_type='robot')
# add starting areas (the height=0 of block does not matter here, only area of (hwalls-2*dwalls)^2)
model.add_block( (0, bot_height/2.0+0.1, 0, 2*nd-2*dwalls, 0.1, 2*nw-2*dwalls, 0.0, 0.0, 0.0),
texture=START, block_type='start')
return texture_paths, world_info
def build_square_1wall_world(model, texture, robot_diameter=2, width=45, depth=45, hwalls=2, dwalls=2, wall_reward=-1, goal_reward=10, distractors=False,
distractors_speed=0.1, sandboxes=False, trigger_button=False, visible_reward=False):
"""
Builds a simple rectangle planar world with walls around, and 1 wall in the middle
"""
## first build default world
texture_paths, world_info = _build_square_default_world(model, texture, width=width, depth=depth,
hwalls=hwalls, dwalls=dwalls,
wall_reward=wall_reward, distractors=distractors,
distractors_speed=distractors_speed,
sandboxes=sandboxes, trigger_button=trigger_button,)
## then add specs
from gym_round_bot.envs import round_bot_model
BOT = round_bot_model.Block.tex_coords((0, 0), (0, 1), (0, 1))
START = round_bot_model.Block.tex_coords((0, 0), (0, 0), (0, 0))
REWARD = round_bot_model.Block.tex_coords((0, 1), (0, 1), (0, 1))
SAND = round_bot_model.Block.tex_coords((1, 1), (1, 1), (1, 1))
n = width/2.0 # 1/2 width and depth of world
wwalls = 2*n # width of walls
wr = width/4.0 # wr width of reward area
# set robot specifications
bot_radius = robot_diameter/2.0
bot_height = bot_radius
# middle wall
model.add_block( (n/2, hwalls/2, -n/4, wwalls/2, hwalls, dwalls, 0.0, 0.0, 0.0), SAND, block_type='brick', collision_reward = -1)
# Build reward block in the corner
model.add_block( (n-(wr/2+dwalls/2), bot_height/2.0, -n+(wr/2+dwalls/2), wr, bot_height/3.0, wr, 0.0, 0.0, 0.0),
texture=REWARD, block_type='reward', collision_reward = 1, visible_reward=visible_reward)
# Build robot block, set initial height to bot_heigh/2 + small offset to avoid ground collision
model.add_block( (0, bot_height/2.0+0.1, 0, 2*bot_radius, bot_height, 2*bot_radius, 0.0, 0.0, 0.0),
texture=BOT, block_type='robot')
# add starting areas (the height=0 of block does not matter here, only area of (hwalls-2*dwalls)^2)
model.add_block( (0, bot_height/2.0+0.1, (wwalls-2*dwalls)/4, wwalls-2*dwalls, 0.1, (wwalls-2*dwalls)/2, 0.0, 0.0, 0.0),
texture=START, block_type='start')
model.add_block( ( -(wwalls-2*dwalls)/4, bot_height/2.0+0.1, -(wwalls-2*dwalls)/4, (wwalls-2*dwalls)/2, 0.1, (wwalls-2*dwalls)/2, 0.0, 0.0, 0.0),
texture=START, block_type='start')
return texture_paths, world_info
| [
"gym_round_bot.envs.round_bot_model.Block.tex_coords",
"os.path.dirname",
"gym_round_bot.envs.round_bot_model.BoundingBoxBlock"
] | [((2911, 2967), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(1, 0)', '(0, 1)', '(0, 0)'], {}), '((1, 0), (0, 1), (0, 0))\n', (2943, 2967), False, 'from gym_round_bot.envs import round_bot_model\n'), ((2979, 3035), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(1, 1)', '(1, 1)', '(1, 1)'], {}), '((1, 1), (1, 1), (1, 1))\n', (3011, 3035), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3048, 3104), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(2, 0)', '(2, 0)', '(2, 0)'], {}), '((2, 0), (2, 0), (2, 0))\n', (3080, 3104), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3118, 3174), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 2)', '(0, 2)', '(0, 2)'], {}), '((0, 2), (0, 2), (0, 2))\n', (3150, 3174), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3187, 3243), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(2, 1)', '(2, 1)', '(2, 1)'], {}), '((2, 1), (2, 1), (2, 1))\n', (3219, 3243), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3257, 3313), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(1, 2)', '(1, 2)', '(1, 2)'], {}), '((1, 2), (1, 2), (1, 2))\n', (3289, 3313), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3327, 3383), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(2, 2)', '(2, 2)', '(2, 2)'], {}), '((2, 2), (2, 2), (2, 2))\n', (3359, 3383), False, 'from gym_round_bot.envs import round_bot_model\n'), ((9588, 9644), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 0)', '(0, 1)', '(0, 1)'], {}), '((0, 0), (0, 1), (0, 1))\n', (9620, 9644), False, 'from gym_round_bot.envs import round_bot_model\n'), ((9657, 9713), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 0)', '(0, 0)', '(0, 0)'], {}), '((0, 0), (0, 0), (0, 0))\n', (9689, 9713), False, 'from gym_round_bot.envs import round_bot_model\n'), ((9727, 9783), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 1)', '(0, 1)', '(0, 1)'], {}), '((0, 1), (0, 1), (0, 1))\n', (9759, 9783), False, 'from gym_round_bot.envs import round_bot_model\n'), ((11875, 11931), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 0)', '(0, 1)', '(0, 1)'], {}), '((0, 0), (0, 1), (0, 1))\n', (11907, 11931), False, 'from gym_round_bot.envs import round_bot_model\n'), ((11944, 12000), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 0)', '(0, 0)', '(0, 0)'], {}), '((0, 0), (0, 0), (0, 0))\n', (11976, 12000), False, 'from gym_round_bot.envs import round_bot_model\n'), ((12014, 12070), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(0, 1)', '(0, 1)', '(0, 1)'], {}), '((0, 1), (0, 1), (0, 1))\n', (12046, 12070), False, 'from gym_round_bot.envs import round_bot_model\n'), ((12082, 12138), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['(1, 1)', '(1, 1)', '(1, 1)'], {}), '((1, 1), (1, 1), (1, 1))\n', (12114, 12138), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3404, 3445), 'gym_round_bot.envs.round_bot_model.Block.tex_coords', 'round_bot_model.Block.tex_coords', (['t', 't', 't'], {}), '(t, t, t)\n', (3436, 3445), False, 'from gym_round_bot.envs import round_bot_model\n'), ((3727, 3752), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3742, 3752), False, 'import os\n'), ((3795, 3820), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3810, 3820), False, 'import os\n'), ((3870, 3895), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3885, 3895), False, 'import os\n'), ((3951, 3976), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3966, 3976), False, 'import os\n'), ((5455, 5567), 'gym_round_bot.envs.round_bot_model.BoundingBoxBlock', 'round_bot_model.BoundingBoxBlock', (['(0, 0.1, 0)', '(2 * n, 0, 2 * n)', '(0.0, 0.0, 0.0)'], {'linked_block': 'ground_block'}), '((0, 0.1, 0), (2 * n, 0, 2 * n), (0.0, 0.0,\n 0.0), linked_block=ground_block)\n', (5487, 5567), False, 'from gym_round_bot.envs import round_bot_model\n'), ((6236, 6403), 'gym_round_bot.envs.round_bot_model.BoundingBoxBlock', 'round_bot_model.BoundingBoxBlock', (['(0, hwalls / 2, -nw + dwalls / 2 + 0.1)', '(wwalls, height_wall_distractors, 0.0)', '(0.0, 0.0, 0.0)'], {'linked_block': 'ground_block'}), '((0, hwalls / 2, -nw + dwalls / 2 + 0.1), (\n wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=\n ground_block)\n', (6268, 6403), False, 'from gym_round_bot.envs import round_bot_model\n'), ((6708, 6874), 'gym_round_bot.envs.round_bot_model.BoundingBoxBlock', 'round_bot_model.BoundingBoxBlock', (['(0, hwalls / 2, nw - dwalls / 2 - 0.1)', '(wwalls, height_wall_distractors, 0.0)', '(0.0, 0.0, 0.0)'], {'linked_block': 'ground_block'}), '((0, hwalls / 2, nw - dwalls / 2 - 0.1), (\n wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=\n ground_block)\n', (6740, 6874), False, 'from gym_round_bot.envs import round_bot_model\n'), ((7178, 7345), 'gym_round_bot.envs.round_bot_model.BoundingBoxBlock', 'round_bot_model.BoundingBoxBlock', (['(-nd + dwalls / 2 + 0.1, hwalls / 2, 0)', '(0.0, height_wall_distractors, wwalls)', '(0.0, 0.0, 0.0)'], {'linked_block': 'ground_block'}), '((-nd + dwalls / 2 + 0.1, hwalls / 2, 0), (\n 0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=\n ground_block)\n', (7210, 7345), False, 'from gym_round_bot.envs import round_bot_model\n'), ((7650, 7816), 'gym_round_bot.envs.round_bot_model.BoundingBoxBlock', 'round_bot_model.BoundingBoxBlock', (['(nd - dwalls / 2 - 0.1, hwalls / 2, 0)', '(0.0, height_wall_distractors, wwalls)', '(0.0, 0.0, 0.0)'], {'linked_block': 'ground_block'}), '((nd - dwalls / 2 - 0.1, hwalls / 2, 0), (\n 0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=\n ground_block)\n', (7682, 7816), False, 'from gym_round_bot.envs import round_bot_model\n')] |
import os, sys
sys.path.append(os.getcwd())
from PyQt5.QtWidgets import QApplication
from src.view.widgets.buttons_cch import ButtonsCCHView
from src.controller.base import Base
class ButtonsCCHController:
def __init__(self, force_show=False):
print('C botoes')
self.view = ButtonsCCHView()
self.view.bt_confirmar.clicked.connect(self.on_bt_confirmare)
if force_show:
self.view.show()
def show(self):
return self.view
def on_bt_confirmare(self):
print(10)
def validate_user(self):
if True:
self.auth = True
if __name__ == '__main__':
app = QApplication(sys.argv)
w = ButtonsCCHController(True)
sys.exit(app.exec_()) | [
"os.getcwd",
"PyQt5.QtWidgets.QApplication",
"src.view.widgets.buttons_cch.ButtonsCCHView"
] | [((31, 42), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40, 42), False, 'import os, sys\n'), ((576, 598), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (588, 598), False, 'from PyQt5.QtWidgets import QApplication\n'), ((282, 298), 'src.view.widgets.buttons_cch.ButtonsCCHView', 'ButtonsCCHView', ([], {}), '()\n', (296, 298), False, 'from src.view.widgets.buttons_cch import ButtonsCCHView\n')] |
#!/usr/bin/python3
from sys import argv
import os
import math
import urllib.request
import random
import os.path
import sqlite3
URL_TEMPLATE = "https://c.tile.openstreetmap.org/%d/%d/%d.png"
BBOX = None # [lon_min, lat_min, lon_max, lat_max] or None for whole world
ZOOM_MAX = 7
LAYERTYPE = "baselayer" # "baselayer" or "overlay"
LAYERNAME = "OSM Low Detail"
TILE_FORMAT = "png"
def deg2num(lat_deg, lon_deg, zoom):
lat_rad = math.radians(lat_deg)
n = 2.0 ** zoom
xtile = int((lon_deg + 180.0) / 360.0 * n)
ytile = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n)
return (xtile, ytile)
def download_url(zoom, xtile, ytile, cursor):
subdomain = random.randint(1, 4)
url = URL_TEMPLATE % (zoom, xtile, ytile)
ymax = 1 << zoom
yinverted = ymax - ytile - 1
existing = cursor.execute('SELECT count(*) FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?', (zoom, xtile, yinverted)).fetchall()
if existing[0][0] > 0:
print('Skipping ' + url)
return
print("downloading %r" % url)
request = urllib.request.Request(
url, data=None,
headers={
'User-Agent': 'Low-Zoom Downloader'
}
)
source = urllib.request.urlopen(request)
content = source.read()
source.close()
cursor.execute('INSERT INTO tiles(zoom_level, tile_column, tile_row, tile_data) VALUES(?, ?, ?, ?)', (zoom, xtile, yinverted, content))
def main(argv):
db = argv[1] if len(argv) > 1 else 'osm.mbtiles'
conn = sqlite3.connect(db)
cur = conn.cursor()
bboxStr = "-180,-85,180,85" if BBOX is None else ",".join(map(str, BBOX))
cur.executescript('''
CREATE TABLE IF NOT EXISTS tiles (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_data blob);
CREATE TABLE IF NOT EXISTS metadata(name text, value text);
CREATE UNIQUE INDEX IF NOT EXISTS metadata_name on metadata (name);
CREATE UNIQUE INDEX IF NOT EXISTS tile_index on tiles(zoom_level, tile_column, tile_row);
INSERT OR REPLACE INTO metadata VALUES('minzoom', '1');
INSERT OR REPLACE INTO metadata VALUES('maxzoom', '{0}');
INSERT OR REPLACE INTO metadata VALUES('name', '{1}');
INSERT OR REPLACE INTO metadata VALUES('type', '{2}');
INSERT OR REPLACE INTO metadata VALUES('format', '{3}');
INSERT OR REPLACE INTO metadata VALUES('bounds', '{4}');
'''.format(ZOOM_MAX, LAYERNAME, LAYERTYPE, TILE_FORMAT, bboxStr))
# from 0 to 6 download all
for zoom in range(0, ZOOM_MAX+1):
xstart = 0
ystart = 0
xend = 2**zoom-1
yend = 2**zoom-1
if BBOX is not None:
xstart, yend = deg2num(BBOX[1], BBOX[0], zoom)
xend, ystart = deg2num(BBOX[3], BBOX[2], zoom)
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
download_url(zoom, x, y, cur)
conn.commit()
cur.close()
conn.close()
main(argv)
| [
"random.randint",
"math.radians",
"math.tan",
"sqlite3.connect",
"math.cos"
] | [((434, 455), 'math.radians', 'math.radians', (['lat_deg'], {}), '(lat_deg)\n', (446, 455), False, 'import math\n'), ((711, 731), 'random.randint', 'random.randint', (['(1)', '(4)'], {}), '(1, 4)\n', (725, 731), False, 'import random\n'), ((1562, 1581), 'sqlite3.connect', 'sqlite3.connect', (['db'], {}), '(db)\n', (1577, 1581), False, 'import sqlite3\n'), ((555, 572), 'math.tan', 'math.tan', (['lat_rad'], {}), '(lat_rad)\n', (563, 572), False, 'import math\n'), ((580, 597), 'math.cos', 'math.cos', (['lat_rad'], {}), '(lat_rad)\n', (588, 597), False, 'import math\n')] |
text_dir='VCTK-Corpus/txt/'
wav_dir='VCTK-Corpus/wav48/'
train_txt='VCTK-Corpus/training.txt'
val_txt='VCTK-Corpus/validation.txt'
eval_txt='VCTK-Corpus/evaluation.txt'
import os
all_set=[]
train_set=[]
val_set=[]
eval_set=[]
spks=os.listdir(text_dir)
spks.sort()
for spk in spks:
if spk in ['p360', 'p361', 'p362', 'p363']:
continue
#import pdb;pdb.set_trace()
spk_dir=os.path.join(text_dir, spk)
txts=os.listdir(spk_dir)
txts.sort()
for txt in txts[:-20]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
train_set.append((iid, text))
for txt in txts[-20:-10]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
val_set.append((iid, text))
for txt in txts[-10:]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
eval_set.append((iid, text))
with open(train_txt, 'w') as f:
for iid, text in train_set:
f.write(f'{iid}|{text}\n')
with open(val_txt, 'w') as f:
for iid, text in val_set:
f.write(f'{iid}|{text}\n')
with open(eval_txt, 'w') as f:
for iid, text in eval_set:
f.write(f'{iid}|{text}\n')
| [
"os.path.join",
"os.listdir",
"os.path.basename"
] | [((236, 256), 'os.listdir', 'os.listdir', (['text_dir'], {}), '(text_dir)\n', (246, 256), False, 'import os\n'), ((395, 422), 'os.path.join', 'os.path.join', (['text_dir', 'spk'], {}), '(text_dir, spk)\n', (407, 422), False, 'import os\n'), ((432, 451), 'os.listdir', 'os.listdir', (['spk_dir'], {}), '(spk_dir)\n', (442, 451), False, 'import os\n'), ((555, 581), 'os.path.join', 'os.path.join', (['spk_dir', 'txt'], {}), '(spk_dir, txt)\n', (567, 581), False, 'import os\n'), ((777, 803), 'os.path.join', 'os.path.join', (['spk_dir', 'txt'], {}), '(spk_dir, txt)\n', (789, 803), False, 'import os\n'), ((994, 1020), 'os.path.join', 'os.path.join', (['spk_dir', 'txt'], {}), '(spk_dir, txt)\n', (1006, 1020), False, 'import os\n'), ((507, 528), 'os.path.basename', 'os.path.basename', (['txt'], {}), '(txt)\n', (523, 528), False, 'import os\n'), ((729, 750), 'os.path.basename', 'os.path.basename', (['txt'], {}), '(txt)\n', (745, 750), False, 'import os\n'), ((946, 967), 'os.path.basename', 'os.path.basename', (['txt'], {}), '(txt)\n', (962, 967), False, 'import os\n')] |
import unittest
from aviation_weather import Pressure
from aviation_weather.exceptions import PressureDecodeError
class TestPressure(unittest.TestCase):
"""Unit tests for aviation_weather.components.pressure.Pressure"""
def _test_valid(self, raw, indicator, value):
p = Pressure(raw)
self.assertEqual(raw, p.raw)
self.assertEqual(indicator, p.indicator)
self.assertEqual(value, p.value)
def test_valid_altimeter(self):
self._test_valid("A2992", "A", 29.92)
def test_valid_QNH(self):
self._test_valid("Q1013", "Q", 1013)
def test_invalid(self):
with self.assertRaises(PressureDecodeError):
Pressure("3000") # no unit indicator; more likely visibility
| [
"aviation_weather.Pressure"
] | [((289, 302), 'aviation_weather.Pressure', 'Pressure', (['raw'], {}), '(raw)\n', (297, 302), False, 'from aviation_weather import Pressure\n'), ((683, 699), 'aviation_weather.Pressure', 'Pressure', (['"""3000"""'], {}), "('3000')\n", (691, 699), False, 'from aviation_weather import Pressure\n')] |
from itertools import cycle
import random
import sys
import pygame
from pygame.locals import *
FPS = 30
SCREENWIDTH = 512
SCREENHEIGHT = 512
# amount by which base can maximum shift to left
PIPEGAPSIZE = 100 # gap between upper and lower part of pipe
PIPEHEIGHT = 300
PIPEWIDTH = 50
BASEY = SCREENHEIGHT * 0.79
BASEX = 0
try:
xrange
except NameError:
xrange = range
class Player:
def __init__(self):
self.x = int(SCREENWIDTH * 0.2)
self.width = 20
self.height = 20
maxValue = int((SCREENHEIGHT - self.height) / SCREENHEIGHT * 100)
minValue = int(self.height / SCREENHEIGHT * 100)
self.y = int((SCREENHEIGHT - self.height) * random.randint(minValue, maxValue) / 100 )
# player velocity, max velocity, downward accleration, accleration on flap
self.velY = -9 # player's velocity along Y, default same as playerFlapped
self.maxVelY = 10 # max vel along Y, max descend speed
self.accY = 1 # players downward accleration
self.flapAcc = -9 # players speed on flapping
self.flapped = False # True when player flaps
self.score = 0
def update(self, event):
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
if self.y > -2 * self.height:
self.velY = self.flapAcc
self.flapped = True
def main():
global SCREEN, FPSCLOCK, myfont
pygame.init()
FPSCLOCK = pygame.time.Clock()
SCREEN = pygame.display.set_mode((SCREENWIDTH, SCREENHEIGHT))
pygame.display.set_caption('Flappy Bird')
myfont = pygame.font.SysFont("Comic Sans MS", 30)
while True:
crashInfo = mainGame()
showGameOverScreen(crashInfo)
def mainGame():
players = []
for i in range(0,1):
players.append(Player())
# get 2 new pipes to add to upperPipes lowerPipes list
newPipe1 = getRandomPipe()
# newPipe2 = getRandomPipe()
# list of upper pipes
upperPipes = [
newPipe1[0],
# newPipe2[0],
]
# list of lowerpipe
lowerPipes = [
newPipe1[1],
# newPipe2[1],
]
pipeVelX = -4
while True:
playerEvent = type('', (object,),{ 'type': 0, 'key': 0})
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
playerEvent = event
# move pipes to left
for uPipe, lPipe in zip(upperPipes, lowerPipes):
uPipe['x'] += pipeVelX
lPipe['x'] += pipeVelX
# add new pipe when first pipe is about to touch left of screen
if 0 < upperPipes[0]['x'] < 5:
newPipe = getRandomPipe()
upperPipes.append(newPipe[0])
lowerPipes.append(newPipe[1])
# remove first pipe if its out of the screen
if upperPipes[0]['x'] < -PIPEWIDTH:
upperPipes.pop(0)
lowerPipes.pop(0)
# draw sprites
SCREEN.fill((0,0,0))
for uPipe, lPipe in zip(upperPipes, lowerPipes):
pygame.draw.rect(SCREEN,(255,255,255), (uPipe['x'], uPipe['y'],PIPEWIDTH,PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (lPipe['x'], lPipe['y'],PIPEWIDTH,PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (BASEX, BASEY,SCREENWIDTH,BASEY))
for player in players:
player.update(playerEvent)
# check for crash here
crashTest = checkCrash(player,
upperPipes, lowerPipes)
if crashTest[0]:
players.remove(player)
if len(players) ==0:
return {
'player': player,
'upperPipes': upperPipes,
'lowerPipes': lowerPipes,
}
# check for score
playerMidPos = player.x + player.width / 2
for pipe in upperPipes:
pipeMidPos = pipe['x'] + PIPEWIDTH / 2
if pipeMidPos <= playerMidPos < pipeMidPos + 4:
player.score += 1
# player's movement
if player.velY < player.maxVelY and not player.flapped:
player.velY += player.accY
if player.flapped:
player.flapped = False
player.y += min(player.velY, BASEY - player.y - player.height)
# print score so player overlaps the score
showScore(player.score)
pygame.draw.ellipse(SCREEN, (255,255,255,200), (player.x, player.y, player.width, player.width), 0)
pygame.display.update()
FPSCLOCK.tick(FPS)
def showGameOverScreen(crashInfo):
"""crashes the player down ans shows gameover image"""
player = crashInfo['player']
upperPipes, lowerPipes = crashInfo['upperPipes'], crashInfo['lowerPipes']
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
return
# draw sprites
SCREEN.fill((0,0,0))
for uPipe, lPipe in zip(upperPipes, lowerPipes):
pygame.draw.rect(SCREEN,(255,255,255), (uPipe['x'], uPipe['y'],PIPEWIDTH, PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (lPipe['x'], lPipe['y'],PIPEWIDTH, PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (BASEX, BASEY,SCREENWIDTH,BASEY))
showScore(player.score)
pygame.draw.ellipse(SCREEN, (255,255,255,200), (player.x, player.y, player.width, player.width), 0)
FPSCLOCK.tick(FPS)
pygame.display.update()
def getRandomPipe():
"""returns a randomly generated pipe"""
# y of gap between upper and lower pipe
gapY = random.randrange(0, int(BASEY * 0.6 - PIPEGAPSIZE))
gapY += int(BASEY * 0.2)
pipeX = SCREENWIDTH + 10
return [
{'x': pipeX, 'y': gapY - PIPEHEIGHT}, # upper pipe
{'x': pipeX, 'y': gapY + PIPEGAPSIZE}, # lower pipe
]
def showScore(score):
"""displays score in center of screen"""
label = myfont.render(str(score), 1, (255,255,255))
SCREEN.blit(label, (10, 10))
def checkCrash(player, upperPipes, lowerPipes):
"""returns True if player collders with base or pipes."""
# if player crashes into ground
if player.y + player.height >= BASEY - 1:
return [True, True]
else:
playerRect = pygame.Rect(player.x, player.y,
player.width, player.height)
for uPipe, lPipe in zip(upperPipes, lowerPipes):
# upper and lower pipe rects
uPipeRect = pygame.Rect(uPipe['x'], uPipe['y'], PIPEWIDTH, PIPEHEIGHT)
lPipeRect = pygame.Rect(lPipe['x'], lPipe['y'], PIPEWIDTH, PIPEHEIGHT)
# if bird collided with upipe or lpipe
uCollide = pixelCollision(playerRect, uPipeRect)
lCollide = pixelCollision(playerRect, lPipeRect)
if uCollide or lCollide:
return [True, False]
return [False, False]
def pixelCollision(rect1, rect2):
"""Checks if two objects collide and not just their rects"""
rect = rect1.clip(rect2)
if rect.width == 0 or rect.height == 0:
return False
return True
if __name__ == '__main__':
main()
| [
"pygame.draw.ellipse",
"pygame.quit",
"random.randint",
"pygame.font.SysFont",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.draw.rect",
"pygame.Rect",
"pygame.init",
"pygame.display.update",
"pygame.display.set_caption",
"pygame.time.Clock",
"sys.exit"
] | [((1471, 1484), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1482, 1484), False, 'import pygame\n'), ((1500, 1519), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (1517, 1519), False, 'import pygame\n'), ((1533, 1585), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(SCREENWIDTH, SCREENHEIGHT)'], {}), '((SCREENWIDTH, SCREENHEIGHT))\n', (1556, 1585), False, 'import pygame\n'), ((1590, 1631), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Flappy Bird"""'], {}), "('Flappy Bird')\n", (1616, 1631), False, 'import pygame\n'), ((1645, 1685), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Comic Sans MS"""', '(30)'], {}), "('Comic Sans MS', 30)\n", (1664, 1685), False, 'import pygame\n'), ((2304, 2322), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2320, 2322), False, 'import pygame\n'), ((3456, 3533), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', '(BASEX, BASEY, SCREENWIDTH, BASEY)'], {}), '(SCREEN, (255, 255, 255), (BASEX, BASEY, SCREENWIDTH, BASEY))\n', (3472, 3533), False, 'import pygame\n'), ((4826, 4849), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (4847, 4849), False, 'import pygame\n'), ((5123, 5141), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (5139, 5141), False, 'import pygame\n'), ((5715, 5792), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', '(BASEX, BASEY, SCREENWIDTH, BASEY)'], {}), '(SCREEN, (255, 255, 255), (BASEX, BASEY, SCREENWIDTH, BASEY))\n', (5731, 5792), False, 'import pygame\n'), ((5829, 5935), 'pygame.draw.ellipse', 'pygame.draw.ellipse', (['SCREEN', '(255, 255, 255, 200)', '(player.x, player.y, player.width, player.width)', '(0)'], {}), '(SCREEN, (255, 255, 255, 200), (player.x, player.y,\n player.width, player.width), 0)\n', (5848, 5935), False, 'import pygame\n'), ((5965, 5988), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5986, 5988), False, 'import pygame\n'), ((6772, 6832), 'pygame.Rect', 'pygame.Rect', (['player.x', 'player.y', 'player.width', 'player.height'], {}), '(player.x, player.y, player.width, player.height)\n', (6783, 6832), False, 'import pygame\n'), ((3263, 3357), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', "(uPipe['x'], uPipe['y'], PIPEWIDTH, PIPEHEIGHT)"], {}), "(SCREEN, (255, 255, 255), (uPipe['x'], uPipe['y'],\n PIPEWIDTH, PIPEHEIGHT))\n", (3279, 3357), False, 'import pygame\n'), ((3361, 3455), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', "(lPipe['x'], lPipe['y'], PIPEWIDTH, PIPEHEIGHT)"], {}), "(SCREEN, (255, 255, 255), (lPipe['x'], lPipe['y'],\n PIPEWIDTH, PIPEHEIGHT))\n", (3377, 3455), False, 'import pygame\n'), ((4717, 4823), 'pygame.draw.ellipse', 'pygame.draw.ellipse', (['SCREEN', '(255, 255, 255, 200)', '(player.x, player.y, player.width, player.width)', '(0)'], {}), '(SCREEN, (255, 255, 255, 200), (player.x, player.y,\n player.width, player.width), 0)\n', (4736, 4823), False, 'import pygame\n'), ((5520, 5614), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', "(uPipe['x'], uPipe['y'], PIPEWIDTH, PIPEHEIGHT)"], {}), "(SCREEN, (255, 255, 255), (uPipe['x'], uPipe['y'],\n PIPEWIDTH, PIPEHEIGHT))\n", (5536, 5614), False, 'import pygame\n'), ((5619, 5713), 'pygame.draw.rect', 'pygame.draw.rect', (['SCREEN', '(255, 255, 255)', "(lPipe['x'], lPipe['y'], PIPEWIDTH, PIPEHEIGHT)"], {}), "(SCREEN, (255, 255, 255), (lPipe['x'], lPipe['y'],\n PIPEWIDTH, PIPEHEIGHT))\n", (5635, 5713), False, 'import pygame\n'), ((6978, 7036), 'pygame.Rect', 'pygame.Rect', (["uPipe['x']", "uPipe['y']", 'PIPEWIDTH', 'PIPEHEIGHT'], {}), "(uPipe['x'], uPipe['y'], PIPEWIDTH, PIPEHEIGHT)\n", (6989, 7036), False, 'import pygame\n'), ((7061, 7119), 'pygame.Rect', 'pygame.Rect', (["lPipe['x']", "lPipe['y']", 'PIPEWIDTH', 'PIPEHEIGHT'], {}), "(lPipe['x'], lPipe['y'], PIPEWIDTH, PIPEHEIGHT)\n", (7072, 7119), False, 'import pygame\n'), ((2428, 2441), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (2439, 2441), False, 'import pygame\n'), ((2458, 2468), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2466, 2468), False, 'import sys\n'), ((5247, 5260), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (5258, 5260), False, 'import pygame\n'), ((5277, 5287), 'sys.exit', 'sys.exit', ([], {}), '()\n', (5285, 5287), False, 'import sys\n'), ((710, 744), 'random.randint', 'random.randint', (['minValue', 'maxValue'], {}), '(minValue, maxValue)\n', (724, 744), False, 'import random\n')] |
from django.shortcuts import render, get_object_or_404
from .models import About,Appointment,Doctor,Report,Service,History
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
# Create your views here.
def home(request):
return render(request, "index.html", {})
def about(request):
context = {
'about': About.objects.all()
}
return render(request, "about.html", context)
def doctor_list(request):
context = {
'doctors': Doctor.objects.all()
}
return render(request, "doctor_list_booking.html", context)
def services(request):
context = {
'service': Service.objects.all()
}
return render(request, "services.html", context)
@login_required
def appointment_delete(request, appointment_id):
appointment = get_object_or_404(Appointment, pk=appointment_id)
if request.method == 'POST':
appointment.delete()
return redirect('appointments_list')
return redirect('appointments_list')
@login_required
def lab_report_delete(request, report_id):
lab_report = get_object_or_404(Report, pk=report_id)
if request.method == 'POST':
lab_report.delete()
return redirect('lab_reports')
return redirect('lab_reports')
@login_required
def lab_reports(request):
reports = Report.objects.filter(user = request.user)
if reports:
return render(request, "lab_reports_list.html", {'reports': reports})
else:
message = "No records Found"
return render(request, "lab_reports_list.html", {'message': message})
@login_required
def appointments(request):
appointments = Appointment.objects.filter(user = request.user)
if appointments:
return render(request, "appointments_list.html", {'appointments': appointments})
else:
message = "No records Found"
return render(request, "appointments_list.html", {'message': message})
def single_report(request, report_id):
report = get_object_or_404(Report, pk=report_id)
return render(request, "single_report.html", {'report': report})
def single_service(request, service_id):
service = get_object_or_404(Service, pk=service_id)
return render(request, 'single_service.html', {'service': service, 'services_info' : Service.objects.all()})
def single_doctor(request,doctor_id):
doctor = get_object_or_404(Doctor, pk=doctor_id)
return render(request, "single_doctor_booking.html", {'doctor': doctor})
@login_required
def profile(request):
history = History.objects.filter(user = request.user)
if history:
return render(request, "profile.html", {'history': history})
else:
message = "No records Found"
return render(request, "profile.html", {'message': message})
@login_required
def booking(request, doctor_id):
if request.method == 'POST' and request.POST.get('Appointment Date') and request.POST.get('Appointment Time'):
disease = request.POST.get('issue')
date=request.POST.get('Appointment Date')
time=request.POST.get('Appointment Time')
doctor = doctor_id
user = request.user
appointment = Appointment.objects.create(date=date, time=time, user=user, disease_option=disease, doctor = doctor)
appointment.save()
appointments = Appointment.objects.filter(user = request.user)
return render(request, 'appointments_list.html', {'appointments': appointments})
else:
appointments = Appointment.objects.filter(user = request.user)
return render(request, 'appointments_list.html', {'appointments': appointments})
def contactus(request):
pass
def patient_info(request):
pass
| [
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect"
] | [((334, 367), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', '{}'], {}), "(request, 'index.html', {})\n", (340, 367), False, 'from django.shortcuts import render, get_object_or_404\n'), ((454, 492), 'django.shortcuts.render', 'render', (['request', '"""about.html"""', 'context'], {}), "(request, 'about.html', context)\n", (460, 492), False, 'from django.shortcuts import render, get_object_or_404\n'), ((589, 641), 'django.shortcuts.render', 'render', (['request', '"""doctor_list_booking.html"""', 'context'], {}), "(request, 'doctor_list_booking.html', context)\n", (595, 641), False, 'from django.shortcuts import render, get_object_or_404\n'), ((736, 777), 'django.shortcuts.render', 'render', (['request', '"""services.html"""', 'context'], {}), "(request, 'services.html', context)\n", (742, 777), False, 'from django.shortcuts import render, get_object_or_404\n'), ((862, 911), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Appointment'], {'pk': 'appointment_id'}), '(Appointment, pk=appointment_id)\n', (879, 911), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1030, 1059), 'django.shortcuts.redirect', 'redirect', (['"""appointments_list"""'], {}), "('appointments_list')\n", (1038, 1059), False, 'from django.shortcuts import redirect\n'), ((1137, 1176), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Report'], {'pk': 'report_id'}), '(Report, pk=report_id)\n', (1154, 1176), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1288, 1311), 'django.shortcuts.redirect', 'redirect', (['"""lab_reports"""'], {}), "('lab_reports')\n", (1296, 1311), False, 'from django.shortcuts import redirect\n'), ((2031, 2070), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Report'], {'pk': 'report_id'}), '(Report, pk=report_id)\n', (2048, 2070), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2082, 2139), 'django.shortcuts.render', 'render', (['request', '"""single_report.html"""', "{'report': report}"], {}), "(request, 'single_report.html', {'report': report})\n", (2088, 2139), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2196, 2237), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Service'], {'pk': 'service_id'}), '(Service, pk=service_id)\n', (2213, 2237), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2403, 2442), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Doctor'], {'pk': 'doctor_id'}), '(Doctor, pk=doctor_id)\n', (2420, 2442), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2454, 2519), 'django.shortcuts.render', 'render', (['request', '"""single_doctor_booking.html"""', "{'doctor': doctor}"], {}), "(request, 'single_doctor_booking.html', {'doctor': doctor})\n", (2460, 2519), False, 'from django.shortcuts import render, get_object_or_404\n'), ((989, 1018), 'django.shortcuts.redirect', 'redirect', (['"""appointments_list"""'], {}), "('appointments_list')\n", (997, 1018), False, 'from django.shortcuts import redirect\n'), ((1253, 1276), 'django.shortcuts.redirect', 'redirect', (['"""lab_reports"""'], {}), "('lab_reports')\n", (1261, 1276), False, 'from django.shortcuts import redirect\n'), ((1443, 1505), 'django.shortcuts.render', 'render', (['request', '"""lab_reports_list.html"""', "{'reports': reports}"], {}), "(request, 'lab_reports_list.html', {'reports': reports})\n", (1449, 1505), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1568, 1630), 'django.shortcuts.render', 'render', (['request', '"""lab_reports_list.html"""', "{'message': message}"], {}), "(request, 'lab_reports_list.html', {'message': message})\n", (1574, 1630), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1778, 1851), 'django.shortcuts.render', 'render', (['request', '"""appointments_list.html"""', "{'appointments': appointments}"], {}), "(request, 'appointments_list.html', {'appointments': appointments})\n", (1784, 1851), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1914, 1977), 'django.shortcuts.render', 'render', (['request', '"""appointments_list.html"""', "{'message': message}"], {}), "(request, 'appointments_list.html', {'message': message})\n", (1920, 1977), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2648, 2701), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'history': history}"], {}), "(request, 'profile.html', {'history': history})\n", (2654, 2701), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2764, 2817), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'message': message}"], {}), "(request, 'profile.html', {'message': message})\n", (2770, 2817), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3418, 3491), 'django.shortcuts.render', 'render', (['request', '"""appointments_list.html"""', "{'appointments': appointments}"], {}), "(request, 'appointments_list.html', {'appointments': appointments})\n", (3424, 3491), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3588, 3661), 'django.shortcuts.render', 'render', (['request', '"""appointments_list.html"""', "{'appointments': appointments}"], {}), "(request, 'appointments_list.html', {'appointments': appointments})\n", (3594, 3661), False, 'from django.shortcuts import render, get_object_or_404\n')] |
"""Basic test configuration"""
from unittest.mock import MagicMock
from pytest import fixture
from elasticsearch_dsl.connections import add_connection
@fixture
def mock_client(dummy_response):
"""Returns elasticsearch mock client"""
client = MagicMock()
client.search.return_value = dummy_response
add_connection("mock", client)
yield client
@fixture(name="dummy_response")
def fixture_dummy_response():
"""Returns the dictionary for comparison in tests"""
return {
"_shards": {
"failed": 0,
"successful": 10,
"total": 10
},
"hits": {
"hits": [
{
"_index": "blog",
"_type": "_doc",
"_id": "1",
"_score": "10.114",
"_source": {
"title": "Test elasticsearch",
"body": """
Litwo! Ojczyzno moja! Ty jesteś jak zdrowie. Ile cię stracił.
Dziś człowieka nie policzę.
Opuszczali rodziców i jeszcze dobrze
na kozłach niemczysko chude na Ojczyzny łono.
Tymczasem na nim się zdawał małpą lub ławę przeskoczyć.
Zręcznie między dwie strony: Uciszcie się! woła.
Marząc i krwi tonęła,
gdy przysięgał na krzaki fijołkowe skłonił oczyma ciekawymi
po kryjomu kazał stoły z Paryża a czuł choroby zaród.
Krzyczano na waszych polowaniach łowił?
Piękna byłaby sława, ażeby nie było gorąca).
wachlarz pozłocist powiewając rozlewał deszcz iskier rzęsisty.
Głowa do Twych świątyń progi iść
za zającami nie został pośmiewiska celem i niesrogi.
Odgadnęła sąsiadka powód jego lata
wleką w kota się nagle, stronnicy Sokół na kształt deski.
Nogi miał głos miły: Witaj nam,
że spod ramion wytknął palce i Asesor, razem, jakoby zlewa.
I też co się przyciągnąć do dworu
uprawne dobrze zachowana sklepienie całe wesoło, lecz w rozmowę lecz lekki.
odgadniesz, że jacyś Francuzi wymowny
zrobili wynalazek: iż ludzie są architektury.
Choć Sędzia jego bok usiadła
owa piękność zda się Gorecki, Pac i opisuję,
bo tak nas reformować cywilizować
będzie wojna u nas starych więcej godni
Wojewody względów doszli potem się teraz
wzrostem dorodniejsza bo tak pan Wojski na nim ją w ulicę się tajemnie,
Ścigany od płaczącej matki pod
Turka czy wstydzić, czy na lewo,
on rodaków zbiera się w domu dostatek mieszka i panien
nie w nieczynności! a Suwarów w posiadłość.
""",
"published_from": "2013-02-10T10:31:07.851688",
"tags": ["g1", "g2"],
"lines": "1",
},
"highlight": {
"title": ["<em>Test</em> elasticsearch"]
},
},
{
"_index": "blog",
"_type": "_doc",
"_id": "2",
"_score": "12.0",
"_source": {
"title": "Test elasticsearch numer 2",
"body": """
Litwo! Ojczyzno moja! Ty jesteś jak zdrowie. Ile cię stracił.
Dziś człowieka nie policzę.
Opuszczali rodziców i jeszcze dobrze
na kozłach niemczysko chude na Ojczyzny łono.
Tymczasem na nim się zdawał małpą lub ławę przeskoczyć.
Zręcznie między dwie strony: Uciszcie się! woła.
Marząc i krwi tonęła,
gdy przysięgał na krzaki fijołkowe skłonił oczyma ciekawymi
po kryjomu kazał stoły z Paryża a czuł choroby zaród.
Krzyczano na waszych polowaniach łowił?
Piękna byłaby sława, ażeby nie było gorąca).
wachlarz pozłocist powiewając rozlewał deszcz iskier rzęsisty.
Głowa do Twych świątyń progi iść
za zającami nie został pośmiewiska celem i niesrogi.
Odgadnęła sąsiadka powód jego lata
wleką w kota się nagle, stronnicy Sokół na kształt deski.
Nogi miał głos miły: Witaj nam,
że spod ramion wytknął palce i Asesor, razem, jakoby zlewa.
I też co się przyciągnąć do dworu
uprawne dobrze zachowana sklepienie całe wesoło, lecz w rozmowę lecz lekki.
odgadniesz, że jacyś Francuzi wymowny
zrobili wynalazek: iż ludzie są architektury.
Choć Sędzia jego bok usiadła
owa piękność zda się Gorecki, Pac i opisuję,
bo tak nas reformować cywilizować
będzie wojna u nas starych więcej godni
Wojewody względów doszli potem się teraz
wzrostem dorodniejsza bo tak pan Wojski na nim ją w ulicę się tajemnie,
Ścigany od płaczącej matki pod
Turka czy wstydzić, czy na lewo,
on rodaków zbiera się w domu dostatek mieszka i panien
nie w nieczynności! a Suwarów w posiadłość.
""",
"published_from": "2014-02-10T10:31:07.851688",
"tags": ["g1", "g2"],
"lines": "1",
},
"highlight": {
"title": ["<em>Test</em> elasticsearch numer 2"]
},
},
]
},
"timed_out": False,
"took": 123,
}
| [
"pytest.fixture",
"unittest.mock.MagicMock",
"elasticsearch_dsl.connections.add_connection"
] | [((369, 399), 'pytest.fixture', 'fixture', ([], {'name': '"""dummy_response"""'}), "(name='dummy_response')\n", (376, 399), False, 'from pytest import fixture\n'), ((254, 265), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (263, 265), False, 'from unittest.mock import MagicMock\n'), ((318, 348), 'elasticsearch_dsl.connections.add_connection', 'add_connection', (['"""mock"""', 'client'], {}), "('mock', client)\n", (332, 348), False, 'from elasticsearch_dsl.connections import add_connection\n')] |
from __future__ import print_function
import csv
import numpy as np
import re
import Spectrum
#import matplotlib.pyplot as plt
def ReadCSVRef(filename):
with open(filename) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
headers = list(filter(None, next(reader)))
data = []
for row in reader:
data.append(row[:-1])
data = np.array(data)
data[data == ''] = np.nan
data = data.astype(float)
dataDict = {}
i = 0
columns_per_data = int(np.shape(data[0])[0]/np.shape(headers)[0])
print(columns_per_data)
for hh in headers:
label = tuple(map(int, re.findall(r'\d+', hh)))
dataDict[label] = data[:, i:i+columns_per_data]
data[:, i:i+columns_per_data]
i+= columns_per_data
return dataDict
# Add error-checking for entering a non-existent grating/wavelength pair
class SystemCorrectionFactor(object):
def __init__(self, grating, center_wavelength, wavelengths = None):
self.grating = grating
self.center_wavelength = center_wavelength
if grating >= 1000:
self.correction_spectrum = self.ImportIR()
elif wavelengths is not None:
self.correction_spectrum = self.ImportVis(wavelengths)
else:
print('No valid reference for system correction!')
def ImportIR(self):
filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/CorrectionFactorSCAlIRCamera_2015_02_26.csv'
dataDict = ReadCSVRef(filename)
d = dataDict[self.grating, self.center_wavelength]
correction_spectrum = Spectrum.CLSpectrum(d[:,1], d[:,0])
return correction_spectrum
def ImportVis(self, wavelengths):
filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/SystemResponseVISInterpolated_20150717.csv'
dataDict = ReadCSVRef(filename)
d = dataDict[(self.grating,)]
spectrum_interp = np.interp(wavelengths, d[:, 0], d[:, 1])
correction_spectrum = Spectrum.CLSpectrum(spectrum_interp, wavelengths)
return correction_spectrum
class WavelengthCorrectionFactor(object):
def __init__(self, grating, center_wavelength):
self.grating = grating
self.center_wavelength = center_wavelength
if self.grating in (1250, 1600, 2000):
self.wavelength = self.importIRwavelengths()
elif self.grating in (500, 800):
self.wavelength = self.importVISwavelengths()
else:
print('No valid reference for wavelength correction!')
def importIRwavelengths(self):
filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsIR20150428.csv'
dataDict = ReadCSVRef(filename)
correction_spectrum = dataDict[self.grating, self.center_wavelength]
return correction_spectrum
def importVISwavelengths(self):
filename = '/home/isobel/Documents/McMaster/CL/SystemResponseFcns/WinspecCorrWavelengthsVis20150309.csv'
dataDict = ReadCSVRef(filename)
wavelengths = dataDict[self.grating, self.center_wavelength]
return wavelengths
#wvls = np.linspace(400, 980)
#p = SystemCorrectionFactor(800, 750, wvls)
#print(np.shape(p.correction_spectrum.SpectrumRange))
#plt.plot(p.correction_spectrum.SpectrumRange, p.correction_spectrum.intensity)
#plt.show()
| [
"csv.reader",
"numpy.shape",
"re.findall",
"numpy.array",
"numpy.interp",
"Spectrum.CLSpectrum"
] | [((383, 397), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (391, 397), True, 'import numpy as np\n'), ((207, 241), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (217, 241), False, 'import csv\n'), ((1620, 1657), 'Spectrum.CLSpectrum', 'Spectrum.CLSpectrum', (['d[:, 1]', 'd[:, 0]'], {}), '(d[:, 1], d[:, 0])\n', (1639, 1657), False, 'import Spectrum\n'), ((1960, 2000), 'numpy.interp', 'np.interp', (['wavelengths', 'd[:, 0]', 'd[:, 1]'], {}), '(wavelengths, d[:, 0], d[:, 1])\n', (1969, 2000), True, 'import numpy as np\n'), ((2031, 2080), 'Spectrum.CLSpectrum', 'Spectrum.CLSpectrum', (['spectrum_interp', 'wavelengths'], {}), '(spectrum_interp, wavelengths)\n', (2050, 2080), False, 'import Spectrum\n'), ((513, 530), 'numpy.shape', 'np.shape', (['data[0]'], {}), '(data[0])\n', (521, 530), True, 'import numpy as np\n'), ((534, 551), 'numpy.shape', 'np.shape', (['headers'], {}), '(headers)\n', (542, 551), True, 'import numpy as np\n'), ((638, 660), 're.findall', 're.findall', (['"""\\\\d+"""', 'hh'], {}), "('\\\\d+', hh)\n", (648, 660), False, 'import re\n')] |
"""Class containing the generic markdown engine used by evenniq_wiki."""
from bs4 import BeautifulSoup
from markdown import Markdown
class MarkdownEngine(Markdown):
"""A special markdown engine for the evennia_wiki.
This pre-loads some common extensions and allows some inner processing.
"""
def __init__(self):
super(MarkdownEngine, self).__init__(extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.footnotes',
'markdown.extensions.tables',
'markdown.extensions.toc',
])
def convert(self, text):
"""Convert the text to HTML, changing some classes.
1. Table elements will have classes table table-responsive table-striped
2. Table headers will have the class thead-inverse
3. Links elements will be re-mapped if absolute (beginning by /)
"""
html = super(MarkdownEngine, self).convert(text)
soup = BeautifulSoup(html, 'html.parser')
# Add classes to tables
for tag in soup.find_all("table"):
tag["class"] = "table table-responsive table-striped"
# Add classes to table headers
for tag in soup.find_all("thead"):
tag["class"] = "thead-inverse"
# Change link location of pointing to /* . We assume an absolute
# URL (/) means a wiki page.
for tag in soup.find_all("a"):
href = tag.get("href")
if href and href.startswith("/"):
tag["href"] = "/wiki" + href
return str(soup)
ENGINE = MarkdownEngine()
| [
"bs4.BeautifulSoup"
] | [((981, 1015), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (994, 1015), False, 'from bs4 import BeautifulSoup\n')] |
# AUTOGENERATED! DO NOT EDIT! File to edit: 00_roster.ipynb (unless otherwise specified).
__all__ = ['ShiftProperties', 'Shift', 'Roster']
# Cell
from dataclasses import dataclass
from datetime import datetime, timedelta, date, time
from ics import Calendar, Event
import re
from typing import Optional
from zoneinfo import ZoneInfo
@dataclass
class ShiftProperties:
name: str
starting_hour: timedelta
duration: timedelta
@dataclass
class Shift:
properties: ShiftProperties
date: datetime
def __post_init__(self):
self.beginning: datetime = self.date + self.properties.starting_hour
# Cell
@dataclass
class Roster:
shifts: list[Shift]
name: str = "<NAME>"
_year: int = 2022
_month: int = 3 # TODO: Read from Excel
_dayp = re.compile(r"MO|DI|MI|DO|FR|SA|SO")
_datep = re.compile(r"\d{2}")
@classmethod
def from_dict(
cls, input: dict[str, str], mapper: Optional[dict] = None
) -> "Roster":
shifts = []
# TODO: This whole continue stuff is just horrible. Change it future me!
for date_str, abbr in input.items():
if abbr == "(/)":
continue
try:
props = mapper[abbr]
if not props:
print(f"No properties for shift abbrevation: {abbr}")
continue
except KeyError:
print(f"Shift abbrevation not found in mapper: {abbr}")
continue
date = datetime(
year=cls._year,
month=cls._month,
day=int(cls._datep.search(date_str).group()),
tzinfo=ZoneInfo("Europe/Berlin"),
)
shift = Shift(props, date=date)
shifts.append(shift)
return cls(shifts=shifts)
def to_ics(self):
c = Calendar()
for shift in self.shifts:
e = Event()
e.name = shift.properties.name
e.begin = shift.beginning
e.duration = shift.properties.duration
c.events.add(e)
return c
| [
"ics.Event",
"zoneinfo.ZoneInfo",
"ics.Calendar",
"re.compile"
] | [((783, 817), 're.compile', 're.compile', (['"""MO|DI|MI|DO|FR|SA|SO"""'], {}), "('MO|DI|MI|DO|FR|SA|SO')\n", (793, 817), False, 'import re\n'), ((832, 852), 're.compile', 're.compile', (['"""\\\\d{2}"""'], {}), "('\\\\d{2}')\n", (842, 852), False, 'import re\n'), ((1858, 1868), 'ics.Calendar', 'Calendar', ([], {}), '()\n', (1866, 1868), False, 'from ics import Calendar, Event\n'), ((1919, 1926), 'ics.Event', 'Event', ([], {}), '()\n', (1924, 1926), False, 'from ics import Calendar, Event\n'), ((1669, 1694), 'zoneinfo.ZoneInfo', 'ZoneInfo', (['"""Europe/Berlin"""'], {}), "('Europe/Berlin')\n", (1677, 1694), False, 'from zoneinfo import ZoneInfo\n')] |
import sys
import discord
from discord.ext import tasks
import base.command_base as base
import base.DiscordSend as Sendtool
import base.ColorPrint as CPrint
import base.time_check as CTime
import os
import collections as cl
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import json
import command.voice_log.Config_Main as CSetting
import command.voice_log.chart as Chart
import pandas as pd
class command(base.command_base) :
def __init__(self) :
super().__init__()
self.test_task: tasks.Loop = None
self.now_filepath = CSetting.baseLogFolder + CSetting.JSONPATH_row + CSetting.JSONPATH_now
# JSON記入
## https://qiita.com/KEINOS/items/ea4bda15506bbd3e6913 から勝手に拝借
def append_json_to_file(self, _dict, path_file):
try :
with open(path_file, 'ab+') as f: # ファイルを開く
f.seek(0,2) # ファイルの末尾(2)に移動(フォフセット0)
if f.tell() == 0 : # ファイルが空かチェック
f.write(json.dumps([_dict],indent=4,ensure_ascii=False).encode()) # 空の場合は JSON 配列を書き込む
else :
f.seek(-1,2) # ファイルの末尾(2)から -1 文字移動
f.truncate() # 最後の文字を削除し、JSON 配列を開ける(]の削除)
f.write(' , '.encode()) # 配列のセパレーターを書き込む
f.write(json.dumps(_dict,indent=4,ensure_ascii=False).encode()) # 辞書を JSON 形式でダンプ書き込み
f.write(']'.encode()) # JSON 配列を閉じる
except OSError as e:
CPrint.error_print( path_file + "が、存在しませんでした")
print(os.getcwd())
print(e)
return f.close() # 連続で追加する場合は都度 Open, Close しない方がいいかも
# JSON出力(1ヵ月定期・ファイルチェンジ機能付き)
async def MonthOutput(self, client: discord.Client):
today = datetime.today()
filetime = today - relativedelta(months=1)
# Renameするときのファイル名を決定する
m_month = datetime.strftime(filetime,'%m')
m_year = datetime.strftime(filetime,'%Y')
month_filename = '{0}{1}'.format(m_year, m_month)
mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + ".json"
if os.path.exists(self.now_filepath) == False:
# ここにエラー文を出して置く
return None
# Rename
os.rename(self.now_filepath, mv_filename )
# now生ログファイルを、空作成しておく
with open( self.now_filepath ,"w"):pass
# 加工済みデータを作る
timeData = await Chart.makeTimeList(client, mv_filename , CSetting.OneMonthOutput_RoleID , mode="NAME")
# CSVで加工済みを保存する
if timeData is not None :
send_fileName = CSetting.baseLogFolder + CSetting.JSONPATH_analysis + month_filename + ".csv"
timeData.to_csv( send_fileName )
return send_fileName
else :
return None
#async def on_message(self, config, client: discord.Client, message: discord.Message) :
#sendfile = await self.MonthOutput(client=client)
#if sendfile is None :
# await Sendtool.Send_Member(Data=message, message="ログファイルがありませんでした。", filename=None)
#else :
# await Sendtool.Send_Member(Data=message, message="MonthOutput!", filename=sendfile)
#pass
## 入退室監視
async def on_voice_state_update(self, config, client: discord.Client, member: discord.Member, before: discord.VoiceState , after: discord.VoiceState):
data = cl.OrderedDict()
if before.channel is None:
## 入ってきたら
print( datetime.now().strftime("%Y/%m/%d %H:%M:%S") ,":" , after.channel.name, "から" , member.name , "#" , member.discriminator , "さんが入りました")
data["Flag"] = "entry"
data["before.channel.name"] = "NULL"
data["before.channel.id"] = "NULL"
data["after.channel.name"] = after.channel.name
data["after.channel.id"] = after.channel.id
data["member.name"] = member.name
data["member.discriminator"] = member.discriminator
data["member.id"] = member.id
data["time"] = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
elif after.channel is None:
## 抜けたら
print(datetime.now().strftime("%Y/%m/%d %H:%M:%S") ,":" , before.channel.name, "から" , member.name , "#" , member.discriminator , "さんが抜けました")
data["Flag"] = "exit"
data["before.channel.name"] = before.channel.name
data["before.channel.id"] = before.channel.id
data["after.channel.name"] = "NULL"
data["after.channel.id"] = "NULL"
data["member.name"] = member.name
data["member.discriminator"] = member.discriminator
data["member.id"] = member.id
data["time"] = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
elif after.channel.id != before.channel.id :
print(datetime.now().strftime("%Y/%m/%d %H:%M:%S") ,":" , before.channel.name, "から" , member.name , "#" , member.discriminator , "さんが移動しました")
data["Flag"] = "move"
data["before.channel.name"] = before.channel.name
data["before.channel.id"] = before.channel.id
data["after.channel.name"] = after.channel.name
data["after.channel.id"] = after.channel.id
data["member.name"] = member.name
data["member.discriminator"] = member.discriminator
data["member.id"] = member.id
data["time"] = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
else :
# 特になし
pass
self.append_json_to_file( data, self.now_filepath )
pass
# 定期送信(1ヵ月)
async def voice_outputlog(self, config, client: discord.Client):
channellist = []
if config.get("on_task") is not None :
if config["on_task"].get(sys._getframe().f_code.co_name) is not None :
channellist = config["on_task"][sys._getframe().f_code.co_name].get("message-channelID")
if channellist is None :
return
#await Sendtool.Send_ChannelID(client=client, channelID=channellist , message="TASKCheck! - voice_outputlog")
## --------
flag = False
# 動作時間決定
# ※ 指定日時に動作できないので、これで代用。
TestFlag = False # --- 定期実行のプログラムテスト以外では、これは、Falseにしてください --------------
if TestFlag == False : # 1日に実行する
flag = CTime.check('%d %H', '01 00')
else : # 1時に実行する
flag = CTime.check('%M', '00')
# -- 出力処理 --
if flag :
sendfile = await self.MonthOutput(client=client)
filetime = today - relativedelta(months=1)
m_month = datetime.strftime(filetime,'%m')
m_year = datetime.strftime(filetime,'%Y')
month_filename = '{0}{1}'.format(m_year, m_month)
mv_filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + month_filename + ".json"
if sendfile is None :
text = "【一か月定期連絡】"+ m_year + "年"+ m_month +"月の音声チャンネルログインはありませんでした"
await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=None)
else :
text = "【一か月定期連絡】"+ m_year + "年"+ m_month +"月の音声チャンネルログイン生データ"
await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=mv_filename)
text = "【一か月定期連絡】"+ m_year + "年"+ m_month +"月の音声チャンネルログイン加工データ"
await Sendtool.Send_ChannelID(client=client, channelID=CSetting.OneMonthOutput_ChannelID, message=text, filename=sendfile)
pass
pass | [
"datetime.datetime.strftime",
"datetime.datetime.today",
"command.voice_log.chart.makeTimeList",
"base.ColorPrint.error_print",
"os.getcwd",
"base.DiscordSend.Send_ChannelID",
"os.rename",
"os.path.exists",
"dateutil.relativedelta.relativedelta",
"sys._getframe",
"json.dumps",
"base.time_check.check",
"collections.OrderedDict",
"datetime.datetime.now"
] | [((1703, 1719), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1717, 1719), False, 'from datetime import datetime, timedelta\n'), ((1805, 1838), 'datetime.datetime.strftime', 'datetime.strftime', (['filetime', '"""%m"""'], {}), "(filetime, '%m')\n", (1822, 1838), False, 'from datetime import datetime, timedelta\n'), ((1849, 1882), 'datetime.datetime.strftime', 'datetime.strftime', (['filetime', '"""%Y"""'], {}), "(filetime, '%Y')\n", (1866, 1882), False, 'from datetime import datetime, timedelta\n'), ((2122, 2163), 'os.rename', 'os.rename', (['self.now_filepath', 'mv_filename'], {}), '(self.now_filepath, mv_filename)\n', (2131, 2163), False, 'import os\n'), ((3118, 3134), 'collections.OrderedDict', 'cl.OrderedDict', ([], {}), '()\n', (3132, 3134), True, 'import collections as cl\n'), ((1741, 1764), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (1754, 1764), False, 'from dateutil.relativedelta import relativedelta\n'), ((2030, 2063), 'os.path.exists', 'os.path.exists', (['self.now_filepath'], {}), '(self.now_filepath)\n', (2044, 2063), False, 'import os\n'), ((2268, 2356), 'command.voice_log.chart.makeTimeList', 'Chart.makeTimeList', (['client', 'mv_filename', 'CSetting.OneMonthOutput_RoleID'], {'mode': '"""NAME"""'}), "(client, mv_filename, CSetting.OneMonthOutput_RoleID,\n mode='NAME')\n", (2286, 2356), True, 'import command.voice_log.chart as Chart\n'), ((5646, 5675), 'base.time_check.check', 'CTime.check', (['"""%d %H"""', '"""01 00"""'], {}), "('%d %H', '01 00')\n", (5657, 5675), True, 'import base.time_check as CTime\n'), ((5722, 5745), 'base.time_check.check', 'CTime.check', (['"""%M"""', '"""00"""'], {}), "('%M', '00')\n", (5733, 5745), True, 'import base.time_check as CTime\n'), ((5890, 5923), 'datetime.datetime.strftime', 'datetime.strftime', (['filetime', '"""%m"""'], {}), "(filetime, '%m')\n", (5907, 5923), False, 'from datetime import datetime, timedelta\n'), ((5935, 5968), 'datetime.datetime.strftime', 'datetime.strftime', (['filetime', '"""%Y"""'], {}), "(filetime, '%Y')\n", (5952, 5968), False, 'from datetime import datetime, timedelta\n'), ((1470, 1515), 'base.ColorPrint.error_print', 'CPrint.error_print', (["(path_file + 'が、存在しませんでした')"], {}), "(path_file + 'が、存在しませんでした')\n", (1488, 1515), True, 'import base.ColorPrint as CPrint\n'), ((5853, 5876), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (5866, 5876), False, 'from dateutil.relativedelta import relativedelta\n'), ((1526, 1537), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1535, 1537), False, 'import os\n'), ((3667, 3681), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3679, 3681), False, 'from datetime import datetime, timedelta\n'), ((6224, 6341), 'base.DiscordSend.Send_ChannelID', 'Sendtool.Send_ChannelID', ([], {'client': 'client', 'channelID': 'CSetting.OneMonthOutput_ChannelID', 'message': 'text', 'filename': 'None'}), '(client=client, channelID=CSetting.\n OneMonthOutput_ChannelID, message=text, filename=None)\n', (6247, 6341), True, 'import base.DiscordSend as Sendtool\n'), ((6424, 6548), 'base.DiscordSend.Send_ChannelID', 'Sendtool.Send_ChannelID', ([], {'client': 'client', 'channelID': 'CSetting.OneMonthOutput_ChannelID', 'message': 'text', 'filename': 'mv_filename'}), '(client=client, channelID=CSetting.\n OneMonthOutput_ChannelID, message=text, filename=mv_filename)\n', (6447, 6548), True, 'import base.DiscordSend as Sendtool\n'), ((6622, 6743), 'base.DiscordSend.Send_ChannelID', 'Sendtool.Send_ChannelID', ([], {'client': 'client', 'channelID': 'CSetting.OneMonthOutput_ChannelID', 'message': 'text', 'filename': 'sendfile'}), '(client=client, channelID=CSetting.\n OneMonthOutput_ChannelID, message=text, filename=sendfile)\n', (6645, 6743), True, 'import base.DiscordSend as Sendtool\n'), ((3188, 3202), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3200, 3202), False, 'from datetime import datetime, timedelta\n'), ((4246, 4260), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4258, 4260), False, 'from datetime import datetime, timedelta\n'), ((3765, 3779), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3777, 3779), False, 'from datetime import datetime, timedelta\n'), ((4852, 4866), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4864, 4866), False, 'from datetime import datetime, timedelta\n'), ((5159, 5174), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5172, 5174), False, 'import sys\n'), ((996, 1045), 'json.dumps', 'json.dumps', (['[_dict]'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '([_dict], indent=4, ensure_ascii=False)\n', (1006, 1045), False, 'import json\n'), ((1302, 1349), 'json.dumps', 'json.dumps', (['_dict'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(_dict, indent=4, ensure_ascii=False)\n', (1312, 1349), False, 'import json\n'), ((4348, 4362), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4360, 4362), False, 'from datetime import datetime, timedelta\n'), ((5241, 5256), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5254, 5256), False, 'import sys\n')] |
"""
This module exports the 'Hand' class, 'PlayerHand' and 'DealerHand' subclasses, and related methods.
"""
import time
draw_delay = 1 # The pause in seconds between drawn card actions
twenty_one = 21 # Ideal score value for both players
class Hand:
"""
A class defining the properties and methods of a hand object.
A hand object is a collection of cards associated with either the dealer or a player (each having their own
respective subclasses with specialised methods and attributes). Within a round of blackjack, cards are added to a
hand when the associated player chooses to 'hit'. The outcome of each round is determined by the relative values
of the player's and dealer's hands.
"""
def __init__(self, holder_name="Player"):
"""
Initialises an empty hand object for a given participant.
Parameters
----------
holder_name : str
Defines the owner, or 'holder', of the hand object bseing created: either 'Player' or 'Dealer'.
Defaults to 'Player' for this base hand class.
"""
self._live_hand = (
[]
) # A list of card objects making up the hand; initialised as an empty list
self._active = True # The active status communicates whether the hand is still active in the current round
self._bust = False # The bust status communicates whether the hand is bust (value > 21) in the current round
self._natural = False # The natural status communicates whether the hand is a natural (value = 21 with 2 cards)
self._holder_name = holder_name
def __iter__(self):
"""
Allows hand objects to be iterated over, yielding constituent card objects in the order they were added.
Yields
------
card : blackjack.card.Card
The next card in the hand (within the hand object's '_live_hand' attribute).
"""
for card in self._live_hand:
yield card
def __repr__(self):
"""
Entering the reference for a hand object in the terminal triggers this method, printing all hand details.
Returns
-------
Output of 'print_hand' method : str
Prints the hand's owner followed by shorthand details of all cards currently within the hand.
"""
return self.print_hand()
def __len__(self):
"""Allows len() to be used on hand objects, returning the number of cards in the hand as the object 'length'."""
return len(self._live_hand)
def hand_value(self, bypass_face_down=False):
"""
Returns the total value(s) of the target hand by summing the values of all constituent card objects.
Parameters
----------
bypass_face_down : bool
Tells method whether to include face-down cards in calculating the value(s) of the hand. Defaults to False.
Returns
-------
hand_value_list : list of int / str
A list containing all possible values the hand's combination of cards can take with no duplicates. For a
hand with all cards face-up: returns a list of integers. For hands with any cards face-down: returns a
list of strings.
"""
ace_count = 0
ace_values = None
face_down_count = 0
non_ace_sum = 0
# Loop: counts number of face-down cards in the hand; counts face-up aces; sums face-up cards that aren't an ace
for card in self:
# Try statement catches AssertionErrors thrown when 'is_ace' method encounters a face-down card
try:
if card.is_ace(bypass_face_down):
ace_count += 1
ace_values = card.card_value(bypass_face_down)
else:
non_ace_sum += card.card_value(bypass_face_down)
except AssertionError:
face_down_count += 1
# This if-else block defines a list of possible values associated with all face-up cards in the hand
if ace_count > 0:
ace_sum_possibilities = self._calculate_ace_values(ace_count, ace_values)
ace_sum = [
possibility + non_ace_sum for possibility in ace_sum_possibilities
]
hand_value_list = ace_sum
else:
hand_value_list = [non_ace_sum]
# Where the hand contains face-down cards, this block adds the consistent face-down string to the face-up values
if face_down_count > 0:
hand_value_list = [
str(value) + " + *-*" * face_down_count for value in hand_value_list
]
return hand_value_list
def best_hand_value(self):
"""
Returns the best possible value of the hand as an integer. If hand value is bust (> 21), returns None.
Returns
-------
best_value : int or None
The best possible total value of the hand's constituent cards. If no hand value <= 21, 'best_value' = None.
"""
max_best_value = 21
all_hand_values = self.hand_value(bypass_face_down=True)
try:
best_value = max([val for val in all_hand_values if val <= max_best_value])
except ValueError:
best_value = None
return best_value
def is_active(self):
"""
As a boolean, returns the active status of the hand in the current round (bust/stand = False; otherwise = True).
A hand is regarded as active in a round while cards can still be added to the hand. Once a player decides to
'stand' at their hand's current value, or if they go bust (> 21), the hands '_active' attribute is set to False
signalling that no further actions are required by the player holding the hand in the current round.
Returns
-------
bool
True when hand can still receive cards in the current round; otherwise False.
"""
return self._active
def is_bust(self):
"""
As a boolean, returns 'bust' status of hand in the current round (value > 21: returns True; otherwise False).
Returns
-------
bool
True when lowest possible hand value exceeds 21; otherwise False.
"""
return self._bust
def is_natural(self):
"""
As a boolean, returns 'natural' status of hand (2 cards in hand and value = 21: returns True; otherwise False).
Returns
-------
bool
True when card contains two cards with combined value of 21; otherwise False.
"""
return self._natural
def stand(self):
"""Updates hand status to inactive: triggered when player chooses to draw no more cards in the current round."""
self._active = False
def draw_card(self, deck_obj, face_dir="up"):
"""
Removes one card from the input deck and adds this card to the hand with orientation defined by 'face_dir'.
Calls the 'deal_card' method of an input deck object, the deck returns a single card object and deletes this
card from the deck. If the 'face_dir' input argument requires the hand to be dealt face-down, the freshly
drawn card (face-up by default) calls its 'flip_card' method to ensure the card is correctly face-down before it
it is appended to the hand array. Finally, the method calls '_validate_hand_status' that checks whether the hand
is now bust and updates all hand statuses accordingly.
Parameters
----------
deck_obj : blackjack.deck.Deck
The game's 'live' deck object - a card will be removed from this deck and added to the current hand object.
face_dir : str
Defines whether card is added to the hand face-up or face-down. By default, the card will be added
face-up with face_dir = 'up'. Any value of face_dir not spelling 'up' (case-insensitive) will add the card
face-down.
Raises
------
AssertionError
Raised when the hand is inactive (can't accept further cards).
"""
assert (
self.is_active()
), "Cannot draw a card to this hand: it is marked as inactive in the current round."
drawn_card = deck_obj.deal_card()
if face_dir.lower() != "up":
drawn_card.flip_card()
self._live_hand.append(drawn_card)
self._verify_hand_status()
def print_hand(self, alt_text=None):
"""
Prints the hand's owner followed by shorthand details of all cards currently within the hand.
Parameters
----------
alt_text : str
This optional argument will be printed instead of the hand owner's name if provided.
Returns
-------
empty_string : str
An empty string, returned so that the 'print_hand' method can be called by the Hand class' __repr__
method which must return a string-like object.
"""
empty_string = ""
ends_with_s = self._holder_name[-1].lower() == "s"
if alt_text is not None:
print(alt_text)
elif ends_with_s:
print(f"\n{self._holder_name}' hand")
else:
print(f"\n{self._holder_name}'s hand")
for idx, single_card in enumerate(self):
print(f"Card {idx}: {single_card.short_card_details()}")
if (
self.is_active()
or self.is_bust()
or (self.best_hand_value() == twenty_one and alt_text is not None)
):
print(f"Value: {self.hand_value()}")
return empty_string
def _verify_hand_status(self):
"""Checks whether the hand is bust, has value equal to 21 or is a natural. Updates hand status accordingly."""
natural_length = 2
if self.best_hand_value() is None:
self._bust = True
self.stand()
elif self.best_hand_value() == twenty_one:
self.stand()
if len(self) == natural_length:
self._natural = True
@staticmethod
def _calculate_ace_values(ace_count, ace_values):
"""
Returns the possible values of a collection of ace cards as a sorted list.
Parameters
----------
ace_count : int
The number of ace cards to calculate possible summed values for.
ace_values : tuple
A two-element tuple containing the possible card values an ace can take e.g. (1, 11).
Returns
-------
ace_sum_possibilities : list of int
A list containing each value 'ace_count' number of aces can combine to make.
TODO: Refactor to allow any number of possible ace values (additional loop over keys of dict?)
"""
ace_sum_possibilities = [0]
for ace_idx in range(ace_count):
first_set = [
ace_values[0] + ace_sum_element
for ace_sum_element in ace_sum_possibilities
]
second_set = [
ace_values[1] + ace_sum_element
for ace_sum_element in ace_sum_possibilities
]
ace_sum_possibilities = list(set(first_set + second_set))
ace_sum_possibilities.sort()
return ace_sum_possibilities
class DealerHand(Hand):
"""
A subclass defining the properties and methods specific to a hand object held by the dealer.
The dealer's hand is unique because: the first card dealt to the dealer will always be dealt face-down;
the dealer's turn in a single round must be resolved automatically.
"""
def __init__(self):
"""Calls the __init__ method of the base Hand class, initialising an empty hand object for the dealer."""
super().__init__("Dealer")
def draw_card(self, deck_obj, face_dir=None):
"""
Removes one card from the input deck and adds this card to the hand with orientation defined by 'face_dir'.
Parameters
----------
deck_obj : blackjack.deck.Deck
The game's 'live' deck object - a card will be removed from this deck and added to the dealer's hand object.
face_dir : None / str
Defines whether card is added to the hand face-up or face-down. By default, 'face_dir' is None when
method is called against a dealer's hand object. Where None, the orientation of the card is determined
by the number of cards currently in the dealer's hand. If the dealer currently has a single card in their
hand, the card is dealt face-down; otherwise face-up. If the method is called with face_dir specified, it
behaves identically to the equivalent method on the base Hand class.
"""
if face_dir:
super().draw_card(deck_obj, face_dir)
elif len(self) == 1:
face_dir = "down"
super().draw_card(deck_obj, face_dir)
else:
face_dir = "up"
super().draw_card(deck_obj, face_dir)
def resolve_hand(self, deck_obj, player_hand, player_score_message):
"""
This method automatically resolves the dealer's hand: drawing cards until the hand value exceeds seventeen.
Method initially checks the dealer's hand value: if its best value > 17, the dealer stands. If < 17, the hand
draws cards until its value exceeds 17 or goes bust. The dealer's final hand score is printed to the screen
or the player is informed that the dealer has gone bust.
Parameters
----------
deck_obj : blackjack.deck.Deck
The game's 'live' deck object - cards may be removed from this deck and added to the dealer's hand object.
player_hand : blackjack.hand.PlayerHand
A player's 'live' hand object. Allows the player's hand to be printed for comparison as the dealer's hand is
resolved.
player_score_message : str
A string that communicates the players score. As the dealer's hand is resolved, the players score is
printed each time the dealer's hand is printed so the user can easily compare the relative scores.
"""
dealer_target = 17
print(player_score_message)
if player_hand.best_hand_value() == twenty_one:
print("You've got 21!")
time.sleep(draw_delay)
self._reveal_hand()
while self.is_active():
if self.best_hand_value() < dealer_target:
self.draw_card(deck_obj)
self.print_hand(alt_text="\nDealer hits:")
player_hand.print_hand()
print(player_score_message)
print("\n---")
time.sleep(draw_delay)
else:
self.stand()
self.print_hand(alt_text="\nDealer stands:")
print(f"Dealer's score = {self.best_hand_value()}")
player_hand.print_hand()
print(player_score_message)
break
if self.is_bust():
self.print_hand(alt_text="\nDealer has gone bust!")
player_hand.print_hand()
print(player_score_message)
print("\n---")
def _reveal_hand(self):
"""Turns all cards in the hand face-up and prints hand details to the screen."""
print("\n---------------")
for card in self:
if not card.is_face_up():
card.flip_card()
self.print_hand(alt_text="Dealer reveals hand:")
print("---------------")
time.sleep(draw_delay)
def settle_naturals(self, player_hand, player_obj):
"""
Method detects naturals and settles any bets as necessary; returns True if round is concluded, otherwise False.
A hand is a 'natural' if it contains two cards with a total value of 21. Players and dealers can get naturals
upon drawing their first two cards at the start of a round. If the dealer gets a natural, the round is over and
they collect the bet of any player who did not also get a natural. If a player gets a natural and the dealer did
not, they are immediately paid 1.5x the value of their bet.
Parameters
----------
player_hand : blackjack.hand.PlayerHand
A player's 'live' hand object. The 'natural' status of this hand is read and compared to the status of the
dealer's hand. Where a payout is required, the amount bet against the hand is also read into 'bet_amount'.
player_obj : blackjack.player.Player
The player object that owns the input 'player_hand'. Where a payout is required, this player's balance
will be updated accordingly.
Returns
-------
round_complete : bool
Returns True if no further actions are possible in the current round, following the settling of naturals;
otherwise False (and the round continues).
"""
if not any((self.is_natural(), player_hand.is_natural())):
round_complete = False
return round_complete
else:
round_complete = True
bet_amount = player_hand.get_bet()
if self.is_natural() and not player_hand.is_natural():
# No action, round ends and bet is collected (discarded) automatically with player's hand
self._reveal_hand()
print("Dealer has a natural!")
elif not self.is_natural() and player_hand.is_natural():
# Player wins 1.5x their original bet; multiplier is 2.5x so bet amount is also deposited back into balance
print(f"\n{player_obj.get_name()} has a natural (dealer does not)!")
payout_multiplier = 2.5
player_obj.update_balance(bet_amount * payout_multiplier)
elif all((self.is_natural(), player_hand.is_natural())):
# Stand-off between player and dealer: player's bet is deposited back into balance
print(f"\n{player_obj.get_name()} has a natural!")
self._reveal_hand()
print("\nSo does the dealer! It's a stand-off!")
payout_multiplier = 1
player_obj.update_balance(bet_amount * payout_multiplier)
return round_complete
def settle_bet(self, player_hand, player_obj):
"""
Method settles any bets at the end of the round; where the player loses, the method exits and their bet is lost.
The value of the dealer's and player's hands are compared. If the player wins, their player object is payed the
value of their bet plus the original bet amount is returned. If it's a draw, the bet is returned to the player's
balance but they receive no winnings. If the player loses, the method exits and their balance is uneffected.
The bet placed against their hand is lost when a new round starts and new hands are initialised.
Parameters
----------
player_hand : blackjack.hand.PlayerHand
A player's 'live' hand object. The value of this hand is read and compared to the value of the
dealer's hand. Where a payout is required, the amount bet against the hand is also read into 'bet_amount'.
player_obj : blackjack.player.Player
The player object that owns the input 'player_hand'. Where a payout is required, this player's balance
will be updated accordingly.
"""
assert not any(
(self.is_active(), player_hand.is_active())
), "Bets cannot be settled between the dealer and a player unless both participants have 'stood' or gone bust."
if player_hand.is_bust():
return
if self.is_bust():
dealer_score = 0
else:
dealer_score = self.best_hand_value()
if dealer_score > player_hand.best_hand_value():
return
else:
bet_amount = player_hand.get_bet()
if player_hand.best_hand_value() > dealer_score:
payout_multiplier = 2
player_obj.update_balance(bet_amount * payout_multiplier)
elif player_hand.best_hand_value() == dealer_score:
payout_multiplier = 1
player_obj.update_balance(bet_amount * payout_multiplier)
class PlayerHand(Hand):
"""
A subclass defining the properties and methods specific to a hand object held by a player.
Players' hands are special because bets can be made against these hands.
"""
def __init__(self, player_obj):
"""
Calls the __init__ method of the base Hand class, initialising an empty hand object for the player.
Parameters
----------
player_obj : blackjack.player.Player
The player object that owns the hand being initialised. The name of this player is queried and set
used to define the '_holder_name' attribute on the base class. This name is then displayed when printing
hand details to screen.
"""
self._bet = float(
0
) # An attribute holding the amount bet by a player against this hand: initially zero
player_name = player_obj.get_name()
super().__init__(player_name)
def add_bet(self, amount):
"""
Adds a bet made by a player to the current hand object: at the end of a round, the dealer resolves this bet.
Parameters
----------
amount : float
The amount bet against the hand object. In typical game flow, this bet amount has already been verified
as positive and has already been removed from the player's balance.
"""
self._bet += amount
def get_bet(self):
"""Returns the amount bet against this player's hand as a float."""
return self._bet
| [
"time.sleep"
] | [((15691, 15713), 'time.sleep', 'time.sleep', (['draw_delay'], {}), '(draw_delay)\n', (15701, 15713), False, 'import time\n'), ((14469, 14491), 'time.sleep', 'time.sleep', (['draw_delay'], {}), '(draw_delay)\n', (14479, 14491), False, 'import time\n'), ((14841, 14863), 'time.sleep', 'time.sleep', (['draw_delay'], {}), '(draw_delay)\n', (14851, 14863), False, 'import time\n')] |
from utils import create_input_files
"""
To create files that contain all images stored in h5py format and captions stored in json files.
Minimum word frequencies to be used as cut-off for removing rare words to be specifiied here.
"""
if __name__ == '__main__':
create_input_files(dataset='coco',
karpathy_json_path='path_to___dataset_coco.json',
image_folder='path_to__mscoco_folder',
captions_per_image=5,
min_word_freq=5,
output_folder='folder_for_processed_data',
max_len=50)
| [
"utils.create_input_files"
] | [((269, 500), 'utils.create_input_files', 'create_input_files', ([], {'dataset': '"""coco"""', 'karpathy_json_path': '"""path_to___dataset_coco.json"""', 'image_folder': '"""path_to__mscoco_folder"""', 'captions_per_image': '(5)', 'min_word_freq': '(5)', 'output_folder': '"""folder_for_processed_data"""', 'max_len': '(50)'}), "(dataset='coco', karpathy_json_path=\n 'path_to___dataset_coco.json', image_folder='path_to__mscoco_folder',\n captions_per_image=5, min_word_freq=5, output_folder=\n 'folder_for_processed_data', max_len=50)\n", (287, 500), False, 'from utils import create_input_files\n')] |
import pandas as pd
class TableManager:
def __init__(self,table_file):
self.master_table=pd.read_csv(table_file).sort_values("Info",ascending=False)
def get_filtered_table(self,low_freq,high_freq,delta_freq,target):
low_freq=float(low_freq)
high_freq=float(high_freq)
delta_freq=float(delta_freq)
table=self.master_table[self.master_table["Target"]==target]
print(table.columns)
table=table[table["Freq 1"]>low_freq]
table=table[table["Freq 1"]<high_freq]
table=table[table["Freq 2"]>low_freq]
table=table[table["Freq 2"]<high_freq]
table=table[(table["Freq 1"]-table["Freq 2"]).abs()<delta_freq]
return table[["Feature","Info"]] | [
"pandas.read_csv"
] | [((102, 125), 'pandas.read_csv', 'pd.read_csv', (['table_file'], {}), '(table_file)\n', (113, 125), True, 'import pandas as pd\n')] |
# -*- coding: utf-8 -*-
# @Time : 2022/3/8 14:38
# @Author : jiaopaner
import sys
sys.path.insert(0, './')
import torch
from collections import OrderedDict
from craft import CRAFT
def copyStateDict(state_dict):
if list(state_dict.keys())[0].startswith("module"):
start_idx = 1
else:
start_idx = 0
new_state_dict = OrderedDict()
for k, v in state_dict.items():
name = ".".join(k.split(".")[start_idx:])
new_state_dict[name] = v
return new_state_dict
if __name__ == '__main__':
net = CRAFT()
net.load_state_dict(copyStateDict(torch.load("/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth", map_location="cpu")))
net.eval()
#dynamic shape
x = torch.randn((1, 3, 960, 960))
torch.onnx.export(net, x, './pd_model/model.onnx', opset_version=11, input_names=["input"],
output_names=["output"], dynamic_axes={'input': [2,3]})
# x2paddle --framework=onnx --model=./pd_model/model.onnx --save_dir=pd_model_dynamic | [
"torch.load",
"sys.path.insert",
"torch.randn",
"craft.CRAFT",
"collections.OrderedDict",
"torch.onnx.export"
] | [((86, 110), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./"""'], {}), "(0, './')\n", (101, 110), False, 'import sys\n'), ((347, 360), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (358, 360), False, 'from collections import OrderedDict\n'), ((544, 551), 'craft.CRAFT', 'CRAFT', ([], {}), '()\n', (549, 551), False, 'from craft import CRAFT\n'), ((740, 769), 'torch.randn', 'torch.randn', (['(1, 3, 960, 960)'], {}), '((1, 3, 960, 960))\n', (751, 769), False, 'import torch\n'), ((775, 931), 'torch.onnx.export', 'torch.onnx.export', (['net', 'x', '"""./pd_model/model.onnx"""'], {'opset_version': '(11)', 'input_names': "['input']", 'output_names': "['output']", 'dynamic_axes': "{'input': [2, 3]}"}), "(net, x, './pd_model/model.onnx', opset_version=11,\n input_names=['input'], output_names=['output'], dynamic_axes={'input':\n [2, 3]})\n", (792, 931), False, 'import torch\n'), ((590, 702), 'torch.load', 'torch.load', (['"""/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth"""'], {'map_location': '"""cpu"""'}), "(\n '/Volumes/storage/resources/models/paddle-ocr-models/craft_mlt_25k.pth',\n map_location='cpu')\n", (600, 702), False, 'import torch\n')] |
from cloudinary.models import CloudinaryField
from django.contrib.auth import get_user_model
from django.db import models
# Create your models here.
from MetioTube.core.validators import validate_image
UserModel = get_user_model()
class Profile(models.Model):
username = models.CharField(
max_length=30
)
profile_picture = CloudinaryField(
resource_type='image',
blank=True,
validators=(validate_image,)
)
about = models.TextField(
blank=True
)
user = models.OneToOneField(
UserModel,
on_delete=models.CASCADE,
primary_key=True
)
subscribers = models.ManyToManyField(
UserModel,
related_name='subscribers',
blank=True,
)
def __str__(self):
return self.username
| [
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.contrib.auth.get_user_model",
"cloudinary.models.CloudinaryField"
] | [((216, 232), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (230, 232), False, 'from django.contrib.auth import get_user_model\n'), ((279, 310), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (295, 310), False, 'from django.db import models\n'), ((348, 433), 'cloudinary.models.CloudinaryField', 'CloudinaryField', ([], {'resource_type': '"""image"""', 'blank': '(True)', 'validators': '(validate_image,)'}), "(resource_type='image', blank=True, validators=(validate_image,)\n )\n", (363, 433), False, 'from cloudinary.models import CloudinaryField\n'), ((472, 500), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (488, 500), False, 'from django.db import models\n'), ((527, 602), 'django.db.models.OneToOneField', 'models.OneToOneField', (['UserModel'], {'on_delete': 'models.CASCADE', 'primary_key': '(True)'}), '(UserModel, on_delete=models.CASCADE, primary_key=True)\n', (547, 602), False, 'from django.db import models\n'), ((652, 725), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['UserModel'], {'related_name': '"""subscribers"""', 'blank': '(True)'}), "(UserModel, related_name='subscribers', blank=True)\n", (674, 725), False, 'from django.db import models\n')] |
import warnings
from datetime import datetime
import numpy as np
import xarray as xr
import pytest
import ecco_v4_py
from .test_common import all_mds_datadirs, get_test_ds
@pytest.mark.parametrize("mytype",['xda','nparr','list','single'])
def test_extract_dates(mytype):
dints = [[1991,8,9,13,10,15],[1992,10,20,8,30,5]]
dates = [datetime(year=x[0],month=x[1],day=x[2],
hour=x[3],minute=x[4],second=x[5]) for x in dints]
dates = np.array(dates,dtype='datetime64[s]')
dates = [np.datetime64(x) for x in dates]
if mytype=='xda':
dates = xr.DataArray(np.array(dates))
elif mytype=='nparr':
dates = np.array(dates)
elif mytype=='single':
dints=dints[0]
dates = dates[0]
test_out = ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64(dates)
for test,expected in zip(test_out,np.array(dints).T):
print('test: ',test)
print('exp: ',expected)
test = test.values if mytype=='xda' else test
assert np.all(test==expected)
def test_get_grid(get_test_ds):
"""make sure we can make a grid ... that's it"""
grid = ecco_v4_py.get_llc_grid(get_test_ds)
| [
"numpy.datetime64",
"ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64",
"datetime.datetime",
"ecco_v4_py.get_llc_grid",
"numpy.array",
"pytest.mark.parametrize",
"numpy.all"
] | [((176, 245), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""mytype"""', "['xda', 'nparr', 'list', 'single']"], {}), "('mytype', ['xda', 'nparr', 'list', 'single'])\n", (199, 245), False, 'import pytest\n'), ((469, 507), 'numpy.array', 'np.array', (['dates'], {'dtype': '"""datetime64[s]"""'}), "(dates, dtype='datetime64[s]')\n", (477, 507), True, 'import numpy as np\n'), ((770, 831), 'ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64', 'ecco_v4_py.extract_yyyy_mm_dd_hh_mm_ss_from_datetime64', (['dates'], {}), '(dates)\n', (824, 831), False, 'import ecco_v4_py\n'), ((1142, 1178), 'ecco_v4_py.get_llc_grid', 'ecco_v4_py.get_llc_grid', (['get_test_ds'], {}), '(get_test_ds)\n', (1165, 1178), False, 'import ecco_v4_py\n'), ((344, 422), 'datetime.datetime', 'datetime', ([], {'year': 'x[0]', 'month': 'x[1]', 'day': 'x[2]', 'hour': 'x[3]', 'minute': 'x[4]', 'second': 'x[5]'}), '(year=x[0], month=x[1], day=x[2], hour=x[3], minute=x[4], second=x[5])\n', (352, 422), False, 'from datetime import datetime\n'), ((520, 536), 'numpy.datetime64', 'np.datetime64', (['x'], {}), '(x)\n', (533, 536), True, 'import numpy as np\n'), ((1021, 1045), 'numpy.all', 'np.all', (['(test == expected)'], {}), '(test == expected)\n', (1027, 1045), True, 'import numpy as np\n'), ((604, 619), 'numpy.array', 'np.array', (['dates'], {}), '(dates)\n', (612, 619), True, 'import numpy as np\n'), ((663, 678), 'numpy.array', 'np.array', (['dates'], {}), '(dates)\n', (671, 678), True, 'import numpy as np\n'), ((870, 885), 'numpy.array', 'np.array', (['dints'], {}), '(dints)\n', (878, 885), True, 'import numpy as np\n')] |
"""The laundrify integration."""
from __future__ import annotations
from laundrify_aio import LaundrifyAPI
from laundrify_aio.exceptions import ApiConnectionException, UnauthorizedException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_POLL_INTERVAL, DOMAIN
from .coordinator import LaundrifyUpdateCoordinator
PLATFORMS = [Platform.BINARY_SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up laundrify from a config entry."""
session = async_get_clientsession(hass)
api_client = LaundrifyAPI(entry.data[CONF_ACCESS_TOKEN], session)
try:
await api_client.validate_token()
except UnauthorizedException as err:
raise ConfigEntryAuthFailed("Invalid authentication") from err
except ApiConnectionException as err:
raise ConfigEntryNotReady("Cannot reach laundrify API") from err
coordinator = LaundrifyUpdateCoordinator(hass, api_client, DEFAULT_POLL_INTERVAL)
await coordinator.async_config_entry_first_refresh()
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
"api": api_client,
"coordinator": coordinator,
}
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
| [
"homeassistant.exceptions.ConfigEntryAuthFailed",
"homeassistant.exceptions.ConfigEntryNotReady",
"laundrify_aio.LaundrifyAPI",
"homeassistant.helpers.aiohttp_client.async_get_clientsession"
] | [((786, 815), 'homeassistant.helpers.aiohttp_client.async_get_clientsession', 'async_get_clientsession', (['hass'], {}), '(hass)\n', (809, 815), False, 'from homeassistant.helpers.aiohttp_client import async_get_clientsession\n'), ((833, 885), 'laundrify_aio.LaundrifyAPI', 'LaundrifyAPI', (['entry.data[CONF_ACCESS_TOKEN]', 'session'], {}), '(entry.data[CONF_ACCESS_TOKEN], session)\n', (845, 885), False, 'from laundrify_aio import LaundrifyAPI\n'), ((993, 1040), 'homeassistant.exceptions.ConfigEntryAuthFailed', 'ConfigEntryAuthFailed', (['"""Invalid authentication"""'], {}), "('Invalid authentication')\n", (1014, 1040), False, 'from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady\n'), ((1106, 1155), 'homeassistant.exceptions.ConfigEntryNotReady', 'ConfigEntryNotReady', (['"""Cannot reach laundrify API"""'], {}), "('Cannot reach laundrify API')\n", (1125, 1155), False, 'from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady\n')] |
# Copyright 2017 <NAME>
#
# Licensed under the MIT License. If the LICENSE file is missing, you
# can find the MIT license terms here: https://opensource.org/licenses/MIT
from flask import Flask, render_template
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
# attach routes and custom error pages here
#
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
| [
"flask.Flask"
] | [((279, 294), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (284, 294), False, 'from flask import Flask, render_template\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.