commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
17654378a6039203ead1c711b6bb8f7fb3ad8680 | add Ermine ELF dumper. | ccpgames/mumble-releng,ccpgames/mumble-releng | tools/dump-ermine-elfs.py | tools/dump-ermine-elfs.py | #!/usr/bin/env python
#
# Copyright (C) 2013 Mikkel Krautz <mikkel@krautz.dk>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the Mumble Developers nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# `AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# dump-ermine-elfs.py is a simple script that dumps all embedded
# ELFs (executables and shared libraries) contained in an Ermine
# packed ELF binary.
import os
import sys
def usage():
print 'dump-ermine-elfs.py <fn>'
sys.exit(1)
def main():
if len(sys.argv) < 2:
usage()
fn = sys.argv[1]
f = open(fn, 'r')
all = f.read()
f.close()
elfMagic = '\x7fELF'
elfPairs = []
for i in range(0, len(all)):
if i == 0: # skip binary itself
continue
if all[i:i+len(elfMagic)] == elfMagic:
elfPairs.append(i)
elfPairs.append(len(all))
for i, ofs in enumerate(elfPairs):
if i == len(elfPairs)-1: # done?
break
end = elfPairs[i+1]
fn = 'dumped-%i.elf' % i
print 'dumping elf @ 0x%x to %s' % (ofs, fn)
f = open(fn, 'w')
f.write(all[ofs:end])
f.close()
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
|
4a1e46d279df1d0a7eaab2ba8175193cd67c1f63 | Add some template filters: sum, floatformat, addslashes, capfirst, stringformat (copied from django), dictsort, get, first, join, last, length, random, sort. Needed to write tests for all those filters | GrAndSE/lighty-template,GrAndSE/lighty | lighty/templates/templatefilters.py | lighty/templates/templatefilters.py | '''Package contains default template tags
'''
from decimal import Decimal, ROUND_DOWN
import random as random_module
from filter import filter_manager
# Numbers
def sum(*args):
'''Calculate the sum of all the values passed as args and
'''
return reduce(lambda x, y: x + float(y), args)
filter_manager.register(sum)
def floatformat(raw_value, format='0'):
'''Make pretty float representation
Lets:
a = '12.4'
Then:
>>> print floatformat(a)
12
>>> print floatformat(a, '2')
12.40
>>> print floatformat(a, '-2')
12.4
'''
# Parse arguments
try:
digits = abs(int(format))
except:
raise Exception('floatformat arguments error: format is not integer')
try:
value = Decimal(raw_value)
except:
raise Exception('floatformat supports only number values')
# Make formater
digit = 0
formatter = Decimal('1')
while digit < digits:
digit += 1
formatter *= Decimal('0.1')
result = value.copy_abs().quantize(formatter, rounding=ROUND_DOWN)
result = str(result.copy_sign(value))
if format[0] == '-':
return result.rstrip('0')
return result
# Strings
def addslashes(value):
'''Add a slashes to string
'''
return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
filter_manager.register(addslashes)
def capfirst(value):
'''Capitalizes the first character in string
'''
return value and value[0].upper() + value[1:]
filter_manager.register(capfirst)
def stringformat(value, format):
"""Formats the variable according to the format, a string formatting
specifier.
This specifier uses Python string formating syntax, with the exception that
the leading "%" is dropped.
See http://docs.python.org/lib/typesseq-strings.html for documentation
of Python string formatting
"""
return (u"%" + str(format)) % value
filter_manager.register(stringformat)
# Lists, dicts, strings
def dictsort(value, key, order=''):
'''Sort dict
'''
return sorted(value, key=key, reverse=(order != ''))
filter_manager.register(dictsort)
def get(value, index):
'''Get item with specified index
'''
if issubclass(value.__class__, dict):
index = value[value.keys()[index]]
if index in value:
return value[index]
return ''
filter_manager.register(get)
def first(value):
'''Get first item from list
'''
return get(value, 0)
filter_manager.register(first)
def join(value, joiner):
'''Join list or items with joiner
>>> join([1, 2, 3], ' ')
'1 2 3'
'''
return joiner.join([str(item) for item in value])
filter_manager.register(join)
def last(value):
'''Get last item from list
'''
return get(value, len(value) - 1)
filter_manager.register(last)
def length(value):
'''Return's the length of the string, dict or list
'''
return len(value)
def random(value):
'''Get random item from list or dict
'''
return get(value, random_module.random(len(value)))
filter_manager.register(random)
def sort(value):
'''Sort list
'''
return sorted(value)
filter_manager.register(sort)
| bsd-3-clause | Python |
|
654e2bf70b4a47adb53d8a0b17f0257e84c7bdf8 | read in data, check things look sensible. Note: need to change unknowns in group col so we have a more usable data type in the pandas dataframe. | bourbonspecial/challenge | main.py | main.py | # Data modelling challenge.
__author__ = 'Remus Knowles <remknowles@gmail.com>'
import pandas as pd
F_DATA = r'data challenge test.csv'
def main():
df = pd.read_csv(F_DATA)
print df.head()
if __name__ == '__main__':
main() | mit | Python |
|
0046f5276c9572fbc40080cc2201a89ee37b96b2 | Create mwis.py | sbugrov/biutils_PY | mwis.py | mwis.py | weights = [int(l) for l in open('mwis.txt')][1:]
def mwis(weights):
n = len(weights)
weights = [0] + weights
maxsetweight = [0, weights[1]]
for i in range(2, n + 1):
maxsetweight.append(max(maxsetweight[i - 1], maxsetweight[i - 2] + weights[i] ))
i = n
maxset = []
while i > 1:
if maxsetweight[i-2] + weights[i] > maxsetweight[i-1]:
maxset.append(i)
i -= 2
if i == 1:
maxset.append(1)
break
else:
i -= 1
return (maxsetweight[n], maxset)
a, b = mwis(weights)
print "The weight of the maximum weight independent set of the graph is :", a
print "The vertices that constitute the maximum weight independent set of the path graph are :", b
| mit | Python |
|
837382f44d91a44c14884f87c580b969e5ef5a4a | add example for tensorboard | gundramleifert/exp_tf | models/toyexample_03_tensorboard.py | models/toyexample_03_tensorboard.py | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True, reshape=False, validation_size=0)
K = 200
L = 100
M = 60
N = 30
# initialization
X = tf.placeholder(tf.float32, [None, 28, 28, 1])
# placeholder for correct answers
Y_ = tf.placeholder(tf.float32, [None, 10])
W1 = tf.Variable(tf.truncated_normal([784, K], stddev=0.1))
B1 = tf.Variable(tf.zeros([K]))
W2 = tf.Variable(tf.truncated_normal([K, L], stddev=0.1))
B2 = tf.Variable(tf.zeros([L]))
W3 = tf.Variable(tf.truncated_normal([L, M], stddev=0.1))
B3 = tf.Variable(tf.zeros([M]))
W4 = tf.Variable(tf.truncated_normal([M, N], stddev=0.1))
B4 = tf.Variable(tf.zeros([N]))
W5 = tf.Variable(tf.truncated_normal([N, 10], stddev=0.1))
B5 = tf.Variable(tf.zeros([10]))
# model
XX = tf.reshape(X, [-1, 28 * 28]) # Input Layer
# summary1 = tf.summary.tensor_summary("INPUT", XX, "input of model")
with tf.name_scope("Hidden_1"):
Y1 = tf.nn.sigmoid(tf.matmul(XX, W1) + B1) # Hidden Layer 1
with tf.name_scope("Hidden_2"):
Y2 = tf.nn.sigmoid(tf.matmul(Y1, W2) + B2) # Hidden Layer 2
with tf.name_scope("Hidden_3"):
Y3 = tf.nn.sigmoid(tf.matmul(Y2, W3) + B3) # Hidden Layer 3
with tf.name_scope("Hidden_4"):
Y4 = tf.nn.sigmoid(tf.matmul(Y3, W4) + B4) # Hidden Layer 4
Ylogits = tf.matmul(Y4, W5) + B5 # Output Layer
Y = tf.nn.softmax(Ylogits)
# summary2 = tf.summary.tensor_summary("OUTPUT", Y, "output of model")
# loss function
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(Ylogits, Y_)
cross_entropy = tf.reduce_mean(cross_entropy) * 100
cost_train = tf.scalar_summary("cost_train", cross_entropy)
cost_val = tf.scalar_summary("cost_val", cross_entropy)
# % of correct answers found in batch
correct_prediction = tf.equal(tf.argmax(Y, 1), tf.argmax(Y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
acc_train = tf.scalar_summary("acc_train", accuracy)
acc_val = tf.scalar_summary("acc_val", accuracy)
# training step
learning_rate = 0.003
optimizer = tf.train.AdamOptimizer(learning_rate)
train_step = optimizer.minimize(cross_entropy)
init = tf.initialize_all_variables()
summary = tf.merge_all_summaries()
sess = tf.Session()
#### BEGIN ####
# Create a summary writer
writer = tf.train.SummaryWriter("./private/", flush_secs=1)
# add the 'graph' to the event file.
writer.add_graph(sess.graph)
# writer.add_graph(tf.get_default_graph())
#### END ####
sess.run(init)
idx = 1;
for i in range(100001):
# Load batch of images and correct answers
batch_X, batch_Y = mnist.train.next_batch(100)
train_data = {X: batch_X, Y_: batch_Y}
# train
_, ct, at, = sess.run([train_step,cost_train,acc_train], feed_dict=train_data)
writer.add_summary(ct, i)
writer.add_summary(at, i)
# success? add code to print it
if i % 100 == 0:
# c, ct, at = sess.run([cross_entropy, cost_train, acc_train], feed_dict=train_data)
# print("Accuracy on train set (i = " + str(i) + "): " + str(a))
# success on test data?
test_data = {X: mnist.test.images, Y_: mnist.test.labels}
a, c, cv, av = sess.run([accuracy,cross_entropy, cost_val, acc_val], feed_dict=test_data)
writer.add_summary(cv, i)
writer.add_summary(av, i)
print("Accuracy on test set (i = " + str(i) + "): " + str(a))
| apache-2.0 | Python |
|
572a47ab8b05f8e93ec5e1b415cb56387d4279ca | add m_restart.py | sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf | pyscf/nao/m_restart.py | pyscf/nao/m_restart.py |
#An HDF5 file is a container for two kinds of objects: datasets (array-like collections of data), and groups (folder-like containers that hold datasets).
#Groups work like dictionaries, and datasets work like NumPy arrays
def read_rst_h5py (filename=None):
import h5py ,os
if filename is None:
path = os.getcwd()
filename =find('*.hdf5', path)
#filename= 'SCREENED_COULOMB.hdf5'
with h5py.File(filename, 'r') as f:
#print("Keys: %s" % f.keys())
a_group_key = list(f.keys())[0]
# Get the data
data = list(f[a_group_key])
msg = 'RESTART: Full matrix elements of screened interactions (W_c) was read from {}'.format(filename)
return data, msg
def write_rst_h5py(data, filename = None):
import h5py
if filename is None: filename= 'SCREENED_COULOMB.hdf5'
with h5py.File(filename, 'w') as data_file:
data_file.create_dataset('W_c', data=data)
data_file.close
msg = 'Full matrix elements of screened interactions (W_c) stored in {}'.format(filename)
return msg
def write_rst_yaml (data , filename=None):
import yaml
if filename is None: filename= 'SCREENED_COULOMB.yaml'
with open(filename, 'w+', encoding='utf8') as outfile:
yaml.dump(data, outfile, default_flow_style=False, allow_unicode=True)
msg = 'Full matrix elements of screened interactions stored in {}'.format(filename)
return msg
def read_rst_yaml (filename=None):
import yaml, os
if filename is None:
path = os.getcwd()
filename =find('*.yaml', path)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream)
msg = 'RESTART: Full matrix elements of screened interactions (W_c) was read from {}'.format(filename)
return data, msg
except yaml.YAMLError as exc:
return exc
| apache-2.0 | Python |
|
a0c303e9c1f7ac75e078e6f3ae9586ba68a24f63 | add the solution | markshao/leetcode,markshao/leetcode | python/oj/mergeSort.py | python/oj/mergeSort.py | #!/usr/bin/python
# coding:utf8
'''
@author: shaoyuliang
@contact: mshao@splunk.com
@since: 7/16/14
'''
# https://oj.leetcode.com/problems/merge-sorted-array/
class Solution:
# @param A a list of integers
# @param m an integer, length of A
# @param B a list of integers
# @param n an integer, length of B
# @return nothing
def merge(self, A, m, B, n):
for i in range(n):
A.append(B[i])
a = 0
b = m
while a < b and b < m + n:
if A[a] < A[b]:
a += 1
continue
else:
c = A.pop(b)
b += 1
A.insert(a, c)
A = [1, 3, 5]
Solution().merge(A, 3, [2, 4], 2)
print A | apache-2.0 | Python |
|
863ec839e24f2f17ba9d1dfb1177592f34cfc5e3 | Create Transaction.py | ojosipeayo/pythonVogue | pyvogue/Transaction.py | pyvogue/Transaction.py |
import requests
import json
import urllib
class Transaction():
def getall(self,trx,res,decode_content=False):
"""
Gets all your transactions
args:
trx -- the transaction id to be fetched
res -- the response type expected : json or xml
"""
url = "https://voguepay.com/?v_transaction_id="+str(trx)+"&type="+str(res)
if ( decode_content ):
dec = self.__parse_json(requests.get(url).text)
return (dec)
else:
return requests.get(url).text
def paylink(self,param):
"""
Generate a one time payment link from params
args:
param -- a dictionary of payment params
e.g
params = {'v_merchant_id':'14307-23682',
'memo':'testing',
'total':'1200'
}
"""
urlg = "https://voguepay.com/?p=linkToken&"+urllib.urlencode(param)
return requests.get(urlg)
def __parse_json(self, response_obj):
"""
This function takes in json response sent back by the
server
Returns a python dictionary of status, email, amount,memo etc
"""
data = json.loads(response_obj)
return data
| mit | Python |
|
3ae0ea21cc6b1afadb0dd72e29016385d18167ab | Add FifoReader class to utils | xtaran/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,lamby/debian-devel-changes-bot | DebianDevelChangesBot/utils/fiforeader.py | DebianDevelChangesBot/utils/fiforeader.py | # -*- coding: utf-8 -*-
#
# Debian Changes Bot
# Copyright (C) 2008 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import fcntl
import select
import threading
import traceback
class FifoReader(object):
__shared_state = {}
read_lock = threading.Lock()
stop_lock = threading.Lock()
running = False
quitfds = None
def __init__(self):
self.__dict__ = self.__shared_state
print "lol"
def start(self, callback, fifo_loc):
self.callback = callback
self.fifo_loc = fifo_loc
threading.Thread(target=self.run).start()
def run(self):
self.read_lock.acquire()
try:
for fileobj in self.gen_messages():
try:
self.callback(fileobj)
except Exception, exc:
print "Uncaught exception caught inside fiforeader"
traceback.print_exc()
finally:
fileobj.close()
finally:
self.read_lock.release()
def gen_messages(self):
self.running = True
self.quitfds = os.pipe()
while self.running:
fifo = os.open(self.fifo_loc, os.O_RDONLY | os.O_NONBLOCK)
flags = fcntl.fcntl(fifo, fcntl.F_GETFL)
fcntl.fcntl(fifo, fcntl.F_SETFD, flags & ~os.O_NONBLOCK)
readfds, _, _ = select.select([fifo, self.quitfds[0]], [], [])
# If our anonymous descriptor was written to, exit loop
if not self.running or self.quitfds[0] in readfds:
os.close(fifo)
os.close(self.quitfds[0])
os.close(self.quitfds[1])
break
if fifo not in readfds:
continue
yield os.fdopen(fifo)
def stop(self):
self.stop_lock.acquire()
try:
if self.running:
self.running = False
os.write(self.quitfds[1], '1')
# Block until we have actually stopped
self.read_lock.acquire()
self.read_lock.release()
finally:
self.stop_lock.release()
| agpl-3.0 | Python |
|
5c3304ffbd78ee47b2c4d197165de08200e77632 | Fix the `week` behavour to match api2 | willkg/standup,rlr/standup,mozilla/standup,willkg/standup,mozilla/standup,safwanrahman/standup,mozilla/standup,rehandalal/standup,rlr/standup,rehandalal/standup,willkg/standup,rlr/standup,safwanrahman/standup,mozilla/standup,safwanrahman/standup,rehandalal/standup,safwanrahman/standup,willkg/standup | standup/apps/status/helpers.py | standup/apps/status/helpers.py | import re
from datetime import date, datetime, timedelta
from standup.database.helpers import paginate as _paginate
def paginate(statuses, page=1, startdate=None, enddate=None, per_page=20):
from standup.apps.status.models import Status
if startdate:
statuses = statuses.filter(Status.created >= startdate)
if enddate:
statuses = statuses.filter(Status.created <= enddate)
return _paginate(statuses, int(page), per_page=per_page)
def startdate(request):
dates = request.args.get('dates')
day = request.args.get('day')
week = request.args.get('week')
if dates == '7d':
return date.today() - timedelta(days=7)
elif dates == 'today':
return date.today()
elif isday(day):
return get_day(day)
elif isday(week):
return week_start(get_day(week))
return None
def enddate(request):
day = request.args.get('day')
week = request.args.get('week')
if isday(day):
return get_day(day) + timedelta(days=1)
elif isday(week):
return week_end(get_day(week))
return None
def isday(day):
return day and re.match('^\d{4}-\d{2}-\d{2}$', day)
def get_day(day):
return datetime.strptime(day, '%Y-%m-%d')
def get_weeks(num_weeks=10):
weeks = []
current = datetime.now()
for i in range(num_weeks):
weeks.append({"start_date": week_start(current), \
"end_date": week_end(current), \
"weeks_ago": i })
current = current - timedelta(7)
return weeks
def week_start(d):
"""Weeks start on the Monday on or before the given date"""
return d - timedelta(d.isoweekday() - 1)
def week_end(d):
"""Weeks start on the Sunday on or after the given date"""
return d + timedelta(7 - d.isoweekday())
| import re
from datetime import date, datetime, timedelta
from standup.database.helpers import paginate as _paginate
def paginate(statuses, page=1, startdate=None, enddate=None, per_page=20):
from standup.apps.status.models import Status
if startdate:
statuses = statuses.filter(Status.created >= startdate)
if enddate:
statuses = statuses.filter(Status.created <= enddate)
return _paginate(statuses, int(page), per_page=per_page)
def startdate(request):
dates = request.args.get('dates')
day = request.args.get('day')
week = request.args.get('week')
if dates == '7d':
return date.today() - timedelta(days=7)
elif dates == 'today':
return date.today()
elif isday(day):
return get_day(day)
elif isday(week):
return get_day(week)
return None
def enddate(request):
day = request.args.get('day')
week = request.args.get('week')
if isday(day):
return get_day(day) + timedelta(days=1)
elif isday(week):
return get_day(week) + timedelta(days=7)
return None
def isday(day):
return day and re.match('^\d{4}-\d{2}-\d{2}$', day)
def get_day(day):
return datetime.strptime(day, '%Y-%m-%d')
def get_weeks(num_weeks=10):
weeks = []
current = datetime.now()
for i in range(num_weeks):
weeks.append({"start_date": week_start(current), \
"end_date": week_end(current), \
"weeks_ago": i })
current = current - timedelta(7)
return weeks
def week_start(d):
"""Weeks start on the Monday on or before the given date"""
return d - timedelta(d.isoweekday() - 1)
def week_end(d):
"""Weeks start on the Sunday on or after the given date"""
return d + timedelta(7 - d.isoweekday())
| bsd-3-clause | Python |
6e096fc10c7eb580ec11fbee585dd2aa3210e2b3 | add settings example | colbypalmer/cp-blog,colbypalmer/cp-blog | blog/settings_example.py | blog/settings_example.py | SITE_URL = "http://project.com"
SITE_NAME = "Project Name"
COMMENTS_APP = 'threadedcomments' # for example
RECAPTCHA_PUBLIC_KEY = 'put-your-key-here'
RECAPTCHA_PRIVATE_KEY = 'put-your-key-here'
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
TAGGIT_TAGCLOUD_MIN = 1
TAGGIT_TAGCLOUD_MAX = 8
GRAPPELLI_ADMIN_TITLE = u'{} Administration'.format(SITE_NAME) | mit | Python |
|
edfba32b5dd24c0fe58da9bbbe84267e81754233 | add demo.py | maliubiao/chrome_remote_debugger | demo.py | demo.py | import pdb
import json
from pprint import pprint
from chrome_debugger import protocol
from chrome_debugger import interface
from chrome_debugger import websocket
context = protocol.connect("ws://localhost:9222/devtools/page/D08C4454-9122-6CC8-E492-93A22F9C9727")
header = websocket.parse_response(context["sock"].recv(4096))
interface.debugger_enable(context)
while True:
pprint(protocol.recv(context))
| mit | Python |
|
863ae7a76567913f60a758a9fb974a27e9bc58d2 | add 21 | ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler | p021.py | p021.py | from utils import divisors
def d(n):
return sum(divisors(n))
print sum(filter(lambda n: n != d(n) and n == d((d(n))), range(1, 10000))) | bsd-3-clause | Python |
|
a3a92435781300966ca59d5316693d0306abd600 | Create osrm_OD_matrix.py | jamaps/fun_with_gdal,jamaps/open_geo_scripts,jamaps/open_geo_scripts,jamaps/open_geo_scripts,jamaps/fun_with_gdal,jamaps/gdal_and_ogr_scripts,jamaps/gdal_and_ogr_scripts,jamaps/shell_scripts,jamaps/shell_scripts | osrm_OD_matrix.py | osrm_OD_matrix.py | # using osrm to create a big dirty OD matrix
import csv
import requests
import polyline
import time
import json
db_points = []
# grab points from csv file - just grab, x, y, and a unique ID
# the headers may be different depending on your data!
with open("db.csv", 'r') as csvfile:
reader = csv.DictReader(csvfile)
n = 0
q = 0
for row in reader:
# limiting number of points for testing, may do all eventually!
if n % 1 == 0:
q += 1
db_points.append([row['X'],row['Y'],row['dbuid']])
n += 1
# split up into managable size - 2000 destinations seems managable
point_count = len(db_points)
points_split_list = []
single_list = []
i = 1
for row in db_points:
single_list.append(row)
if i % 3000 == 0:
points_split_list.append(single_list)
single_list = []
if i == len(db_points):
points_split_list.append(single_list)
i += 1
# print lenghts of before and after
print len(db_points)
print len(points_split_list)
for x in points_split_list:
print len(x)
# make sure these total!
# list of ids
dbuids = []
for row in db_points:
dbuids.append(row[2])
print len(dbuids)
# set up that awesome marix were going to output!
the_matrix = []
# lets add in a header row!
the_matrix.append([''] + dbuids)
print len(the_matrix)
print len(the_matrix[0])
# the start time for time timing
start_time = time.time()
# loop over the origins
for origin in db_points:
# the output row!
out_row = [origin[2]]
for points in points_split_list:
polyline_list = []
polyline_list.append((float(origin[1]),float(origin[0])))
# grab x y for lists
for row in points:
dr_tuple = (float(row[1]),float(row[0]))
polyline_list.append(dr_tuple)
line = polyline.encode(polyline_list, 5)
# what to send
url = 'http://localhost:5000/table/v1/driving/polyline(' + line + ')?sources=0'
# sending and recieving
page = requests.get(url)
data = json.loads(page.content)
durs = data["durations"][0]
del durs[0] # deleting initial 0
out_row = out_row + durs
the_matrix.append(out_row)
# this break is for testing!
break
print time.time() - start_time
for row in the_matrix:
print len(row)
| mit | Python |
|
41a533ffddfebc3303a1e882bfaf1fcdd243828e | add api like test | dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall,dubirajara/django_my_ideas_wall | myideas/core/tests/test_like_api.py | myideas/core/tests/test_like_api.py | from django.test import TestCase
from django.test.client import Client
from django.shortcuts import resolve_url as r
from django.contrib.auth.models import User
from myideas.core.models import Ideas
class LikeApiTest(TestCase):
def setUp(self):
self.client = Client()
self.username = 'diego'
self.email = 'test@djangoapp.com'
self.password = 'test'
user = User.objects.create_user(
self.username, self.email, self.password
)
self.idea = Ideas.objects.create(
user=user, title='test app'
)
def api_signin_and_get(self):
self.login = self.client.login(
username=self.username, password=self.password
)
self.response = self.client.get(r(self.idea.get_api_like_url()))
def test_get(self):
"""GET 'Ideas like api' must return status code 200"""
self.api_signin_and_get()
self.assertEqual(200, self.response.status_code)
def test_api_status(self):
self.api_signin_and_get()
self.assertTrue(self.response)
def test_api_likes_count(self):
self.api_signin_and_get()
self.assertEqual(1, self.idea.likes.count())
def test_access_forbidden(self):
"""GET page not logged in must return status code 403"""
self.response = self.client.get(r(self.idea.get_api_like_url()))
self.assertEqual(403, self.response.status_code)
| agpl-3.0 | Python |
|
10c19d0c7d7cdb2c823a698db8ca128134f32c5a | Add beam potential generation | ghallsimpsons/optical_tweezers | otz/Beam.py | otz/Beam.py | import pdb
import numpy as np
import scipy as sp
h = 6.626E-34
c = 3.0E8
def uniform(max_angle, intensity):
def profile(phi):
if (abs(phi) < max_angle):
return intensity
else:
return 0
return profile
def default_profile(angle):
return uniform(np.pi/8.0, 1)(angle)
class Bead:
def __init__(self, diameter, index=2, mass=1, r=0, z=None):
self.disable = diameter
self.radius = diameter/2.0
self.mass = mass
self.r = r
if z is None:
z = diameter
self.z = z
self.index = index
def set_position(self, r, z):
self.r = r
self.z = z
class Beam:
def __init__(self, wavelength, profile=default_profile):
self.profile = profile
self.wavelength = wavelength
def force(self, bead):
r = bead.r
z = bead.z
n = bead.index
R = bead.radius
d = np.sqrt(z**2+r**2)
phi_prime = np.arctan2(r,z)
def theta(phi):
return np.arctan2(R*np.sin(phi),d-R*np.cos(phi))
def theta_prime(phi):
return theta(phi-phi_prime)
def theta2(phi):
return np.arcsin(np.sin(phi+theta_prime(phi))/n)
def delta_theta(phi):
return 2*theta2(phi)
def p(phi):
return self.profile(phi)*h*c/self.wavelength
def dF_r(phi):
return -p(phi)*(np.sin(theta_prime(phi))-np.sin(theta_prime(phi)+delta_theta(phi)))
def dF_z(phi):
return -p(phi)*(np.cos(theta_prime(phi))-np.cos(theta_prime(phi)+delta_theta(phi)))
F_r = sp.integrate.quad(dF_r, -np.pi/2.0, np.pi/2.0)
F_z = sp.integrate.quad(dF_z, -np.pi/2.0, np.pi/2.0)
return (F_r, F_z)
def r_potential(self, bead, r_lim=None, z=None, dx = None):
if r_lim is None:
r_lim = 2*bead.radius
if z is not None:
bead.z = z
if dx is None:
dx = r_lim/1e4
r = np.arange(-r_lim, r_lim, dx)
def restoring_force(dist):
bead.r = dist
return self.force(bead)[0][0]
force_r = [restoring_force(dist) for dist in r]
V = sp.integrate.cumtrapz(force_r, r)
return (r[:-1],V)
| unlicense | Python |
|
ad5018c045a14f2e8360e8118d73d021df10baab | add solution for Course Schedule II | zhyu/leetcode,zhyu/leetcode | algorithms/courseScheduleII/courseScheduleII.py | algorithms/courseScheduleII/courseScheduleII.py | class Solution:
# @param {integer} numCourses
# @param {integer[][]} prerequisites
# @return {integer[]}
def findOrder(self, numCourses, prerequisites):
g = {v: [] for v in xrange(numCourses)}
deg = {v: 0 for v in xrange(numCourses)}
s = set(range(numCourses))
for u, v in prerequisites:
g[v].append(u)
deg[u] += 1
s.discard(u)
res = []
while s:
u = s.pop()
res.append(u)
for v in g[u]:
deg[v] -= 1
if deg[v] == 0:
s.add(v)
return [] if len(res) != numCourses else res
| mit | Python |
|
4557cce84ff91e830f1f1fd241223cff70ceb46e | add directions and a script for how I found duplicate functions | rhansen/rpstir,rhansen/rpstir,rhansen/rpstir,rhansen/rpstir,rhansen/rpstir | deprecated/utils/tags-to-dup-functions.py | deprecated/utils/tags-to-dup-functions.py | # Run the below command to generate the TAGS file, then run this script with TAGS as stdin to see duplicate function names
#
# find . -name \*.c -not -path ./deprecated/\* -print0 | xargs -0 etags --declarations -D --no-globals -I --no-members
import collections
import sys
src_file = None
got_section_header = 0
# function name => list of files
functions = collections.defaultdict(lambda: set())
for line in sys.stdin:
line = line.rstrip('\r\n')
if got_section_header == 0:
if line != "\x0c":
exit("invalid header first line: %s" % line)
got_section_header = 1
elif got_section_header == 1:
src_file, sep, tail = line.rpartition(',')
if sep != ',':
exit("invalid header second line: %s" % line)
got_section_header = 2
elif got_section_header == 2:
if line == "\x0c":
got_section_header = 1
else:
definition, sep, tail = line.rpartition('\x7f')
if sep != '\x7f':
exit("invalid definition line: %s" % line)
if definition[-1] == '(':
head, sep, function = definition.rpartition(' ')
if sep != ' ':
function = sep
function = function.rstrip('(')
function = function.lstrip('*')
functions[function].add(src_file)
else:
exit("unexpected value for got_section_header, %s" % got_section_header);
for k, v in functions.iteritems():
if len(v) > 1:
print k, len(v), ' '.join(v)
| bsd-3-clause | Python |
|
4331b380e43751a7223e0ee1dee6c1c45ad09a67 | add levy function | aaronkl/RoBO,numairmansur/RoBO,automl/RoBO,aaronkl/RoBO,automl/RoBO,numairmansur/RoBO,aaronkl/RoBO | robo/task/levy.py | robo/task/levy.py | '''
Created on 12.07.2015
@author: Aaron Klein
'''
import numpy as np
from robo.task.base_task import BaseTask
class Levy(BaseTask):
def __init__(self):
X_lower = np.array([-15])
X_upper = np.array([10])
opt = np.array([[1.0]])
fopt = 0.0
super(Levy, self).__init__(X_lower, X_upper, opt=opt, fopt=fopt)
def objective_function(self, x):
z = 1 + ((x - 1.) / 4.)
s = np.power((np.sin(np.pi * z)), 2)
y = (s + ((z - 1) ** 2) * (1 + np.power((np.sin(2 * np.pi * z)), 2)))
return y[:, np.newaxis]
def objective_function_test(self, x):
return self.objective_function(x)
| bsd-3-clause | Python |
|
f2c6e7cf6e60eac5222658d89baf28e1e7d12939 | Test minimal snoop2 | lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator | platforms/m3/programming/mbus_snoop_img2.py | platforms/m3/programming/mbus_snoop_img2.py | #!/usr/bin/python
import os
import sys
import logging
import csv
import time
import datetime
from datetime import datetime
import m3_common
#m3_common.configure_root_logger()
#logger = logging.getLogger(__name__)
from m3_logging import get_logger
logger = get_logger(__name__)
def Bpp_callback(address, data, cb0, cb1):
print(" Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + " ADDR: 0x" + address.encode('hex') + " DATA: 0x" + data.encode('hex') + " (ACK: " + str(not cb1) + ")")
m = m3_common.mbus_snooper(Bpp_callback)
m.hang_for_messages()
| apache-2.0 | Python |
|
a8c7c6f08571449b618fd57f298546da6ef80ee9 | Add a pyastro16.py file to use as an auto doc demo | cdeil/sphinx-tutorial | astrospam/pyastro16.py | astrospam/pyastro16.py | """
Python in Astronomy 2016 is the second iteration of the Python in Astronomy
conference series.
This is the docstring for the pyastro module, this gets included as the
description for the module.
"""
import numpy as np
def times(a, b):
"""
Multiply a by b.
Parameters
----------
a : `numpy.ndarray`
Array one.
b : `numpy.ndarray`
Array two
Returns
-------
result : `numpy.ndarray`
``a`` multiplied by ``b``
"""
return np.multipy(a, b)
class PyAstro(object):
"""
This is a class docstring, here you must describe the parameters for the
creation of the class, which is normally the signature of the ``__init__``
method.
Parameters
----------
awesomeness_level : `int`
How awesome is pyastro16??!
day : `int`
Day of the conference. Defaults to 1.
Attributes
----------
awesomeness_level: `int`
How awesome is this class attributes?! You can document attributes that
are not properties here.
"""
def __init__(self, awesomeness_level, day=1):
"""
This docstring is not used, because it is for a hidden method.
"""
self.awesomeness_level = awesomeness_level
self._day = day
@property
def day(self):
"""
Day of the conference.
Properties are automatically documented as attributes
"""
return self._day
| mit | Python |
|
9af2a8341b59098d0ebb88f1e71a3452c338b191 | Add a plotting example. | scipy/scipy,Srisai85/scipy,jakevdp/scipy,jamestwebber/scipy,trankmichael/scipy,sonnyhu/scipy,haudren/scipy,kalvdans/scipy,mortonjt/scipy,teoliphant/scipy,ortylp/scipy,ChanderG/scipy,WillieMaddox/scipy,ilayn/scipy,apbard/scipy,behzadnouri/scipy,gfyoung/scipy,kleskjr/scipy,woodscn/scipy,arokem/scipy,larsmans/scipy,dominicelse/scipy,petebachant/scipy,Eric89GXL/scipy,gertingold/scipy,jjhelmus/scipy,giorgiop/scipy,lukauskas/scipy,jseabold/scipy,chatcannon/scipy,dch312/scipy,pschella/scipy,jjhelmus/scipy,andyfaff/scipy,minhlongdo/scipy,zxsted/scipy,piyush0609/scipy,matthew-brett/scipy,Shaswat27/scipy,perimosocordiae/scipy,gef756/scipy,richardotis/scipy,giorgiop/scipy,pizzathief/scipy,vigna/scipy,dominicelse/scipy,gdooper/scipy,surhudm/scipy,sauliusl/scipy,scipy/scipy,sauliusl/scipy,vigna/scipy,raoulbq/scipy,minhlongdo/scipy,ndchorley/scipy,kalvdans/scipy,ndchorley/scipy,felipebetancur/scipy,endolith/scipy,grlee77/scipy,minhlongdo/scipy,jsilter/scipy,minhlongdo/scipy,mgaitan/scipy,maciejkula/scipy,andyfaff/scipy,perimosocordiae/scipy,maciejkula/scipy,WarrenWeckesser/scipy,surhudm/scipy,vhaasteren/scipy,anielsen001/scipy,e-q/scipy,pnedunuri/scipy,sauliusl/scipy,surhudm/scipy,richardotis/scipy,perimosocordiae/scipy,nmayorov/scipy,kleskjr/scipy,Eric89GXL/scipy,anielsen001/scipy,aman-iitj/scipy,niknow/scipy,fernand/scipy,argriffing/scipy,WarrenWeckesser/scipy,efiring/scipy,efiring/scipy,teoliphant/scipy,mortada/scipy,woodscn/scipy,fredrikw/scipy,woodscn/scipy,Dapid/scipy,njwilson23/scipy,dominicelse/scipy,fernand/scipy,scipy/scipy,Dapid/scipy,pbrod/scipy,sriki18/scipy,cpaulik/scipy,Newman101/scipy,mortada/scipy,sriki18/scipy,nonhermitian/scipy,perimosocordiae/scipy,mdhaber/scipy,grlee77/scipy,piyush0609/scipy,hainm/scipy,mtrbean/scipy,e-q/scipy,piyush0609/scipy,lhilt/scipy,maciejkula/scipy,zerothi/scipy,ilayn/scipy,jseabold/scipy,Gillu13/scipy,jor-/scipy,kleskjr/scipy,vberaudi/scipy,lukauskas/scipy,behzadnouri/scipy,nonhermitian/scipy,pizzathief/scipy,matthew-brett/scipy,fredrikw/scipy,ChanderG/scipy,maniteja123/scipy,FRidh/scipy,WarrenWeckesser/scipy,maniteja123/scipy,hainm/scipy,pyramania/scipy,nonhermitian/scipy,haudren/scipy,ndchorley/scipy,mtrbean/scipy,Stefan-Endres/scipy,grlee77/scipy,aarchiba/scipy,arokem/scipy,vanpact/scipy,ales-erjavec/scipy,behzadnouri/scipy,hainm/scipy,rgommers/scipy,mingwpy/scipy,nvoron23/scipy,andim/scipy,sonnyhu/scipy,anntzer/scipy,jor-/scipy,mingwpy/scipy,lukauskas/scipy,rmcgibbo/scipy,Kamp9/scipy,WarrenWeckesser/scipy,ChanderG/scipy,nmayorov/scipy,mgaitan/scipy,mdhaber/scipy,anielsen001/scipy,Stefan-Endres/scipy,mgaitan/scipy,mortada/scipy,andyfaff/scipy,vberaudi/scipy,newemailjdm/scipy,witcxc/scipy,andyfaff/scipy,rmcgibbo/scipy,sauliusl/scipy,raoulbq/scipy,WarrenWeckesser/scipy,jseabold/scipy,matthewalbani/scipy,aman-iitj/scipy,jakevdp/scipy,niknow/scipy,rmcgibbo/scipy,aman-iitj/scipy,arokem/scipy,anielsen001/scipy,pyramania/scipy,vigna/scipy,mdhaber/scipy,chatcannon/scipy,felipebetancur/scipy,dominicelse/scipy,fernand/scipy,gef756/scipy,Stefan-Endres/scipy,zerothi/scipy,chatcannon/scipy,Eric89GXL/scipy,sriki18/scipy,sonnyhu/scipy,newemailjdm/scipy,ortylp/scipy,njwilson23/scipy,ortylp/scipy,zerothi/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,minhlongdo/scipy,ales-erjavec/scipy,befelix/scipy,newemailjdm/scipy,ndchorley/scipy,richardotis/scipy,zxsted/scipy,person142/scipy,vanpact/scipy,nmayorov/scipy,ogrisel/scipy,mhogg/scipy,gdooper/scipy,Dapid/scipy,sonnyhu/scipy,argriffing/scipy,petebachant/scipy,ilayn/scipy,surhudm/scipy,aarchiba/scipy,Newman101/scipy,lhilt/scipy,njwilson23/scipy,sonnyhu/scipy,efiring/scipy,Eric89GXL/scipy,ogrisel/scipy,fredrikw/scipy,pschella/scipy,bkendzior/scipy,vberaudi/scipy,pnedunuri/scipy,behzadnouri/scipy,Shaswat27/scipy,argriffing/scipy,mgaitan/scipy,apbard/scipy,zerothi/scipy,zxsted/scipy,jjhelmus/scipy,josephcslater/scipy,larsmans/scipy,mortada/scipy,zxsted/scipy,aeklant/scipy,mdhaber/scipy,vigna/scipy,zaxliu/scipy,WillieMaddox/scipy,giorgiop/scipy,niknow/scipy,pyramania/scipy,lukauskas/scipy,matthewalbani/scipy,Gillu13/scipy,jakevdp/scipy,Kamp9/scipy,anntzer/scipy,futurulus/scipy,WillieMaddox/scipy,zaxliu/scipy,pnedunuri/scipy,pizzathief/scipy,jamestwebber/scipy,jsilter/scipy,gertingold/scipy,maniteja123/scipy,josephcslater/scipy,Srisai85/scipy,petebachant/scipy,ales-erjavec/scipy,teoliphant/scipy,jjhelmus/scipy,cpaulik/scipy,ilayn/scipy,efiring/scipy,sriki18/scipy,petebachant/scipy,witcxc/scipy,andim/scipy,zerothi/scipy,mikebenfield/scipy,ndchorley/scipy,gfyoung/scipy,mortonjt/scipy,josephcslater/scipy,jor-/scipy,Eric89GXL/scipy,anielsen001/scipy,maniteja123/scipy,kleskjr/scipy,vberaudi/scipy,gef756/scipy,lhilt/scipy,sriki18/scipy,scipy/scipy,mhogg/scipy,pnedunuri/scipy,vanpact/scipy,dch312/scipy,ortylp/scipy,rgommers/scipy,zaxliu/scipy,chatcannon/scipy,jseabold/scipy,jonycgn/scipy,kleskjr/scipy,ChanderG/scipy,gdooper/scipy,mikebenfield/scipy,Shaswat27/scipy,aman-iitj/scipy,mikebenfield/scipy,dominicelse/scipy,mortonjt/scipy,apbard/scipy,Srisai85/scipy,person142/scipy,Newman101/scipy,maciejkula/scipy,felipebetancur/scipy,sargas/scipy,jakevdp/scipy,niknow/scipy,newemailjdm/scipy,Gillu13/scipy,Newman101/scipy,dch312/scipy,pbrod/scipy,mhogg/scipy,rmcgibbo/scipy,jonycgn/scipy,lhilt/scipy,ogrisel/scipy,mingwpy/scipy,FRidh/scipy,jsilter/scipy,haudren/scipy,vanpact/scipy,WillieMaddox/scipy,mortonjt/scipy,sonnyhu/scipy,pbrod/scipy,mdhaber/scipy,larsmans/scipy,WillieMaddox/scipy,josephcslater/scipy,andim/scipy,mgaitan/scipy,mingwpy/scipy,larsmans/scipy,cpaulik/scipy,sargas/scipy,gef756/scipy,Kamp9/scipy,mdhaber/scipy,gef756/scipy,petebachant/scipy,apbard/scipy,kalvdans/scipy,ales-erjavec/scipy,argriffing/scipy,sargas/scipy,anntzer/scipy,pschella/scipy,mortonjt/scipy,befelix/scipy,Shaswat27/scipy,teoliphant/scipy,lhilt/scipy,tylerjereddy/scipy,haudren/scipy,zaxliu/scipy,ilayn/scipy,vhaasteren/scipy,rmcgibbo/scipy,josephcslater/scipy,fernand/scipy,Stefan-Endres/scipy,Gillu13/scipy,sriki18/scipy,Shaswat27/scipy,pschella/scipy,apbard/scipy,aarchiba/scipy,befelix/scipy,grlee77/scipy,aarchiba/scipy,ogrisel/scipy,maniteja123/scipy,teoliphant/scipy,futurulus/scipy,ndchorley/scipy,vberaudi/scipy,newemailjdm/scipy,scipy/scipy,hainm/scipy,lukauskas/scipy,jsilter/scipy,gfyoung/scipy,rgommers/scipy,kalvdans/scipy,woodscn/scipy,perimosocordiae/scipy,dch312/scipy,zerothi/scipy,cpaulik/scipy,giorgiop/scipy,futurulus/scipy,ortylp/scipy,woodscn/scipy,mikebenfield/scipy,mtrbean/scipy,felipebetancur/scipy,pbrod/scipy,gertingold/scipy,jonycgn/scipy,bkendzior/scipy,bkendzior/scipy,pbrod/scipy,fredrikw/scipy,ales-erjavec/scipy,dch312/scipy,anntzer/scipy,pnedunuri/scipy,anntzer/scipy,aeklant/scipy,Dapid/scipy,chatcannon/scipy,nvoron23/scipy,pyramania/scipy,pbrod/scipy,raoulbq/scipy,nvoron23/scipy,jamestwebber/scipy,matthew-brett/scipy,gertingold/scipy,sargas/scipy,vberaudi/scipy,pyramania/scipy,zaxliu/scipy,njwilson23/scipy,futurulus/scipy,zaxliu/scipy,jor-/scipy,mhogg/scipy,fernand/scipy,trankmichael/scipy,jor-/scipy,anntzer/scipy,Stefan-Endres/scipy,gdooper/scipy,juliantaylor/scipy,FRidh/scipy,mortada/scipy,Srisai85/scipy,woodscn/scipy,larsmans/scipy,maciejkula/scipy,Kamp9/scipy,bkendzior/scipy,ChanderG/scipy,mtrbean/scipy,nonhermitian/scipy,newemailjdm/scipy,vhaasteren/scipy,Newman101/scipy,mhogg/scipy,mtrbean/scipy,niknow/scipy,jsilter/scipy,juliantaylor/scipy,sargas/scipy,argriffing/scipy,endolith/scipy,mgaitan/scipy,aeklant/scipy,juliantaylor/scipy,gef756/scipy,nvoron23/scipy,lukauskas/scipy,grlee77/scipy,piyush0609/scipy,mingwpy/scipy,argriffing/scipy,tylerjereddy/scipy,vhaasteren/scipy,fredrikw/scipy,nvoron23/scipy,haudren/scipy,richardotis/scipy,witcxc/scipy,piyush0609/scipy,ales-erjavec/scipy,jonycgn/scipy,nvoron23/scipy,larsmans/scipy,andim/scipy,Eric89GXL/scipy,person142/scipy,e-q/scipy,felipebetancur/scipy,jjhelmus/scipy,andim/scipy,kleskjr/scipy,matthewalbani/scipy,perimosocordiae/scipy,trankmichael/scipy,gfyoung/scipy,Kamp9/scipy,Newman101/scipy,pizzathief/scipy,mortada/scipy,mortonjt/scipy,chatcannon/scipy,jamestwebber/scipy,juliantaylor/scipy,vanpact/scipy,njwilson23/scipy,rgommers/scipy,jseabold/scipy,matthewalbani/scipy,felipebetancur/scipy,andyfaff/scipy,e-q/scipy,Srisai85/scipy,richardotis/scipy,futurulus/scipy,FRidh/scipy,aeklant/scipy,Gillu13/scipy,FRidh/scipy,WillieMaddox/scipy,efiring/scipy,jakevdp/scipy,pnedunuri/scipy,Srisai85/scipy,nmayorov/scipy,arokem/scipy,pizzathief/scipy,zxsted/scipy,e-q/scipy,rgommers/scipy,niknow/scipy,sauliusl/scipy,FRidh/scipy,kalvdans/scipy,nmayorov/scipy,richardotis/scipy,mingwpy/scipy,witcxc/scipy,tylerjereddy/scipy,trankmichael/scipy,vhaasteren/scipy,nonhermitian/scipy,pschella/scipy,petebachant/scipy,raoulbq/scipy,giorgiop/scipy,befelix/scipy,matthewalbani/scipy,aman-iitj/scipy,ogrisel/scipy,bkendzior/scipy,sauliusl/scipy,anielsen001/scipy,vigna/scipy,witcxc/scipy,haudren/scipy,surhudm/scipy,endolith/scipy,aman-iitj/scipy,jseabold/scipy,endolith/scipy,andyfaff/scipy,endolith/scipy,aeklant/scipy,aarchiba/scipy,Dapid/scipy,ilayn/scipy,matthew-brett/scipy,raoulbq/scipy,endolith/scipy,cpaulik/scipy,Kamp9/scipy,tylerjereddy/scipy,mhogg/scipy,giorgiop/scipy,mikebenfield/scipy,rmcgibbo/scipy,ChanderG/scipy,gdooper/scipy,hainm/scipy,fernand/scipy,minhlongdo/scipy,jonycgn/scipy,piyush0609/scipy,raoulbq/scipy,befelix/scipy,zxsted/scipy,Stefan-Endres/scipy,cpaulik/scipy,Shaswat27/scipy,Dapid/scipy,scipy/scipy,person142/scipy,surhudm/scipy,behzadnouri/scipy,trankmichael/scipy,matthew-brett/scipy,person142/scipy,maniteja123/scipy,andim/scipy,trankmichael/scipy,ortylp/scipy,arokem/scipy,efiring/scipy,fredrikw/scipy,juliantaylor/scipy,mtrbean/scipy,tylerjereddy/scipy,Gillu13/scipy,jonycgn/scipy,gfyoung/scipy,gertingold/scipy,hainm/scipy,behzadnouri/scipy,njwilson23/scipy,futurulus/scipy,vanpact/scipy,jamestwebber/scipy | Lib/sandbox/pyem/examples/plotexamples.py | Lib/sandbox/pyem/examples/plotexamples.py | #! /usr/bin/env python
# Last Change: Mon Jun 11 03:00 PM 2007 J
# This is a simple test to check whether plotting ellipsoides of confidence and
# isodensity contours match
import numpy as N
from numpy.testing import set_package_path, restore_path
import pylab as P
set_package_path()
import pyem
restore_path()
# Generate a simple mixture model, plot its confidence ellipses + isodensity
# curves for both diagonal and full covariance matrices
d = 3
k = 3
dim = [0, 2]
# diag model
w, mu, va = pyem.GM.gen_param(d, k)
dgm = pyem.GM.fromvalues(w, mu, va)
# full model
w, mu, va = pyem.GM.gen_param(d, k, 'full', spread = 1)
fgm = pyem.GM.fromvalues(w, mu, va)
def plot_model(gm, dim):
X, Y, Z, V = gm.density_on_grid(dim = dim)
h = gm.plot(dim = dim)
[i.set_linestyle('-.') for i in h]
P.contour(X, Y, Z, V)
data = gm.sample(200)
P.plot(data[:, dim[0]], data[:,dim[1]], '.')
# Plot the contours and the ellipsoids of confidence
P.subplot(2, 1, 1)
plot_model(dgm, dim)
P.subplot(2, 1, 2)
plot_model(fgm, dim)
P.show()
| bsd-3-clause | Python |
|
71ac93da2eed58bbd53bb13d4ade308404be18ad | Add auth0.v2.connection | auth0/auth0-python,auth0/auth0-python | auth0/v2/connection.py | auth0/v2/connection.py | from .rest import RestClient
class Connection(object):
"""Auth0 connection endpoints"""
def __init__(self, domain, jwt_token):
url = 'https://%s/api/v2/connections' % domain
self.client = RestClient(endpoint=url, jwt=jwt_token)
def all(self, strategy=None, fields=[], include_fields=True):
"""Retrieves all connections.
Args:
strategy (str, optional): Only retrieve connections of
this strategy type. (e.g: strategy='amazon')
fields (list of str, optional): A list of fields to include or
exclude from the result (depending on include_fields). Empty to
retrieve all fields.
include_fields (bool, optional): True if the fields specified are
to be include in the result, False otherwise.
Returns:
A list of connection objects.
"""
params = {'strategy': strategy or None,
'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower()}
return self.client.get(params=params)
def get(self, id, fields=[], include_fields=True):
"""Retrieve connection by id.
Args:
id (str): Id of the connection to get.
fields (list of str, optional): A list of fields to include or
exclude from the result (depending on include_fields). Empty to
retrieve all fields.
include_fields (bool, optional): True if the fields specified are
to be include in the result, False otherwise.
Returns:
A connection object.
"""
params = {'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower()}
return self.client.get(params=params, id=id)
def delete(self, id):
"""Deletes a connection and all its users.
Args:
id: Id of the connection to delete.
Returns:
An empty dict.
"""
return self.client.delete(id=id)
def update(self, id, body):
"""Modifies a connection.
Args:
id: Id of the connection.
body (dict): Specifies which fields are to be modified, and to what
values.
Returns:
The modified connection object.
"""
return self.client.patch(id=id, data=body)
def create(self, body):
"""Creates a new connection. """
return self.client.post(data=body)
| mit | Python |
|
7e600a791bec2f8639aae417a1ea052ca94cf7b9 | Add a largish auto-generated test for the aligned bundling feature, along with the script generating it. The test should never be modified manually. If anyone needs to change it, please change the script and re-run it. | lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx | testgen/mc-bundling-x86-gen.py | testgen/mc-bundling-x86-gen.py | #!/usr/bin/python
# Auto-generates an exhaustive and repetitive test for correct bundle-locked
# alignment on x86.
# For every possible offset in an aligned bundle, a bundle-locked group of every
# size in the inclusive range [1, bundle_size] is inserted. An appropriate CHECK
# is added to verify that NOP padding occurred (or did not occur) as expected.
# This script runs with Python 2.6+ (including 3.x)
from __future__ import print_function
BUNDLE_SIZE_POW2 = 4
BUNDLE_SIZE = 2 ** BUNDLE_SIZE_POW2
PREAMBLE = '''
# RUN: llvm-mc -filetype=obj -triple i386-pc-linux-gnu %s -o - \\
# RUN: | llvm-objdump -triple i386 -disassemble -no-show-raw-insn - | FileCheck %s
# !!! This test is auto-generated from utils/testgen/mc-bundling-x86-gen.py !!!
# It tests that bundle-aligned grouping works correctly in MC. Read the
# source of the script for more details.
.text
.bundle_align_mode {0}
'''.format(BUNDLE_SIZE_POW2).lstrip()
ALIGNTO = ' .align {0}, 0x90'
NOPFILL = ' .fill {0}, 1, 0x90'
def print_bundle_locked_sequence(len):
print(' .bundle_lock')
print(' .rept {0}'.format(len))
print(' inc %eax')
print(' .endr')
print(' .bundle_unlock')
def generate():
print(PREAMBLE)
ntest = 0
for instlen in range(1, BUNDLE_SIZE + 1):
for offset in range(0, BUNDLE_SIZE):
# Spread out all the instructions to not worry about cross-bundle
# interference.
print(ALIGNTO.format(2 * BUNDLE_SIZE))
print('INSTRLEN_{0}_OFFSET_{1}:'.format(instlen, offset))
if offset > 0:
print(NOPFILL.format(offset))
print_bundle_locked_sequence(instlen)
# Now generate an appropriate CHECK line
base_offset = ntest * 2 * BUNDLE_SIZE
inst_orig_offset = base_offset + offset # had it not been padded...
if offset + instlen > BUNDLE_SIZE:
# Padding needed
print('# CHECK: {0:x}: nop'.format(inst_orig_offset))
aligned_offset = (inst_orig_offset + instlen) & ~(BUNDLE_SIZE - 1)
print('# CHECK: {0:x}: incl'.format(aligned_offset))
else:
# No padding needed
print('# CHECK: {0:x}: incl'.format(inst_orig_offset))
print()
ntest += 1
if __name__ == '__main__':
generate()
| bsd-3-clause | Python |
|
15150516e1915948b10abed70e964a5b6109013b | Add ExtractAttribute | maxalbert/tohu | tohu/derived_generators_NEW.py | tohu/derived_generators_NEW.py | import logging
from operator import attrgetter
from .base_NEW import TohuUltraBaseGenerator
__all__ = ['ExtractAttribute']
logger = logging.getLogger('tohu')
class ExtractAttribute(TohuUltraBaseGenerator):
"""
Generator which produces items that are attributes extracted from
the items produced by a different generator.
"""
def __init__(self, g, attr_name):
logger.debug(f"Extracting attribute '{attr_name}' from parent={g}")
self.parent = g
self.gen = g.clone()
self.attr_name = attr_name
self.attrgetter = attrgetter(attr_name)
def __repr__(self):
return f"<ExtractAttribute '{self.attr_name}' from {self.parent} >"
def spawn(self, dependency_mapping):
logger.warning(f'ExtractAttribute.spawn(): dependency_mapping={dependency_mapping}')
raise NotImplementedError()
def __next__(self):
return self.attrgetter(next(self.gen))
| mit | Python |
|
c9afc35d2be96adea47e79a4c0042235e4ffd594 | add ldap-filter-cut.py | bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile | python/python/openldap/ldap-filter-cut.py | python/python/openldap/ldap-filter-cut.py | #!/usr/bin/env python
'''
Copyright (C) 2011 Bryan Maupin <bmaupincode@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
'''
Takes an OpenLDAP log file, cuts the filters out of it, and writes the unique
filters to a new file.
'''
import re
import sys
def main():
filters = []
pattern = re.compile('filter="(.*)"')
# the input file is the first argument to this script
infile_name = sys.argv[1]
infile = open(infile_name)
for line in infile:
match = pattern.search(line)
if match:
filter = match.group(1)
if filter not in filters:
filters.append(filter)
infile.close()
print '%s filters found' % (len(filters))
# the output file is the second argument to this script
outfile_name = sys.argv[2]
outfile = open(outfile_name, 'w')
for filter in filters:
outfile.write('%s\n' % (filter))
outfile.close()
# calls the main() function when the script runs
if __name__ == '__main__':
main()
| mit | Python |
|
3a19187e8116e8dc20166786fb1ca4d14b527950 | Add missing IDL Visistor class | yitian134/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,adobe/chromium,adobe/chromium,adobe/chromium,yitian134/chromium,ropik/chromium,gavinp/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,ropik/chromium,ropik/chromium,ropik/chromium,gavinp/chromium | ppapi/generators/idl_visitor.py | ppapi/generators/idl_visitor.py | #!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
| bsd-3-clause | Python |
|
bbed7b813b6c809ee9615eabf2fcf4d3156b1c36 | Add script to convert release notes from Markdown | adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython | tools/convert_release_notes.py | tools/convert_release_notes.py | import sys
import mistune
print(sys.argv[1])
with open(sys.argv[1], "r") as source_file:
source = source_file.read()
html = mistune.Markdown()
print()
print("HTML")
print("=====================================")
print("From the <a href=\"\">GitHub release page</a>:\n<blockquote>")
print(html(source))
print("</blockquote>")
class AdafruitBBCodeRenderer:
def __init__(self, **kwargs):
self.options = kwargs
def placeholder(self):
return ''
def paragraph(self, text):
return text + "\n\n"
def text(self, text):
return text
def link(self, link, title, text):
return "[url={}]{}[/url]".format(link, text)
def header(self, text, level, raw):
return "[b][size=150]{}[/size][/b]\n".format(text)
def codespan(self, text):
return "[color=#E74C3C][size=95]{}[/size][/color]".format(text)
def list_item(self, text):
return "[*]{}[/*]\n".format(text.strip())
def list(self, body, ordered=True):
ordered_indicator = "=" if ordered else ""
return "[list{}]\n{}[/list]".format(ordered_indicator, body)
def double_emphasis(self, text):
return "[b]{}[/b]".format(text)
bbcode = mistune.Markdown(renderer=AdafruitBBCodeRenderer())
print()
print("BBCode")
print("=====================================")
print("From the [url=]GitHub release page[/url]:\n[quote]")
print(bbcode(source))
print("[/quote]")
| mit | Python |
|
1db5cd0fddbbcc1d38a08bfe8ad6cfb8d0b5c550 | add migration to create new model fields | byteweaver/django-coupons,byteweaver/django-coupons | coupons/migrations/0004_auto_20151105_1456.py | coupons/migrations/0004_auto_20151105_1456.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('coupons', '0003_auto_20150416_0617'),
]
operations = [
migrations.CreateModel(
name='CouponUser',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, primary_key=True, verbose_name='ID')),
('redeemed_at', models.DateTimeField(blank=True, verbose_name='Redeemed at', null=True)),
],
),
migrations.AddField(
model_name='coupon',
name='user_limit',
field=models.PositiveIntegerField(verbose_name='User limit', default=1),
),
migrations.AlterField(
model_name='coupon',
name='type',
field=models.CharField(choices=[('monetary', 'Money based coupon'), ('percentage', 'Percentage discount'), ('virtual_currency', 'Virtual currency')], verbose_name='Type', max_length=20),
),
migrations.AddField(
model_name='couponuser',
name='coupon',
field=models.ForeignKey(related_name='users', to='coupons.Coupon'),
),
migrations.AddField(
model_name='couponuser',
name='user',
field=models.ForeignKey(null=True, to=settings.AUTH_USER_MODEL, blank=True, verbose_name='User'),
),
]
| bsd-3-clause | Python |
|
3bd7c50acfc8044fc33002530a5fcaa0b5c2152e | add module 'job' for reset queue | roramirez/qpanel,roramirez/qpanel,roramirez/qpanel,skazancev/qpanel,skazancev/qpanel,skazancev/qpanel,skazancev/qpanel,roramirez/qpanel | libs/qpanel/job.py | libs/qpanel/job.py | import backend
import config
from redis import Redis
from rq_scheduler import Scheduler
import datetime
def reset_stats_queue(queuename, when, hour):
'''
Reset stat for a queue on backend
queuename: Name of queue to reset
when, hour parameters for more easy
control for exists_job_onqueue
'''
remove_jobs_not_config()
if not exists_job_onqueue(queuename, when, hour):
return False
b = backend.Backend()
return b.reset_stats(queuename)
def job_reset_stats_queue(queuename, when, hour):
scheduler = Scheduler(connection=Redis())
remove_jobs_not_config()
if not exists_job_onqueue(queuename, when, hour):
scheduler.schedule(
scheduled_time=datetime_from_config(when, hour),
func=reset_stats_queue,
args=[queuename, when, hour],
interval=seconds_from_config_interval(when)
)
def exists_job_onqueue(queuename, when, hour):
"""
Check if a job is present on queue
"""
scheduler = Scheduler(connection=Redis())
jobs = scheduler.get_jobs()
for job in jobs:
if 'reset_stats_queue' in job.func_name:
args = job.args
if queuename == args[0] and when == args[1] and hour == args[2]:
return True
return False
def remove_jobs_not_config():
"""
Remove jobs on queue but not present on config.
Prevent when in job for reset a queue stats is scheduled but
after your config is modified or deleted
"""
scheduler = Scheduler(connection=Redis())
queue_for_reset = config.QPanelConfig().queues_for_reset_stats()
jobs = scheduler.get_jobs()
for job in jobs:
if 'reset_stats_queue' in job.func_name:
q = job.args[0]
if q not in queue_for_reset.keys():
job.delete()
def enqueue_reset_stats():
queues_for_reset = config.QPanelConfig().queues_for_reset_stats()
for queue, val in queues_for_reset.items():
job_reset_stats_queue(queue, val['when'], val['hour'])
def seconds_from_config_interval(val):
"""
Get interval value for a configuration by parameter
"""
val = val.lower()
day = 0
if val == 'daily':
day = 1
elif val in ['weekly', 'sun', 'mon', 'tue', 'wed', 'thu', 'fri' 'sat']:
day = 7
elif val == 'monthly':
day = 30
return day * 24 * 60 * 60 # day * hour * minute * seconds
def datetime_from_config(when, hour):
return datetime.datetime.utcnow()
| mit | Python |
|
b0c3ed39916e25bed2900b653974672a39fcb254 | Use CHROME_HEADLESS to check if download_sdk_extras.py is running on a bot. | chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk | build/download_sdk_extras.py | build/download_sdk_extras.py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to download sdk/extras packages on the bots from google storage.
The script expects arguments that specify zips file in the google storage
bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will
be extracted in the android_tools/sdk/extras directory on the test bots. This
script will not do anything for developers.
"""
import json
import os
import shutil
import subprocess
import sys
import zipfile
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android'))
sys.path.insert(1, os.path.join(CHROME_SRC, 'tools'))
from pylib import constants
import find_depot_tools
DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras'
SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras')
SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
'android_sdk_extras.json')
def clean_and_extract(dir_name, package_name, zip_file):
local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name)
if os.path.exists(local_dir):
shutil.rmtree(local_dir)
local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file)
with zipfile.ZipFile(local_zip) as z:
z.extractall(path=SDK_EXTRAS_PATH)
def main():
if not os.environ.get('CHROME_HEADLESS'):
# This is not a buildbot checkout.
return 0
# Update the android_sdk_extras.json file to update downloaded packages.
with open(SDK_EXTRAS_JSON_FILE) as json_file:
packages = json.load(json_file)
for package in packages:
local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip'])
if not os.path.exists(local_zip):
package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip'])
subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7',
'cp', package_zip, local_zip])
# Always clean dir and extract zip to ensure correct contents.
clean_and_extract(package['dir_name'], package['package'], package['zip'])
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to download sdk/extras packages on the bots from google storage.
The script expects arguments that specify zips file in the google storage
bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will
be extracted in the android_tools/sdk/extras directory.
"""
import json
import os
import shutil
import subprocess
import sys
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'android'))
from pylib import constants
GSUTIL_PATH = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, os.pardir, os.pardir, os.pardir, 'depot_tools', 'gsutil.py')
SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras'
SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras')
SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
'android_sdk_extras.json')
def clean_and_extract(dir_name, package_name, zip_file):
local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name)
if os.path.exists(local_dir):
shutil.rmtree(local_dir)
local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file)
with zipfile.ZipFile(local_zip) as z:
z.extractall(path=SDK_EXTRAS_PATH)
def main():
if not os.path.exists(GSUTIL_PATH) or not os.path.exists(SDK_EXTRAS_PATH):
# This is not a buildbot checkout.
return 0
# Update the android_sdk_extras.json file to update downloaded packages.
with open(SDK_EXTRAS_JSON_FILE) as json_file:
packages = json.load(json_file)
for package in packages:
local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip'])
if not os.path.exists(local_zip):
package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip'])
subprocess.check_call([GSUTIL_PATH, '--force-version', '4.7', 'cp',
package_zip, local_zip])
# Always clean dir and extract zip to ensure correct contents.
clean_and_extract(package['dir_name'], package['package'], package['zip'])
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
80580b8667558e3a4034b31ac08773de70ef3b39 | Implement consumer for adjusting screen brightness. | ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display | display_control_consumer/run.py | display_control_consumer/run.py | from setproctitle import setproctitle
import json
import redis
import subprocess
import time
class DisplayControlConsumer(object):
STEP = 0.05
def __init__(self):
self.redis_instance = redis.StrictRedis()
self.env = {"DISPLAY": ":0"}
def get_brightness(self):
p = subprocess.Popen(["xrandr", "--verbose"], env=self.env, stdout=subprocess.PIPE)
(stdout, _) = p.communicate()
for line in stdout.split("\n"):
if "Brightness" in line:
return float(line.strip().split(": ")[1])
def set_brightness(self, brightness):
p = subprocess.Popen(["xrandr", "--q1", "--output", "HDMI-0", "--brightness", unicode(brightness)], env=self.env)
p.wait()
self.redis_instance.setex("display-control-brightness", 60, brightness)
def run(self):
while True:
time.sleep(1)
destination_brightness = self.redis_instance.get("display-control-destination-brightness")
if not destination_brightness:
continue
destination_brightness = float(destination_brightness)
current_brightness = self.redis_instance.get("display-control-brightness")
if current_brightness:
current_brightness = float(current_brightness)
else:
current_brightness = self.get_brightness()
self.redis_instance.setex("display-control-brightness", 60, current_brightness)
if current_brightness > destination_brightness:
# Decrease brightness. Current brightness is too large.
new_brightness = current_brightness - self.STEP
print "Decreasing brightness: %s (-> %s, currently at %s)" % (new_brightness, destination_brightness, current_brightness)
if new_brightness < destination_brightness:
# Wrapped around: new brightness is smaller than destination brightness.; no action
print "Brightness wrapped around"
self.redis_instance.delete("display-control-destination-brightness")
continue
elif current_brightness < destination_brightness:
# Increase brightness
new_brightness = current_brightness + self.STEP
print "Increasing brightness: %s (-> %s, currently at %s)" % (new_brightness, destination_brightness, current_brightness)
if new_brightness > destination_brightness:
# Wrapped around; no action
self.redis_instance.delete("display-control-destination-brightness")
continue
else:
# Already matches. No action.
self.redis_instance.delete("display-control-destination-brightness")
continue
print "Setting brightness to %s (destination: %s)" % (new_brightness, destination_brightness)
self.set_brightness(new_brightness)
self.redis_instance.publish("home:broadcast:generic", json.dumps({"key": "display_brightness", "content": new_brightness}))
def main():
setproctitle("display_control_consumer: run")
dcc = DisplayControlConsumer()
dcc.run()
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
|
5a376ef0d49193df46fc127323bfa50376e3c968 | add lqr sample | AtsushiSakai/PyAdvancedControl,AtsushiSakai/PyAdvancedControl | lqr_sample/main.py | lqr_sample/main.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
u"""
Linear-Quadratic Regulator sample code
author Atsushi Sakai
"""
import matplotlib.pyplot as plt
import numpy as np
import scipy.linalg as la
simTime=3.0
dt=0.1
A=np.matrix([[1.1,2.0],[0,0.95]])
B=np.matrix([0.0,0.0787]).T
C=np.matrix([-2,1])
def Observation(x):
y=C*x
ry=float(y[0])
return (ry)
def Process(x,u):
x=A*x+B*u
return (x)
def dlqr(A,B,Q,R):
"""Solve the discrete time lqr controller.
x[k+1] = A x[k] + B u[k]
cost = sum x[k].T*Q*x[k] + u[k].T*R*u[k]
"""
#ref Bertsekas, p.151
#first, try to solve the ricatti equation
X = np.matrix(la.solve_discrete_are(A, B, Q, R))
#compute the LQR gain
K = np.matrix(la.inv(B.T*X*B+R)*(B.T*X*A))
eigVals, eigVecs = la.eig(A-B*K)
return K, X, eigVals
def LQRController(x,u):
K,X,ev=dlqr(A,B,C.T*np.eye(1)*C,np.eye(1))
u=-K*x
return u
def Main():
time=0.0
u_history=[]
y_history=[]
time_history=[]
x=np.matrix([3,1]).T
u=np.matrix([0,0,0])
while time<=simTime:
u=LQRController(x,u)
u0=float(u[0,0])
x=Process(x,u0)
y=Observation(x)
u_history.append(u0)
y_history.append(y)
time_history.append(time)
time+=dt
plt.plot(time_history,u_history,"-r",label="input")
plt.plot(time_history,y_history,"-b",label="output")
plt.grid(True)
plt.xlim([0,simTime])
plt.legend()
plt.show()
if __name__ == '__main__':
Main()
| mit | Python |
|
d83b18ec4faa513c7171a23af5ba46397141519e | add main __init__.py | helo9/wingstructure | wingstructure/__init__.py | wingstructure/__init__.py | from . import analysis
from . import data
from . import liftingline
from . import structure
| mit | Python |
|
81df43350fdcbde85780dfbf1101e47fff04dc6c | Add missing migration | genialis/resolwe,jberci/resolwe,genialis/resolwe,jberci/resolwe | resolwe/flow/migrations/0025_set_get_last_by.py | resolwe/flow/migrations/0025_set_get_last_by.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-15 12:42
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_add_relations'),
]
operations = [
migrations.AlterModelOptions(
name='collection',
options={'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_collection', 'Can view collection'), ('edit_collection', 'Can edit collection'), ('share_collection', 'Can share collection'), ('download_collection', 'Can download files from collection'), ('add_collection', 'Can add data objects to collection'), ('owner_collection', 'Is owner of the collection'))},
),
migrations.AlterModelOptions(
name='data',
options={'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_data', 'Can view data'), ('edit_data', 'Can edit data'), ('share_data', 'Can share data'), ('download_data', 'Can download files from data'), ('owner_data', 'Is owner of the data'))},
),
migrations.AlterModelOptions(
name='descriptorschema',
options={'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_descriptorschema', 'Can view descriptor schema'), ('edit_descriptorschema', 'Can edit descriptor schema'), ('share_descriptorschema', 'Can share descriptor schema'), ('owner_descriptorschema', 'Is owner of the description schema'))},
),
migrations.AlterModelOptions(
name='entity',
options={'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_entity', 'Can view entity'), ('edit_entity', 'Can edit entity'), ('share_entity', 'Can share entity'), ('download_entity', 'Can download files from entity'), ('add_entity', 'Can add data objects to entity'), ('owner_entity', 'Is owner of the entity'))},
),
migrations.AlterModelOptions(
name='process',
options={'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_process', 'Can view process'), ('share_process', 'Can share process'), ('owner_process', 'Is owner of the process'))},
),
migrations.AlterModelOptions(
name='storage',
options={'default_permissions': (), 'get_latest_by': 'version'},
),
]
| apache-2.0 | Python |
|
7b2e28f9604347ff396b220c8d2ab7bdfdc671c8 | test hbase TSocket | svebk/DeepSentiBank_memex,svebk/DeepSentiBank_memex,svebk/DeepSentiBank_memex,svebk/DeepSentiBank_memex | test/test_hbase_TSocker0Err32/test_hbase.py | test/test_hbase_TSocker0Err32/test_hbase.py | import happybase
# gives error
# TSocket read 0 bytes
# [Errno 32] Broken pipe
if __name__ == "__main__":
conn = happybase.Connection(host="10.1.94.57")
table_name = "escorts_images_sha1_infos_dev"
hbase_table = conn.table(table_name)
batch_list_queries = ["000421227D83DA48DB4A417FCEFCA68272398B8E"]
rows = hbase_table.rows(batch_list_queries)
print rows
| bsd-2-clause | Python |
|
6d8e47f0b1bc70de7464303d6ac3b7684588a7aa | Add mpmodel | yukirin/RegionTF | mpmodel/mpmodel.py | mpmodel/mpmodel.py | import tensorflow as tf
| mit | Python |
|
ad6e67d382df1018e4ae55ebdcb6fae1cca9bffe | Add merge migration | saradbowman/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,icereval/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,adlius/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,chennan47/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,sloria/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,binoculars/osf.io,baylee-d/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,aaxelb/osf.io,mattclark/osf.io,adlius/osf.io,baylee-d/osf.io,sloria/osf.io,felliott/osf.io,erinspace/osf.io,binoculars/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,sloria/osf.io,cslzchen/osf.io,felliott/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mfraezz/osf.io,mattclark/osf.io,mfraezz/osf.io,cslzchen/osf.io,cslzchen/osf.io,adlius/osf.io,adlius/osf.io,erinspace/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,chennan47/osf.io,pattisdr/osf.io,saradbowman/osf.io,erinspace/osf.io,binoculars/osf.io,caseyrollins/osf.io,icereval/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io | osf/migrations/0081_merge_20180212_0949.py | osf/migrations/0081_merge_20180212_0949.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-12 15:49
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0080_ensure_schemas'),
('osf', '0079_merge_20180202_1206'),
]
operations = [
]
| apache-2.0 | Python |
|
ec6b65513baa4532af7cad1bd6c98e162b3db9ef | Add multiprocessing example | imitrichev/cantera,imitrichev/cantera,imitrichev/cantera,Heathckliff/cantera,Heathckliff/cantera,imitrichev/cantera,imitrichev/cantera,Heathckliff/cantera,Heathckliff/cantera,imitrichev/cantera,Heathckliff/cantera,Heathckliff/cantera | interfaces/cython/cantera/examples/transport/multiprocessing_viscosity.py | interfaces/cython/cantera/examples/transport/multiprocessing_viscosity.py | """
This example demonstrates how Cantera can be used with the 'multiprocessing'
module.
Because Cantera Python objects are built on top of C++ objects which cannot be
passed between Python processes, it is necessary to set up the computation so
that each process has its own copy of the relevant Cantera objects. One way to
do this is by storing the objects in (module) global variables, which are
initialized once per worker process.
"""
import multiprocessing
import numpy as np
import cantera as ct
import itertools
from time import time
# Global storage for Cantera Solution objects
gases = {}
def init_process(mech):
"""
This function is called once for each process in the Pool. We use it to
initialize any Cantera objects we need to use.
"""
gases[mech] = ct.Solution(mech)
gases[mech].transport_model = 'Multi'
def get_thermal_conductivity(args):
# Pool.imap only permits a single argument, so we pack all of the needed
# arguments into the tuple 'args'
mech, T, P, X = args
gas = gases[mech]
gas.TPX = T, P, X
return gas.thermal_conductivity
def get_viscosity(args):
# Pool.imap only permits a single argument, so we pack all of the needed
# arguments into the tuple 'args'
mech, T, P, X = args
gas = gases[mech]
gas.TPX = T, P, X
return gas.enthalpy_mass
def parallel(mech, predicate, nProcs, nTemps):
"""
Call the function ``predicate`` on ``nProcs`` processors for ``nTemps``
different temperatures.
"""
P = ct.one_atm
X = 'CH4:1.0, O2:1.0, N2:3.76'
pool = multiprocessing.Pool(processes=nProcs,
initializer=init_process,
initargs=(mech,))
y = pool.map(predicate,
zip(itertools.repeat(mech),
np.linspace(300, 900, nTemps),
itertools.repeat(P),
itertools.repeat(X)))
return y
def serial(mech, predicate, nTemps):
P = ct.one_atm
X = 'CH4:1.0, O2:1.0, N2:3.76'
init_process(mech)
y = map(predicate,
zip(itertools.repeat(mech),
np.linspace(300, 900, nTemps),
itertools.repeat(P),
itertools.repeat(X)))
return y
if __name__ == '__main__':
# For functions where the work done in each subprocess is substantial,
# significant speedup can be obtained using the multiprocessing module.
print('Thermal conductivity')
t1 = time()
parallel('gri30.xml', get_thermal_conductivity, 4, 1000)
t2 = time()
print('Parallel: {0:.3f} seconds'.format(t2-t1))
t1 = time()
serial('gri30.xml', get_thermal_conductivity, 1000)
t2 = time()
print('Serial: {0:.3f} seconds'.format(t2-t1))
# On the other hand, if the work done per call to the predicate function is
# small, there may be no advantage to using multiprocessing.
print('\nViscosity')
t1 = time()
parallel('gri30.xml', get_viscosity, 4, 1000)
t2 = time()
print('Parallel: {0:.3f} seconds'.format(t2-t1))
t1 = time()
serial('gri30.xml', get_viscosity, 1000)
t2 = time()
print('Serial: {0:.3f} seconds'.format(t2-t1))
| bsd-3-clause | Python |
|
3fbf2c29a54225e7d4dd882637e68cfe3a4d0101 | Add some tests for Message Queue | asteroide/immo_spider,asteroide/immo_spider,asteroide/immo_spider,asteroide/immo_spider | src/cobwebs/tests/test_mq.py | src/cobwebs/tests/test_mq.py | from cobwebs.mq.core import RPCLink, TopicsLink
from cobwebs.mq.backends.rabbitmq import driver
import pytest
import spider
import json
from unittest import mock
HOST = "127.0.0.1"
def test_driver_instance():
assert isinstance(driver.rpc, RPCLink)
assert isinstance(driver.topics, TopicsLink)
@mock.patch("cobwebs.mq.backends.rabbitmq")
def test_rpc(rabbitmq):
request = {"action": "list", "data": None}
result = rabbitmq.rpc.send("db_driver", json.dumps(request), HOST)
rabbitmq.rpc.send.assert_called_with("db_driver", json.dumps(request), HOST)
@mock.patch("cobwebs.mq.backends.rabbitmq")
def test_topic(rabbitmq):
result = rabbitmq.topic.emit(key="test", message="this is just a message")
rabbitmq.topic.emit.assert_called_with(key="test",
message="this is just a message")
| apache-2.0 | Python |
|
eca48495bdba121a0719bb442f5ec30b70233e74 | Add a snippet (Python OpenCV). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/opencv/opencv_2/gui/opencv_trackbar.py | python/opencv/opencv_2/gui/opencv_trackbar.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Trackbar widget.
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_trackbar/py_trackbar.html#trackbar
"""
from __future__ import print_function
import cv2 as cv
import numpy as np
import argparse
def trackbar1_cb(x):
pass
def trackbar2_cb(x):
pass
def main():
# Parse the programm options (get the path of the image file to read) #####
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--cameraid", "-i", help="The camera ID number (default: 0)", type=int, default=0, metavar="INTEGER")
args = parser.parse_args()
device_number = args.cameraid
# OpenCV ##################################################################
video_capture = cv.VideoCapture(device_number)
# Create a window
window_name = "Threshold Bin"
cv.namedWindow(window_name)
# Create trackbars
trackbar1_name = "Threshold"
trackbar1_window_name = window_name
trackbar1_default_value = 127
trackbar1_maximum_value = 255
trackbar1_callback_function = trackbar1_cb # Executed everytime trackbar value changes
cv.createTrackbar(trackbar1_name, trackbar1_window_name, trackbar1_default_value, trackbar1_maximum_value, trackbar1_callback_function)
trackbar2_name = "Max value"
trackbar2_window_name = window_name
trackbar2_default_value = 255
trackbar2_maximum_value = 255
trackbar2_callback_function = trackbar2_cb # Executed everytime trackbar value changes
cv.createTrackbar(trackbar2_name, trackbar2_window_name, trackbar2_default_value, trackbar2_maximum_value, trackbar2_callback_function)
print("Press q to quit.")
while(True):
# Capture frame-by-frame.
# 'ret' is a boolean ('True' if frame is read correctly, 'False' otherwise).
# 'img_np' is an numpy array.
ret, img_bgr = video_capture.read()
# IMAGE PROCESSING ################################
# Convert BGR color space to Grayscale
img_gray = cv.cvtColor(img_bgr, cv.COLOR_BGR2GRAY)
# Threshold the Grayscale image: dst_i = (src_i > threshold_value) ? max_val : 0
threshold_value = cv.getTrackbarPos(trackbar1_name, trackbar1_window_name)
max_val = cv.getTrackbarPos(trackbar2_name, trackbar2_window_name)
ret, img_threshold_bin = cv.threshold(img_gray, threshold_value, max_val, cv.THRESH_BINARY)
# DISPLAY IMAGES ##################################
# Display the resulting frame (BGR)
cv.imshow('BGR (orignal)', img_bgr)
# Display the resulting frames (Threshold)
cv.imshow(window_name, img_threshold_bin)
# KEYBOARD LISTENER ###############################
if cv.waitKey(1) & 0xFF == ord('q'):
break
video_capture.release()
cv.destroyAllWindows()
if __name__ == '__main__':
main()
| mit | Python |
|
9469bcf60a199b96d1fec778c44346df744a1d60 | add jieba | Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python | jieba/test_jieba.py | jieba/test_jieba.py | #!/usr/bin/env python
# encoding=utf-8
import jieba
seg_list = jieba.cut("我来到北京清华大学", cut_all=True)
print("Full Mode: " + "/ ".join(seg_list)) # 全模式
seg_list = jieba.cut("我来到北京清华大学", cut_all=False)
print("Default Mode: " + "/ ".join(seg_list)) # 精确模式
seg_list = jieba.cut("他来到了网易杭研大厦") # 默认是精确模式
print(", ".join(seg_list))
# 搜索引擎模式
seg_list = jieba.cut_for_search("小明硕士毕业于中国科学院计算所,后在日本京都大学深造")
print(", ".join(seg_list))
| mit | Python |
|
291e7c8b2a69f26f6343269aaac2b9e3cd517220 | Add tests | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/proxito/tests/test_proxied_api.py | readthedocs/proxito/tests/test_proxied_api.py | from readthedocs.rtd_tests.tests.test_footer import TestFooterHTML
from django.test import override_settings
@override_settings(ROOT_URLCONF='readthedocs.proxito.urls')
class TestProxiedFooterHTML(TestFooterHTML):
def setUp(self):
super().setUp()
self.host = 'pip.readthedocs.io'
def render(self):
r = self.client.get(self.url, HTTP_HOST=self.host)
return r
| mit | Python |
|
081b5aabae205ad7c23c512be15ee26276dc8a29 | Check whether Azure CLI is in ARM mode | GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker | perfkitbenchmarker/providers/azure/util.py | perfkitbenchmarker/providers/azure/util.py | # Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Verify that Azure CLI is in arm mode."""
from perfkitbenchmarker import events
from perfkitbenchmarker import providers
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
class BadAzureCLIModeError(Exception):
pass
def _CheckAzureCLIMode(sender):
assert sender == providers.AZURE, sender
stdout, _ = vm_util.IssueRetryableCommand(
[azure.AZURE_PATH, 'config'])
if 'Current Mode: arm' not in stdout:
raise BadAzureCLIModeError('Azure CLI may not be in ARM mode.')
events.provider_imported.connect(_CheckAzureCLIMode, providers.AZURE,
weak=False)
| apache-2.0 | Python |
|
0d3ca7371bcf4d0d7b7db75e7e9130deefd706cb | add visualize.py | YusukeSuzuki/castanea | castanea/layers/visualize.py | castanea/layers/visualize.py | import tensorflow as tf
palette_data = [0, 0, 0, 128, 0, 0, 0, 128, 0, 128, 128, 0,
0, 0, 128, 128, 0, 128, 0, 128, 128, 128, 128, 128,
64, 0, 0, 192, 0, 0, 64, 128, 0, 192, 128, 0,
64, 0, 128, 192, 0, 128, 64, 128, 128, 192, 128, 128,
0, 64, 0, 128, 64, 0, 0, 192, 0, 128, 192, 0,
0, 64, 128, 128, 64, 128, 0, 192, 128, 128, 192, 128,
64, 64, 0, 192, 64, 0, 64, 192, 0, 192, 192, 0,
64, 64, 128, 192, 64, 128, 64, 192, 128, 192, 192, 128,
0, 0, 64, 128, 0, 64, 0, 128, 64, 128, 128, 64,
0, 0, 192, 128, 0, 192, 0, 128, 192, 128, 128, 192,
64, 0, 64, 192, 0, 64, 64, 128, 64, 192, 128, 64,
64, 0, 192, 192, 0, 192, 64, 128, 192, 192, 128, 192,
0, 64, 64, 128, 64, 64, 0, 192, 64, 128, 192, 64,
0, 64, 192, 128, 64, 192, 0, 192, 192, 128, 192, 192,
64, 64, 64, 192, 64, 64, 64, 192, 64, 192, 192,
64, 64, 64, 192, 192, 64, 192, 64, 192, 192, 192, 192,
192, 32, 0, 0, 160, 0, 0, 32, 128, 0, 160, 128,
0, 32, 0, 128, 160, 0, 128, 32, 128, 128, 160, 128,
128, 96, 0, 0, 224, 0, 0, 96, 128, 0, 224, 128,
0, 96, 0, 128, 224, 0, 128, 96, 128, 128, 224, 128, 128,
32, 64, 0, 160, 64, 0, 32, 192, 0, 160, 192, 0, 32,
64, 128, 160, 64, 128, 32, 192, 128, 160, 192, 128, 96, 64,
0, 224, 64, 0, 96, 192, 0, 224, 192, 0, 96, 64, 128,
224, 64, 128, 96, 192, 128, 224, 192, 128, 32, 0, 64,
160, 0, 64, 32, 128, 64, 160, 128, 64, 32, 0, 192,
160, 0, 192, 32, 128, 192, 160, 128, 192, 96, 0, 64,
224, 0, 64, 96, 128, 64, 224, 128, 64, 96, 0, 192,
224, 0, 192, 96, 128, 192, 224, 128, 192, 32, 64, 64,
160, 64, 64, 32, 192, 64, 160, 192, 64, 32, 64, 192,
160, 64, 192, 32, 192, 192, 160, 192, 192, 96, 64, 64, 224,
64, 64, 96, 192, 64, 224, 192, 64, 96, 64, 192, 224, 64, 192,
96, 192, 192, 224, 192, 192, 0, 32, 0, 128, 32, 0, 0,
160, 0, 128, 160, 0, 0, 32, 128, 128, 32, 128, 0,
160, 128, 128, 160, 128, 64, 32, 0, 192, 32, 0, 64,
160, 0, 192, 160, 0, 64, 32, 128, 192, 32, 128, 64, 160,
128, 192, 160, 128, 0, 96, 0, 128, 96, 0, 0, 224,
0, 128, 224, 0, 0, 96, 128, 128, 96, 128, 0, 224, 128,
128, 224, 128, 64, 96, 0, 192, 96, 0, 64, 224, 0,
192, 224, 0, 64, 96, 128, 192, 96, 128, 64, 224, 128, 192,
224, 128, 0, 32, 64, 128, 32, 64, 0, 160, 64, 128,
160, 64, 0, 32, 192, 128, 32, 192, 0, 160, 192, 128,
160, 192, 64, 32, 64, 192, 32, 64, 64, 160, 64, 192,
160, 64, 64, 32, 192, 192, 32, 192, 64, 160, 192, 192,
160, 192, 0, 96, 64, 128, 96, 64, 0, 224, 64, 128,
224, 64, 0, 96, 192, 128, 96, 192, 0, 224, 192, 128,
224, 192, 64, 96, 64, 192, 96, 64, 64, 224, 64, 192,
224, 64, 64, 96, 192, 192, 96, 192, 64, 224, 192, 192,
224, 192, 32, 32, 0, 160, 32, 0, 32, 160, 0, 160,
160, 0, 32, 32, 128, 160, 32, 128, 32, 160, 128, 160,
160, 128, 96, 32, 0, 224, 32, 0, 96, 160, 0, 224,
160, 0, 96, 32, 128, 224, 32, 128, 96, 160, 128, 224,
160, 128, 32, 96, 0, 160, 96, 0, 32, 224, 0, 160,
224, 0, 32, 96, 128, 160, 96, 128, 32, 224, 128, 160,
224, 128, 96, 96, 0, 224, 96, 0, 96, 224, 0, 224,
224, 0, 96, 96, 128, 224, 96, 128, 96, 224, 128, 224,
224, 128, 32, 32, 64, 160, 32, 64, 32, 160, 64, 160,
160, 64, 32, 32, 192, 160, 32, 192, 32, 160, 192, 160,
160, 192, 96, 32, 64, 224, 32, 64, 96, 160, 64, 224,
160, 64, 96, 32, 192, 224, 32, 192, 96, 160, 192, 224,
160, 192, 32, 96, 64, 160, 96, 64, 32, 224, 64, 160,
224, 64, 32, 96, 192, 160, 96, 192, 32, 224, 192, 160,
224, 192, 96, 96, 64, 224, 96, 64, 96, 224, 64, 224,
224, 64, 96, 96, 192, 224, 96, 192, 96, 224, 192, 224,
224, 192]
def visualize_segmentation(segments, argmax=False):
if argmax:
segments = tf.argmax(segments, axis=3)
palette = tf.reshape(palette_data, [-1, 3])
shape = tf.shape(segments)
indices = tf.reshape(segments, [shape[0], -1, 1])
images = tf.gather_nd(params=palette, indices=indices)
images = tf.reshape(images, [shape[0], shape[1], shape[2], 3])
images = tf.cast(images, tf.uint8)
return images
| mit | Python |
|
59edefb410b932a648347f76ca9a96013b40a08e | Add solution 303 | byung-u/ProjectEuler | Problem_300_399/euler_303.py | Problem_300_399/euler_303.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Problem 303
For a positive integer n, define f(n) as the least positive multiple of n that, written in base 10, uses only digits ≤ 2.
Thus f(2)=2, f(3)=12, f(7)=21, f(42)=210, f(89)=1121222.
Also, . n = 1 ~ 100, f(n)/n = 11363107
Find . n = 1 ~ 10000, f(n)/n = ?
'''
from itertools import cycle, product
from functools import reduce
'''
mul = [ [1],
[1, 1, 8],
[1, 4, 1, 4],
[4, 3, 3],
[3, 2, 3, 2],
[2],
[2, 3, 2, 3],
[3, 3, 4],
[4, 1, 4, 1],
[8, 1, 1]]
def digit_012_check(n):
while n != 0:
d, m = divmod(n, 10)
if m > 2:
return False
n = d
return True
def fn(n):
if digit_012_check(n):
print([n], '-', 1, n)
return 1
#mul = [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
# [0, 1, 2],
# [0, 1, 5, 6],
# [0, 4, 7],
# [0, 3, 5, 8],
# [0, 2, 4, 6, 8],
# [0, 2, 5, 7,],
# [0, 3, 6],
# [0, 4, 5, 9],
# [0, 8, 9]]
j = 0
for i in cycle(mul[n % 10]):
j += i
m = n * j
if digit_012_check(m):
print([n], i, j, m)
return j
'''
def p303(): # Answer: 1111981904675169, pretty awful though
L = 10000 + 1
check = [x for x in range(3, L)]
result = [0] * L
result[0] = 1
result[1] = 1
result[2] = 2
# run and check only 9990
# Found 111333555778 * 9990 = 1112222222222220
result[9990] = 1112222222222220
# by hand
# 9990 answer -> 111333555778
# attach [1] -> 1111333555778
# attach [3] -> 11113333555778
# attach [5] -> 111133335555778
# attach [7] -> 1111333355557778
# found -> 1111333355557778
result[9999] = 11112222222222222222
check.remove(9990)
check.remove(9999)
for i in product([0, 1, 2], repeat=30):
n = int(reduce(lambda x, y: str(x) + str(y), i))
temp = []
for c in check:
if n % c == 0:
if n == 0:
break
result[c] = n
temp.append(c)
# print([n], c, len(check), check)
for t in temp:
check.remove(t)
if 0 not in result:
break
total = 0
for i in range(1, len(result)):
# print([i], result[i])
total += result[i] // i
print(total)
p303()
| mit | Python |
|
6705e0e23d13a94726556714e11dfbb7a916877d | Add basic mechanism to override the default EntryAdmin | django-blog-zinnia/zinnia-wysiwyg-wymeditor,layar/zinnia-wysiwyg-wymeditor,django-blog-zinnia/zinnia-wysiwyg-wymeditor,layar/zinnia-wysiwyg-wymeditor,layar/zinnia-wysiwyg-wymeditor,django-blog-zinnia/zinnia-wysiwyg-wymeditor,layar/zinnia-wysiwyg-wymeditor,django-blog-zinnia/zinnia-wysiwyg-wymeditor | zinnia_wymeditor/admin.py | zinnia_wymeditor/admin.py | """EntryAdmin for zinnia-wymeditor"""
from django.contrib import admin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
class EntryAdminWYMEditorMixin(object):
"""
Mixin adding WYMeditor for editing Entry.content field.
"""
pass
class EntryAdminWYMEditor(EntryAdminWYMEditorMixin,
EntryAdmin):
"""
Enrich the default EntryAdmin with WYMEditor.
"""
pass
admin.site.unregister(Entry)
admin.site.register(Entry, EntryAdminWYMEditor)
| bsd-3-clause | Python |
|
6193786bb2307550ab9dfb9c218f6d8b3f407156 | Create is-graph-bipartite.py | kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/is-graph-bipartite.py | Python/is-graph-bipartite.py | # Time: O(|V| + |E|)
# Space: O(|V|)
# Given a graph, return true if and only if it is bipartite.
#
# Recall that a graph is bipartite if we can split it's set of nodes into
# two independent subsets A and B such that every edge in the graph has
# one node in A and another node in B.
#
# The graph is given in the following form: graph[i] is a list of indexes j
# for which the edge between nodes i and j exists.
# Each node is an integer between 0 and graph.length - 1.
# There are no self edges or parallel edges: graph[i] does not contain i,
# and it doesn't contain any element twice.
#
# Example 1:
# Input: [[1,3], [0,2], [1,3], [0,2]]
# Output: true
# Explanation:
# The graph looks like this:
# 0----1
# | |
# | |
# 3----2
# We can divide the vertices into two groups: {0, 2} and {1, 3}.
#
# Example 2:
# Input: [[1,2,3], [0,2], [0,1,3], [0,2]]
# Output: false
# Explanation:
# The graph looks like this:
# 0----1
# | \ |
# | \ |
# 3----2
# We cannot find a way to divide the set of nodes into two independent ubsets.
#
# Note:
# - graph will have length in range [1, 100].
# - graph[i] will contain integers in range [0, graph.length - 1].
# - graph[i] will not contain i or duplicate values.
class Solution(object):
def isBipartite(self, graph):
"""
:type graph: List[List[int]]
:rtype: bool
"""
color = {}
for node in xrange(len(graph)):
if node in color:
continue
stack = [node]
color[node] = 0
while stack:
curr = stack.pop()
for neighbor in graph[curr]:
if neighbor not in color:
stack.append(neighbor)
color[neighbor] = color[curr] ^ 1
elif color[neighbor] == color[curr]:
return False
return True
| mit | Python |
|
3204227799ce5f7a7d0df4cb6b480b42d6cdae1f | Add a snippet. | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/pyqt/pyqt5/widget_QPainter_OpenGL.py | python/pyqt/pyqt5/widget_QPainter_OpenGL.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See https://doc.qt.io/archives/4.6/opengl-2dpainting.html
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtGui import QPainter, QBrush, QPen
from PyQt5.QtCore import Qt
from PyQt5.QtOpenGL import QGLWidget
class MyPaintWidget(QGLWidget):
def __init__(self):
super().__init__()
# Set window background color
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(self.backgroundRole(), Qt.white)
self.setPalette(palette)
def paintEvent(self, event):
qp = QPainter(self)
qp.setPen(QPen(Qt.black, 5, Qt.SolidLine))
qp.setBrush(QBrush(Qt.red, Qt.SolidPattern))
qp.setRenderHint(QPainter.Antialiasing) # <- Set anti-aliasing See https://wiki.python.org/moin/PyQt/Painting%20and%20clipping%20demonstration
qp.drawEllipse(100, 15, 400, 200)
qp.setBrush(QBrush(Qt.red, Qt.DiagCrossPattern))
qp.drawEllipse(600, 15, 200, 200)
if __name__ == '__main__':
app = QApplication(sys.argv)
widget = MyPaintWidget()
widget.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
| mit | Python |
|
ae3bd406736f9235b442c52bf584a97d0760a588 | add api | tardyp/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis | buildbot_travis/api.py | buildbot_travis/api.py | # Copyright 2012-2013 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from buildbot import config
from klein import Klein
from twisted.internet import defer
from twisted.internet import threads
import yaml
import json
from buildbot.util.eventual import eventually
def getDbConfigObjectId(master, name="config"):
return master.db.state.getObjectId(name, "DbConfig")
class Api(object):
app = Klein()
_yamlPath = None
_useDbConfig = False
_in_progress = False
def __init__(self, ep):
self.ep = ep
def setYamlPath(self, path):
self._yamlPath = path
def useDbConfig(self):
self._useDbConfig = True
def setCfg(self, cfg):
self._cfg = cfg
self._in_progress = False
@defer.inlineCallbacks
def saveCfg(self, cfg):
if self._yamlPath is not None:
cfg = yaml.safe_dump(cfg, default_flow_style=False, indent=4)
with open(self._yamlPath, "w") as f:
f.write(cfg)
if self._useDbConfig:
oid = yield getDbConfigObjectId(self.ep.master)
yield self.ep.master.db.state.setState(oid, "travis", cfg)
@app.route("/config", methods=['GET'])
def getConfig(self, request):
return json.dumps(self._cfg)
def thdCheckConfig(self):
# check the config in thread
try:
config.MasterConfig.loadConfig(self.ep.master.basedir, self.ep.master.configFileName)
except config.ConfigErrors, e:
return e.errors
return None
@app.route("/config", methods=['PUT'])
@defer.inlineCallbacks
def saveConfig(self, request):
"""I save the config, and run check_config, potencially returning errors"""
request.setHeader('Content-Type', 'application/json')
if self._in_progress:
defer.returnValue(json.dumps({'success': False, 'errors': ['reconfig already in progress']}))
self._in_progress = True
cfg = json.loads(request.content.read())
if cfg != self._cfg:
yield self.saveCfg(cfg)
try:
err = yield threads.deferToThread(self.thdCheckConfig)
except Exception as e:
err = [repr(e)]
if err is not None:
self._in_progress = False
yield self.saveCfg(self._cfg)
defer.returnValue(json.dumps({'success': False, 'errors': err}))
yield self.ep.master.reconfig()
defer.returnValue(json.dumps({'success': True}))
| unknown | Python |
|
d19ab50f2d3b259bd6c5cfb21b4087ca4d3ec248 | create theano 2 | wangwei7175878/tutorials | theanoTUT/theano2_install.py | theanoTUT/theano2_install.py | # View more python tutorials on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
# 2 - Install theano
"""
requirements:
1. python 2 >=2.6 or python 3>=3.3
2. Numpy >= 1.7.1
3. Scipy >=0.11
If using CPU, no other requirement.
But if using GPU, you will need NVIDIA CUDA drivers and SDK.
The must easy way to install theano is to use pip install.
1. open your terminal (MacOS and Linux), or your command window (Windows)
2. type "pip install theano" (for python 2x); type "pip3 install theano" (for python 3x)
Note: to install theano on Windows machine may be a little bit stuggling. If you encounter any
problem, please refer to this web page:
http://deeplearning.net/software/theano/install_windows.html#install-windows
""" | mit | Python |
|
389adca1fd52747814f370de2d066a1743544469 | Solve Game Time in python | deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground | solutions/beecrowd/1046/1046.py | solutions/beecrowd/1046/1046.py | start, end = map(int, input().split())
if start == end:
result = 24
elif end - start >= 0:
result = end - start
else:
result = 24 + end - start
print(f'O JOGO DUROU {result} HORA(S)')
| mit | Python |
|
3a9627f31846e06e04d7ae933712840d52616663 | Create main.py | jakovj/SuperHeroRush | main.py | main.py | import pygame
import game
file = 'music.mp3'
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load(file)
pygame.mixer.music.play(loops=-1)
pygame.mixer.music.set_volume(0.5)
run = True
SuperHeroTower = game.Game()
while run:
run = SuperHeroTower.startScreen()
pygame.quit()
quit()
| mit | Python |
|
bc3f7e83bd35f1a6ae8add35932513c7da47076e | fix a typo. | uw-it-aca/uw-restclients,UWIT-IAM/uw-restclients,uw-it-aca/uw-restclients,uw-it-cte/uw-restclients,UWIT-IAM/uw-restclients,UWIT-IAM/uw-restclients,uw-it-cte/uw-restclients,uw-it-cte/uw-restclients | restclients/test/util/datetime_convertor.py | restclients/test/util/datetime_convertor.py | from django.test import TestCase
from datetime import date, datetime
from restclients.util.datetime_convertor import convert_to_begin_of_day,\
convert_to_end_of_day
class DatetimeConvertorTest(TestCase):
def test_convert_to_begin_of_day(self):
self.assertEquals(convert_to_begin_of_day(date(2013, 4, 9)),
datetime(2013, 4, 9, 0, 0, 0))
self.assertEquals(
convert_to_begin_of_day(datetime(2013, 4, 9, 10, 10, 10)),
datetime(2013, 4, 9, 0, 0, 0))
def test_convert_to_end_of_day(self):
self.assertEquals(convert_to_end_of_day(date(2012, 2, 28)),
datetime(2012, 2, 29, 0, 0, 0))
self.assertEquals(
convert_to_end_of_day(datetime(2012, 2, 28, 10, 10, 10)),
datetime(2012, 2, 29, 0, 0, 0))
| from django.test import TestCase
from datetime import date, datetime
from restclients.util.datetime_convertor import convert_to_begin_of_day,\
convert_to_end_of_day
class DatetimeConvertorTest(TestCase):
def test_convert_to_begin_of_day(self):
self.assertEquals(convert_to_begin_of_day(date(2013, 4, 9)),
datetime(2013, 4, 9, 0, 0, 0))
self.assertEquals(
convert_to_begin_of_day(datetime(2013, 4, 9, 10, 10, 10)),
datetime(2013, 4, 9, 0, 0, 0))
def test_convert_to_end_of_day(self):
self.assertEquals(convert_to_end_of_day(date(2012, 2, 28)),
datetime(2013, 2, 29, 0, 0, 0))
self.assertEquals(
convert_to_end_of_day(datetime(2012, 2, 28, 10, 10, 10)),
datetime(2012, 2, 29, 0, 0, 0))
| apache-2.0 | Python |
3b4f7b9792be0315aca7d71fafe1a972e5fd87f7 | Add Seh_bug_fuzzer.py | b09780978/SEH_Fuzzer | SEH_Fuzzer/Seh_bug_fuzzer.py | SEH_Fuzzer/Seh_bug_fuzzer.py | # -*- coding: utf-8 -*-
import time
import sys
import socket
import cPickle
import os
from pydbg import *
from pydbg.defines import *
from util import *
PICKLE_NAME = "fsws_phase1.pkl"
exe_path = "D:\\testPoc\\Easy File Sharing Web Server\\fsws.exe"
import threading
import time
host, port = "127.0.0.1", 80
global Running
global Crash
global lock
global chance
global MAX_OFFSET
global OFFSET
chance = 2
Running = True
Crash = False
lock = threading.Lock()
def check_access_validation(dbg):
global chance
global Running
global lock
with lock:
if dbg.dbg.u.Exception.dwFirstChance:
chance -= 1
# prevent test next size.
Running = False
if chance==0:
Running = False
seh, nseh = dbg.seh_unwind()[0]
seh_offset = pattern_find(seh, MAX_OFFSET)
print "[+] crash in %d words" % OFFSET
print "[+] seh offset %s." % seh_offset
with open(PICKLE_NAME, "wb") as phase_file:
cPickle.dump(OFFSET, phase_file)
cPickle.dump(seh_offset, phase_file)
cPickle.dump(seh, phase_file)
cPickle.dump(nseh, phase_file)
with open("crash.txt", "w") as f:
f.write("seh: 0x%08x\n" % seh)
f.write("nseh: 0x%08x\n" % nseh)
f.write(dbg.dump_context(stack_depth=1000))
dbg.terminate_process()
return DBG_EXCEPTION_NOT_HANDLED
else:
Running = True
return DBG_EXCEPTION_NOT_HANDLED
return DBG_EXCEPTION_NOT_HANDLED
class Fuzzer(object):
def __init__(self, exe_path, max_offset = 8000):
self.exe_path = exe_path
self.pid = None
self.dbg = None
global MAX_OFFSET
MAX_OFFSET = max_offset
# self.running = True
self.dbgThread = threading.Thread(target=self.start_debugger)
self.dbgThread.setDaemon(False)
self.dbgThread.start()
# Wait debugger start process
while self.pid is None:
time.sleep(1)
self.monitorThread = threading.Thread(target=self.monitor_debugger)
self.monitorThread.setDaemon(False)
self.monitorThread.start()
def monitor_debugger(self):
global Running
global OFFSET
test_words = 0
raw_input("[+] Please start the debugger...")
while Running and MAX_OFFSET>test_words:
with lock:
if not Running:
break
test_words += 100
OFFSET = test_words
print "[+] test %d words" % test_words
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
buffer = pattern_create(test_words)
httpreq = (
"GET /changeuser.ghp HTTP/1.1\r\n"
"User-Agent: Mozilla/4.0\r\n"
"Host:" + host + ":" + str(port) + "\r\n"
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
"Accept-Language: en-us\r\n"
"Accept-Encoding: gzip, deflate\r\n"
"Referer: http://" + host + "/\r\n"
"Cookie: SESSIONID=6771; UserID=" + buffer + "; PassWD=;\r\n"
"Conection: Keep-Alive\r\n\r\n"
)
s.send(httpreq)
s.close()
# prevent execute to fast.
time.sleep(1)
if not os.path.isfile(PICKLE_NAME):
print "[+] No found bug."
Running = False
self.dbg.terminate_process()
else:
print "[+] Find bug."
'''
Try to start debugger and run it.
'''
def start_debugger(self):
try:
self.dbg = pydbg()
self.dbg.load(self.exe_path)
self.pid = self.dbg.pid
except pdx:
print "[+] Can't open file, please check file path"
sys.exit(1)
except Exception as e:
print "[+] Unknow error: ", str(e)
sys.exit(1)
self.dbg.set_callback(EXCEPTION_ACCESS_VIOLATION, check_access_validation)
self.dbg.run()
exe_path = "D:\\testPoc\\Easy File Sharing Web Server\\fsws.exe"
Fuzzer(exe_path) | mit | Python |
|
2199f4c5ed563200d555315b9a8575e00486e667 | Add a simple script to generate monthly confirmed / fixed counts | mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport,mysociety/fixmytransport | script/confirmed-fixed-monthly-breakdown.py | script/confirmed-fixed-monthly-breakdown.py | #!/usr/bin/python
# A script to draw graphs showing the number of confirmed reports
# created each month, and those of which that have been fixed. This
# script expects to find a file called 'problems.csv' in the current
# directory which should be generated by:
#
# DIR=`pwd` rake data:create_problem_spreadsheet
import csv
import datetime
from collections import defaultdict
import itertools
status_types = ('confirmed', 'fixed')
counts = {}
for status_type in status_types:
counts[status_type] = defaultdict(int)
today = datetime.date.today()
latest_month = earliest_month = (today.year, today.month)
maximum_count = -1
with open('problems.csv') as fp:
reader = csv.DictReader(fp, delimiter=',', quotechar='"')
for row in reader:
d = datetime.datetime.strptime(row['Created'],
'%H:%M %d %b %Y')
ym = (d.year, d.month)
earliest_month = min(earliest_month, ym)
if row['Status'] == 'confirmed':
counts['confirmed'][ym] += 1
elif row['Status'] == 'fixed':
counts['fixed'][ym] += 1
maximum_count = max(maximum_count, counts['fixed'][ym], counts['confirmed'][ym])
def months_between(earlier, later):
"""A generator for iterating over months represented as (year, month) tuples"""
year = earlier[0]
month = earlier[1]
while True:
yield (year, month)
if month == 12:
year = year + 1
month = 1
else:
month += 1
if (year, month) > later:
return
all_months = list(months_between(earliest_month, latest_month))
months = len(all_months)
# Make sure that there's at least a zero count for each month we're
# considering:
for d in counts.values():
for ym in all_months:
d[ym] += 0
with open('monthly-breakdown.csv', 'w') as fp:
writer = csv.writer(fp)
writer.writerow(['Month', 'Confirmed', 'Fixed'])
for ym in all_months:
writer.writerow(["%d-%02d" % (ym[0], ym[1]),
counts['confirmed'][ym],
counts['fixed'][ym]])
| agpl-3.0 | Python |
|
417f1832dbb6a1d0742b2f01d56429139f8885ef | add conversion script | idaholab/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,joshua-cogliati-inl/raven,idaholab/raven,idaholab/raven | scripts/conversionScripts/toValidationPP.py | scripts/conversionScripts/toValidationPP.py | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import xml.etree.ElementTree as ET
import xml.dom.minidom as pxml
import os
def convert(tree,fileName=None):
"""
Converts input files to be compatible with merge request #1583
Restructure the Validation PostProcessor, use the subType to indicate the algorithm
used by the Validation. Remove the specific node 'Probailistic'.
@ In, tree, xml.etree.ElementTree.ElementTree object, the contents of a RAVEN input file
@ In, fileName, the name for the raven input file
@Out, tree, xml.etree.ElementTree.ElementTree object, the modified RAVEN input file
"""
simulation = tree.getroot()
models = simulation.find('Models')
updateTestInfo = False
if models is not None:
postProcessors = models.findall('PostProcessor')
for pp in postProcessors:
subType = pp.get('subType')
if subType == 'Validation':
prob = pp.find('Probabilistic')
if prob is not None:
pp.set('subType', prob.tag.strip())
pp.remove(prob)
updateTestInfo = True
if updateTestInfo:
TestInfo = simulation.find('TestInfo')
if TestInfo is not None:
revisions = TestInfo.find('revisions')
hasRev = True
if revisions is None:
revisions = ET.Element('revisions')
hasRev = False
rev = ET.Element('revision')
rev.attrib['author'] = 'wangc'
rev.attrib['date'] = '2021-09-28'
rev.text = 'Convert Validation PostProcessor: subType will be replaced with the Probabilistic node tag, and Probabilistic node is removed'
revisions.append(rev)
if not hasRev:
TestInfo.append(revisions)
return tree
if __name__=='__main__':
import convert_utils
import sys
convert_utils.standardMain(sys.argv,convert)
| apache-2.0 | Python |
|
bbae3e9fee30634a659276732f16a883500e8f45 | Create memcache.py | yangjiePro/cutout,jojoin/cutout,MrZhengliang/cutout | cutout/cache/memcache.py | cutout/cache/memcache.py | # -*- coding: utf-8 -*-
import os
import re
import tempfile
from time import time
from .basecache import BaseCache
from .posixemulation import rename, _items
try:
import cPickle as pickle
except ImportError:
import pickle
try:
from hashlib import md5
except ImportError:
from md5 import new as md5
class MemCache(BaseCache):
"""Simple memory cache for single process environments. This class exists
mainly for the development server and is not 100% thread safe. It tries
to use as many atomic operations as possible and no locks for simplicity
but it could happen under heavy load that keys are added multiple times.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
"""
def __init__(self, threshold=500, default_timeout=300):
BaseCache.__init__(self, default_timeout)
self._cache = {}
self.clear = self._cache.clear
self._threshold = threshold
def _prune(self):
if len(self._cache) > self._threshold:
now = time()
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if expires <= now or idx % 3 == 0:
self._cache.pop(key, None)
def get(self, key):
now = time()
expires, value = self._cache.get(key, (0, None))
if expires > time():
return pickle.loads(value)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
self._prune()
self._cache[key] = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if len(self._cache) > self._threshold:
self._prune()
item = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
self._cache.setdefault(key, item)
def delete(self, key):
self._cache.pop(key, None)
| mit | Python |
|
bd2a70930ba67f3dd510b172fe4e00ddc2dc23c2 | Create voxelmodel.py | hi9hlander/odvm | odvm/voxelmodel.py | odvm/voxelmodel.py | from panda3d.core import *
from odvm.quads import Quads
class VoxelModel(Geom):
def __init__(self):
Geom.__init__( self, GeomVertexData( 'vertices', GeomVertexFormat.get_v3n3c4(), Geom.UH_static ) )
self.quads = Quads(self)
self.add_primitive(self.quads)
def add(self,p2s,i,j,k,c,p2i=0,p2j=0,p2k=0):
di = 1 << p2i
dj = 1 << p2j
dk = 1 << p2k
self.quads.add( 1<<p2s,i,j,k,
( ( ( 0, 0, 0, di, dj, 0 ), c ),
( ( 0, 0,-dk, 0, dj, 0 ), c ),
( ( di, 0, 0, di, dj,-dk ), c ),
( ( 0, 0,-dk, di, 0, 0 ), c ),
( ( 0, dj,-dk, di, 0,-dk ), c ),
( ( 0, dj, 0, di, dj,-dk ), c ) ) )
| mit | Python |
|
b4d82c21995fb2b9e2afd93eea8849ded8b7d489 | Update next-greater-element-iii.py | yiwen-luo/LeetCode,yiwen-luo/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode | Python/next-greater-element-iii.py | Python/next-greater-element-iii.py | # Time: O(logn) = O(1)
# Space: O(logn) = O(1)
# Given a positive 32-bit integer n, you need to find the smallest 32-bit integer
# which has exactly the same digits existing in the integer n and is greater in value than n.
# If no such positive 32-bit integer exists, you need to return -1.
@
# Example 1:
# Input: 12
# Output: 21
# Example 2:
# Input: 21
# Output: -1
class Solution(object):
def nextGreaterElement(self, n):
"""
:type n: int
:rtype: int
"""
digits = map(int, list(str(n)))
k, l = -1, 0
for i in xrange(len(digits) - 1):
if digits[i] < digits[i + 1]:
k = i
if k == -1:
digits.reverse()
return -1
for i in xrange(k + 1, len(digits)):
if digits[i] > digits[k]:
l = i
digits[k], digits[l] = digits[l], digits[k]
digits[k + 1:] = digits[:k:-1]
result = int("".join(map(str, digits)))
return -1 if result >= 0x7FFFFFFF else result
| # Time: O(logn)
# Space: O(logn)
# Given a positive 32-bit integer n, you need to find the smallest 32-bit integer
# which has exactly the same digits existing in the integer n and is greater in value than n.
# If no such positive 32-bit integer exists, you need to return -1.
@
# Example 1:
# Input: 12
# Output: 21
# Example 2:
# Input: 21
# Output: -1
class Solution(object):
def nextGreaterElement(self, n):
"""
:type n: int
:rtype: int
"""
digits = map(int, list(str(n)))
k, l = -1, 0
for i in xrange(len(digits) - 1):
if digits[i] < digits[i + 1]:
k = i
if k == -1:
digits.reverse()
return -1
for i in xrange(k + 1, len(digits)):
if digits[i] > digits[k]:
l = i
digits[k], digits[l] = digits[l], digits[k]
digits[k + 1:] = digits[:k:-1]
result = int("".join(map(str, digits)))
return -1 if result >= 0x7FFFFFFF else result
| mit | Python |
c6af2c9f11204dde361a9b1f8b14113e90a272b3 | add py prototype | vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam,vmiklos/vmexam | 1/hazi.py | 1/hazi.py | #!/usr/bin/env python
import sys, codecs
class Node:
def __init__(self, name, g, h):
self.name = name
self.f = g + h
self.g = g
self.h = h
def sort_node(a, b):
return cmp(a.f, b.f)
def name_in_list(y, l):
for i in l:
if y == i.name:
return True
return False
def node_from_list(y, l):
for i in l:
if y == i.name:
return i
def reconstruct_path(came_from,current_node):
if current_node in came_from.keys():
p = reconstruct_path(came_from,came_from[current_node])
return p + [current_node]
else:
return [current_node]
def a_star(start, end):
sock = codecs.open("output.txt", "w", "ISO-8859-2")
openlist = [Node(start, 0, hn[start])]
closedlist = []
count = 0
while len(openlist):
openlist.sort(cmp=sort_node)
sock.write("(:openlist %s" % count)
for i in openlist:
sock.write(" (%s %s)" % (i.f, i.name))
sock.write(")\n")
sock.write("(:closedlist %s" % count)
for i in closedlist:
sock.write(" (%s %s)" % (i.f, i.name))
sock.write(")\n")
x = openlist.pop(0)
if x.name == end:
sock.write("(:sol %s " % x.f)
sock.write(" ".join(reconstruct_path(came_from,end)))
sock.write(")\n")
return True
closedlist.append(x)
for y in gn[x.name].keys():
if name_in_list(y, closedlist):
continue
tentative_g_score = x.g + gn[x.name][y]
tentative_is_better = False
if not name_in_list(y, openlist):
openlist.append(Node(y, tentative_g_score, hn[y]))
tentative_is_better = True
elif tentative_g_score < node_from_list(y, openlist).g:
tentative_is_better = True
if tentative_is_better == True:
came_from[y] = x.name
count += 1
return False
inhn = False
ingn = False
start = None
end = None
hn = {}
gn = {}
came_from = {}
sock = codecs.open(sys.argv[1], "r", "ISO-8859-2")
for i in sock.readlines():
line = i.strip()
if line.startswith("(:start"):
start = line[8:-1]
elif line.startswith("(:end"):
end = line[6:-1]
elif line.startswith("(:hn"):
inhn = True
elif line.startswith("(:gn"):
ingn = True
elif line.startswith("("):
if inhn:
items = line[1:-1].split(' ')
hn[items[0]] = int(items[1])
elif ingn:
items = line[1:-1].split(' ')
if items[0] not in gn.keys():
gn[items[0]] = {}
gn[items[0]][items[1]] = int(items[2])
elif line.startswith(")"):
if inhn:
inhn = False
elif ingn:
ingn = False
sock.close()
a_star(start, end)
| mit | Python |
|
d9ed78369e21b79e022e685ecb39babbb0c17315 | Create test_lcd.py | alienneo666/Rob_Bat | Raspberry_py/test_lcd.py | Raspberry_py/test_lcd.py | #!/usr/bin/python
#import
import RPi.GPIO as GPIO
import time
# Define GPIO to LCD mapping
LCD_RS = 7
LCD_E = 8
LCD_D4 = 25
LCD_D5 = 24
LCD_D6 = 23
LCD_D7 = 18
# Define some device constants
LCD_WIDTH = 16 # Maximum characters per line
LCD_CHR = True
LCD_CMD = False
LCD_LINE_1 = 0x80 # LCD RAM address for the 1st line
LCD_LINE_2 = 0xC0 # LCD RAM address for the 2nd line
# Timing constants
E_PULSE = 0.0005
E_DELAY = 0.0005
def main():
# Main program block
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
GPIO.setup(LCD_E, GPIO.OUT) # E
GPIO.setup(LCD_RS, GPIO.OUT) # RS
GPIO.setup(LCD_D4, GPIO.OUT) # DB4
GPIO.setup(LCD_D5, GPIO.OUT) # DB5
GPIO.setup(LCD_D6, GPIO.OUT) # DB6
GPIO.setup(LCD_D7, GPIO.OUT) # DB7
# Initialise display
lcd_init()
while True:
# Send some test
lcd_string("................",LCD_LINE_1)
lcd_string("................",LCD_LINE_2)
time.sleep(4)
lcd_string(" Rob-Bat OK ",LCD_LINE_1)
time.sleep(3)
lcd_string(" Funcionando! ",LCD_LINE_2)
time.sleep(3)
lcd_string("Computando......",LCD_LINE_1)
lcd_string("................",LCD_LINE_2)
time.sleep(2)
lcd_string(" ESTOY VIVO !! ",LCD_LINE_1)
lcd_string("----------------",LCD_LINE_2)
time.sleep(10);
def lcd_init():
# Initialise display
lcd_byte(0x33,LCD_CMD) # 110011 Initialise
lcd_byte(0x32,LCD_CMD) # 110010 Initialise
lcd_byte(0x06,LCD_CMD) # 000110 Cursor move direction
lcd_byte(0x0C,LCD_CMD) # 001100 Display On,Cursor Off, Blink Off
lcd_byte(0x28,LCD_CMD) # 101000 Data length, number of lines, font size
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def lcd_byte(bits, mode):
# Send byte to data pins
# bits = data
# mode = True for character
# False for command
GPIO.output(LCD_RS, mode) # RS
# High bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x10==0x10:
GPIO.output(LCD_D4, True)
if bits&0x20==0x20:
GPIO.output(LCD_D5, True)
if bits&0x40==0x40:
GPIO.output(LCD_D6, True)
if bits&0x80==0x80:
GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
# Low bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x01==0x01:
GPIO.output(LCD_D4, True)
if bits&0x02==0x02:
GPIO.output(LCD_D5, True)
if bits&0x04==0x04:
GPIO.output(LCD_D6, True)
if bits&0x08==0x08:
GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
def lcd_toggle_enable():
# Toggle enable
time.sleep(E_DELAY)
GPIO.output(LCD_E, True)
time.sleep(E_PULSE)
GPIO.output(LCD_E, False)
time.sleep(E_DELAY)
def lcd_string(message,line):
# Send string to display
message = message.ljust(LCD_WIDTH," ")
lcd_byte(line, LCD_CMD)
for i in range(LCD_WIDTH):
lcd_byte(ord(message[i]),LCD_CHR)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
finally:
lcd_byte(0x01, LCD_CMD)
lcd_string("Goodbye!",LCD_LINE_1)
GPIO.cleanup()
| mit | Python |
|
f30c542a9714574dbcee15ca7f7b4ca4cdb9d965 | add atexit01.py | devlights/try-python | trypython/stdlib/atexit01.py | trypython/stdlib/atexit01.py | # coding: utf-8
"""
atexitモジュールについてのサンプルです。
"""
import atexit
import sys
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
class Sample(SampleBase):
def exec(self):
#
# atexitモジュールを利用するとシャットダウンフックを設定出来る
# register() で登録して、 unregister() で解除する
#
# 引数無しの関数に限り、@atexit.register という風にデコレータで
# 指定できる。
#
atexit.register(Sample.exit_hook)
pr('script', 'end')
sys.exit(0)
@staticmethod
def exit_hook():
pr('exit_hook', 'called')
@staticmethod
@atexit.register
def exit_hook2():
pr('exit_hook2', 'called')
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python |
|
926fe25c4995b5ab1d2464159223e2c403b72570 | use python command line tool with tshark to parse pcap and convert to csv | econchick/tissue,econchick/tissue | pcap2csv.py | pcap2csv.py | import os
import csv
cmd = "tshark -n -r {0} -T fields -Eheader=y -e ip.addr > tmp.csv"
os.system(cmd.format("wireshark_sample.pcap"))
result = []
with open("tmp.csv", "r") as infile:
for line in infile:
if line == "\n":
continue
else:
result.append(line.strip().split(","))
with open('sample.csv', 'wb') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
for line in result:
writer.writerow(line)
os.system("rm tmp.csv") | mit | Python |
|
81f4976645225b6cf4a422186a3419a06756bfc5 | add a set of test utils that will be useful for running tests | ocefpaf/ulmo,ocefpaf/ulmo,nathanhilbert/ulmo,nathanhilbert/ulmo,cameronbracken/ulmo,cameronbracken/ulmo | test/test_util.py | test/test_util.py | import contextlib
import os
import os.path
import mock
import requests
@contextlib.contextmanager
def mocked_requests(path):
"""mocks the requests library to return a given file's content"""
# if environment variable is set, then don't mock the tests just grab files
# over the network. Example:
# env ULMO_DONT_MOCK_TESTS=1 py.test
if os.environ.get('ULMO_DONT_MOCK_TESTS', False):
yield
else:
test_path = test_file_path(path)
with open(test_path, 'rb') as f:
mock_response = requests.Response()
mock_response.status_code = 200
mock_response.raw = f
with mock.patch('requests.get', return_value=mock_response):
yield
def test_file_path(file_path):
"""translates a file path to be relative to the test files directory"""
return os.path.join(os.path.dirname(__file__), 'files', file_path)
| bsd-3-clause | Python |
|
0827fce61013172fa7183ee294189275030c0faf | Create code_5.py | jnimish77/Cloud-Computing-and-Programming-using-various-tools,jnimish77/Cloud-Computing-and-Programming-using-various-tools,jnimish77/Cloud-Computing-and-Programming-using-various-tools | MPI_Practice_Examples/code_5.py | MPI_Practice_Examples/code_5.py | #dotProductParallel_1.py
#"to run" syntax example: mpiexec -n 4 python26 dotProductParallel_1.py 40000
from mpi4py import MPI
import numpy
import sys
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
#read from command line
n = int(sys.argv[1]) #length of vectors
#arbitrary example vectors, generated to be evenly divided by the number of
#processes for convenience
x = numpy.linspace(0,100,n) if comm.rank == 0 else None
y = numpy.linspace(20,300,n) if comm.rank == 0 else None
#initialize as numpy arrays
dot = numpy.array([0.])
local_n = numpy.array([0])
#test for conformability
if rank == 0:
if (n != y.size):
print "vector length mismatch"
comm.Abort()
#currently, our program cannot handle sizes that are not evenly divided by
#the number of processors
if(n % size != 0):
print "the number of processors must evenly divide n."
comm.Abort()
#length of each process's portion of the original vector
local_n = numpy.array([n/size])
#communicate local array size to all processes
comm.Bcast(local_n, root=0)
#initialize as numpy arrays
local_x = numpy.zeros(local_n)
local_y = numpy.zeros(local_n)
#divide up vectors
comm.Scatterv([x,(0,100,n),MPI.DOUBLE], local_x)
comm.Scatterv([y, (20,300,n), MPI.DOUBLE] local_y)
#local computation of dot product
local_dot = numpy.array([numpy.dot(local_x, local_y)])
#sum the results of each
comm.Reduce(local_dot, local_n, op = MPI.SUM)
if (rank == 0):
print "The dot product is", dot[0], "computed in parallel"
print "and", numpy.dot(x,y), "computed serially"
| apache-2.0 | Python |
|
6454548da01dbc2b9f772a5c0ffb11a03dc933e7 | Add module capable of rendering a circle when ran | withtwoemms/pygame-explorations | draw_shape.py | draw_shape.py | import pygame
pygame.init()
#-- SCREEN CHARACTERISTICS ------------------------->>>
background_color = (255,255,255)
(width, height) = (300, 200)
#-- RENDER SCREEN ---------------------------------->>>
screen = pygame.display.set_mode((width, height))
screen.fill(background_color)
#pygame.draw.circle(canvas, color, position(x,y), radius, thickness)
pygame.draw.circle(screen, (255,0,0), (150, 100), 10, 1)
#-- RUN LOOP --------------------------------------->>>
pygame.display.flip()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
| mit | Python |
|
469b28aec45c9832e4cfe658143316fb15e103d1 | Add server | dotoscat/Polytank-ASIR | server.py | server.py | print("Hola mundo")
| agpl-3.0 | Python |
|
6ac6f9f3f933a98af8722561ba181ca50c6ad1fe | Add performance test | tailhook/sortedsets | perftest.py | perftest.py | import resource
from time import clock
from sortedsets import SortedSet
def test(size):
tm = clock()
ss = SortedSet((str(i), i*10) for i in range(size))
create_time = clock() - tm
print("SORTED SET WITH", size, "ELEMENTS", ss._level, "LEVELS")
print("Memory usage", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
print("Creation time ", format(create_time, '10.2f'), "s")
num = 1000
step = size // (num + 2)
items = []
for i in range(step, size-step, step):
items.append((str(i), i*10))
tm = clock()
for k, v in items:
del ss[k]
del_time = num/(clock() - tm)
tm = clock()
for k, v in items:
ss[k] = v
ins_time = num/(clock() - tm)
print("Insertion speed", format(ins_time, '10.2f'), "ins/s")
print("Deletion speed ", format(del_time, '10.2f'), "del/s")
for size in (10000, 100000, 1000000, 10000000):
test(size)
| mit | Python |
|
a107d3c088e13c4bf1a600f0ebf2664321d6799f | add solution for Binary Tree Maximum Path Sum | zhyu/leetcode,zhyu/leetcode | src/binaryTreeMaximumPathSum.py | src/binaryTreeMaximumPathSum.py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return an integer
def maxPathSum(self, root):
self.res = root.val if root else 0
self.dfs(root)
return self.res
def dfs(self, root):
if root is None:
return 0
l_max = max(0, self.dfs(root.left))
r_max = max(0, self.dfs(root.right))
self.res = max(self.res, root.val+l_max+r_max)
return root.val+max(l_max, r_max)
| mit | Python |
|
4fdef464be6eabee609ecc4327493c277693c0e0 | Make content text mandatory | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | content/migrations/0023_auto_20160614_1130.py | content/migrations/0023_auto_20160614_1130.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-14 09:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('content', '0022_auto_20160608_1407'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='text',
field=models.TextField(verbose_name='Text'),
),
migrations.AlterField(
model_name='content',
name='text',
field=models.TextField(verbose_name='Text'),
),
]
| agpl-3.0 | Python |
|
5a857703de5fc1e67e958afb41a10db07b98bfa1 | Add migration script to fix valid users with date_confirmed==None | laurenrevere/osf.io,doublebits/osf.io,kch8qx/osf.io,mluo613/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,caneruguz/osf.io,njantrania/osf.io,asanfilippo7/osf.io,zkraime/osf.io,petermalcolm/osf.io,himanshuo/osf.io,hmoco/osf.io,GaryKriebel/osf.io,jmcarp/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,binoculars/osf.io,hmoco/osf.io,lyndsysimon/osf.io,baylee-d/osf.io,jinluyuan/osf.io,KAsante95/osf.io,laurenrevere/osf.io,samanehsan/osf.io,cslzchen/osf.io,jmcarp/osf.io,arpitar/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,wearpants/osf.io,RomanZWang/osf.io,zkraime/osf.io,zachjanicki/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,njantrania/osf.io,TomHeatwole/osf.io,dplorimer/osf,HarryRybacki/osf.io,fabianvf/osf.io,petermalcolm/osf.io,acshi/osf.io,RomanZWang/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,doublebits/osf.io,ckc6cz/osf.io,jolene-esposito/osf.io,caneruguz/osf.io,felliott/osf.io,HarryRybacki/osf.io,cslzchen/osf.io,dplorimer/osf,abought/osf.io,adlius/osf.io,Ghalko/osf.io,jolene-esposito/osf.io,reinaH/osf.io,doublebits/osf.io,cslzchen/osf.io,kushG/osf.io,amyshi188/osf.io,caseyrollins/osf.io,jmcarp/osf.io,chrisseto/osf.io,samanehsan/osf.io,cosenal/osf.io,revanthkolli/osf.io,amyshi188/osf.io,caseyrygt/osf.io,kushG/osf.io,icereval/osf.io,KAsante95/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,erinspace/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,GaryKriebel/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,binoculars/osf.io,mluo613/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,pattisdr/osf.io,Ghalko/osf.io,bdyetton/prettychart,cldershem/osf.io,cldershem/osf.io,alexschiller/osf.io,jeffreyliu3230/osf.io,SSJohns/osf.io,danielneis/osf.io,zamattiac/osf.io,caseyrygt/osf.io,HarryRybacki/osf.io,aaxelb/osf.io,adlius/osf.io,baylee-d/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,kushG/osf.io,alexschiller/osf.io,lyndsysimon/osf.io,wearpants/osf.io,jinluyuan/osf.io,CenterForOpenScience/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,RomanZWang/osf.io,samanehsan/osf.io,revanthkolli/osf.io,jinluyuan/osf.io,barbour-em/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,danielneis/osf.io,mluke93/osf.io,baylee-d/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,erinspace/osf.io,ticklemepierce/osf.io,zachjanicki/osf.io,mattclark/osf.io,crcresearch/osf.io,billyhunt/osf.io,jeffreyliu3230/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,amyshi188/osf.io,mfraezz/osf.io,Ghalko/osf.io,felliott/osf.io,samchrisinger/osf.io,GaryKriebel/osf.io,jolene-esposito/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,reinaH/osf.io,mluke93/osf.io,barbour-em/osf.io,kushG/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,himanshuo/osf.io,asanfilippo7/osf.io,adlius/osf.io,SSJohns/osf.io,Ghalko/osf.io,jnayak1/osf.io,binoculars/osf.io,chennan47/osf.io,lyndsysimon/osf.io,TomBaxter/osf.io,abought/osf.io,fabianvf/osf.io,MerlinZhang/osf.io,rdhyee/osf.io,jnayak1/osf.io,sloria/osf.io,leb2dg/osf.io,leb2dg/osf.io,fabianvf/osf.io,zkraime/osf.io,kch8qx/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,emetsger/osf.io,icereval/osf.io,Nesiehr/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,felliott/osf.io,bdyetton/prettychart,rdhyee/osf.io,revanthkolli/osf.io,zkraime/osf.io,acshi/osf.io,alexschiller/osf.io,chennan47/osf.io,doublebits/osf.io,kwierman/osf.io,mluo613/osf.io,laurenrevere/osf.io,acshi/osf.io,chennan47/osf.io,mluo613/osf.io,wearpants/osf.io,himanshuo/osf.io,abought/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,Nesiehr/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,billyhunt/osf.io,jmcarp/osf.io,kwierman/osf.io,brianjgeiger/osf.io,mattclark/osf.io,hmoco/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,doublebits/osf.io,TomBaxter/osf.io,zamattiac/osf.io,fabianvf/osf.io,ZobairAlijan/osf.io,adlius/osf.io,ckc6cz/osf.io,kwierman/osf.io,icereval/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,SSJohns/osf.io,mluke93/osf.io,jnayak1/osf.io,jinluyuan/osf.io,lamdnhan/osf.io,ZobairAlijan/osf.io,rdhyee/osf.io,TomBaxter/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,barbour-em/osf.io,barbour-em/osf.io,cwisecarver/osf.io,dplorimer/osf,jnayak1/osf.io,zamattiac/osf.io,lamdnhan/osf.io,ZobairAlijan/osf.io,arpitar/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,njantrania/osf.io,ckc6cz/osf.io,felliott/osf.io,cldershem/osf.io,arpitar/osf.io,dplorimer/osf,CenterForOpenScience/osf.io,zachjanicki/osf.io,cosenal/osf.io,MerlinZhang/osf.io,kch8qx/osf.io,billyhunt/osf.io,erinspace/osf.io,brandonPurvis/osf.io,jeffreyliu3230/osf.io,KAsante95/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,mluo613/osf.io,aaxelb/osf.io,emetsger/osf.io,petermalcolm/osf.io,pattisdr/osf.io,njantrania/osf.io,cwisecarver/osf.io,GaryKriebel/osf.io,revanthkolli/osf.io,mluke93/osf.io,chrisseto/osf.io,KAsante95/osf.io,mattclark/osf.io,mfraezz/osf.io,alexschiller/osf.io,saradbowman/osf.io,sloria/osf.io,GageGaskins/osf.io,HarryRybacki/osf.io,hmoco/osf.io,cldershem/osf.io,zachjanicki/osf.io,chrisseto/osf.io,cosenal/osf.io,arpitar/osf.io,wearpants/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,acshi/osf.io,sloria/osf.io,MerlinZhang/osf.io,CenterForOpenScience/osf.io,himanshuo/osf.io,lamdnhan/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,kwierman/osf.io,ckc6cz/osf.io,zamattiac/osf.io,danielneis/osf.io,alexschiller/osf.io,KAsante95/osf.io,lamdnhan/osf.io,bdyetton/prettychart,abought/osf.io,acshi/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,cosenal/osf.io,sbt9uc/osf.io,caneruguz/osf.io,reinaH/osf.io,leb2dg/osf.io,danielneis/osf.io,amyshi188/osf.io,reinaH/osf.io,emetsger/osf.io | scripts/migrate_unconfirmed_valid_users.py | scripts/migrate_unconfirmed_valid_users.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to migrate users with a valid date_last_login but no date_confirmed."""
import sys
import logging
from website.app import init_app
from website.models import User
from scripts import utils as script_utils
from tests.base import OsfTestCase
from tests.factories import UserFactory
from modularodm import Q
import datetime as dt
logger = logging.getLogger(__name__)
def do_migration(records):
for user in records:
user.date_confirmed = user.date_last_login
if not user.is_registered:
user.is_registered = True
logger.info('Finished migrating user {0}'.format(user._id))
def get_targets():
return User.find(Q('date_confirmed', 'eq', None) & Q('date_last_login', 'ne', None))
def main():
init_app(routes=False) # Sets the storage backends on all models
if 'dry' in sys.argv:
for user in get_targets():
print(user)
else:
do_migration(get_targets())
class TestMigrateNodeCategories(OsfTestCase):
def test_get_targets(self):
test = User.find(Q('date_confirmed', 'ne', None) & Q('date_last_login', 'ne', None))
assert test is not None
def test_do_migration(self):
today = dt.datetime.utcnow()
user1 = UserFactory.build(date_confirmed=None, date_last_login=today, is_registered=False)
user2 = UserFactory.build(date_confirmed=None, date_last_login=today, is_registered=True)
user1.save()
user2.save()
user_list = User.find(Q('_id', 'eq', user1._id) | Q('_id', 'eq', user2._id))
do_migration(user_list)
assert user1.date_confirmed is today
assert user1.is_registered
assert user2.date_confirmed is today
assert user2.is_registered
if __name__ == '__main__':
script_utils.add_file_logger(logger, __file__)
main()
| apache-2.0 | Python |
|
e12371408af1682904483341fd1f41ef6034a17f | add test | Jayin/ComputerScience,Jayin/ComputerScience,Jayin/ComputerScience,Jayin/ComputerScience,Jayin/ComputerScience | OperateSystem/Ex1/Test/SellTest.py | OperateSystem/Ex1/Test/SellTest.py | # -*- coding: utf-8 -*-
__author__ = 'jayin'
import requests
import threading
def buy_ticket():
res = requests.get('http://localhost:8000/buy1')
print threading.currentThread().getName() + u' buy ticket ' + res.content
def main():
for x in range(1, 40):
t = threading.Thread(target=buy_ticket, name=x)
t.start()
if __name__ == '__main__':
main() | mit | Python |
|
edeffbcbe8fb239553c73fa37e73c0188ffc2479 | Add unit test for retrieving credentials from environment variables | ueg1990/imgur-cli | tests/test_cli.py | tests/test_cli.py | import sys
import fixtures
import imgurpython
import testtools
import imgur_cli.cli as cli
FAKE_ENV = {'IMGUR_CLIENT_ID': 'client_id',
'IMGUR_CLIENT_SECRET': 'client_secret',
'IMGUR_ACCESS_TOKEN': 'access_token',
'IMGUR_REFRESH_TOKEN': 'refresh_token',
'IMGUR_MASHAPE_KEY': 'mashape_key'}
class TestImgurCli(testtools.TestCase):
def make_env(self, exclude=None):
if not exclude:
exclude = []
env = {key: value for key, value in FAKE_ENV.items() if key not in exclude}
self.useFixture(fixtures.MonkeyPatch('os.environ', env))
def test_imgur_credentials_env(self):
self.make_env()
expected = ('client_id', 'client_secret', 'access_token', 'refresh_token',
'mashape_key')
imgur_credentials = cli.imgur_credentials()
self.assertEqual(expected, imgur_credentials)
self.make_env(exclude=['IMGUR_MASHAPE_KEY'])
expected = ('client_id', 'client_secret', 'access_token', 'refresh_token',
None)
imgur_credentials = cli.imgur_credentials()
self.assertEqual(expected, imgur_credentials)
self.make_env(exclude=['IMGUR_CLIENT_ID'])
self.assertRaises(imgurpython.client.ImgurClientError,
cli.imgur_credentials)
self.make_env(exclude=['IMGUR_CLIENT_SECRET'])
self.assertRaises(imgurpython.client.ImgurClientError,
cli.imgur_credentials)
| mit | Python |
|
4c148281ee8071ea8f150362388a44cf5c0895bf | Add exception classes. | cwahbong/tgif-py | tgif/exception.py | tgif/exception.py | """ All exceptions go here.
"""
class Friday(Exception):
""" Base exception in Friday game.
"""
class GameOver(Friday):
""" Indicats that the game is overed.
"""
| mit | Python |
|
ff079da977990b7d6e71c6d92c5a9299fa92d123 | Add module listtools implementing class LazyList. | RKrahl/photo-tools | photo/listtools.py | photo/listtools.py | """Some useful list classes.
**Note**: This module might be useful independently of photo-tools.
It is included here because photo-tools uses it internally, but it is
not considered to be part of the API. Changes in this module are not
considered API changes of photo-tools. It may even be removed from
future versions of the photo-tools distribution without further
notice.
"""
from collections import MutableSequence
class LazyList(MutableSequence):
"""A list generated lazily from an iterable.
LazyList provides list access to the sequence of elements from the
iterable. Elements are taken out lazily. That means, the
elements are taken from the iterable not before they are actually
accessed. Once taken out, the elements are stored in a
conventional list in order to provide random access. The string
representation operator of LazyList only displays the elements
taken out of the iterable so far.
Note: if the list is accessed at the end using negative indices,
all elements are taken from the iterable before returning the
result. Some operations implicitly access the list at the end and
thus take all elements from the iterable. These operations
include `len()` and `append()`. Do not access the list at the end
using negativ indices or append to the list if you cannot afford
to take all elements out of the iterable.
>>> l = LazyList((0, 1, 2, 3, 4))
>>> l
[]
>>> l[1]
1
>>> l
[0, 1]
>>> del l[1]
>>> l
[0]
>>> l[8]
Traceback (most recent call last):
...
IndexError: list index out of range
>>> l
[0, 2, 3, 4]
>>> l = LazyList((0, 1, 2, 3, 4))
>>> l[-2]
3
>>> l
[0, 1, 2, 3, 4]
>>> l = LazyList((0, 1, 2, 3, 4))
>>> list(l) == [0, 1, 2, 3, 4]
True
>>> l
[0, 1, 2, 3, 4]
>>> l = LazyList((0, 1, 2, 3, 4))
>>> len(l)
5
>>> l
[0, 1, 2, 3, 4]
>>> l = LazyList((0, 1, 2, 3, 4))
>>> l.append(5)
>>> l
[0, 1, 2, 3, 4, 5]
>>> def naturals():
... n = 0
... while True:
... yield n
... n += 1
...
>>> l = LazyList(naturals())
>>> l[1]
1
>>> l
[0, 1]
>>> l[4:2:-1]
[4, 3]
>>> l
[0, 1, 2, 3, 4]
>>> l[8]
8
>>> l
[0, 1, 2, 3, 4, 5, 6, 7, 8]
>>> l[17:11]
[]
>>> l
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
"""
def __init__(self, iterable):
self.iterable = iter(iterable)
self.elements = []
def _access(self, index):
"""Try to take out the elements covered by index from the iterable.
The argument may be an int or a slice. Do not raise an error,
even if not enough elements can be delivered by the iterable.
"""
m = 0
if isinstance(index, int):
m = index + 1 if index >= 0 else -1
elif isinstance(index, slice):
if index.step is not None and index.step < 0:
m = index.start + 1 if index.start >= 0 else -1
else:
m = index.stop if index.stop >= 0 else -1
while len(self.elements) < m or m < 0:
try:
self.elements.append(next(self.iterable))
except StopIteration:
break
def __len__(self):
self._access(-1)
return len(self.elements)
def __getitem__(self, index):
self._access(index)
return self.elements.__getitem__(index)
def __setitem__(self, index, value):
self._access(index)
self.elements.__setitem__(index, value)
def __delitem__(self, index):
self._access(index)
self.elements.__delitem__(index)
def insert(self, index, value):
self._access(index)
self.elements.insert(index, value)
def append(self, value):
self._access(-1)
self.elements.append(value)
def __nonzero__(self):
self._access(0)
return len(self) > 0
def __str__(self):
return str(self.elements)
def __repr__(self):
return repr(self.elements)
| apache-2.0 | Python |
|
a6935d250dfdbc275ce450f813697b73ebc291e3 | Create addDigits.py | CptDemocracy/Python | Puzzles/leetcode/April-9th-2016/addDigits.py | Puzzles/leetcode/April-9th-2016/addDigits.py | /*
[ref.href] leetcode.com/problems/add-digits
"
Given a non-negative integer num, repeatedly add all its digits
until the result has only one digit.
For example:
Given num = 38, the process is like: 3 + 8 = 11, 1 + 1 = 2.
Since 2 has only one digit, return it.
Credits:
Special thanks to @jianchao.li.fighter for adding this problem
and creating all test cases.
"
*/
class Solution(object):
def addDigits(self, n):
"""
:type num: int
:rtype: int
"""
if n < 10:
return n
n = n % 10 + self.addDigits(n // 10)
return self.addDigits(n)
| mit | Python |
|
85142dd9f7413dcb7c214ec251d21c93517ce26c | add AcoraMatcher tool | AtmaHou/atma | AcoraMatcher.py | AcoraMatcher.py | # coding:utf-8
import cPickle
import json
import acora
from atma import tool
import collections
from itertools import groupby
class AcoraMatcher:
def __init__(self, spec_set, min_count=1, min_len=1):
key_lst = []
if type(spec_set) == dict or type(spec_set) == collections.Counter:
for spec, cnt in spec_set.items():
if cnt >= min_count and len(spec) >= min_len:
key_lst.append(spec)
elif type(spec_set) == list:
key_lst = spec_set
else:
print 'ERROR: wrong value type:', type(spec_set)
exit(-1)
self.builder = acora.AcoraBuilder(key_lst)
self.ac = self.builder.build()
def match(self, des, whole_match=True):
ret = []
letters = set("!\"$%&'()*+,.:;<>?@[\]^_`{|}~ -")
wrong_spec = ['other', 'no', 'A', 'none']
for kw, pos in self.ac.findall(des):
# print des[pos - 1] == ' '
# print des[pos: pos + len(kw)]
# print pos+len(kw) == len(des), len(des), pos, len(kw), des[pos + len(kw) - 1] in letters
if kw in wrong_spec:
continue
if not whole_match:
ret.append((kw, pos))
# remove non whole match
elif (pos == 0 or des[pos-1] in letters) and (pos+len(kw) == len(des) or des[pos+len(kw)] in letters):
ret.append((kw, pos))
return ret # return value format: [(match_string, start_pos)], start_pos starts from 0
@staticmethod
def longest_match(matches):
ret = []
matches = sorted(matches, key=lambda (x, y): (y, len(x) * -1))
last_end = 0
for m in matches:
if len(m[0]) + m[1] > last_end:
ret.append(m)
last_end = len(m[0]) + m[1]
return ret
@staticmethod
def distribution_counter(count_dic, items):
for i in items:
key = i
if key not in count_dic:
count_dic[key] = 1
else:
count_dic[key] += 1
| mit | Python |
|
7a880376e098f60b1666833bb6b14b359b0ebda5 | add fitness_spider.py | bluedai180/PythonExercise,bluedai180/PythonExercise | Exercise/fitness_spider.py | Exercise/fitness_spider.py | from bs4 import BeautifulSoup
import requests
from selenium import webdriver
import time
import sqlite3
from selenium import webdriver
import json
driver = webdriver.PhantomJS()
class Fitness:
i = 0
url = "http://www.hiyd.com/dongzuo/"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
}
def get_info(self, url):
response = requests.get(url, headers=self.headers, timeout=5)
# driver.get(url)
soup = BeautifulSoup(response.text, "html.parser")
# soup = BeautifulSoup(driver.page_source, "html.parser")
text = str(soup.find_all("script")[-1])
# print(driver.page_source)
data_text = text.split("e.init(")[1].split(");")[0]
json_text = json.loads(data_text)
print(json_text)
if __name__ == "__main__":
spider = Fitness()
while spider.i < 1:
spider.i += 1
spider.get_info(spider.url + str(spider.i) + "/") | apache-2.0 | Python |
|
a8fd0bfa974ff818ec105a42c585bae48030a086 | Create notebooknetc.py | picklecai/OMOOC2py,picklecai/OMOOC2py | _src/om2py3w/3wex0/notebooknetc.py | _src/om2py3w/3wex0/notebooknetc.py | # _*_coding:utf-8_*_
# 客户端程序
from socket import *
import time
import notebooknets
def main():
BUF_SIZE = 65565
ss_addr = ('127.0.0.1', 8800)
cs = socket(AF_INET, SOCK_DGRAM)
while True:
global data
data = raw_input('Please Input data>')
cs.sendto(data, ss_addr)
data, addr = cs.recvfrom(BUF_SIZE)
print "Data: ", data
cs.close
notebooknets.history(data)
if __name__ == '__main__':
main()
| mit | Python |
|
68206c67739abf4f9f4d1ab8aa647a28649b5f5f | add figure one comparison between IME and random | WilmerLab/HTSOHM-dev,WilmerLab/HTSOHM-dev | analysis/figure_1_ime_vs_random.py | analysis/figure_1_ime_vs_random.py | #!/usr/bin/env python3
import os
import click
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
# from matplotlib import cm
import pandas as pd
font = {'family':'sans-serif',
'sans-serif':['Helvetica'],
'weight' : 'normal',
'size' : 8}
rc('font', **font)
@click.command()
def figure1_ime_vs_random():
print("loading data...")
def setup_plot(csv_paths):
df = pd.DataFrame(data=dict(num=range(1, 500001)))
ax.set_xlabel("Materials")
ax.set_yticks([0, 400, 800, 1200, 1600])
ax.set_xlim(0, 150000)
ax.set_ylim(0, 1600)
# ax.axes.yaxis.set_visible(False)
ax.set_xticks([0, 25000, 50000, 100000, 150000])
ax.axes.xaxis.set_ticklabels(["0", "25K", "50K", "450K", "500K"])
ax.grid(linestyle='-', color='0.8', zorder=0, axis="x")
# ax.axhline(1600, linestyle="--", lw=2, color="black", label=0)
for path in csv_paths:
df[path] = pd.read_csv(path, usecols=["unique_bins"])
return df
fig = plt.figure(figsize=(3.346, 3.346), tight_layout=True)
ax = fig.add_subplot(1, 1, 1)
ax.set_ylabel("Bins Explored")
legend_labels = ["IME", "Random"]
g1 = setup_plot(["reference.csv", "random16_500K.csv"])
ax.plot(g1.num, g1["reference.csv"], lw=1.5, color="black", zorder=10)
ax.plot(g1.num[0:50000], g1["random16_500K.csv"][0:50000], lw=1.5, color="orange", zorder=10)
ax.plot(g1.num[100000:150000], g1["random16_500K.csv"][450000:500000], lw=1.5, color="orange", zorder=10)
ax.plot([50000, 100000],[541, 730], lw=1.5, color="orange", linestyle="--", zorder=10)
# ax.legend(legend_labels, bbox_to_anchor=(1.05, 1), loc='upper left', borderaxespad=0., facecolor='white', framealpha=1)
ax.legend(legend_labels, loc='upper right', facecolor='white', framealpha=1)
ax.axhline(468, lw=1.0, linestyle="--", color="grey", zorder=1)
ax.axhline(732, lw=1.0, linestyle="--", color="grey", zorder=1)
ax.axhline(1062, lw=1.0, linestyle="--", color="grey", zorder=1)
arrow_args = dict(arrowstyle="->")
ax.annotate("732 bins @ 494956", xy=(494956 - 350000, 732), xycoords="data",
textcoords="offset points", xytext=(0, 10), horizontalalignment='right', verticalalignment='bottom',
arrowprops=arrow_args)
ax.annotate("732 bins @ 4283", xy=(4283, 732), xycoords="data",
textcoords="offset points", xytext=(13, 10), horizontalalignment='left', verticalalignment='bottom',
arrowprops=arrow_args)
ax.annotate("468 bins @ 25000", xy=(25000, 468), xycoords="data",
textcoords="offset points", xytext=(0, -10), horizontalalignment='left', verticalalignment='top',
arrowprops=arrow_args)
ax.annotate("468 bins @ 1786", xy=(1786, 468), xycoords="data",
textcoords="offset points", xytext=(10, 15), horizontalalignment='left', verticalalignment='bottom',
arrowprops=arrow_args)
ax.annotate("1062 bins @ 25000", xy=(25000, 1062), xycoords="data",
textcoords="offset points", xytext=(0, 10), horizontalalignment='left', verticalalignment='bottom',
arrowprops=arrow_args)
fig.savefig("figure1_ime_vs_random.png", dpi=1200)
plt.close(fig)
if __name__ == '__main__':
figure1_ime_vs_random()
| mit | Python |
|
13f495ddabd1997b7dfdc9e2933b82fd25ecd664 | Create LevelOrderTraversal.py from LeetCode | jcchuks/MiscCodes,jcchuks/Hackerrank,jcchuks/Hackerrank,jcchuks/MiscCodes,jcchuks/MiscCodes | LevelOrderTraversal.py | LevelOrderTraversal.py |
#https://leetcode.com/problems/binary-tree-level-order-traversal/#/description
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Node(object):
def __init__(self,node,level):
self.node = node
self.level = level
class Solution(object):
def __init__(self):
self.array = []
self.level_counter = 0
self.result = []
self.levelq = []
def queue(self,node):
self.array.append(node)
def isNotEmpty(self):
return self.array
def popValue(self):
value = self.array[0]
del self.array[0]
return value
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return self.result
self.queue(Node(root,0))
while self.isNotEmpty():
bigNode = self.popValue()
if bigNode.level > self.level_counter:
self.level_counter = bigNode.level
self.result.append(self.levelq[:])
self.levelq[:] = []
self.levelq.append(bigNode.node.val)
if bigNode.node.left :
self.queue(Node(bigNode.node.left, bigNode.level + 1))
if bigNode.node.right :
self.queue(Node(bigNode.node.right, bigNode.level + 1))
if self.levelq:
self.result.append(self.levelq[:])
return self.result
| mit | Python |
|
66c00d10ddc1f137deaf9208572a287bbad33de7 | Add migration script | privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea | migrations/versions/d756b34061ff_.py | migrations/versions/d756b34061ff_.py | """Store privacyIDEA node in eventcounter table
Revision ID: d756b34061ff
Revises: 3d7f8b29cbb1
Create Date: 2019-09-02 13:59:24.244529
"""
# revision identifiers, used by Alembic.
from sqlalchemy import orm
from sqlalchemy.sql.ddl import CreateSequence
from privacyidea.lib.config import get_privacyidea_node
revision = 'd756b34061ff'
down_revision = '3d7f8b29cbb1'
from alembic import op, context
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class OldEventCounter(Base):
__tablename__ = 'eventcounter'
counter_name = sa.Column(sa.Unicode(80), nullable=False, primary_key=True)
counter_value = sa.Column(sa.Integer, default=0)
__table_args__ = {'mysql_row_format': 'DYNAMIC'}
class NewEventCounter(Base):
__tablename__ = 'eventcounter_new'
id = sa.Column(sa.Integer, sa.Sequence("eventcounter_seq"), primary_key=True)
counter_name = sa.Column(sa.Unicode(80), nullable=False)
counter_value = sa.Column(sa.Integer, default=0)
node = sa.Column(sa.Unicode(255), nullable=False)
__table_args__ = (sa.UniqueConstraint('counter_name',
'node',
name='evctr_1'),
{'mysql_row_format': 'DYNAMIC'})
def dialect_supports_sequences():
migration_context = context.get_context()
return migration_context.dialect.supports_sequences
def create_seq(seq):
if dialect_supports_sequences():
op.execute(CreateSequence(seq))
def upgrade():
try:
# Step 1: Create sequence on Postgres
seq = sa.Sequence('tokenowner_seq')
try:
create_seq(seq)
except Exception as _e:
pass
# Step 2: Create new eventcounter_new table
op.create_table('eventcounter_new',
sa.Column("id", sa.Integer, sa.Sequence("eventcounter_seq"), primary_key=True),
sa.Column("counter_name", sa.Unicode(80), nullable=False),
sa.Column("counter_value", sa.Integer, default=0),
sa.Column("node", sa.Unicode(255), nullable=False),
sa.UniqueConstraint('counter_name', 'node', name='evctr_1'),
mysql_row_format='DYNAMIC'
)
# Step 3: Migrate data from eventcounter to eventcounter_new
node = get_privacyidea_node()
bind = op.get_bind()
session = orm.Session(bind=bind)
for old_ctr in session.query(OldEventCounter).all():
new_ctr = NewEventCounter(counter_name=old_ctr.counter_name,
counter_value=old_ctr.counter_value,
node=node)
session.add(new_ctr)
print("Migrating counter {}={} on node={} ...".format(new_ctr.counter_name, new_ctr.counter_value, node))
session.commit()
# Step 4: Remove eventcounter
op.drop_table("eventcounter")
op.rename_table("eventcounter_new", "eventcounter")
except Exception as exx:
print("Could not migrate table 'eventcounter'")
print (exx)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('evctr_1', 'eventcounter', type_='unique')
op.drop_column('eventcounter', 'node')
op.drop_column('eventcounter', 'id')
# ### end Alembic commands ###
| agpl-3.0 | Python |
|
891dc05f36ae9084d8511bf3e26e0631eadecef7 | add medications urls | slogan621/tscharts,slogan621/tscharts,slogan621/tscharts | medications/urls.py | medications/urls.py |
from django.conf.urls import url
from medications.views import MedicationsView
urlpatterns = [
url(r'^$', MedicationsView.as_view()),
url(r'^([0-9]+)/$', MedicationsView.as_view()),
]
| apache-2.0 | Python |
|
aa4f1df448c6d01875ed667e37afe68c114892ed | Add initial verification endpoint. Add all balance endpoint | Nevtep/omniwallet,VukDukic/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,OmniLayer/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,habibmasuro/omniwallet,Nevtep/omniwallet,achamely/omniwallet,achamely/omniwallet,achamely/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,VukDukic/omniwallet,habibmasuro/omniwallet | api/mastercoin_verify.py | api/mastercoin_verify.py | import os
import glob
from flask import Flask, request, jsonify, abort, json
data_dir_root = os.environ.get('DATADIR')
app = Flask(__name__)
app.debug = True
@app.route('/addresses')
def addresses():
currency_id = request.args.get('currency_id')
print currency_id
response = []
addr_glob = glob.glob(data_dir_root + '/addr/*.json')
for address_file in addr_glob:
with open(address_file, 'r') as f:
addr = json.load(f)
res = {
'address': addr['address']
}
if currency_id == '0':
btc_balance = [x['value'] for x in addr['balance'] if x['symbol'] == 'BTC'][0]
res['balance'] = float(btc_balance)
response.append(res)
else:
if currency_id == '1' or currency_id == '2':
msc_currency_id = str(int(currency_id) - 1) # Mastercoin-tools is off by one on currency id from the spec
if msc_currency_id in addr:
print addr[currency_id]['balance']
res['balance'] = float(addr[msc_currency_id]['balance'])
response.append(res)
json_response = json.dumps(response)
return json_response
@app.route('/transactions/<address>')
def transactions(address=None):
return ""
| agpl-3.0 | Python |
|
23799c4a33b9d2da82ec0770f15e840459a940c6 | Add api comtrade | daniel1409/dataviva-api,DataViva/dataviva-api,jdmmiranda307/dataviva-api | app/apis/comtrade_api.py | app/apis/comtrade_api.py | from flask import Blueprint, jsonify, request
from sqlalchemy import func, distinct
from inflection import singularize
from app.models.comtrade import Comtrade as Model
from app import cache
from app.helpers.cache_helper import api_cache_key
blueprint = Blueprint('comtrade_api', __name__, url_prefix='/comtrade')
@blueprint.route('/<path:path>/')
@cache.cached(key_prefix=api_cache_key("comtrade"))
def api(path):
dimensions = map(singularize, path.split('/'))
if invalid_dimension(dimensions):
return 'Error', 403
filters = {k: v for k, v in request.args.to_dict().iteritems() if k in Model.dimensions()}
counts = [c for c in map(singularize, request.args.getlist('count')) if c in Model.dimensions()]
values = get_values(request)
group_columns = get_columns(dimensions)
count_columns = get_columns(counts)
aggregated_values = [Model.aggregate(v) for v in values]
headers = get_headers(group_columns) + get_headers(count_columns, '_count') + values
entities = group_columns + map(lambda x: func.count(distinct(x)), count_columns) + aggregated_values
query = Model.query.with_entities(*entities).filter_by(**filters).group_by(*group_columns)
return jsonify(data=query.all(), headers=headers)
def get_values(request):
values = [v for v in request.args.getlist('value') if v in Model.values()]
return values if len(values) else Model.values()
def get_headers(columns, suffix=''):
return map(lambda x: x.key + suffix, columns)
def get_columns(dimensions):
return [getattr(Model, dimension) for dimension in dimensions]
def invalid_dimension(dimensions):
return not set(dimensions).issubset(set(Model.dimensions()))
| mit | Python |
|
670e5d017adb24c5adffb38fa59059fec5175c3c | Create hello.py | libennext/gluon-tutorials-zh,libennext/gluon-tutorials-zh,libennext/gluon-tutorials-zh | hello.py | hello.py | print('hello, world!')
| apache-2.0 | Python |
|
1692161ad43fdc6a0e2ce9eba0bacefc04c46b5c | Add form generator module. | ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website | src/epiweb/apps/survey/utils.py | src/epiweb/apps/survey/utils.py | from django import forms
from epiweb.apps.survey.data import Survey, Section, Question
_ = lambda x: x
def create_field(question):
if question.type == 'yes-no':
field = forms.ChoiceField(widget=forms.RadioSelect,
choices=[('yes', _('Yes')), ('no', _('No'))])
elif question.type == 'option-multiple':
field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,
choices=zip(range(0, len(question.options)), question.options))
elif question.type == 'option-single':
field = forms.ChoiceField(widget=forms.RadioSelect,
choices=zip(range(0, len(question.options)), question.options))
elif question.type == 'date':
field = forms.DateField(input_formats='%m/%d/%y')
else:
field = forms.CharField()
field.label = question.label
field.required = False
return field
def generate_form(section, values=None):
if values:
form = forms.Form(values)
else:
form = forms.Form()
for question in section.questions:
form.fields[question.id] = create_field(question)
return form
| agpl-3.0 | Python |
|
72a5f0d301b2169367c8bcbc42bb53b71c1d635c | Create utils.py | ch4rliem4rbles/slack-five | utils.py | utils.py | from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.api import memcache
import jinja2
import logging
import json
import os
class BaseHandler(webapp.RequestHandler):
context = {}
def initialize(self, request, response):
"""docstring for __init__"""
self.populateContext()
super(BaseHandler, self).initialize(request, response)
def populateContext(self):
"""Load up the stuff that every web handler will need"""
user = users.get_current_user()
if user:
self.context['logged_in'] = True
self.context['is_admin'] = users.is_current_user_admin()
def render(self, template_name):
"""Rending a template in a base directory by passing the name of the template"""
env = jinja2.Environment(loader=jinja2.FileSystemLoader('views'))
template = env.get_template(template_name)
self.response.out.write(template.render(self.context))
| mit | Python |
|
26bc11340590b0b863527fa12da03cea528feb46 | Add initial stub of GerritClient class | morucci/pygerrit,sonyxperiadev/pygerrit,dpursehouse/pygerrit,benjiii/pygerrit,gferon/pygerrit2,markon/pygerrit2,dpursehouse/pygerrit2 | pygerrit/client.py | pygerrit/client.py | """ Gerrit client interface. """
from Queue import Queue, Empty, Full
from pygerrit.error import GerritError
from pygerrit.events import GerritEventFactory
class GerritClient(object):
""" Gerrit client interface. """
def __init__(self, host):
self._factory = GerritEventFactory()
self._host = host
self._events = Queue()
def get_event(self, block=True, timeout=None):
""" Get the next event from the queue.
Return a `GerritEvent` instance, or None if:
- `block` was False and there is no event available in the queue, or
- `block` was True and no event was available within the time
specified by `timeout`.
"""
try:
return self._events.get(block, timeout)
except Empty:
return None
def put_event(self, json_data):
""" Create event from `json_data` and add it to the queue.
Raise GerritError if the queue is full, or the factory could not
create the event.
"""
try:
event = self._factory.create(json_data)
self._events.put(event)
except Full:
raise GerritError("Unable to add event: queue is full")
| mit | Python |
|
10f99acc11051b37595751b9b9b84e11dd133a64 | Add functions for getting available checksums for a channel from remote and disk. | mrpau/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri,mrpau/kolibri,indirectlylit/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri | kolibri/core/content/utils/file_availability.py | kolibri/core/content/utils/file_availability.py | import json
import os
import re
import requests
from django.core.cache import cache
from kolibri.core.content.models import LocalFile
from kolibri.core.content.utils.paths import get_content_storage_dir_path
from kolibri.core.content.utils.paths import get_file_checksums_url
checksum_regex = re.compile("^([a-f0-9]{32})$")
def get_available_checksums_from_remote(channel_id, baseurl):
CACHE_KEY = "PEER_AVAILABLE_CHECKSUMS_{baseurl}_{channel_id}".format(
baseurl=baseurl, channel_id=channel_id
)
if CACHE_KEY not in cache:
response = requests.get(get_file_checksums_url(channel_id, baseurl))
checksums = None
# Do something if we got a successful return
if response.status_code == 200:
try:
checksums = json.loads(response.content)
# Filter to avoid passing in bad checksums
checksums = [
checksum for checksum in checksums if checksum_regex.match(checksum)
]
cache.set(CACHE_KEY, checksums, 3600)
except (ValueError, TypeError):
# Bad JSON parsing will throw ValueError
# If the result of the json.loads is not iterable, a TypeError will be thrown
# If we end up here, just set checksums to None to allow us to cleanly continue
pass
return cache.get(CACHE_KEY)
def get_available_checksums_from_disk(channel_id, basepath):
PER_DISK_CACHE_KEY = "DISK_AVAILABLE_CHECKSUMS_{basepath}".format(basepath=basepath)
PER_DISK_PER_CHANNEL_CACHE_KEY = "DISK_AVAILABLE_CHECKSUMS_{basepath}_{channel_id}".format(
basepath=basepath, channel_id=channel_id
)
if PER_DISK_PER_CHANNEL_CACHE_KEY not in cache:
if PER_DISK_CACHE_KEY not in cache:
content_dir = get_content_storage_dir_path(datafolder=basepath)
disk_checksums = []
for _, _, files in os.walk(content_dir):
for name in files:
checksum = os.path.splitext(name)[0]
# Only add valid checksums formatted according to our standard filename
if checksum_regex.match(checksum):
disk_checksums.append(checksum)
# Cache is per device, so a relatively long lived one should
# be fine.
cache.set(PER_DISK_CACHE_KEY, disk_checksums, 3600)
disk_checksums = set(cache.get(PER_DISK_CACHE_KEY))
channel_checksums = set(
LocalFile.objects.filter(
files__contentnode__channel_id=channel_id
).values_list("id", flat=True)
)
cache.set(
PER_DISK_PER_CHANNEL_CACHE_KEY,
channel_checksums.intersection(disk_checksums),
3600,
)
return cache.get(PER_DISK_PER_CHANNEL_CACHE_KEY)
| mit | Python |
|
61ec190ca29187cbf9ad7b721fbf1936d665e4f6 | Revert "rm client.py" | icsnju/nap-core,icsnju/nap-core | orchestration/containerAPI/client.py | orchestration/containerAPI/client.py | from docker import Client as docker_client
class Client(object):
'''
Docker engine client
'''
def __init__(self, hostURL, version):
self.client = docker_client(base_url=hostURL, version=version)
self.url = hostURL
self.version = version
def get_url():
return self.url
def get_version():
return self.version
| apache-2.0 | Python |
|
dcc08986d4e2f0e7940f485d0ece465b1325a711 | Add barebones FileBlob class | mrorii/github_lda,mrorii/github_lda,LanternYing/github_lda,LanternYing/github_lda,mrorii/github_lda,LanternYing/github_lda,LanternYing/github_lda,mrorii/github_lda | python/fileblob.py | python/fileblob.py | #!/usr/bin/env python
import os
MEGABYTE = 1024 * 1024
class FileBlob:
def __init__(self, path):
self.path = path
def data(self):
return open(self.path).read()
def size(self):
try:
return os.path.getsize(self.path)
except os.error:
return 0
def extname(self):
_, ext = os.path.splitext(self.path)
return ext
def _mime_type(self):
pass
def mime_type(self):
pass
def content_type(self):
pass
def encoding(self):
pass
def is_binary(self):
pass
def is_text(self):
pass
def is_image(self):
self.extname() in ['.png', '.jpg', '.jpeg', '.gif', '.tif', 'tiff']
def is_large(self):
self.size() > MEGABYTE
def is_safe_to_tokenize(self):
return not self.is_large() and self.is_text() and not self.high_ratio_of_long_lines()
def high_ratio_of_long_lines(self):
if self.loc() == 0:
return false
return self.size() / > 5000
def loc(self):
return len(self.lines())
def lines(self):
pass
def is_viewable(self):
pass
def line_split_character(self):
pass
| mit | Python |
|
5c1e1744fa19bf900981d6a40c69195419861357 | Add snactor sanity-check command (#564) | leapp-to/prototype,leapp-to/prototype,leapp-to/prototype,leapp-to/prototype | leapp/snactor/commands/workflow/sanity_check.py | leapp/snactor/commands/workflow/sanity_check.py | from __future__ import print_function
import sys
from leapp.exceptions import LeappError, CommandError
from leapp.logger import configure_logger
from leapp.repository.scan import find_and_scan_repositories
from leapp.snactor.commands.workflow import workflow
from leapp.utils.clicmd import command_arg
from leapp.utils.repository import requires_repository, find_repository_basedir
_DESCRIPTION = 'The following messages are attempted to be consumed before they are produced: {}'
_LONG_DESCRIPTION = '''
Perform workflow sanity checks
- check whether there is a message in the given workflow which is attempted to be consumed before it was produced
For more information please consider reading the documentation at:
https://red.ht/leapp-docs
'''
@workflow.command('sanity-check', help='Perform workflow sanity checks', description=_LONG_DESCRIPTION)
@command_arg('name')
@requires_repository
def cli(params):
configure_logger()
repository = find_and_scan_repositories(find_repository_basedir('.'), include_locals=True)
try:
repository.load()
except LeappError as exc:
sys.stderr.write(exc.message)
sys.stderr.write('\n')
sys.exit(1)
wf = repository.lookup_workflow(params.name)
if not wf:
raise CommandError('Could not find any workflow named "{}"'.format(params.name))
instance = wf()
produced_late = set(instance.initial).intersection(set(instance.produces))
if produced_late:
print(_DESCRIPTION.format(' '.join([m.__name__ for m in produced_late])), file=sys.stderr, end='\n')
sys.exit(1)
| lgpl-2.1 | Python |
|
489004c5f81b8a5a2a639bc67f3ed5008f18960a | fix the naming error of the plotting script | berkeley-stat222/mousestyles,changsiyao/mousestyles,togawa28/mousestyles | doc/source/report/plots/plot_hc_dendrogram.py | doc/source/report/plots/plot_hc_dendrogram.py | from mousestyles import data
from mousestyles.classification import clustering
from mousestyles.visualization import plot_clustering
# load data
mouse_data = data.load_all_features()
# mouse inidividual
mouse_dayavgstd_rsl = clustering.prep_data(mouse_data, melted=False, std = True, rescale = True)
# optimal parameters
method, dist = clustering.get_optimal_hc_params(mouse_day=mouse_dayavgstd_rsl)
# fit hc
sils_hc, labels_hc = clustering.fit_hc(
mouse_day_X=mouse_dayavgstd_rsl[:,2:],
method=method, dist=dist, num_clusters=range(2,17))
# plot and get the distance matrxix
Z = plot_clustering.plot_dendrogram(
mouse_day=mouse_dayavgstd_rsl, method=method, dist=dist)
| bsd-2-clause | Python |
|
37d851bb34552edfc3b1abd4d1034d4fdf46408f | Implement --remote | mhinz/neovim-remote,mhinz/neovim-remote | nvim-remote.py | nvim-remote.py | #!/usr/bin/env python3
"""
Copyright (c) 2015 Marco Hinz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import os
import subprocess
import argparse
from neovim import attach
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--remote', action='append', help='Edit <files> in a Vim server if possible')
parser.add_argument('--remote-silent', help="Same, don't complain if there is no server")
parser.add_argument('--remote-wait', help='As --remote but wait for files to have been edited')
parser.add_argument('--remote-wait-silent', help="Same, don't complain if there is no server")
parser.add_argument('--remote-tab', help='As --remote but use tab page per file')
parser.add_argument('--remote-send', help='Send <keys> to a Vim server and exit')
parser.add_argument('--remote-expr', help='Evaluate <expr> in a Vim server and print result ')
args, unused = parser.parse_known_args()
sockpath = os.environ.get('NVIM_LISTEN_ADDRESS')
if sockpath is None:
sockpath = '/tmp/nvimsocket'
try:
nvim = attach('socket', path='/tmp/nvimsocket')
except FileNotFoundError:
print("""Problem: Can't find unix socket: /tmp/nvimsocket
Solution: Start a new server: NVIM_LISTEN_ADDRESS=/tmp/nvimsocket nvim""")
sys.exit(1)
if args.remote:
for fname in args.remote:
nvim.command('edit {}'.format(fname))
if unused:
os.putenv('VIMRUNTIME', '/data/repo/neovim/runtime')
subprocess.Popen(['/data/repo/neovim/build/bin/nvim'] + unused)
if __name__ == '__main__':
main()
| mit | Python |
|
af1bb3d1eb5fa0e827d0484a620d99adbaaf207e | Select GC candiates by color | AlexaVillaume/Archive,SAGES-UCSC/Photometry,SAGES-UCSC/Photometry,AlexaVillaume/Archive | makeColorCut.py | makeColorCut.py | import math
import numpy as np
import matplotlib.pyplot as plt
from pylab import *
'''
def makePlots():
x = [1, 2, 3, 4]
y = calcY(x, 0.48, -.23)
y1 = calcY(x, 0.48, -.43)
y2 = calcY(x, 0.48, -.03)
plt.plot(x, y, linestyle='-', linewidth=3)
plt.plot(x, y1, linestyle='-', linewidth=3)
plt.plot(x, y2, linestyle='-', linewidth=3)
plt.show()
'''
def calcY(x, m, b):
y = m*x + b
return y
def simpleComp(x0, x1, y0, y1, px, py):
is_in = 0
if px > x0 and px < x1 and py > y0 and py < y1:
is_in = 1
return is_in
def realComp(px, py, m, b, var):
is_in = 0
y_top = calcY(px, m, b+var)
y_bot = calcY(px, m, b-var)
if py > y_bot and py < y_top:
is_in = 1
return is_in
def main():
# Have the slope, y-intercept, and endpoints from M87 data
b = -0.086
m = 0.50
x0 = 1.5
x1 = 3.0
var = 0.3
# Find bounding points
y_11 = calcY(x1, m, b+var)
y_01 = calcY(x0, m, b+var)
y_00 = calcY(x0, m, b-var)
y_10 = calcY(x1, m, b-var)
u_z = []
g_z = []
color1 = []
color2 = [] # For selction testing
output = open("colorCutCatalog.txt" , "w")
with open('n4459_cfht_ugiz_auto.cat', 'r') as f:
for object in (raw.strip().split() for raw in f):
if object[0:1][0][0] != '#':
px = float(object[3]) - float(object[9])
py = float(object[5]) - float(object[9])
color1.append(px)
color2.append(py) # For selection testing
# To an initial quick test of the posints
in_bounds = simpleComp(x0, x1, y_00, y_11, px, py)
# If the point is in the bounding box test to see if it's
# in the parallelogram
if in_bounds == 1:
yes = realComp(px, py, m, b, var)
#Write to new catalog
if yes:
u_z.append(float(object[3]) - float(object[9]))
g_z.append(float(object[5]) - float(object[9])) # For selection testing
output.write("%10s" % object[0] + "%15s" % object[1] + "%15s" % object[2]
+ "%15s" % object[3] + "%15s" % object[4] + "%15s" % object[5]
+ "%15s" % object[6]+ "%15s" % object[7] + "%15s" % object[8]
+ "%15s" % object[9] + "%15s" % object[10] + "%15s" % object[11]
+ "%15s" % object[12] + "\n")
# Check the selection
x = [1.5, 2, 2.5, 3]
y = [calcY(x[0], m, b), calcY(x[1], m, b), calcY(x[2], m, b), calcY(x[3], m, b)]
yt = [calcY(x[0], m, b+var), calcY(x[1], m, b+var), calcY(x[2], m, b+var), calcY(x[3], m, b+var)]
yb = [calcY(x[0], m, b-var), calcY(x[1], m, b-var), calcY(x[2], m, b-var), calcY(x[3], m, b-var)]
plt.plot(color1, color2, linestyle='none', marker=',', alpha=0.1)
plt.plot(u_z, g_z, linestyle='none', marker=',', alpha=0.7)
plt.plot(x, y, linestyle='-', linewidth=1, color='r')
plt.plot(x, yt, linestyle='-', linewidth=1, color='r')
plt.plot(x, yb, linestyle='-', linewidth=1, color='r')
plt.show()
if __name__ == "__main__":
main()
| unlicense | Python |
|
49424b855f043ae2bbb3562481493b1fa83f5090 | add random selection wip code | aaronfang/personal_scripts | af_scripts/tmp/randSelect.py | af_scripts/tmp/randSelect.py | import random as rd
list = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]
randList = list
#print randList
div=3
listSize=len(list)
#print listSize
numForOnePart=listSize/div
#print numForOnePart
rd.shuffle(randList)
#print randList
print [randList[i::3] for i in range(3)]
print randList | mit | Python |
|
7d4281574a9ee2a8e7642f14402a452f82a807db | Create smarthome.py | kankiri/pabiana | demos/smarthome/smarthome.py | demos/smarthome/smarthome.py | import logging
from pabiana import area
from pabiana.area import autoloop, load_interfaces, pulse, register, scheduling, subscribe
from pabiana.node import create_publisher, run
NAME = 'smarthome'
publisher = None
# Triggers
@register
def increase_temp():
area.context['temperature'] += 0.25
autoloop(increase_temp)
@register
def lower_temp():
area.context['temperature'] -= 0.25
autoloop(lower_temp)
@register
def keep_temp():
pass
@register
def window(open):
area.context['window-open'] = open
# Reactions
@scheduling
def schedule():
if keep_temp in area.demand:
area.demand.pop(increase_temp, None)
area.demand.pop(lower_temp, None)
elif lower_temp in area.demand:
area.demand.pop(increase_temp, None)
@pulse
def publish():
if area.clock % 8 == 0:
publisher.send_json({
'temperature': area.context['temperature'],
'window-open': area.context['window-open']
})
if __name__ == '__main__':
logging.basicConfig(
format='%(asctime)s %(levelname)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG
)
load_interfaces('interfaces.json')
subscribe([], 'pulse', '01')
publisher = create_publisher(own_name=NAME, host='0.0.0.0')
area.context['temperature'] = 18
area.context['window-open'] = False
run(own_name=NAME, host='0.0.0.0')
| mit | Python |
|
a0493ff48b96056709880804f61e794621886c61 | Add CoNLL reader tests | vene/comparison-pattern | compattern/dependency/tests/test_conll.py | compattern/dependency/tests/test_conll.py | # encoding: utf8
from compattern.dependency import conll
def test_read_french():
"""Test that conll.read understands French Bonsai output"""
line = (u"6\tchauffé\tchauffer\tV\tVPP\tg=m|m=part|n=s|t=past\t"
u"1100011\t5\tdep_coord\t_\t_")
sentence = conll.read([line, '\n'])[0]
assert len(sentence) == 1
token = sentence[0]
assert token.id == 6
assert token.lemma == "chauffer"
assert token.cpos == "V"
assert token.pos == "VPP"
assert token.feat[0].startswith("g=m") # morpho features
assert token.feat[1].startswith("110") # cluster path
assert token.head == 5
assert token.deprel == "dep_coord"
# Don't really care what happens with undefined phead and pdeprel
def test_read_turboparser():
line = "11\tvaccines\tvaccine\tNNS\tNNS\t_\t10\tPMOD"
sentence = conll.read([line, '\n'])[0]
assert len(sentence) == 1
token = sentence[0]
assert token.id == 11
assert token.form == "vaccines"
assert token.lemma == "vaccine"
assert token.cpos == "NNS"
assert token.pos == "NNS"
assert token.head == 10
assert token.deprel == "PMOD"
def test_read_wacky():
line = "was\tbe\tVBD\t18\t11\tPRD"
sentence = conll.read([line, '\n'])[0]
assert len(sentence) == 1
token = sentence[0]
assert token.id == 18
assert token.form == "was"
assert token.lemma == "be"
assert token.pos == "VBD"
assert token.head == 11
assert token.deprel == "PRD"
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.