commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
a737126f8f8bcac1a00999f9e5c2a23bca9efd0d | Create hamming.py | mikejthomas/biote100_pset2 | hamming.py | hamming.py | #Python Problem 2
#hamming.py
#Introduction to Bioinformatics Assignment 2
#Purpose:Calculate Hamming Distance
#Your Name: Michael Thomas
#Date: 10/10/15
#stores 3 database sequences
seqList = ["AGGATACAGCGGCTTCTGCGCGACAAATAAGAGCTCCTTGTAAAGCGCCAAAAAAAGCCTCTCGGTCTGTGGCAGCAGCGTTGGCCCGGCCCCGGGAGCGGAGAGCGAGGGGAGGCAGATTCGGAGGAAGGTCTGAAAAG",
"AAAATACAGGGGGTTCTGCGCGACTTATGGGAGCTCCTTGTGCGGCGCCATTTTAAGCCTCACAGACTATGGCAGCAGCGTTGGCCCGGCAAAAGGAGCGGAGAGCGAGGGGAGGCGGAGACGGACGAAGGTCTGAGCAG",
"CCCATACAGCCGCTCCTCCGCGACTTATAAGAGCTCCTTGTGCGGCGCCATTTTAAGCCTCTCGGTCTGTGGCAGCAGCGTTGGCCCGCCCAAAACAGCGGAGAGCGAGGGGAGGCGGAGACGGAGGAAGGTCTGAGCAG"]
#your query sequence
s1 = "AGGATACAGCGGCTTCTGCGCGACTTATAAGAGCTCCTTGTGCGGCGCCATTTTAAGCCTCTCGGTCTGTGGCAGCAGCGTTGGCCCGGCCCCGGGAGCGGAGAGCGAGGGGAGGCGGAGACGGAGGAAGGTCTGAGGAG"
count=[0,0,0]
#outer loop to go through seqList[]
for i in range(len(seqList)):
#save each string to iterate trough on secondary loop
seqi = seqList[i]
#checks for non-matches between s1 and seqi and iterates count
for j in range(len(s1)):
if s1[j] != seqi[j]:
count[i] = count[i] + 1
#Results
#hamming distance for each sequence
print "The Hamming distance dh(s1,seqList[0]) =", count[0]
print "The Hamming distance dh(s1,seqList[1]) = ", count[1]
print "The Hamming distance dh(s1,seqList[2]) = ", count[2]
| mit | Python |
|
7b73c957ad52f9b846955b96b7cc6d0938587bb3 | Add 3rd order covariance | synergetics/spectrum | src/conventional/cum3est.py | src/conventional/cum3est.py | #!/usr/bin/env python
from __future__ import division
import numpy as np
from scipy.linalg import hankel
import scipy.io as sio
import matplotlib.pyplot as plt
from tools import *
def cum3est(y, maxlag, nsamp, overlap, flag, k1):
"""
UM3EST Third-order cumulants.
Should be invoked via "CUMEST" for proper parameter checks
Parameters:
y: input data vector (column)
maxlag: maximum lag to be computed
samp_seg: samples per segment
overlap: percentage overlap of segments
flag : 'biased', biased estimates are computed [default]
'unbiased', unbiased estimates are computed.
k1: the fixed lag in c3(m,k1): see below
Output:
y_cum: estimated third-order cumulant,
C3(m,k1) -maxlag <= m <= maxlag
"""
(n1,n2) = np.shape(y)
N = n1*n2
minlag = -maxlag
overlap = np.fix(overlap/100 * nsamp)
nrecord = np.fix((N - overlap)/(nsamp - overlap))
nadvance = nsamp - overlap
y_cum = np.zeros([maxlag-minlag+1,1])
nd = np.arange(nsamp).T
nlags = 1*maxlag + 1
zlag = 1 + maxlag
if flag == 'biased':
scale = np.ones([nlags, 1])/nsamp
else:
lsamp = nsamp - abs(k1)
scale = make_arr((range(lsamp-maxlag, lsamp), range(lsamp, lsamp-maxlag,-1)), axis=1)
(m2,n2) = scale.shape
scale = np.ones([m2,n2]) / scale
y = y.ravel(order='F')
for i in xrange(nrecord):
x = y[ind]
x = x.ravel(order='F') - mean(x)
cx = np.conj(x)
z = x * 0
# create the "IV" matrix: offset for second lag
if k1 > 0:
z[q:nsamp-k1] = x[0:nsamp-k1, :] * cx[k1:nsamp, :]
else:
z[-k1:nsamp] = x[-k1:nsamp] * cx[0:nsamp+k1]
# compute third-order cumulants
y_cum[zlag] = y_cum[zlag] + (z.T * x)
for k in xrange(maxlag):
y_cum[zlag-k] = y_cum[zlag-k] + z[k:nsamp].T * x[0:nsamp-k]
y_cum[zlag+k] = y_cum[zlag+k] + z[0:nsamp-k].T * x[k:nsamp]
ind = ind + int(nadvance)
y_cum = y_cum * scale/nrecord
return y_cum
| mit | Python |
|
ded893c34db0c6de521e6d735d6fce30f16f3a51 | Add WSGI file. | Osmose/noodleamp,Osmose/noodleamp | noodleamp.wsgi | noodleamp.wsgi | import os
import pwd
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def path(*paths):
return os.path.join(BASE_DIR, *paths)
os.environ['NOODLEAMP_CONFIG'] = path('settings_local.py')
# http://code.google.com/p/modwsgi/wiki/ApplicationIssues#User_HOME_Environment_Variable
os.environ['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
activate_this = path('venv/bin/activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
BASE_DIR = os.path.join(os.path.dirname(__file__))
if BASE_DIR not in sys.path:
sys.path.append(BASE_DIR)
from noodleamp.server import app as application
| mit | Python |
|
33393fcfcca30edafcf06df53550f4985033c459 | Add numba error module | stuartarchibald/numba,cpcloud/numba,pombredanne/numba,stuartarchibald/numba,pombredanne/numba,stonebig/numba,cpcloud/numba,sklam/numba,IntelLabs/numba,seibert/numba,pitrou/numba,stefanseefeld/numba,seibert/numba,ssarangi/numba,seibert/numba,gmarkall/numba,gmarkall/numba,stuartarchibald/numba,sklam/numba,stefanseefeld/numba,shiquanwang/numba,pitrou/numba,numba/numba,seibert/numba,ssarangi/numba,pitrou/numba,seibert/numba,sklam/numba,sklam/numba,ssarangi/numba,numba/numba,gmarkall/numba,stefanseefeld/numba,gdementen/numba,shiquanwang/numba,stonebig/numba,jriehl/numba,numba/numba,stuartarchibald/numba,GaZ3ll3/numba,stefanseefeld/numba,GaZ3ll3/numba,jriehl/numba,cpcloud/numba,GaZ3ll3/numba,cpcloud/numba,pombredanne/numba,jriehl/numba,IntelLabs/numba,stonebig/numba,ssarangi/numba,jriehl/numba,gdementen/numba,IntelLabs/numba,shiquanwang/numba,numba/numba,numba/numba,pombredanne/numba,gmarkall/numba,pitrou/numba,stonebig/numba,GaZ3ll3/numba,stefanseefeld/numba,pitrou/numba,stonebig/numba,IntelLabs/numba,stuartarchibald/numba,gmarkall/numba,cpcloud/numba,jriehl/numba,pombredanne/numba,ssarangi/numba,GaZ3ll3/numba,gdementen/numba,sklam/numba,gdementen/numba,IntelLabs/numba,gdementen/numba | numba/error.py | numba/error.py | class NumbaError(Exception):
"Some error happened during compilation" | bsd-2-clause | Python |
|
e8a5720c6959a3166c1c8a373ef00a390b89ac22 | Add rasp2geotiff script | TobiasLohner/rasp2geotiff | rasp2geotiff.py | rasp2geotiff.py | #!/usr/bin/env python
import xcsoar
import osr
import gdal
import numpy as np
import os, sys
import math
idx_min_x = idx_max_x = idx_min_y = idx_max_y = 0
spa_x = spa_y = 0
lat_0 = lat_1 = lon_0 = 0
lat_c = lon_c = 0
raster_data = None
def get_parameters(line):
global idx_min_x, idx_max_x, idx_min_y, idx_max_y, spa_x, spa_y, lat_0, lat_1, lon_0, lat_c, lon_c
splitted = line.split(' ')
i = 0
while splitted[i] != 'Indexs=':
i += 1
idx_min_x = int(splitted[i + 1])
idx_max_x = int(splitted[i + 2])
idx_min_y = int(splitted[i + 3])
idx_max_y = int(splitted[i + 4])
i = 0
while splitted[i] != 'Proj=':
i += 1
if splitted[i + 1] != 'lambert':
print "Error - no lambert projection found..."
return
spa_x = float(splitted[i + 2])
spa_y = float(splitted[i + 3])
lat_0 = float(splitted[i + 4])
lat_1 = float(splitted[i + 5])
lon_0 = float(splitted[i + 6])
lat_c = float(splitted[i + 7])
lon_c = float(splitted[i + 8])
def read_data(line, idx):
splitted = line.split(' ')
if len(splitted) != idx_max_x - idx_min_x + 1:
print "Error - grid resolution wrong?!?"
return
for i in range(len(splitted)):
raster_data[(idx_max_y - idx_min_y) - idx - 1, i] = float(splitted[i])
#raster_data[idx, i] = float(splitted[i])
i = 0
for line in open(sys.argv[1]):
i += 1
if line == '---':
continue
if line.startswith('Model='):
get_parameters(line)
raster_data = np.zeros((idx_max_x - idx_min_x + 1, idx_max_y - idx_min_y + 1), dtype=np.float32)
if i >= 5:
read_data(line, i - 5)
lcc = osr.SpatialReference()
lcc.ImportFromProj4("+proj=lcc +lat_1=" + str(lat_1) + " +lat_0=" + str(lat_0) + " +lon_0=" + str(lon_0) + " +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs")
epsg4326 = osr.SpatialReference()
epsg4326.ImportFromEPSG(4326)
epsg4326_to_lcc = osr.CoordinateTransformation(epsg4326, lcc)
width = (idx_max_x - idx_min_x) + 1
height = (idx_max_y - idx_min_y) + 1
center_lcc = epsg4326_to_lcc.TransformPoint(lon_c, lat_c)
geotransform = [center_lcc[0] - width * spa_x / 2, spa_x, 0, center_lcc[1] + height * spa_y / 2, 0, -spa_y]
driver = gdal.GetDriverByName('GTiff')
dst_ds = driver.Create(sys.argv[1] + ".tiff", width, height, 1, gdal.GDT_Float32)
dst_ds.SetProjection(lcc.ExportToWkt())
dst_ds.SetGeoTransform(geotransform)
dst_ds.GetRasterBand(1).WriteArray(raster_data)
dst_ds = None
| unlicense | Python |
|
c1e801798d3b7e8d4c9ba8a11f79ffa92bf182f5 | Add test cases for the logger | thombashi/pingparsing,thombashi/pingparsing | test/test_logger.py | test/test_logger.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
from pingparsing import (
set_logger,
set_log_level,
)
import pytest
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
| mit | Python |
|
c559cdd34a2dc8f3129c1fed5235291f22329368 | install crontab | ralokt/ubuntu-cleanup-annoyifier | install_crontab.py | install_crontab.py | #!/usr/bin/python2
from crontab import CronTab
import sys
CRONTAB_TAG = "ubuntu-cleanup-annoifier"
def install_cron():
my_cron = CronTab(user=True)
# job = my_cron.new(command=executable_path(args))
job = my_cron.new(command="dummy123")
job.minute.on(0)
job.hour.on(0)
job.enable()
job.set_comment(CRONTAB_TAG)
my_cron.write_to_user( user=True )
def uninstall_cron():
my_cron = CronTab(user=True)
my_cron.remove_all(comment=CRONTAB_TAG)
my_cron.write_to_user( user=True )
if __name__ == "__main__":
if sys.argv[1] == "i":
install_cron()
elif sys.argv[1] == "u":
uninstall_cron()
| mit | Python |
|
ca8a7320cbec1d4fa71ec5a7f909908b8765f573 | Allow underscores for release tags (#4976) | GoogleCloudPlatform/gcloud-python,tseaver/google-cloud-python,tseaver/gcloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,GoogleCloudPlatform/gcloud-python,jonparrott/gcloud-python,jonparrott/google-cloud-python,dhermes/google-cloud-python,dhermes/gcloud-python,tseaver/gcloud-python,googleapis/google-cloud-python,dhermes/google-cloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,dhermes/google-cloud-python,jonparrott/gcloud-python,tswast/google-cloud-python,googleapis/google-cloud-python,dhermes/gcloud-python,jonparrott/google-cloud-python | test_utils/scripts/circleci/get_tagged_package.py | test_utils/scripts/circleci/get_tagged_package.py | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper to determine package from tag.
Get the current package directory corresponding to the Circle Tag.
"""
from __future__ import print_function
import os
import re
import sys
TAG_RE = re.compile(r"""
^
(?P<pkg>
(([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed)
([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
$
""", re.VERBOSE)
TAG_ENV = 'CIRCLE_TAG'
ERROR_MSG = '%s env. var. not set' % (TAG_ENV,)
BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z'
CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.realpath(
os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..'))
def main():
"""Get the current package directory.
Prints the package directory out so callers can consume it.
"""
if TAG_ENV not in os.environ:
print(ERROR_MSG, file=sys.stderr)
sys.exit(1)
tag_name = os.environ[TAG_ENV]
match = TAG_RE.match(tag_name)
if match is None:
print(BAD_TAG_MSG % (tag_name,), file=sys.stderr)
sys.exit(1)
pkg_name = match.group('pkg')
if pkg_name is None:
print(ROOT_DIR)
else:
pkg_dir = pkg_name.rstrip('-').replace('-', '_')
print(os.path.join(ROOT_DIR, pkg_dir))
if __name__ == '__main__':
main()
| # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper to determine package from tag.
Get the current package directory corresponding to the Circle Tag.
"""
from __future__ import print_function
import os
import re
import sys
TAG_RE = re.compile(r"""
^
(?P<pkg>
(([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed)
([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
$
""", re.VERBOSE)
TAG_ENV = 'CIRCLE_TAG'
ERROR_MSG = '%s env. var. not set' % (TAG_ENV,)
BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z'
CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.realpath(
os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..'))
def main():
"""Get the current package directory.
Prints the package directory out so callers can consume it.
"""
if TAG_ENV not in os.environ:
print(ERROR_MSG, file=sys.stderr)
sys.exit(1)
tag_name = os.environ[TAG_ENV]
match = TAG_RE.match(tag_name)
if match is None:
print(BAD_TAG_MSG % (tag_name,), file=sys.stderr)
sys.exit(1)
pkg_name = match.group('pkg')
if pkg_name is None:
print(ROOT_DIR)
else:
pkg_dir = pkg_name.rstrip('-').replace('-', '_')
print(os.path.join(ROOT_DIR, pkg_dir))
if __name__ == '__main__':
main()
| apache-2.0 | Python |
ee679b745e955e3d555b49500ae2d09aa3336abb | Add a util function for SNMP | whowutwut/confluent,jjohnson42/confluent,jjohnson42/confluent,jjohnson42/confluent,whowutwut/confluent,whowutwut/confluent,xcat2/confluent,whowutwut/confluent,jjohnson42/confluent,xcat2/confluent,xcat2/confluent,xcat2/confluent,jjohnson42/confluent,xcat2/confluent | confluent_server/confluent/snmputil.py | confluent_server/confluent/snmputil.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2016 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This provides a simplified wrapper around snmp implementation roughly
# mapping to the net-snmp commands
# net-snmp-python was considered as the API is cleaner, but the ability to
# patch pysnmp to have it be eventlet friendly has caused it's selection
# This module simplifies the complex hlapi pysnmp interface
import confluent.exceptions as exc
import eventlet
from eventlet.support.greendns import getaddrinfo
import socket
snmp = eventlet.import_patched('pysnmp.hlapi')
def _get_transport(name):
# Annoyingly, pysnmp does not automatically determine ipv6 v ipv4
res = getaddrinfo(name, 161, 0, socket.SOCK_DGRAM)
if res[0][0] == socket.AF_INET6:
return snmp.Udp6TransportTarget(res[0][4])
else:
return snmp.UdpTransportTarget(res[0][4])
def walk(server, oid, secret, username=None, context=None):
"""Walk over children of a given OID
This is roughly equivalent to snmpwalk. It will automatically try to be
an snmpbulkwalk if possible. If username is not given, it is assumed that
the secret is a community string, and v2c is used. If a username given,
it'll assume SHA auth and DES privacy with the secret being the same for
both.
:param server: The network name/address to target
:param oid: The SNMP object identifier
:param secret: The community string or password
:param username: The username for SNMPv3
:param context: The SNMPv3 context or index for community string indexing
"""
# SNMP is a complicated mess of things. Will endeavor to shield caller
# from as much as possible, assuming reasonable defaults where possible.
# there may come a time where we add more parameters to override the
# automatic behavior (e.g. DES is weak, so it's a likely candidate to be
# overriden, but some devices only support DES)
tp = _get_transport(server)
ctx = snmp.ContextData(context)
if '::' in oid:
mib, field = oid.split('::')
obj = snmp.ObjectType(snmp.ObjectIdentity(mib, field))
else:
obj = snmp.ObjectType(snmp.ObjectIdentity(oid))
eng = snmp.SnmpEngine()
if username is None:
# SNMP v2c
authdata = snmp.CommunityData(secret, mpModel=1)
else:
authdata = snmp.UsmUserData(username, authKey=secret, privKey=secret)
walking = snmp.bulkCmd(eng, authdata, tp, ctx, 0, 10, obj,
lexicographicMode=False)
for rsp in walking:
errstr, errnum, erridx, answers = rsp
if errstr:
raise exc.TargetEndpointUnreachable(str(errstr))
elif errnum:
raise exc.ConfluentException(errnum.prettyPrint())
for ans in answers:
yield ans
if __name__ == '__main__':
import sys
for kp in walk(sys.argv[1], sys.argv[2], 'public'):
print(str(kp[0]))
print(str(kp[1]))
| apache-2.0 | Python |
|
74e24debf55b003f1d56d35f4b040d91a0698e0a | Add example for cluster centroids method | dvro/UnbalancedDataset,dvro/imbalanced-learn,fmfn/UnbalancedDataset,scikit-learn-contrib/imbalanced-learn,scikit-learn-contrib/imbalanced-learn,glemaitre/UnbalancedDataset,dvro/UnbalancedDataset,dvro/imbalanced-learn,fmfn/UnbalancedDataset,glemaitre/UnbalancedDataset | example/under-sampling/plot_cluster_centroids.py | example/under-sampling/plot_cluster_centroids.py | """
=================
Cluster centroids
=================
An illustration of the cluster centroids method.
"""
print(__doc__)
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
# Define some color for the plotting
almost_black = '#262626'
palette = sns.color_palette()
from sklearn.datasets import make_classification
from sklearn.decomposition import PCA
from unbalanced_dataset.under_sampling import ClusterCentroids
# Generate the dataset
X, y = make_classification(n_classes=2, class_sep=2, weights=[0.1, 0.9],
n_informative=3, n_redundant=1, flip_y=0,
n_features=20, n_clusters_per_class=1,
n_samples=5000, random_state=10)
# Instanciate a PCA object for the sake of easy visualisation
pca = PCA(n_components=2)
# Fit and transform x to visualise inside a 2D feature space
X_vis = pca.fit_transform(X)
# Apply the random under-sampling
cc = ClusterCentroids()
X_resampled, y_resampled = cc.fit_transform(X, y)
X_res_vis = pca.transform(X_resampled)
# Two subplots, unpack the axes array immediately
f, (ax1, ax2) = plt.subplots(1, 2)
ax1.scatter(X_vis[y == 0, 0], X_vis[y == 0, 1], label="Class #0", alpha=0.5,
edgecolor=almost_black, facecolor=palette[0], linewidth=0.15)
ax1.scatter(X_vis[y == 1, 0], X_vis[y == 1, 1], label="Class #1", alpha=0.5,
edgecolor=almost_black, facecolor=palette[2], linewidth=0.15)
ax1.set_title('Original set')
ax2.scatter(X_res_vis[y_resampled == 0, 0], X_res_vis[y_resampled == 0, 1],
label="Class #0", alpha=.5, edgecolor=almost_black,
facecolor=palette[0], linewidth=0.15)
ax2.scatter(X_res_vis[y_resampled == 1, 0], X_res_vis[y_resampled == 1, 1],
label="Class #1", alpha=.5, edgecolor=almost_black,
facecolor=palette[2], linewidth=0.15)
ax2.set_title('Cluster centroids')
plt.show()
| mit | Python |
|
25cd25dab4de9e6963ffa622474b3f0bdcdc1e48 | Create preprocessor.py | zahllang/zahl | interpreter/preprocessor.py | interpreter/preprocessor.py | mit | Python |
||
0c1ccd5180601d3ed3f5dc98b3330d40c014f7c0 | Add simul. (#3300) | krafczyk/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,krafczyk/spack,lgarren/spack,skosukhin/spack,lgarren/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,skosukhin/spack,tmerrick1/spack,LLNL/spack,EmreAtes/spack,lgarren/spack,skosukhin/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,mfherbst/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,lgarren/spack,tmerrick1/spack,iulian787/spack,mfherbst/spack,skosukhin/spack,mfherbst/spack,krafczyk/spack,krafczyk/spack,LLNL/spack,TheTimmy/spack,tmerrick1/spack,EmreAtes/spack,tmerrick1/spack,TheTimmy/spack,matthiasdiener/spack,TheTimmy/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/simul/package.py | var/spack/repos/builtin/packages/simul/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Simul(Package):
"""simul is an MPI coordinated test of parallel
filesystem system calls and library functions. """
homepage = "https://github.com/LLNL/simul"
url = "https://github.com/LLNL/simul/archive/1.16.tar.gz"
version('1.16', 'd616c1046a170c1e1b7956c402d23a95')
version('1.15', 'a5744673c094a87c05c6f0799d1f496f')
version('1.14', 'f8c14f0bac15741e2af354e3f9a0e30f')
version('1.13', '8a80a62d569557715d6c9c326e39a8ef')
depends_on('mpi')
def install(self, spec, prefix):
make('simul')
mkdirp(prefix.bin)
install('simul', prefix.bin)
| lgpl-2.1 | Python |
|
6cb953dc01a77bc549c53cc325a741d1952ed6b6 | Bump FIDO version to 1.3.12 | artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin | fpr/migrations/0025_update_fido_1312.py | fpr/migrations/0025_update_fido_1312.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def data_migration_up(apps, schema_editor):
"""
Update identification tool FIDO to 1.3.12, correcting a
character-spacing issue bug identified in PRONOM94 (again)
"""
idtool = apps.get_model('fpr', 'IDTool')
idcommand = apps.get_model('fpr', 'IDCommand')
# Update Fido tool
idtool.objects\
.filter(uuid='c33c9d4d-121f-4db1-aa31-3d248c705e44')\
.update(version='1.3.12', slug='fido-1312')
# Create new command using the new version of Fido
old_fido_command = idcommand.objects\
.get(uuid='e586f750-6230-42d7-8d12-1e24ca2aa658')
idcommand.objects.create(
uuid='213d1589-c255-474f-81ac-f0a618181e40',
description=u'Identify using Fido 1.3.12',
config=old_fido_command.config,
script=old_fido_command.script,
script_type=old_fido_command.script_type,
tool=idtool.objects.get(uuid='c33c9d4d-121f-4db1-aa31-3d248c705e44'),
enabled=True
)
old_fido_command.enabled = False
old_fido_command.save()
def data_migration_down(apps, schema_editor):
"""
Revert FIDO to previous version
"""
idtool = apps.get_model('fpr', 'IDTool')
idcommand = apps.get_model('fpr', 'IDCommand')
# Remove new ID Commands
idcommand.objects\
.filter(uuid='213d1589-c255-474f-81ac-f0a618181e40').delete()
# Revert Fido tool
idtool.objects\
.filter(uuid='c33c9d4d-121f-4db1-aa31-3d248c705e44')\
.update(version='1.3.10', slug='fido-1310')
# Restore Fido command
idcommand.objects\
.filter(uuid='e586f750-6230-42d7-8d12-1e24ca2aa658')\
.update(enabled=True)
class Migration(migrations.Migration):
dependencies = [
('fpr', '0024_update_fido'),
]
operations = [
migrations.RunPython(data_migration_up, data_migration_down),
]
| agpl-3.0 | Python |
|
8fa9a54c9a5ee683fc9e9d361a4eb7affe5e83ed | Add functions to paint game of life to screen | akud/stem-club-presentation,akud/stem-club-presentation,akud/stem-club-presentation | game_of_life.py | game_of_life.py | #!/usr/bin/env python
from curses import wrapper
from time import sleep
def enumerate_lines(matrix):
on = '*'
off = ' '
for i, row in enumerate(matrix):
yield i, ''.join(on if v else off for v in row)
def paint(stdscr, matrix):
stdscr.clear()
for i, line in enumerate_lines(matrix):
stdscr.addstr(i, 0, line)
stdscr.refresh()
size = 50
m1 = [
[i == j or i == size - j for j in xrange(0, size + 1)]
for i in xrange(0, size + 1)
]
m2 = [
[i == size / 2 or j == size / 2 for j in xrange(0, size + 1)]
for i in xrange(0, size + 1)
]
def main(stdscr):
for i in xrange(0,100):
matrix = m1 if i % 2 else m2
paint(stdscr, matrix)
sleep(0.5)
stdscr.getkey()
wrapper(main)
| mit | Python |
|
ff8cee4f98dde0533751dfd15308c5fdfdec3982 | test file for rapid iteration | ClimbsRocks/auto_ml | tests/quick_test.py | tests/quick_test.py | """
nosetests -sv --nologcapture tests/quick_test.py
"""
import datetime
import os
import random
import sys
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
os.environ['is_test_suite'] = 'True'
os.environ['KERAS_BACKEND'] = 'theano'
from auto_ml import Predictor
from auto_ml.utils_models import load_ml_model
from nose.tools import assert_equal, assert_not_equal, with_setup
from sklearn.metrics import accuracy_score
import dill
import numpy as np
import utils_testing as utils
# def regression_test():
# # a random seed of 42 has ExtraTreesRegressor getting the best CV score, and that model doesn't generalize as well as GradientBoostingRegressor.
# np.random.seed(0)
# df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
# column_descriptions = {
# 'MEDV': 'output'
# , 'CHAS': 'categorical'
# }
# ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
# ml_predictor.train(df_boston_train, model_names=['DeepLearningRegressor'])
# test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV)
# print('test_score')
# print(test_score)
# assert -3.35 < test_score < -2.8
def classification_test(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, model_names=['DeepLearningClassifier'])
test_score = ml_predictor.score(df_titanic_test, df_titanic_test.survived)
print('test_score')
print(test_score)
assert -0.215 < test_score < -0.17
| mit | Python |
|
c98a744f5f436ae2c6266a7bb5d32173cfd0e4a9 | Add a script that scrapes the Socrata catalog, just in case we need that in another format | opensmc/service-locator,opensmc/service-locator,opensmc/service-locator | scripts/socrata_scraper.py | scripts/socrata_scraper.py | #!/usr/bin/python3
"""
This is a basic script that downloads the catalog data from the smcgov.org
website and pulls out information about all the datasets.
This is in python3
There is an optional download_all argument that will allow you to download
all of the datasets individually and in their entirety. I have included this
as a demonstration, but it should not be commonly used because it takes a
while and beats up on the smcgov data portal, which you should avoid.
"""
import sys
import json
import argparse
import collections
import urllib.request
URL = "https://data.smcgov.org/api/catalog?limit=999999999&only=datasets"
def main(args):
category_data = collections.defaultdict(list)
domain_data = collections.defaultdict(list)
data_downloads = []
datasets_with_location = []
with urllib.request.urlopen(URL) as raw_data:
data = json.loads(raw_data.read().decode('utf-8'))
for result in data['results']:
categories = result['classification']['categories']
domain = result['classification']['domain_category']
if categories is None or categories == []:
categories = ['NULL']
permalink = result['permalink']
data_downloads.append('{}.json'.format(permalink))
domain_data[domain].append(permalink)
for category in categories:
category_data[category].append(permalink)
if args.download_all:
for download_url in data_downloads:
with urllib.request.urlopen(download_url) as dataset_file:
print('Downloading {}'.format(download_url))
dataset = json.loads(dataset_file.read().decode('utf-8'))
if len(dataset) < 1:
continue
if 'location_1' in dataset[0].keys():
# Our best guess on which datasets have location info.
datasets_with_location.append(download_url)
if args.download_all:
print('Datasets with location_1 key')
print(datasets_with_location)
print('----------------------------------------------------')
print('Number of Datasets by Category')
for key, values in category_data.items():
print(key, len(values))
print('----------------------------------------------------')
print('Number of Datasets by Domain')
for key, values in domain_data.items():
print(key, len(values))
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--download_all', help='Download all datasets',
action='store_true')
args = parser.parse_args()
main(args=args)
| mit | Python |
|
0c11d2740e561586bb4f9d2b67bda2ccc87e146e | Add new command to notify New Relic of deployment | infoxchange/ixdjango | ixdjango/management/commands/newrelic_notify_deploy.py | ixdjango/management/commands/newrelic_notify_deploy.py | """
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import pwd
import os
from subprocess import Popen, PIPE
from urllib import urlencode
from httplib2 import Http
from django.conf import settings
from django.core.management.base import NoArgsCommand
import newrelic.agent
class Command(NoArgsCommand):
"""
Loads the fixtures contained inside IX_FIXTURES setting variable.
See http://redmine.office.infoxchange.net.au/issues/8376
"""
URL = 'https://rpm.newrelic.com/deployments.xml'
def handle_noargs(self, **options):
newrelic.agent.initialize(
settings.NEW_RELIC_CONFIG,
settings.NEW_RELIC_ENV
)
config = newrelic.agent.global_settings()
if not config.monitor_mode:
return
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# get the current user
user = pwd.getpwuid(os.getuid())
headers = {
'x-api-key': config.license_key
}
post = {
'deployment[app_name]': config.app_name,
'deployment[revision]': ver,
'deployment[user]': '%s (%s)' % (user.pw_gecos, user.pw_name),
}
print "Informing New Relic...",
# post this data
http = Http()
response, _ = http.request(self.URL, 'POST',
headers=headers,
body=urlencode(post))
print response['status']
| mit | Python |
|
c0ea919305bcedf080a2213f4c549c68fa4efa2d | test tools | gipi/parcel,gipi/parcel | tests/test_tools.py | tests/test_tools.py | import unittest2 as unittest
from fabric.api import run
import tempfile
from mixins import WebServerMixin
from parcel.tools import dl, rpull, rpush
def tempname():
return tempfile.mkstemp()[1]
import zlib, os
def crc32(filename):
CHUNKSIZE = 8192
checksum = 0
with open(filename, 'rb') as fh:
bytes = fh.read(CHUNKSIZE)
while bytes:
checksum = zlib.crc32(bytes, checksum)
bytes = fh.read(CHUNKSIZE)
return checksum
class ToolsTestSuite(unittest.TestCase, WebServerMixin):
"""Tools test cases."""
def test_dl(self):
self.startWebServer()
filename = tempname()
dl("http://localhost:%s/tip.tar.gz"%self.port,filename)
# there should be no differences between the files
self.assertEquals(crc32(filename),crc32(os.path.join(self.webroot,'tip.tar.gz')))
# shutdown webserver
self.stopWebServer()
| isc | Python |
|
9de5728e5fdb0f7dc606681df685eb084477d8d0 | Add exercise | MindCookin/python-exercises | multiplyTwoNumbers.py | multiplyTwoNumbers.py | #!/usr/bin/env python
def main():
a = input("Enter a number: ")
b = input("Enter another number: ")
print "The product of %d and %d is %d" % (a, b, a * b)
main()
| apache-2.0 | Python |
|
f883edc209928494c45693c5ecfd279bfbb09c97 | Add partfrac1 | mph-/lcapy | timing/partfrac1.py | timing/partfrac1.py | import time
from lcapy import *
funcs = [1 / s, 1 / s**2, 1 / (s + 3), 1 / (s + 3)**2, (s + 3) / (s + 4),
1 / (s + 3)**2 / (s + 4), 1 / (s + 3)**3 / (s + 4),
1 / (s + 3) / (s + 4) / (s + 5), (s + 6) / (s + 3) / (s + 4) / (s + 5),
1 / (s + 3)**2 / (s + 4)**2, 1 / (s + 3)**3 / (s + 4)**2,
s / (s + 3)**2 / (s + 4), s / (s + 3)**3 / (s + 4)]
Ntrials = 10
methods = ('ec', 'sub')
times = {}
for func in funcs:
ans1 = func.partfrac(method='ec')
ans2 = func.partfrac(method='sub')
if ans1 != func:
print('Wrong answer for eq: ', func)
if ans2 != func:
print('Wrong answer for sub: ', func)
for method in methods:
times[method] = []
for func in funcs:
start = time.perf_counter()
for i in range(Ntrials):
func.partfrac(method=method)
stop = time.perf_counter()
times[method].append((stop - start) / Ntrials)
import numpy as np
from matplotlib.pyplot import subplots, style, savefig, show
index = np.arange(len(funcs))
fig, axes = subplots(1)
axes.bar(index, times['ec'], 0.35, label='ec')
axes.bar(index+0.35, times['sub'], 0.35, label='subs')
axes.legend()
axes.set_ylabel('Time (s)')
show()
| lgpl-2.1 | Python |
|
e71c232660a7480c2b56f6e76e83fad4c7e9da8a | Add ctm_test.py test for testing CRTC's CTM color matrix property. | tomba/kmsxx,tomba/kmsxx,tomba/kmsxx,tomba/kmsxx | py/tests/ctm_test.py | py/tests/ctm_test.py | #!/usr/bin/python3
import sys
import pykms
def ctm_to_blob(ctm, card):
len=9
arr = bytearray(len*8)
view = memoryview(arr).cast("I")
for x in range(len):
i, d = divmod(ctm[x], 1)
if i < 0:
i = -i
sign = 1 << 31
else:
sign = 0
view[x * 2 + 0] = int(d * ((2 ** 32) - 1))
view[x * 2 + 1] = int(i) | sign
#print("%f = %08x.%08x" % (ctm[x], view[x * 2 + 1], view[x * 2 + 0]))
return pykms.Blob(card, arr);
if len(sys.argv) > 1:
conn_name = sys.argv[1]
else:
conn_name = ""
card = pykms.Card()
res = pykms.ResourceManager(card)
conn = res.reserve_connector(conn_name)
crtc = res.reserve_crtc(conn)
mode = conn.get_default_mode()
fb = pykms.DumbFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
pykms.draw_test_pattern(fb);
crtc.set_mode(conn, fb, mode)
input("press enter to set normal ctm\n")
ctm = [ 1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0 ]
ctmb = ctm_to_blob(ctm, card)
crtc.set_prop("CTM", ctmb.id)
input("press enter to set new ctm\n")
ctm = [ 0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
1.0, 0.0, 0.0 ]
ctmb = ctm_to_blob(ctm, card)
crtc.set_prop("CTM", ctmb.id)
print("r->b g->r b->g ctm active\n")
input("press enter to set new ctm\n")
ctm = [ 0.0, 0.0, 1.0,
1.0, 0.0, 0.0,
0.0, 1.0, 0.0 ]
ctmb = ctm_to_blob(ctm, card)
crtc.set_prop("CTM", ctmb.id)
input("r->g g->b b->r ctm active\n")
input("press enter to turn off the crtc\n")
crtc.disable_mode()
input("press enter to enable crtc again\n")
crtc.set_mode(conn, fb, mode)
input("press enter to remove ctm\n")
crtc.set_prop("CTM", 0)
input("press enter to exit\n")
| mpl-2.0 | Python |
|
04477b11bbe7efa1720829691b7d1c3fe2a7a492 | Add __init__ | Kriechi/hyper-h2,mhils/hyper-h2,Kriechi/hyper-h2,python-hyper/hyper-h2,vladmunteanu/hyper-h2,vladmunteanu/hyper-h2,bhavishyagopesh/hyper-h2,python-hyper/hyper-h2 | h2/__init__.py | h2/__init__.py | # -*- coding: utf-8 -*-
"""
h2
~~
A HTTP/2 implementation.
"""
__version__ = '0.1.0'
| mit | Python |
|
2e2ad49c7ada145b5a4a81bd8941cf5e72d2d81b | Test case for wordaxe bug | thomaspurchas/rst2pdf,thomaspurchas/rst2pdf | rst2pdf/tests/input/test_180.py | rst2pdf/tests/input/test_180.py | # -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
| mit | Python |
|
698eee3db238189ba066670c4fe4a1193e6a942a | add flask-login | dwisulfahnur/flask-login,dwisulfahnur/flask-login,dwisulfahnur/flask-login | app/user/loginmanager.py | app/user/loginmanager.py | from flask.ext.login import LoginManager
from models import User
login_manager = LoginManager()
@login_manager.user_loader
def user_loader(user_id):
return User.query.get(user_id)
login_manager.login_view = '.login'
| apache-2.0 | Python |
|
d5d8e16b5ccbbb65398ce015f020db3839fac409 | add test_rotate.py | yuyu2172/chainercv,yuyu2172/chainercv,chainer/chainercv,pfnet/chainercv,chainer/chainercv | tests/transforms_tests/image_tests/test_rotate.py | tests/transforms_tests/image_tests/test_rotate.py | import random
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import flip
from chainercv.transforms import rotate
class TestRotate(unittest.TestCase):
def test_rotate(self):
img = np.random.uniform(size=(3, 32, 24))
angle = random.uniform(0, 180)
out = rotate(img, angle)
expected = flip(img, x_flip=True)
expected = rotate(expected, -1 * angle)
expected = flip(expected, x_flip=True)
np.testing.assert_almost_equal(out, expected, decimal=6)
testing.run_module(__name__, __file__)
| mit | Python |
|
de74c933b74d9066984fe040edf026b7d9f87711 | Split problem statement 2 | rahulbohra/Python-Basic | 69_split_problem_statement_2.py | 69_split_problem_statement_2.py | '''
Open the file sample.txt and read it line by line.
When you find a line that starts with 'From:' like the following line:
From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008
You will parse the From line using split() and print out the second word in the line
(i.e. the entire address of the person who sent the message).
Then print out a count at the end.
Hint: make sure not to include the lines that start with 'From:'.
'''
fileName = raw_input("Enter file name : ")
if len(fileName) < 1 : fileName = "sample.txt"
openFile = open(fileName)
count = 0
words = list()
for line in openFile:
if not line.startswith("From:"):
continue
count += 1
words = line.split()
print words[1]
print "There were", count, "lines in the file with 'From:' as the first word."
| mit | Python |
|
01029805a6fb3484cf803f0c0abd18232b4ad810 | Add database tools | openego/ego.io,openego/ego.io | egoio/tools/db.py | egoio/tools/db.py | def grant_db_access(conn, schema, table, role):
r"""Gives access to database users/ groups
Parameters
----------
conn : sqlalchemy connection object
A valid connection to a database
schema : str
The database schema
table : str
The database table
role : str
database role that access is granted to
"""
grant_str = """GRANT ALL ON TABLE {schema}.{table}
TO {role} WITH GRANT OPTION;""".format(schema=schema, table=table,
role=role)
conn.execute(grant_str)
def add_primary_key(conn, schema, table, pk_col):
r"""Adds primary key to database table
Parameters
----------
conn : sqlalchemy connection object
A valid connection to a database
schema : str
The database schema
table : str
The database table
pk_col : str
Column that primary key is applied to
"""
sql_str = """alter table {schema}.{table} add primary key ({col})""".format(
schema=schema, table=table, col=pk_col)
conn.execute(sql_str)
def change_owner_to(conn, schema, table, role):
r"""Changes table's ownership to role
Parameters
----------
conn : sqlalchemy connection object
A valid connection to a database
schema : str
The database schema
table : str
The database table
role : str
database role that access is granted to
"""
sql_str = """ALTER TABLE {schema}.{table}
OWNER TO {role};""".format(schema=schema,
table=table,
role=role)
conn.execute(sql_str) | agpl-3.0 | Python |
|
bc9072cee7ce880c30af83ee4c239ae9cf1ddbfe | Create NumberofIslandsII_001.py | cc13ny/Allin,Chasego/codirit,Chasego/cod,Chasego/codi,cc13ny/Allin,cc13ny/algo,Chasego/cod,cc13ny/Allin,Chasego/cod,cc13ny/Allin,Chasego/codirit,cc13ny/Allin,Chasego/codi,Chasego/codi,Chasego/codi,Chasego/codi,cc13ny/algo,Chasego/cod,Chasego/codirit,cc13ny/algo,Chasego/cod,cc13ny/algo,Chasego/codirit,cc13ny/algo,Chasego/codirit | lintcode/Number-of-Islands-II/NumberofIslandsII_001.py | lintcode/Number-of-Islands-II/NumberofIslandsII_001.py | # Definition for a point.
# class Point:
# def __init__(self, a=0, b=0):
# self.x = a
# self.y = b
class UnionFind:
def __init__(self, n, m):
self.fathers = {}
self.nsets = 0
self.grid = [[0 for _ in range(m)] for _ in range(n)]
self.n = n
self.m = m
def build_island(self, i, j):
self.grid[i][j] = 1
self.fathers[i * self.m + j] = i * self.m + j
self.nsets += 1
nbrs = []
nbrs.append([i, j - 1])
nbrs.append([i, j + 1])
nbrs.append([i - 1, j])
nbrs.append([i + 1, j])
for nbr in nbrs:
if -1 < nbr[0] < self.n and -1 < nbr[1] < self.m:
if self.grid[nbr[0]][nbr[1]] == 1:
idx1 = i * self.m + j
idx2 = nbr[0] * self.m + nbr[1]
self.union(idx1, idx2)
def find(self, idx):
return self.compressed_find(idx)
def compressed_find(self, idx):
fidx = self.fathers[idx]
if fidx != idx:
self.fathers[idx] = self.find(fidx)
return self.fathers[idx]
def union(self, i, j):
fi = self.find(i)
fj = self.find(j)
if fi != fj:
self.fathers[fi] = fj
self.nsets -= 1
def get_nsets(self):
return self.nsets
class Solution:
# @param {int} n an integer
# @param {int} m an integer
# @param {Pint[]} operators an array of point
# @return {int[]} an integer array
def numIslands2(self, n, m, operators):
# Write your code here
if n == 0 or m == 0:
return 0
uf, res = UnionFind(n, m), []
for oper in operators:
i, j = oper.x, oper.y
if -1 < i < n and -1 < j < m:
uf.build_island(i, j)
res.append(uf.get_nsets())
return res
| mit | Python |
|
bc3f4575c7267db8f7841a82e8f6866c59d15237 | Add some example function tests that use gaeftest | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | tests/test_functional.py | tests/test_functional.py | #!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__authors__ = [
'"Matthew Wilkes" <matthew@matthewwilkes.co.uk>',
]
from gaeftest.test import FunctionalTestCase
from zope.testbrowser import browser
import os.path
class MelangeFunctionalTestCase(FunctionalTestCase):
"""A base class for all functional tests in Melange.
Tests MUST NOT be defined here, but the superclass requires a path
attribute that points to the app.yaml. Utility functions MAY be
declared here to be shared by all functional tests, but any
overridden unittest methods MUST call the superclass version.
"""
path = os.path.abspath(__file__+"/../../app/app.yaml")
class TestBranding(MelangeFunctionalTestCase):
"""Tests that ensure Melange properly displays attribution.
Other notices, as required by the project and/or law, are tested
here as well.
"""
def test_attribution(self):
"""Ensure that the front page asserts that it is a Melange app.
"""
tb = browser.Browser()
tb.open("http://127.0.0.1:8080/site/show/site")
self.assertTrue("Powered by Melange" in tb.contents)
class TestLogin(MelangeFunctionalTestCase):
"""Tests that check the login system is functioning correctly.
Also tests that users go through the correct registration workflow.
"""
def test_firstLogin(self):
"""Ensure that new users are prompted to create a profile.
Also test that only new users are prompted.
"""
tb = browser.Browser()
tb.open("http://127.0.0.1:8080")
tb.getLink("Sign in").click()
self.assertTrue("login" in tb.url)
# fill in dev_appserver login form
tb.getForm().getControl("Email").value = "newuser@example.com"
tb.getForm().getControl("Login").click()
self.assertTrue(tb.url.endswith("/show/site"))
self.assertTrue('Please create <a href="/user/create_profile">'
'User Profile</a> in order to view this page' in tb.contents)
tb.getLink("User Profile").click()
# fill in the user profile
cp = tb.getForm(action="create_profile")
cp.getControl(name="link_id").value = "exampleuser"
cp.getControl(name="name").value = "Example user"
cp.getControl("Save").click()
# if all is well, we go to the edit page
self.assertTrue("edit_profile" in tb.url)
tb.open("http://127.0.0.1:8080")
# call to action no longer on front page
self.assertFalse('Please create <a href="/user/create_profile">'
'User Profile</a> in order to view this page' in tb.contents)
| apache-2.0 | Python |
|
4856b426b380d4d46cccc2f5b8ab2212956a96c2 | test of time module. not terribly fancy, but it does touch every function and variable in the module, verifies a few return values and even tests a couple of known error conditions. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_time.py | Lib/test/test_time.py | import time
time.altzone
time.clock()
t = time.time()
time.asctime(time.gmtime(t))
if time.ctime(t) <> time.asctime(time.localtime(t)):
print 'time.ctime(t) <> time.asctime(time.localtime(t))'
time.daylight
if int(time.mktime(time.localtime(t))) <> int(t):
print 'time.mktime(time.localtime(t)) <> t'
time.sleep(1.2)
tt = time.gmtime(t)
for directive in ('a', 'A', 'b', 'B', 'c', 'd', 'E', 'H', 'I',
'j', 'm', 'M', 'n', 'N', 'o', 'p', 'S', 't',
'U', 'w', 'W', 'x', 'X', 'y', 'Y', 'Z', '%'):
format = '%' + directive
time.strftime(format, tt)
time.timezone
time.tzname
# expected errors
try:
time.asctime(0)
except TypeError:
pass
try:
time.mktime((999999, 999999, 999999, 999999,
999999, 999999, 999999, 999999,
999999))
except OverflowError:
pass
| mit | Python |
|
c851501cc8149685a9e9c023aa200b92c17a9078 | Add decoder ida fields name | goodwinxp/ATFGenerator,goodwinxp/ATFGenerator,goodwinxp/ATFGenerator | pida_fields.py | pida_fields.py | def decode_name_fields(ida_fields):
i = -1
stop = len(ida_fields)
while True:
i += 1
if i == stop:
break
count = ord(ida_fields[i]) - 1
if count == 0:
continue
i += 1
yield ida_fields[i:i + count]
i += count - 1
| mit | Python |
|
e869c7ef9e3d19da4c98cda57b5e22fb5a35cba5 | Add first basic unittests using py.test | Khan/wtforms | tests/test_validators.py | tests/test_validators.py | """
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
| bsd-3-clause | Python |
|
54c358a296733d2a5236a9a776830f1b78682b73 | Add lc040_combination_sum_ii.py | bowen0701/algorithms_data_structures | lc040_combination_sum_ii.py | lc040_combination_sum_ii.py | """Leetcode 40. Combination Sum II
Medium
URL: https://leetcode.com/problems/combination-sum-ii/
Given a collection of candidate numbers (candidates) and a target number (target),
find all unique combinations in candidates where the candidate numbers sums to target.
Each number in candidates may only be used once in the combination.
Note:
All numbers (including target) will be positive integers.
The solution set must not contain duplicate combinations.
Example 1:
Input: candidates = [10,1,2,7,6,1,5], target = 8,
A solution set is:
[
[1, 7],
[1, 2, 5],
[2, 6],
[1, 1, 6]
]
Example 2:
Input: candidates = [2,5,2,1,2], target = 5,
A solution set is:
[
[1,2,2],
[5]
]
"""
class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
7fab8c2d014f013131bd4d6301f5f8e5268d6037 | add leetcode Pow(x, n) | Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code | leetcode/powx-n/solution.py | leetcode/powx-n/solution.py | # -*- coding:utf-8 -*-
class Solution:
# @param x, a float
# @param n, a integer
# @return a float
def pow(self, x, n):
if n == 0:
return 1
if n < 0:
neg_flag = True
n = -n
else:
neg_flag = False
ret = 1
while n > 0:
if n % 2 == 1:
ret *= x
x = x * x
n //= 2
if neg_flag:
return 1 / ret
return ret
| mit | Python |
|
736103ea495c89defcae9bf6ab72aa7b89768026 | add start of advisory module | sassoftware/mirrorball,sassoftware/mirrorball | updatebot/advise.py | updatebot/advise.py | #
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"""
Module for managing/manipulating advisories.
"""
from updatebot.errors import *
class Advisor(object):
"""
Class for managing, manipulating, and distributing advisories.
"""
def __init__(self, cfg, rpmSource):
self._cfg = cfg
self._rpmSource = rpmSource
| apache-2.0 | Python |
|
27b9727926139ae2cfde6d3cdcdf5746ed28e03d | Add new package arbor (#11914) | iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/arbor/package.py | var/spack/repos/builtin/packages/arbor/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Arbor(CMakePackage):
"""Arbor is a high-performance library for computational neuroscience
simulations."""
homepage = "https://github.com/arbor-sim/arbor/"
url = "https://github.com/arbor-sim/arbor/archive/v0.2.tar.gz"
version('0.2', sha256='43c9181c03be5f3c9820b2b50592d7b41344f37e1200980119ad347eb7bcf4eb')
variant('vectorize', default=False,
description='Enable vectorization of computational kernels')
variant('gpu', default=False, description='Enable GPU support')
variant('mpi', default=False, description='Enable MPI support')
variant('python', default=False,
description='Enable Python frontend support')
variant('unwind', default=False,
description='Enable libunwind for pretty stack traces')
depends_on('cuda', when='+gpu')
depends_on('mpi', when='+mpi')
depends_on('libunwind', when='+unwind')
extends('python@3.6:', when='+python')
depends_on('py-mpi4py', when='+mpi+python', type=('build', 'run'))
depends_on('cmake@3.9:', type='build')
# mentioned in documentation but shouldn't be necessary when
# using the archive
# depends_on('git@2.0:', type='build')
# compiler dependencies
# depends_on(C++14)
# depends_on('gcc@6.1.0:', type='build')
# depends_on('llvm@4:', type='build')
# depends_on('clang-apple@9:', type='build')
# when building documentation, this could be an optional dependency
depends_on('py-sphinx', type='build')
def patch(self):
filter_file(
r'find_library\(_unwind_library_target unwind-\${libunwind_arch}',
r'find_library(_unwind_library_target unwind-${_libunwind_arch}',
'cmake/FindUnwind.cmake'
)
filter_file(
r'target_compile_definitions\(arbor-private-deps ARB_WITH_UNWIND\)', # noqa: E501
r'target_compile_definitions(arbor-private-deps INTERFACE WITH_UNWIND)', # noqa: E501
'CMakeLists.txt'
)
def cmake_args(self):
args = [
'-DARB_VECTORIZE=' + ('ON' if '+vectorize' in self.spec else 'OFF'), # noqa: E501
'-DARB_WITH_GPU=' + ('ON' if '+gpu' in self.spec else 'OFF'),
'-DARB_WITH_PYTHON=' + ('ON' if '+python' in self.spec else 'OFF'),
]
if '+unwind' in self.spec:
args.append('-DUnwind_ROOT_DIR={0}'.format(self.spec['libunwind'].prefix)) # noqa: E501
return args
| lgpl-2.1 | Python |
|
ac851c402952cf44b24dfdf5277765ff286dd994 | convert embeddingns to js-friendly format | brandonmburroughs/food2vec,altosaar/food2vec,altosaar/food2vec,brandonmburroughs/food2vec,altosaar/food2vec,brandonmburroughs/food2vec,brandonmburroughs/food2vec | src/convert_embeddings_to_js.py | src/convert_embeddings_to_js.py | import h5py
import json
import numpy as np
def load_embeddings(path):
f = h5py.File(path, 'r')
nemb = f['nemb'][:]
f.close()
return nemb
def load_vocab(path):
vocab = []
with open(path, 'rb') as f:
for line in f.readlines():
split = line.split(' ')
vocab.append((split[0], int(split[1].rstrip())))
# ignore UNK at position 0
return vocab[1:]
def write_to_js(words, embeddings, path):
word_vecs = {}
for word, embedding in zip(words, embeddings):
word_vecs[word] = embedding.tolist()
with open(path, 'wb') as f:
json.dump(word_vecs, f)
f.write(';')
def main():
nemb = load_embeddings(path='/tmp/embeddings.h5')
vocab = load_vocab('/tmp/vocab.txt')
words = [tup[0] for tup in vocab]
# dont use UNK
words = words[1:]
nemb = nemb[1:]
# lower precision, faster
nemb = nemb.astype(np.float16)
write_to_js(words, nemb[1:], path='../../word2vecjson/data/foodVecs.js')
if __name__ == '__main__':
main()
| mit | Python |
|
11efa5583bbeeee7c7823264f6f73715ea81edc0 | Add trivial test for ECO fetching | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | luigi/tests/ontologies/eco_test.py | luigi/tests/ontologies/eco_test.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from ontologies import eco
def test_can_load_all_eco_terms():
source = eco.TermSources(
quickgo_file='data/quickgo/rna.gpa'
)
assert len(list(eco.to_load(source))) == 6
| apache-2.0 | Python |
|
ec3b080b2f1922f4989b853db45475d185e314de | add all | pombredanne/pyjs,gpitel/pyjs,pyjs/pyjs,spaceone/pyjs,pombredanne/pyjs,spaceone/pyjs,anandology/pyjamas,anandology/pyjamas,minghuascode/pyj,Hasimir/pyjs,minghuascode/pyj,pyjs/pyjs,pyjs/pyjs,anandology/pyjamas,pombredanne/pyjs,Hasimir/pyjs,pyjs/pyjs,lancezlin/pyjs,minghuascode/pyj,minghuascode/pyj,gpitel/pyjs,gpitel/pyjs,pombredanne/pyjs,spaceone/pyjs,lancezlin/pyjs,Hasimir/pyjs,gpitel/pyjs,Hasimir/pyjs,lancezlin/pyjs,spaceone/pyjs,lancezlin/pyjs,anandology/pyjamas | examples/gcharttestapp/TestGChart00.py | examples/gcharttestapp/TestGChart00.py |
import GChartTestAppUtil
from pyjamas.chart.GChart import GChart
"""* Empty chart without anything on it except a title and footnotes """
class TestGChart00 (GChart):
def __init__(self):
GChart.__init__(self, 150,150)
self.setChartTitle(GChartTestAppUtil.getTitle(self))
self.setChartFootnotes("Check: Consistent with a 'no data' chart (and it doesn't crash).")
| apache-2.0 | Python |
|
4fdba8a1a5a2123843cc9eefd8949fb8996f59b2 | Add a wrapper for ChromeOS to call into telemetry. | SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult | telemetry/telemetry/unittest/run_chromeos_tests.py | telemetry/telemetry/unittest/run_chromeos_tests.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import sys
from telemetry.unittest import gtest_progress_reporter
from telemetry.unittest import run_tests
from telemetry.core import util
def RunTestsForChromeOS(browser_type, unit_tests, perf_tests):
stream = _LoggingOutputStream()
error_string = ''
logging.info('Running telemetry unit tests with browser_type "%s".' %
browser_type)
ret = _RunOneSetOfTests(browser_type, 'telemetry',
os.path.join('telemetry', 'telemetry'),
unit_tests, stream)
if ret:
error_string += 'The unit tests failed.\n'
logging.info('Running telemetry perf tests with browser_type "%s".' %
browser_type)
ret = _RunOneSetOfTests(browser_type, 'perf', 'perf', perf_tests, stream)
if ret:
error_string = 'The perf tests failed.\n'
return error_string
def _RunOneSetOfTests(browser_type, root_dir, sub_dir, tests, stream):
top_level_dir = os.path.join(util.GetChromiumSrcDir(), 'tools', root_dir)
sub_dir = os.path.join(util.GetChromiumSrcDir(), 'tools', sub_dir)
sys.path.append(top_level_dir)
output_formatters = [gtest_progress_reporter.GTestProgressReporter(stream)]
run_tests.config = run_tests.Config(top_level_dir, [sub_dir],
output_formatters)
return run_tests.RunTestsCommand.main(['--browser', browser_type] + tests)
class _LoggingOutputStream(object):
def __init__(self):
self._buffer = []
def write(self, s):
"""Buffer a string write. Log it when we encounter a newline."""
if '\n' in s:
segments = s.split('\n')
segments[0] = ''.join(self._buffer + [segments[0]])
log_level = logging.getLogger().getEffectiveLevel()
try: # TODO(dtu): We need this because of crbug.com/394571
logging.getLogger().setLevel(logging.INFO)
for line in segments[:-1]:
logging.info(line)
finally:
logging.getLogger().setLevel(log_level)
self._buffer = [segments[-1]]
else:
self._buffer.append(s)
def flush(self): # pylint: disable=W0612
pass
| bsd-3-clause | Python |
|
fcce65daf40bb1c198be7ddadee8769bf6feea9b | Create k-order-test.py | wangxiaox10/projetlibre | k-order-test.py | k-order-test.py | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 6 16:41:40 2014
@author: xiao
"""
from k_order import *
#number of items to recommand
p=2
fadress = "/home/xiao/ProjetLibre/matrix/matrixInfo"
readDataFromFile(fadress)
getDu()
recommendationListe = zeros((m,p))
############################################
#### We need to recommend top items ####
############################################
#k=1
#recommend top p items for user u
def recommendItems_u(u, p):
#initialize recommendation items to be -1: null
res = zeros(p)-1
D_bar_u = Omega_comp[u]
r = f_bar_d(D_bar_u, u)
indexOrder = argsort(r)
indexOrder = indexOrder[::-1]
if len(indexOrder) >= p:
res = indexOrder[:p]
else:
res[:len(indexOrder)] = indexOrder
return res
#recommend top p items for all m users
def recommendItems(p):
for u in range(m):
r = recommendItems_u(u, p)
recommendationListe[u,:] = r
def f_test(x):
return x**2 - 3*x
def test():
a = arange(5)
b = f_test(a)
c = argsort(b)
c = c[::-1]
return c
#show
def showRecomms():
for u in range(m):
print "u:", u, ",",recommendationListe[u,:]
k_os_AUC()
recommendItems(p)
showRecomms()
######################################################
#### We need to recommend most relavent users ####
######################################################
######################################################
#### test normal AUC ####
######################################################
######################################################
#### test normal WARP ####
######################################################
######################################################
#### test K-os AUC ####
######################################################
######################################################
#### test k-os WARP ####
######################################################
| bsd-3-clause | Python |
|
d73235dd994d3705178d0cff142293444977d764 | Remove bad imports | cowlicks/odo,quantopian/odo,quantopian/odo,ContinuumIO/odo,cowlicks/odo,cpcloud/odo,ContinuumIO/odo,cpcloud/odo,blaze/odo,blaze/odo | odo/backends/tests/conftest.py | odo/backends/tests/conftest.py | import os
import shutil
import pytest
@pytest.fixture(scope='session')
def sc():
pyspark = pytest.importorskip('pyspark')
return pyspark.SparkContext('local[*]', 'odo')
@pytest.yield_fixture(scope='session')
def sqlctx(sc):
pyspark = pytest.importorskip('pyspark')
try:
yield pyspark.HiveContext(sc)
finally:
dbpath = 'metastore_db'
logpath = 'derby.log'
if os.path.exists(dbpath):
assert os.path.isdir(dbpath)
shutil.rmtree(dbpath)
if os.path.exists(logpath):
assert os.path.isfile(logpath)
os.remove(logpath)
| import os
import shutil
import pytest
@pytest.fixture(scope='session')
def sc():
pytest.importorskip('pyspark')
from pyspark import SparkContext
return SparkContext('local[*]', 'odo')
@pytest.yield_fixture(scope='session')
def sqlctx(sc):
pytest.importorskip('pyspark')
from odo.backends.sparksql import HiveContext, SQLContext, SPARK_ONE_TWO
try:
yield HiveContext(sc) if not SPARK_ONE_TWO else SQLContext(sc)
finally:
dbpath = 'metastore_db'
logpath = 'derby.log'
if os.path.exists(dbpath):
assert os.path.isdir(dbpath)
shutil.rmtree(dbpath)
if os.path.exists(logpath):
assert os.path.isfile(logpath)
os.remove(logpath)
| bsd-3-clause | Python |
4c5a8f018af4377ce3f9367b0c66a51a6cad671b | add __init__.py | esjeon/eatable | eatable/__init__.py | eatable/__init__.py |
from .table import Table
from .row import Row
| mit | Python |
|
2b15d2df8333db5f5cd6fcefaf56f5400baba95e | add test_results_table.py | daler/metaseq,daler/metaseq,daler/metaseq,agrimaldi/metaseq,mrGeen/metaseq,mrGeen/metaseq,agrimaldi/metaseq,agrimaldi/metaseq,mrGeen/metaseq | metaseq/test/test_results_table.py | metaseq/test/test_results_table.py | from metaseq import results_table
import metaseq
import numpy as np
fn = metaseq.example_filename('ex.deseq')
d = results_table.ResultsTable(fn)
def test_dataframe_access():
# different ways of accessing get the same data in memory
assert d.id is d.data.id
assert d['id'] is d.data.id
def test_dataframe_subsetting():
assert all(d[:10].data == d.data[:10])
assert all(d.update(d.data[:10]).data == d.data[:10])
def test_copy():
e = d.copy()
e.id = 'a'
assert e.id[0] == 'a'
assert d.id[0] != 'a'
def smoke_tests():
#smoke test for repr
print repr(d)
def test_db():
# should work
d.attach_db(None)
d.attach_db(metaseq.example_filename('dmel-all-r5.33-cleaned.gff.db'))
| mit | Python |
|
2382c1c9daf2b17799ceb03f42a6917966b3162c | add kattis/cold | mjenrungrot/competitive_programming,mjenrungrot/competitive_programming,mjenrungrot/competitive_programming,mjenrungrot/competitive_programming,mjenrungrot/algorithm | Kattis/cold.py | Kattis/cold.py | """
Problem: cold
Link: https://open.kattis.com/problems/cold
Source: Kattis
"""
N = int(input())
A = list(map(int, input().split()))
answer = 0
for i in range(len(A)):
answer += (A[i] < 0)
print(answer)
| mit | Python |
|
dd93b450eb0cc92debd8b5cec82f3127c454d77f | put this back... | annayqho/TheCannon,annayqho/TheCannon | TheCannon/infer_labels.py | TheCannon/infer_labels.py | from __future__ import (absolute_import, division, print_function, unicode_literals)
from scipy import optimize as opt
import numpy as np
LARGE = 200.
SMALL = 1. / LARGE
def _get_lvec(labels):
"""
Constructs a label vector for an arbitrary number of labels
Assumes that our model is quadratic in the labels
Parameters
----------
labels: numpy ndarray
pivoted label values for one star
Returns
-------
lvec: numpy ndarray
label vector
"""
nlabels = len(labels)
# specialized to second-order model
linear_terms = labels
quadratic_terms = np.outer(linear_terms,
linear_terms)[np.triu_indices(nlabels)]
lvec = np.hstack((linear_terms, quadratic_terms))
return lvec
def _func(coeffs, *labels):
""" Takes the dot product of coefficients vec & labels vector
Parameters
----------
coeffs: numpy ndarray
the coefficients on each element of the label vector
*labels: numpy ndarray
label vector
Returns
-------
dot product of coeffs vec and labels vec
"""
lvec = _get_lvec(list(labels))
return np.dot(coeffs, lvec)
def _infer_labels(model, dataset):
"""
Uses the model to solve for labels of the test set.
Parameters
----------
model: tuple
Coeffs_all, covs, scatters, chis, chisqs, pivots
dataset: Dataset
Dataset that needs label inference
Returns
-------
errs_all:
Covariance matrix of the fit
"""
print("Inferring Labels")
coeffs_all = model.coeffs
scatters = model.scatters
chisqs = model.chisqs
pivots = model.pivots
nlabels = dataset.tr_label.shape[1]
fluxes = dataset.test_flux
ivars = dataset.test_ivar
nstars = fluxes.shape[0]
labels_all = np.zeros((nstars, nlabels))
MCM_rotate_all = np.zeros((nstars, coeffs_all.shape[1] - 1,
coeffs_all.shape[1]-1.))
errs_all = np.zeros((nstars, nlabels))
for jj in range(nstars):
print(jj)
flux = fluxes[jj,:]
ivar = ivars[jj,:]
flux_piv = flux - coeffs_all[:,0] * 1. # pivot around the leading term
sig = np.sqrt(1./ivar + scatters**2)
coeffs = np.delete(coeffs_all, 0, axis=1) # take pivot into account
try:
labels, covs = opt.curve_fit(_func, coeffs, flux_piv,
p0=np.repeat(1, nlabels),
sigma=sig, absolute_sigma=True)
except TypeError: # old scipy version
labels, covs = opt.curve_fit(_func, coeffs, flux_piv,
p0=np.repeat(1, nlabels), sigma=sig)
# rescale covariance matrix
chi = (flux_piv-_func(coeffs, *labels)) / sig
chi2 = (chi**2).sum()
# FIXME: dof does not seem to be right to me (MF)
dof = len(flux_piv) - nlabels
factor = (chi2 / dof)
covs /= factor
labels = labels + pivots
labels_all[jj,:] = labels
errs_all[jj,:] = covs.diagonal()
dataset.set_test_label_vals(labels_all)
return errs_all
| mit | Python |
|
652a03d96cbc5c06850fa62fa3507fb74ee3deab | Create python_ciphertext.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | Encryption/python_ciphertext.py | Encryption/python_ciphertext.py | #Simply how to make a ciphertext only with 1 line.
>>> #hex_encode = 'summonagus'.encode('hex')
>>> hex_encode = '73756d6d6f6e61677573'
>>> chip = ''.join([ str(int(a)*2) if a.isdigit() and int(a) == 3 else str(int(a)/2) if a.isdigit() and int(a) == 6 else a for a in hex_encode ])
>>>
>>> hex_encode
'73756d6d6f6e61677573'
>>> chip
'76753d3d3f3e31377576'
>>>
>>>
| agpl-3.0 | Python |
|
8add0d44139b527d40aaa9da43d023ddde52c410 | Add string python solution | byung-u/ProjectEuler | HackerRank/PYTHON/Strings/alphabet_rangoli.py | HackerRank/PYTHON/Strings/alphabet_rangoli.py | #!/usr/bin/env python3
import sys
from string import ascii_lowercase
def print_rangoli(size):
width = size * 4 - 3
alphabet = (ascii_lowercase[0:size])[::-1]
res = []
for i in range(size):
s = ''
for a in alphabet[0:i+1]:
s = '%s-%s' % (s, a)
temp = s + s[::-1][1:]
if len(temp) == width + 2:
temp = temp[1:-1]
res.append(temp)
else:
res.append(temp.center(width, '-'))
print('\n'.join(res))
print('\n'.join(list(reversed(res[0:size - 1]))))
if __name__ == '__main__':
n = int(input())
print_rangoli(n)
| mit | Python |
|
eac74d731b01f732d23ce21e8132fa0785aa1ab2 | Create visible_elements.py | kiryushah/test-selenium_first | visible_elements.py | visible_elements.py | # -*- coding: utf-8 -*-
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
class visible_elements(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome("C://chromedriver/chromedriver.exe")
self.driver.maximize_window()
wait = WebDriverWait(self.driver, 10)
def test_clickelements(self):
self.driver.get("http://localhost/litecart/en/")
rows = self.driver.find_elements_by_xpath("//li[@class='product column shadow hover-light']")
def are_elements_present(self, *args):
return len(self.driver.find_elements(*args)) == 1
are_elements_present(self, By.XPATH, "//div[@class='sticker sale']" and "//div[@class='sticker new']" ) in rows
def tearDown(self):
self.driver.close()
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
|
5343c89686fd05cf251388e1f28bfd4343d4c277 | Add python-based CPU implementation | kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT | src/CPU/color_histogram.py | src/CPU/color_histogram.py | from PIL import Image
from collections import defaultdict
import sys
im = Image.open(sys.argv[1])
colors = defaultdict(int)
for pixel in im.getdata():
colors[pixel] += 1
print colors
| bsd-3-clause | Python |
|
f408465521484032631adfe9dced21119ad2bf82 | Revert "Delete old MultiServer implementation" | HubbeKing/Hubbot_Twisted | MultiServer.py | MultiServer.py | from multiprocessing import Process
import subprocess
import GlobalVars
def botInstance(server, channels):
args = ["python", "hubbebot.py"]
args.append(server)
for chan in channels:
args.append(chan)
subprocess.call(args)
if __name__ == "__main__":
for (server,channels) in GlobalVars.connections.items():
p = Process(target=botInstance, args=(server, channels))
p.start()
| mit | Python |
|
2ef9618e705bb293641674ca5e7cc1f14daf3483 | Set default branding for all organisations | alphagov/notifications-api,alphagov/notifications-api | migrations/versions/0285_default_org_branding.py | migrations/versions/0285_default_org_branding.py | """empty message
Revision ID: 0285_default_org_branding
Revises: 0284_0283_retry
Create Date: 2016-10-25 17:37:27.660723
"""
# revision identifiers, used by Alembic.
revision = '0285_default_org_branding'
down_revision = '0284_0283_retry'
from alembic import op
import sqlalchemy as sa
BRANDING_TABLES = ('email_branding', 'letter_branding')
def upgrade():
for branding in BRANDING_TABLES:
op.execute("""
UPDATE
organisation
SET
{branding}_id = {branding}.id
FROM
{branding}
WHERE
{branding}.domain in (
SELECT
domain
FROM
domain
WHERE
domain.organisation_id = organisation.id
)
""".format(branding=branding))
def downgrade():
for branding in BRANDING_TABLES:
op.execute("""
UPDATE
organisation
SET
{branding}_id = null
""".format(branding=branding))
| mit | Python |
|
52b870d36370f46fdc33de2948504c2aec8db1a1 | fix field names in network object | wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos | planetstack/core/migrations/0002_network_field_case.py | planetstack/core/migrations/0002_network_field_case.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import timezones.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='networktemplate',
old_name='controllerKind',
new_name='controller_kind',
),
migrations.RenameField(
model_name='networktemplate',
old_name='guaranteedBandwidth',
new_name='guaranteed_bandwidth',
),
migrations.RenameField(
model_name='networktemplate',
old_name='sharedNetworkId',
new_name='shared_network_id',
),
migrations.RenameField(
model_name='networktemplate',
old_name='sharedNetworkName',
new_name='shared_network_name',
),
migrations.RenameField(
model_name='networktemplate',
old_name='topologyKind',
new_name='topology_kind',
),
]
| apache-2.0 | Python |
|
9b584c6d23ad93fd497fb2e71d2343a954cea4e5 | Create PaulFinalproject.py | kingmayonaise/Final-Project | PaulFinalproject.py | PaulFinalproject.py | mit | Python |
||
8462466f8a21f25f85b8a06076877361b2545a12 | Add initialize script | MaritimeRenewable/PyResis | PyResis/__init__.py | PyResis/__init__.py | __author__ = 'Yu Cao'
| mit | Python |
|
8fcc727f9a7fbd886bc900f9c24cf2711a0c5b99 | Create Record.py | Larz60p/Python-Record-Structure | Record.py | Record.py | """
The MIT License (MIT)
Copyright (c) <2016> <Larry McCaig (aka: Larz60+ aka: Larz60p)>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from collections import namedtuple
import json
class Record(object):
def __init__(self, filename=None):
with open(filename, 'r') as f:
self.j = f.read()
self.record = json.loads(self.j, object_hook=lambda j:
namedtuple('data', j.keys())(*j.values()))
self.recindex = len(self.record)
self.index = 0
def __iter__(self):
self.index = self.recindex
return self
def __next__(self):
if self.index == 0:
raise StopIteration
self.index -= 1
return self.record[self.index]
| mit | Python |
|
1708eb17fb9c232414b0e162754ca31b6fd9366c | Add tests for plagiarism filter command | empirical-org/Empirical-Core,empirical-org/Empirical-Core,empirical-org/Empirical-Core,empirical-org/Empirical-Core,empirical-org/Empirical-Core,empirical-org/Empirical-Core,empirical-org/Empirical-Core | services/comprehension/main-api/comprehension/tests/management/commands/test_pre_filter_responses.py | services/comprehension/main-api/comprehension/tests/management/commands/test_pre_filter_responses.py | import csv
from io import StringIO
from unittest.mock import call, MagicMock, patch
from django.test import TestCase
from ....views.plagiarism import PlagiarismFeedbackView
from ....management.commands import pre_filter_responses
Command = pre_filter_responses.Command
class TestCommandBase(TestCase):
def setUp(self):
self.command = Command()
class TestPreFilterResponsesCommand(TestCommandBase):
def test_add_arguments(self):
mock_parser = MagicMock()
self.command.add_arguments(mock_parser)
self.assertEqual(mock_parser.add_argument.call_count, 2)
mock_parser.assert_has_calls([
call.add_argument('passage_source', metavar='PASSAGE_SOURCE',
help='The path to the file with the passage'),
call.add_argument('csv_input', metavar='CSV_PATH',
help='The path to the input CSV file'),
])
@patch.object(PlagiarismFeedbackView, '_check_is_plagiarism')
@patch.object(Command, '_retrieve_passage')
@patch.object(csv, 'reader')
@patch.object(csv, 'writer')
@patch(f'{pre_filter_responses.__name__}.open')
def test_extract_create_feedback_kwargs(self, mock_open, mock_writer,
mock_reader, mock_retrieve,
mock_check_plagiarism):
mock_csv_input = 'MOCK_CSV_INPUT'
kwargs = {
'passage_source': 'MOCK_PASSAGE_SOURCE',
'csv_input': mock_csv_input,
}
file_name = 'FAKE FILE NAME'
mock_handler = mock_open.return_value
mock_file_content = StringIO('HEADER\nVALUE')
mock_handler.__enter__.return_value = mock_file_content
mock_reader_row = 'MOCK_ROW'
mock_reader.next.return_value = mock_reader_row
mock_check_plagiarism.return_value = False
self.command.handle(**kwargs)
mock_open.assert_has_calls([
call(mock_csv_input, 'r'),
call().__enter__(),
call(f'filtered_{mock_csv_input}', 'w'),
call().__enter__(),
call().__exit__(None, None, None),
call().__exit__(None, None, None),
])
mock_retrieve.assert_called_with(kwargs['passage_source'])
mock_writer.assert_called()
| agpl-3.0 | Python |
|
06ced5abe2226a234c2e2887fbf84f18dfa7ddc4 | Update timer for new label. Clean up a bit and use more pyglet 1.1 features. | xshotD/pyglet,mpasternak/pyglet-fix-issue-518-522,kmonsoor/pyglet,Austin503/pyglet,kmonsoor/pyglet,gdkar/pyglet,odyaka341/pyglet,google-code-export/pyglet,odyaka341/pyglet,cledio66/pyglet,mpasternak/pyglet-fix-issue-518-522,mpasternak/michaldtz-fix-552,Alwnikrotikz/pyglet,qbektrix/pyglet,google-code-export/pyglet,mpasternak/michaldtz-fixes-518-522,xshotD/pyglet,odyaka341/pyglet,kmonsoor/pyglet,Alwnikrotikz/pyglet,arifgursel/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fix-552,mpasternak/pyglet-fix-issue-552,Austin503/pyglet,qbektrix/pyglet,odyaka341/pyglet,cledio66/pyglet,mpasternak/michaldtz-fixes-518-522,Alwnikrotikz/pyglet,Austin503/pyglet,Austin503/pyglet,google-code-export/pyglet,qbektrix/pyglet,cledio66/pyglet,shaileshgoogler/pyglet,kmonsoor/pyglet,arifgursel/pyglet,gdkar/pyglet,xshotD/pyglet,mpasternak/michaldtz-fix-552,xshotD/pyglet,shaileshgoogler/pyglet,odyaka341/pyglet,mpasternak/pyglet-fix-issue-552,shaileshgoogler/pyglet,cledio66/pyglet,xshotD/pyglet,Austin503/pyglet,arifgursel/pyglet,cledio66/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fix-552,Alwnikrotikz/pyglet,google-code-export/pyglet,gdkar/pyglet,gdkar/pyglet,mpasternak/michaldtz-fixes-518-522,mpasternak/pyglet-fix-issue-518-522,arifgursel/pyglet,qbektrix/pyglet,qbektrix/pyglet,mpasternak/pyglet-fix-issue-518-522,Alwnikrotikz/pyglet,kmonsoor/pyglet,mpasternak/michaldtz-fixes-518-522,arifgursel/pyglet,mpasternak/pyglet-fix-issue-552,gdkar/pyglet,google-code-export/pyglet,mpasternak/pyglet-fix-issue-552 | examples/timer.py | examples/timer.py | '''A full-screen minute:second timer. Leave it in charge of your conference
lighting talks.
After 5 minutes, the timer goes red. This limit is easily adjustable by
hacking the source code.
Press spacebar to start, stop and reset the timer.
'''
import pyglet
window = pyglet.window.Window(fullscreen=True)
class Timer(object):
def __init__(self):
self.label = pyglet.text.Label('00:00', font_size=360,
x=window.width//2, y=window.height//2,
valign='center', halign='center')
self.reset()
def reset(self):
self.time = 0
self.running = False
self.label.text = '00:00'
self.label.color = (255, 255, 255, 255)
def update(self, dt):
if self.running:
self.time += dt
m, s = divmod(self.time, 60)
self.label.text = '%02d:%02d' % (m, s)
if m >= 5:
self.label.color = (180, 0, 0, 255)
@window.event
def on_key_press(symbol, modifiers):
if symbol == pyglet.window.key.SPACE:
if timer.running:
timer.running = False
else:
if timer.time > 0:
timer.reset()
else:
timer.running = True
elif symbol == pyglet.window.key.ESCAPE:
window.close()
@window.event
def on_draw():
window.clear()
timer.label.draw()
timer = Timer()
pyglet.clock.schedule_interval(timer.update, 1)
pyglet.app.run()
| from pyglet import window
from pyglet import text
from pyglet import clock
from pyglet import font
w = window.Window(fullscreen=True)
class Timer(text.Label):
def stop(self):
self.__time = 0
def reset(self):
self.__time = 0
self.__running = False
self.text = '00:00'
def animate(self, dt):
if self.__running:
self.__time += dt
m, s = divmod(self.__time, 60)
self.text = '%02d:%02d'%(m, s)
def on_text(self, text):
if text == ' ':
self.__running = not self.__running
return True
return False
ft = font.load('', 360)
timer = Timer('00:00', ft, x=w.width//2, y=w.height//2,
valign='center', halign='center')
timer.reset()
clock.schedule(timer.animate)
w.push_handlers(timer)
while not w.has_exit:
w.dispatch_events()
clock.tick()
w.clear()
timer.draw()
w.flip()
| bsd-3-clause | Python |
064c1a5bd8790c9ea407f62de0428657354e979f | Create jcolor.py | JaredMHall/pulltag,zwparchman/pulltag | jcolor.py | jcolor.py | # colors
HEADER = '\033[95m'
FAIL = '\033[91m'
FGBLUE2 = '\033[94m'
FGGREEN2 = '\033[92m'
FGORANGE = '\033[93m'
FGGRAY = '\033[30m'
FGRED = '\033[31m'
FGGREEN = '\033[32m'
FGYELLOW = '\033[33m'
FGBLUE = '\033[34m'
FGMAG = '\033[35m'
FGCYAN = '\033[36m'
FGWHITE = '\033[37m'
# FGGRAY = '\033[61m'
BGBLACK = '\033[40m'
BGRED = '\033[41m'
BGGREEN = '\033[42m'
BGYELLOW = '\033[43m'
BGBLUE = '\033[44m'
BGMAG = '\033[45m'
BGCYAN = '\033[46m'
BGWHITE = '\033[47m'
# end color(s)
ENDC = '\033[0m'
# format settings
BOLDON = '\033[1m'
BOLDOFF = '\033[22m'
ITALON = '\033[3m'
ITALOFF = '\033[23m'
UNDLNON = '\033[4m'
UNDLNOFF = '\033[24m'
INVON = '\033[7m'
INVOFF = '\033[27m'
STRKTHRUON = '\033[9m'
STRKTHRUOFF = '\033[29m'
| mit | Python |
|
040911e2343ec6753c767eff44be2cf54eb33ff8 | add file name to fasta sequence headers | fandemonium/code,chnops/code,fandemonium/code,chnops/code,fandemonium/code,chnops/code,fandemonium/code | add_file_name_to_reads.py | add_file_name_to_reads.py | import os
import sys
from Bio import SeqIO
out = open(sys.argv[2], 'w')
for records in SeqIO.parse(open(sys.argv[1], 'rU'), "fasta"):
records.id = records.id.strip() + '%s' % sys.argv[1].split('.')[0]
records.name = records.id
records.description = records.id
SeqIO.write(records, out, 'fasta')
| mit | Python |
|
c420f6bf996c53fa8958956626c136ac0e9e55f6 | Add sonos updater plugin. | jackwilsdon/beets,ibmibmibm/beets,beetbox/beets,sampsyo/beets,shamangeorge/beets,jackwilsdon/beets,sampsyo/beets,SusannaMaria/beets,jackwilsdon/beets,beetbox/beets,sampsyo/beets,jackwilsdon/beets,shamangeorge/beets,beetbox/beets,shamangeorge/beets,ibmibmibm/beets,SusannaMaria/beets,beetbox/beets,sampsyo/beets,SusannaMaria/beets,shamangeorge/beets,ibmibmibm/beets,ibmibmibm/beets,SusannaMaria/beets | beetsplug/sonosupdate.py | beetsplug/sonosupdate.py | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2018, Tobias Sauerwein.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Updates a Sonos library whenever the beets library is changed.
This is based on the Kodi Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
from beets import config
from beets.plugins import BeetsPlugin
import six
import soco
class SonosUpdate(BeetsPlugin):
def __init__(self):
super(SonosUpdate, self).__init__()
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to a Sonos
controler.
"""
self._log.info(u'Requesting a Sonos library update...')
# Try to send update request.
try:
device = soco.discovery.any_soco()
device.music_library.start_library_update()
except:
self._log.warning(u'Sonos update failed')
return
self._log.info(u'Sonos update triggered')
| mit | Python |
|
797114781ed4f31c265c58a76e39aa8ff6a16443 | Add missing file from last commit | ppwwyyxx/tensorpack,eyaler/tensorpack,eyaler/tensorpack,ppwwyyxx/tensorpack | tensorpack/utils/compatible_serialize.py | tensorpack/utils/compatible_serialize.py | #!/usr/bin/env python
import os
from .serialize import loads_msgpack, loads_pyarrow, dumps_msgpack, dumps_pyarrow
"""
Serialization that has compatibility guarantee (therefore is safe to store to disk).
"""
__all__ = ['loads', 'dumps']
# pyarrow has no compatibility guarantee
# use msgpack for persistent serialization, unless explicitly set from envvar
if os.environ.get('TENSORPACK_COMPATIBLE_SERIALIZE', 'msgpack') == 'msgpack':
loads = loads_msgpack
dumps = dumps_msgpack
else:
loads = loads_pyarrow
dumps = dumps_pyarrow
| apache-2.0 | Python |
|
2f155e1dafd5302dfbf4607af81bfa979046be8e | add test file | parrt/msan501-starterkit | junk/t.py | junk/t.py | def f():
print "hi"
f() | bsd-2-clause | Python |
|
cfb39d7389d63a293dc075d420f80276a34df193 | Add minimal pygstc example to play a video | RidgeRun/gstd-1.x,RidgeRun/gstd-1.x,RidgeRun/gstd-1.x,RidgeRun/gstd-1.x | examples/pygstc/simple_pipeline.py | examples/pygstc/simple_pipeline.py | import time
import sys
from pygstc.gstc import *
from pygstc.logger import *
#Create a custom logger with loglevel=DEBUG
gstd_logger = CustomLogger('simple_pipeline', loglevel='DEBUG')
#Create the client with the logger
gstd_client = GstdClient(logger=gstd_logger)
def printError():
print("To play run: python3 simple_pipeline.py play VIDEO_PATH")
print("To stop run: python3 simple_pipeline.py stop")
print("To stop run: python3 simple_pipeline.py reverse")
print("To stop run: python3 simple_pipeline.py slow_motion")
if(len(sys.argv) > 1):
if(sys.argv[1]=="play"):
FILE_SOURCE = sys.argv[2]
#pipeline is the string with the pipeline description
pipeline = "playbin uri=file:"+FILE_SOURCE
#Following instructions create and play the pipeline
gstd_client.pipeline_create ("p0", pipeline)
gstd_client.pipeline_play ("p0")
print("Playing")
# Check this
# reverse and slow motion restart the pipeline
elif(sys.argv[1]== "reverse"):
gstd_client.event_seek("p0", rate=-1.0, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in reverse")
elif(sys.argv[1]== "slow_motion"):
gstd_client.event_seek("p0", rate=0.5, format=3, flags=1, start_type=1, start=0, end_type=1, end=-1)
print("Playing in slow motion")
elif(sys.argv[1]== "stop"):
#Following instructions stop and delete the pipeline
gstd_client.pipeline_stop ("p0")
gstd_client.pipeline_delete ("p0")
print("Pipeline deleted")
else:
printError()
else:
printError()
| lgpl-2.1 | Python |
|
f8d06f85e896c1098f58667c161d920f6d255d7b | Add utility for sent mail | Micronaet/micronaet-utility,Micronaet/micronaet-utility,Micronaet/micronaet-utility,Micronaet/micronaet-utility | sendmail/log_mail.py | sendmail/log_mail.py | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
from smtplib import SMTP
from datetime import datetime
# Parameter:
smtp_host = 'smtp.qboxmail.com'
smtp_port = 465
smtp_user = 'account@example.it''
smtp_password = 'password'
from_address = 'from@example.it'
to_address = 'dest@example.it'
subject = 'Subject'
body = 'body'
# Send mail:
smtp = SMTP()
smtp.set_debuglevel(0)
smtp.connect(smtp_host, smtp_port)
smtp.login(smtp_user, smtp_password)
date = datetime.now().strftime('%Y-%m-%s %H:%M')
smtp.sendmail(
from_addr, to_addr,
'From: %s\nTo: %s\nSubject: %s\nDate: %s\n\n%s' % (
from_addr,
to_addr,
subject,
date,
body,
),
)
smtp.quit()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
|
ddac657da2743c7435e8408677406d37eaea5836 | Add migration. | open-craft/opencraft,open-craft/opencraft,open-craft/opencraft,open-craft/opencraft,open-craft/opencraft | instance/migrations/0041_auto_20160420_1409.py | instance/migrations/0041_auto_20160420_1409.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('instance', '0040_auto_20160420_0754'),
]
operations = [
migrations.AlterField(
model_name='openstackserver',
name='status',
field=models.CharField(choices=[('pending', 'Pending'), ('building', 'Building'), ('booting', 'Booting'), ('ready', 'Ready'), ('terminated', 'Terminated'), ('unknown', 'Unknown'), ('failed', 'BuildFailed')], max_length=20, db_index=True, default='pending'),
),
migrations.RunSQL(
[
"UPDATE instance_openstackserver SET status = 'pending' WHERE status = 'new'",
"UPDATE instance_openstackserver SET status = 'building' WHERE status = 'started'",
"UPDATE instance_openstackserver SET status = 'booting' WHERE status = 'active' OR status = 'rebooting'",
"UPDATE instance_openstackserver SET status = 'ready' WHERE status = 'booted' OR status = 'provisioning'",
],
)
]
| agpl-3.0 | Python |
|
4c6964a6043c6c5bb3df7ad184e2c6a5537ca6da | Create __init__.py | pkug/intelmq,certtools/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq,pkug/intelmq,robcza/intelmq,aaronkaplan/intelmq,robcza/intelmq,pkug/intelmq,certtools/intelmq,robcza/intelmq,pkug/intelmq,robcza/intelmq,certtools/intelmq | intelmq/tests/bots/experts/fqdn2ip/__init__.py | intelmq/tests/bots/experts/fqdn2ip/__init__.py | agpl-3.0 | Python |
||
ce5ca3ac3268af331150f66865072a049869b3b2 | add abstraction magics | dekstop/ipython_extensions,NunoEdgarGub1/ipython_extensions,NunoEdgarGub1/ipython_extensions,danielballan/ipython_extensions,dekstop/ipython_extensions,minrk/ipython_extensions,NunoEdgarGub1/ipython_extensions,minrk/ipython_extensions,danielballan/ipython_extensions,danielballan/ipython_extensions,minrk/ipython_extensions,dekstop/ipython_extensions | abstraction.py | abstraction.py | """
abstraction magics
let's you turn a cell into a function
In [1]: plot(x, f(y))
...: xlabel('x')
...: ylabel('y')
In [2]: %functionize 1
"""
from IPython.utils.text import indent
def parse_ranges(s):
blocks = s.split(',')
ranges = []
for block in blocks:
if '-' in block:
start, stop = [ int(b) for b in block.split('-') ]
stop = stop + 1 # be inclusive?
else:
start = int(block)
stop = start + 1
ranges.append((start, stop))
return ranges
def functionize(line):
shell = get_ipython()
splits = line.split(' ', 1)
range_str = splits[0]
args = splits[1] if len(splits) > 1 else ''
ranges = parse_ranges(range_str)
get_range = shell.history_manager.get_range
blocks = ["def cell_function(%s):" % args]
for start, stop in ranges:
cursor = get_range(0, start, stop)
for session_id, cell_id, code in cursor:
blocks.append(indent(code))
code = '\n'.join(blocks)
shell.set_next_input(code)
def load_ipython_extension(ip):
ip.magics_manager.register_function(functionize) | bsd-3-clause | Python |
|
9af5c4e79234a47ac26e5d1890e70f741363b18a | Create factorise_test.py | nick599/PythonMathsAlgorithms | factorise_test.py | factorise_test.py | apache-2.0 | Python |
||
425a8e26d371038f6ebf7c80dd7faea0f1dd906e | Add base test for admin endpoints [WAL-883] | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/core/tests/unittests/test_admin.py | nodeconductor/core/tests/unittests/test_admin.py | from django.contrib import admin
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
User = get_user_model()
class TestAdminEndpoints(TestCase):
def setUp(self):
user, _ = User.objects.get_or_create(username='username', is_staff=True)
self.client.force_login(user)
self.admin_site_name = admin.site.name
def _reverse_url(self, path):
return reverse('%s:%s' % (self.admin_site_name, path))
def test_app_list_ulrs_can_be_queried(self):
app_list_urls = dict()
for model in admin.site._registry:
app_list_url = reverse('%s:%s' % (self.admin_site_name, 'app_list'), args=(model._meta.app_label,))
app_list_urls.update({model._meta.app_label: app_list_url})
for url in app_list_urls.values():
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_base_admin_site_urls_can_be_queried(self):
pages = ['index', 'login', 'logout', 'password_change', 'password_change_done', 'jsi18n']
for name in pages:
url = self._reverse_url(name)
response = self.client.get(url)
self.assertIn(response.status_code, [200, 302])
def test_changelist_urls_can_be_queried(self):
for model in admin.site._registry:
url = self._reverse_url('%s_%s_changelist' % (model._meta.app_label, model._meta.model_name))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_add_urls_can_be_queried(self):
for model in admin.site._registry:
model_fullname = '%s_%s' % (model._meta.app_label, model._meta.model_name)
url = self._reverse_url('%s_add' % model_fullname)
response = self.client.get(url)
self.assertIn(response.status_code, [200, 403])
| mit | Python |
|
90a30ae1b3165c03f6de5458c92f8ecb9d3f948a | Add homework min_three | lukin155/skola-programiranja | domaci-zadaci/05/test_min_three.py | domaci-zadaci/05/test_min_three.py | from solutions import min_three
import unittest
import random
class TestMinThree(unittest.TestCase):
def test_1000_cases(self):
for _ in range(1000):
first = (random.random() - 0.5) * 2000
second = (random.random() - 0.5) * 2000
third = (random.random() - 0.5) * 2000
expected = min(first, second, third)
actual = min_three(first, second, third)
self.assertEqual(expected, actual)
actual = min_three(first, third, second)
self.assertEqual(expected, actual)
actual = min_three(second, first, third)
self.assertEqual(expected, actual)
actual = min_three(second, third, first)
self.assertEqual(expected, actual)
actual = min_three(third, first, second)
self.assertEqual(expected, actual)
actual = min_three(third, second, first)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
aad51679cc2e4e719ed12e3983b54dcf15a2c06f | Update slack.py | fried-sausage/graphite-beacon,klen/graphite-beacon,klen/graphite-beacon,klen/graphite-beacon,fried-sausage/graphite-beacon,fried-sausage/graphite-beacon | graphite_beacon/handlers/slack.py | graphite_beacon/handlers/slack.py | import json
from tornado import gen, httpclient as hc
from graphite_beacon.handlers import AbstractHandler, LOGGER
from graphite_beacon.template import TEMPLATES
class SlackHandler(AbstractHandler):
name = 'slack'
# Default options
defaults = {
'webhook': None,
'channel': None,
'username': 'graphite-beacon',
}
emoji = {
'critical': ':exclamation:',
'warning': ':warning:',
'normal': ':white_check_mark:',
}
def init_handler(self):
self.webhook = self.options.get('webhook')
assert self.webhook, 'Slack webhook is not defined.'
self.channel = self.options.get('channel')
if self.channel and not self.channel.startswith(('#', '@')):
self.channel = '#' + self.channel
self.username = self.options.get('username')
self.client = hc.AsyncHTTPClient()
def get_message(self, level, alert, value, target=None, ntype=None, rule=None):
msg_type = 'slack' if ntype == 'graphite' else 'short'
tmpl = TEMPLATES[ntype][msg_type]
return tmpl.generate(
level=level, reactor=self.reactor, alert=alert, value=value, target=target).strip()
@gen.coroutine
def notify(self, level, *args, **kwargs):
LOGGER.debug("Handler (%s) %s", self.name, level)
message = self.get_message(level, *args, **kwargs)
data = dict()
data['username'] = self.username
data['text'] = message
data['icon_emoji'] = self.emoji.get(level, ':warning:')
if self.channel:
data['channel'] = self.channel
body = json.dumps(data)
yield self.client.fetch(self.webhook, method='POST', body=body)
| import json
from tornado import gen, httpclient as hc
from graphite_beacon.handlers import AbstractHandler, LOGGER
from graphite_beacon.template import TEMPLATES
class SlackHandler(AbstractHandler):
name = 'slack'
# Default options
defaults = {
'webhook': None,
'channel': None,
'username': 'graphite-beacon',
}
emoji = {
'critical': ':exclamation:',
'warning': ':warning:',
'normal': ':white_check_mark:',
}
def init_handler(self):
self.webhook = self.options.get('webhook')
assert self.webhook, 'Slack webhook is not defined.'
self.channel = self.options.get('channel')
if self.channel and not self.channel.startswith('#'):
self.channel = '#' + self.channel
self.username = self.options.get('username')
self.client = hc.AsyncHTTPClient()
def get_message(self, level, alert, value, target=None, ntype=None, rule=None):
msg_type = 'slack' if ntype == 'graphite' else 'short'
tmpl = TEMPLATES[ntype][msg_type]
return tmpl.generate(
level=level, reactor=self.reactor, alert=alert, value=value, target=target).strip()
@gen.coroutine
def notify(self, level, *args, **kwargs):
LOGGER.debug("Handler (%s) %s", self.name, level)
message = self.get_message(level, *args, **kwargs)
data = dict()
data['username'] = self.username
data['text'] = message
data['icon_emoji'] = self.emoji.get(level, ':warning:')
if self.channel:
data['channel'] = self.channel
body = json.dumps(data)
yield self.client.fetch(self.webhook, method='POST', body=body)
| mit | Python |
34908071bd11470806a84d9f76c630fd3fcc2d4b | test file :-) | g-weatherill/oq-hazardlib,gem/oq-engine,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,silviacanessa/oq-hazardlib,ROB-Seismology/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,rcgee/oq-hazardlib,mmpagani/oq-hazardlib,gem/oq-hazardlib,larsbutler/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-hazardlib,larsbutler/oq-hazardlib,vup1120/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,rcgee/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,mmpagani/oq-hazardlib,gem/oq-engine,ROB-Seismology/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib,ROB-Seismology/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,ROB-Seismology/oq-hazardlib | tests/gsim/abrahamson_silva_2008_test.py | tests/gsim/abrahamson_silva_2008_test.py | # nhlib: A New Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from nhlib.gsim.abrahamson_silva_2008 import AbrahamsonSilva2008
from tests.gsim.utils import BaseGSIMTestCase
# Test data have been generated from Fortran implementation
# of Dave Boore available at:
# http://www.daveboore.com/software_online.html
# Note that the Fortran implementation has been modified not
# to compute the 'Constant Displacement Model' term
class AbrahamsonSilva2008TestCase(BaseGSIMTestCase):
GSIM_CLASS = AbrahamsonSilva2008
def test_mean(self):
self.check('AS08/AS08_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_inter(self):
self.check('AS08/AS08_STD_INTER.csv',
max_discrep_percentage=0.1)
def test_std_intra(self):
self.check('AS08/AS08_STD_INTRA.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('AS08/AS08_STD_TOTAL.csv',
max_discrep_percentage=0.1)
| agpl-3.0 | Python |
|
9fc373bbfa606aeb23c237df9c8d9143e14b60a1 | structure of preprocessing module for lea to fill in | vincentadam87/gatsby-hackathon-seizure,vincentadam87/gatsby-hackathon-seizure | code/python/seizures/preprocessing/preprocessing.py | code/python/seizures/preprocessing/preprocessing.py | import scipy.signal
def preprocess_multichannel_data(matrix):
n_channel,m= matrix.shape
for i in range(n_channel):
preprocess_single_channel(matrix[i,:])
def preprocess_single_channel(x):
x = remove_elec_noise(x)
x = hp_filter(x)
x = remove_dc(x)
return x
def remove_dc():
"""
Remove mean of signal
:return:
"""
pass
def remove_elec_noise():
"""
Bandpass remove:49-51Hz
:return:
"""
pass
def hp_filter():
"""
Anti_aliasing
:return:
"""
pass | bsd-2-clause | Python |
|
3e7429a36532e7c731d5d254b853dd72bdd94c82 | Create test.py | crap0101/fup,crap0101/fup,crap0101/fup | home/crap0101/test/500fup/test.py | home/crap0101/test/500fup/test.py | import operator
import re
import sys
import time
import urlparse
import fhp.api.five_hundred_px as _fh
import fhp.helpers.authentication as _a
from fhp.models.user import User
_TREG = re.compile('^(\d+)-(\d+)-(\d+).*?(\d+):(\d+):(\d+).*')
_URL = 'http://500px.com/'
_HTML_BEGIN = '''<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<HTML>
<HEAD>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<TITLE>following last updates</TITLE>
</HEAD>
<BODY>
'''
_HTML_END = '''</BODY>
</HTML>'''
FMT_TEXT = 'txt'
FMT_HTML = 'html'
def get_time (s):
return time.strptime(' '.join(_TREG.match(s).groups()),
'%Y %m %d %H %M %S')
def get_last_upload_photo (user):
return sorted(user.photos,
reverse=True, key=lambda p: get_time(p.created_at))[0]
def get_sorted_data (user):
return sorted([(u, u.photos[0].created_at) for u in user.friends],
reverse=True, key=operator.itemgetter(1))
def _get_sorted_data (user): # like get_sorted_data but slower :-D
return sorted([(u, get_last_upload_photo(u).created_at) for u in user.friends],
reverse=True, key=operator.itemgetter(1))
def format_info_html (data):
yield _HTML_BEGIN
for user, date in data:
yield '<a href="%s">%s</a> (%s)<p>' % (
urlparse.urljoin(_URL, user.username), user.fullname.strip(), date)
#time.strftime('%Y-%m-%d %H:%M:%S', get_time(date))) # last for debug only
yield _HTML_END
def format_info_txt (data):
for user, date in data:
yield '%s (%s, %s)' % (
urlparse.urljoin(_URL, user.username), user.fullname.strip(), date)
#time.strftime('%Y-%m-%d %H:%M:%S', get_time(date))) # last for debug only
def print_info(data, fmt=FMT_HTML):
if fmt == FMT_HTML:
func = format_info_html
elif fmt == FMT_TEXT:
func = format_info_txt
else:
raise ValueError("unknown format <%s>" % fmt)
for out in func(data):
print out
if __name__ == '__main__':
_f = _fh.FiveHundredPx(_a.get_consumer_key(),
_a.get_consumer_secret(),
_a.get_verify_url())
username = sys.argv[1].encode('utf-8')
me = User(username=username)
sorted_uploads = get_sorted_data(me)
print_info(sorted_uploads)
##############################################
sys.exit()
if 0:
__t = []
#class
for i in range(10):
__t.append(time.localtime())
sleep(1)
me.friends = 8
"""
print type(me), dir(me), me.id
print "------"
print type(f), dir(f)
for i in me.friends:
print i.fullname, i.username, i.id, i.domain, dir(i)
break
if 0:
for p in sorted((x.created_at for x in i.photos), reverse=True, key=lambda s:get_time(s)):
print p
break
break
for p in i.photos:
print p, p.created_at, p.id
break
print list(f.get_user_friends(username))
print type(me), len(me)
print dir(f)
"""
| mit | Python |
|
42c82bc865d69b904ec688aa152caf3a247df1c6 | Create frontdoor.py | paulpoco/Raspi2GarageOpenerVideo,paulpoco/Raspi2GarageOpenerVideo,paulpoco/Raspi2GarageOpenerVideo,paulpoco/Raspi2GarageOpenerVideo | home/pi/PirFrontDoor/frontdoor.py | home/pi/PirFrontDoor/frontdoor.py | #!/usr/bin/python
import RPi.GPIO as GPIO
import time
import requests
GPIO.setmode(GPIO.BCM)
PIR_PIN = 22
GPIO.setup(PIR_PIN, GPIO.IN)
def MOTION(PIR_PIN):
print "Motion Detected!"
payload = { 'value1' : 'Someone at Front Door'}
r = requests.post("https://maker.ifttt.com/trigger/{Event}/with/key/{secret key}", data=payload)
print r.text
print "PIR Module Test (CTRL+C to exit)"
time.sleep(2)
print "Ready"
try:
GPIO.add_event_detect(PIR_PIN, GPIO.RISING, callback=MOTION)
while 1:
time.sleep(120)
except KeyboardInterrupt:
print "Quit"
GPIO.cleanup()
| mit | Python |
|
1307070cfe27ca605bfcc279644b735ee941f627 | Add work for ex21.py. | jaredmanning/learning,jaredmanning/learning | lpthw/ex31.py | lpthw/ex31.py | print "You enter a dark room with two doors. Do you go through door #1 or #2?"
door = raw_input("> ")
if door == "1":
print "Ther's a giant bear here eating a cheese cake. What do you do?"
print "1. Take the cake."
print "2. Scream at the bear."
bear = raw_input("> ")
if bear == "1":
print "The bear eats your face off. Good job!"
elif bear == "2":
print "The bear ears your legs off. Good job!"
else:
print "Well, doing %s is probably better. Bear runs away." % bear
elif door == "2":
print "You stare into the endless abyss at Cthulhu's retina."
print "1. Blueberries."
print "2. Yellow jacket clothespins."
print "3. Understanding revolvers yelling melodies."
insanity = raw_input("> ")
if insanity == "1" or insanity == "2":
print "Your body survives powered by a mind of jello. Good job!"
else:
print "The insanity rots your eyes into a pool of muck. Good job!"
else:
print "You stumble around and fall on a knife and die. Good job!"
| mit | Python |
|
d3c6c91bc4b6214053b9a1d1d2291a402c164b86 | add file | jvc2688/cpm | GridPixelPlot.py | GridPixelPlot.py | import kplr
import numpy as np
import matplotlib.pyplot as plt
qua = 5
client = kplr.API()
# Find the target KOI.
koi = client.koi(282.02)
originStar = koi.star
# Find potential targets by Kepler magnitude
koisOver = client.kois(where="koi_kepmag between %f and %f"%(originStar.kic_kepmag, originStar.kic_kepmag+0.1), sort=("koi_kepmag",1))
koisUnder = client.kois(where="koi_kepmag between %f and %f"%(originStar.kic_kepmag-0.1, originStar.kic_kepmag), sort=("koi_kepmag",1))
koisUnder.reverse()
stars = []
stars.append(originStar.kepid)
#Find 16 stars that are closest to the origin star in terms of Kepler magnitude
i=0
j=0
while len(stars) <17:
while koisOver[i].kepid in stars:
i+=1
tmpOver = koisOver[i].star
while koisUnder[j].kepid in stars:
j+=1
tmpUnder =koisUnder[j].star
if tmpOver.kic_kepmag-originStar.kic_kepmag > originStar.kic_kepmag-tmpUnder.kic_kepmag:
stars.append(tmpUnder.kepid)
j+=1
elif tmpOver.kic_kepmag-originStar.kic_kepmag < originStar.kic_kepmag-tmpUnder.kic_kepmag:
stars.append(tmpOver.kepid)
j+=1
else:
stars.append(tmpUnder.kepid)
stars.append(tmpOver.kepid)
i+=1
j+=1
for tmp in stars:
star = client.star(tmp)
# Get a list of light curve datasets.
tpfs = star.get_target_pixel_files(short_cadence=False)
time, flux = [], []
for tpf in tpfs:
with tpf.open() as f:
hdu_data = f[1].data
time.append(hdu_data["time"])
flux.append(hdu_data["flux"])
t = time[qua]
data = flux[qua]
data = np.nan_to_num(data)
data = np.ma.masked_equal(data,0)
shape = data.shape
td = shape[0]
x = shape[1]
y = shape[2]
# Plot the data
f, axes = plt.subplots(x, y)
for i in range(0,x):
for j in range(0,y):
axes[i,j].plot(t,data[0:td:1,i,j])
plt.setp( axes[i,j].get_xticklabels(), visible=False)
plt.setp( axes[i,j].get_yticklabels(), visible=False)
plt.subplots_adjust(left=None, bottom=None, right=None, top=None,
wspace=0, hspace=0)
plt.suptitle('Kepler %d Quarter %d\n Kepler magnitude %f'%(star.kepid, qua, star.kic_kepmag))
plt.savefig('%d-%d.png'%(star.kepid, qua))
plt.clf()
| mit | Python |
|
8515155d9d0df940eea758121124995320fce6bb | add experimental C/clang plugin | simonzack/SublimeLinter3,ikappas/SublimeLinter3,paulirish/SublimeLinter3,jasonrhaas/SublimeLinter3,SublimeLinter/SublimeLinter3,Raynos/SublimeLinter3-raynos,smanolloff/SublimeLinter3,paulirish/SublimeLinter3,lunixbochs/sublimelint,lunixbochs/sublimelint,zenlambda/SublimeLinter3,ikappas/SublimeLinter3,jasonrhaas/SublimeLinter3,SublimeLinter/SublimeLinter3,zenlambda/SublimeLinter3,simonzack/SublimeLinter3,smanolloff/SublimeLinter3 | languages/c.py | languages/c.py | import os
from lint.linter import Linter
from lint.util import find
class C(Linter):
language = 'c'
cmd = ('clang', '-xc', '-fsyntax-only', '-std=c99', '-Werror',
'-pedantic')
regex = (
r'^<stdin>:(?P<line>\d+):(?P<col>\d+):'
r'(?:(?P<ranges>[{}0-9:\-]+):)?\s+'
r'(?P<error>.+)'
)
def communicate(self, cmd, code):
includes = []
if self.filename:
parent = os.path.dirname(self.filename)
includes.append('-I' + parent)
inc = find(parent, 'include')
if inc:
includes.append('-I' + inc)
cmd += ('-',) + tuple(includes)
return super(C, self).communicate(cmd, code)
| mit | Python |
|
0026beea95ec26b8763feae270e79872f86de8a5 | Add run_sample_tests for executing sample tests in Travis | intracom-telecom-sdn/nstat,intracom-telecom-sdn/nstat | stress_test/sample_test_confs/run_sample_tests.py | stress_test/sample_test_confs/run_sample_tests.py | #! /usr/bin/env python3.4
# Copyright (c) 2015 Intracom S.A. Telecom Solutions. All rights reserved.
#
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution,
# and is available at http://www.eclipse.org/legal/epl-v10.html
"""
Runner for sample tests
"""
import os
def run_tests():
"""
Method for running sample tests
"""
pass
if __name__ == '__main__':
run_tests()
| epl-1.0 | Python |
|
352b17d8139fb0d269e4c17c01fe8ee488961c3a | Create HR_miniMaxSum.py | bluewitch/Code-Blue-Python | HR_miniMaxSum.py | HR_miniMaxSum.py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the miniMaxSum function below.
def miniMaxSum(arr):
maxx = max(arr)
minn = min(arr)
mini = arr.copy()
mini.remove(maxx)
maxi = arr.copy()
maxi.remove(minn)
sum_min = sum(mini)
sum_max = sum(maxi)
print(sum_min, sum_max)
if __name__ == '__main__':
arr = list(map(int, input().rstrip().split()))
miniMaxSum(arr)
| mit | Python |
|
b8fe92674773c7470c3b47899a8832bbb94771b4 | Add path module | hundeboll/core,sknsean/core,sknsean/core,sknsean/core,diorcety/oe-lite-core,diorcety/oe-lite-core,hundeboll/core,sknsean/core,hundeboll/core,hundeboll/core,sknsean/core | lib/oelite/path.py | lib/oelite/path.py | import os
TOPDIR = os.getcwd()
def init(topdir):
global TOPDIR
TOPDIR = topdir
def relpath(path):
"""Return a relative version of paths compared to TOPDIR."""
global TOPDIR
if path.startswith(TOPDIR):
return path[len(TOPDIR):].lstrip("/")
return path
def which(path, filename, pathsep=os.pathsep):
"""Given a search path, find file."""
if isinstance(path, basestring):
path = path.split(pathsep)
for p in path:
f = os.path.join(p, filename)
if os.path.exists(f):
return os.path.abspath(f)
return '' # TODO: change to None, and fixup the breakage it causes
| mit | Python |
|
8ce2da2ed2e445480ee2e10483a5fae1c7c677a0 | Include self contained method for output to a view | OdatNurd/OverrideAudit | lib/output_view.py | lib/output_view.py | import sublime
import sublime_plugin
###-----------------------------------------------------------------------------
def output_to_view(window,
title,
content,
reuse=True,
syntax=None,
clear=True,
settings=None):
if not isinstance(content, str):
content = "\n".join (content)
view = None
if reuse:
for _view in window.views ():
if _view.name () == title:
view = _view
break
if view is None:
view = window.new_file ()
view.set_scratch (True)
view.set_name (title)
if syntax is not None:
view.assign_syntax (syntax)
else:
view.set_read_only (False)
if clear is True:
view.sel ().clear ()
view.sel ().add (sublime.Region (0, view.size ()))
view.run_command ("left_delete")
if window.active_view () != view:
window.focus_view (view)
if settings is not None:
for setting in settings:
view.settings ().set (setting, settings[setting])
# Sace current buffer size, selection information and view position
saved_size = view.size ()
saved_sel = list(view.sel ())
saved_position = view.viewport_position ()
# Single select, position cursor at end of file, insert the data
view.sel ().clear ()
view.sel ().add (sublime.Region (saved_size, saved_size))
view.run_command ("insert", {"characters": content})
# If the last selection was at the end of the buffer, replace that selection
# with the new end of the buffer so the relative position remains the same.
if sublime.Region (saved_size, saved_size) == saved_sel[-1]:
saved_sel[-1] = sublime.Region (view.size (), view.size ())
# Clear current selection and add original selection back
view.sel ().clear ()
for region in saved_sel:
view.sel ().add (region)
view.set_viewport_position (saved_position, False)
view.set_read_only (True)
###-----------------------------------------------------------------------------
| mit | Python |
|
0d35b502515a9775166e775c3462ca9300fe4517 | add examples | nschloe/maelstrom,nschloe/maelstrom | examples/helpers.py | examples/helpers.py | # -*- coding: utf-8 -*-
#
from dolfin import as_backend_type
import matplotlib.pyplot as plt
import scipy.linalg
def show_matrix(A):
A = as_backend_type(A)
A_matrix = A.sparray()
# colormap
cmap = plt.cm.gray_r
A_dense = A_matrix.todense()
# A_r = A_dense[0::2][0::2]
# A_i = A_dense[1::2][0::2]
cmap.set_bad('r')
# im = plt.imshow(
# abs(A_dense), cmap=cmap, interpolation='nearest', norm=LogNorm()
# )
plt.imshow(abs(A_dense), cmap=cmap, interpolation='nearest')
plt.colorbar()
plt.show()
return
def get_eigenvalues(A):
A = as_backend_type(A)
A_matrix = A.sparray()
return scipy.linalg.eigvals(A_matrix.todense())
| mit | Python |
|
9f9955ff920d88cb0c9dd7ce4abeaac54a1c4977 | add tests for the migration command | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/motech/repeaters/tests/test_repeaters_migration.py | corehq/motech/repeaters/tests/test_repeaters_migration.py | from django.core.management import call_command
from django.test import TestCase
from corehq.motech.dhis2.repeaters import (
SQLDhis2EntityRepeater,
SQLDhis2Repeater,
)
from corehq.motech.fhir.repeaters import SQLFHIRRepeater
from corehq.motech.models import ConnectionSettings
from corehq.motech.repeaters.dbaccessors import delete_all_repeaters
from corehq.motech.repeaters.expression.repeaters import (
SQLCaseExpressionRepeater,
)
from corehq.motech.repeaters.models import (
Repeater,
SQLAppStructureRepeater,
SQLCaseRepeater,
SQLCreateCaseRepeater,
SQLDataRegistryCaseUpdateRepeater,
SQLFormRepeater,
SQLLocationRepeater,
SQLReferCaseRepeater,
SQLRepeater,
SQLShortFormRepeater,
SQLUpdateCaseRepeater,
SQLUserRepeater,
)
from corehq.motech.openmrs.repeaters import SQLOpenmrsRepeater
from .data.repeaters import repeater_test_data
class TestMigrationCommand(TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.conn = ConnectionSettings(id=1, url="http://url.com", domain='rtest')
cls.conn.save()
cls.couch_repeaters = []
for r in repeater_test_data:
r = Repeater.wrap(r)
r.save(sync_to_sql=False)
cls.couch_repeaters.append(r)
return super().setUpClass()
@classmethod
def tearDownClass(cls) -> None:
delete_all_repeaters()
return super().tearDownClass()
def test_case_repeater_docs_are_migrated(self):
call_command('migrate_caserepeater')
self._assert_repeaters_equality(SQLCaseRepeater, "CaseRepeater")
call_command('migrate_formrepeater')
self._assert_repeaters_equality(SQLFormRepeater, "FormRepeater")
call_command('migrate_shortformrepeater')
self._assert_repeaters_equality(SQLShortFormRepeater, "ShortFormRepeater")
call_command('migrate_createcaserepeater')
self._assert_repeaters_equality(SQLCreateCaseRepeater, "CreateCaseRepeater")
call_command('migrate_refercaserrepeater')
self._assert_repeaters_equality(SQLReferCaseRepeater, "ReferCaseRepeater")
call_command('migrate_dhis2repeater')
self._assert_repeaters_equality(SQLDhis2Repeater, "Dhis2Repeater")
call_command('migrate_userrepeater')
self._assert_repeaters_equality(SQLUserRepeater, "UserRepeater")
call_command('migrate_fhirrepeater')
self._assert_repeaters_equality(SQLFHIRRepeater, "FHIRRepeater")
call_command('migrate_appstructurerepeater')
self._assert_repeaters_equality(SQLAppStructureRepeater, "AppStructureRepeater")
call_command('migrate_caseexpressionrepeater')
self._assert_repeaters_equality(SQLCaseExpressionRepeater, "CaseExpressionRepeater")
call_command('migrate_dataregistrycaseupdaterepeater')
self._assert_repeaters_equality(SQLDataRegistryCaseUpdateRepeater, "DataRegistryCaseUpdateRepeater")
call_command('migrate_dhis2entityrepeater')
self._assert_repeaters_equality(SQLDhis2EntityRepeater, "Dhis2EntityRepeater")
call_command('migrate_openmrsrepeater')
self._assert_repeaters_equality(SQLOpenmrsRepeater, "OpenmrsRepeater")
call_command('migrate_locationrepeater')
self._assert_repeaters_equality(SQLLocationRepeater, "LocationRepeater")
call_command('migrate_updatecaserepeater')
self._assert_repeaters_equality(SQLUpdateCaseRepeater, "UpdateCaseRepeater")
# test for count
self.assertEqual(SQLRepeater.objects.count(), len(self.couch_repeaters))
def _assert_repeaters_equality(self, sql_class, doc_type):
sql_ids = set(sql_class.objects.all().values_list('repeater_id', flat=True))
couch_ids = {r._id for r in self._get_repeater_objects(doc_type)}
self.assertEqual(len(couch_ids), 2)
self.assertEqual(len(sql_ids), 2)
self.assertCountEqual(sql_ids, couch_ids)
self.assertEqual(sql_ids, couch_ids)
def _get_repeater_objects(self, repeater_type):
return [r for r in self.couch_repeaters if r.doc_type == repeater_type]
| bsd-3-clause | Python |
|
f4bf1c83f55013051037b4380f1b579375bad3d7 | Add test for ContextAwareForm | patrick91/pycon,patrick91/pycon | backend/tests/api/test_forms.py | backend/tests/api/test_forms.py | import pytest
from api.forms import ContextAwareForm
from users.models import User
def test_cannot_use_form_context_if_its_not_passed():
class TestModelForm(ContextAwareForm):
class Meta:
model = User
fields = ('id',)
form = TestModelForm()
with pytest.raises(ValueError) as e:
form.context
assert str(e.value) == 'Make sure you pass the context when instancing the Form'
| mit | Python |
|
3d40378e0e42f62615199daf97a48f24d5b9eb12 | add basic test for LIS | cjwfuller/algorithms | test_lis.py | test_lis.py | import unittest
import lis
class TestLis(unittest.TestCase):
def test_basic(self):
l = lis.Lis()
answer = [[0, 4, 6, 9, 13, 15], [0, 2, 6, 9, 13, 15], [0, 4, 6, 9, 11, 15], [0, 2, 6, 9, 11, 15]]
self.assertEquals(answer, l.lis([0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]))
if __name__ == '__main__':
unittest.main() | mit | Python |
|
71675f81214ea510c377abf23fe2a11dfb113717 | create module | metadata1984/pyAhocorasick | pyAhocorasick/pyAhocorasick.py | pyAhocorasick/pyAhocorasick.py | #-*- encoding=utf-8 -*-
'''
Created on Mar 15, 2014
@author: tonyzhang
'''
| mit | Python |
|
963866e795df42121f972ee2170ddeb890f7e5b7 | Create pytest test file | daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various | python-practice/test_arrays.py | python-practice/test_arrays.py | import arrays
# Reverse an array in place
def test_reverse_array():
input = [1, 2, 3]
assert arrays.reverse_array(input) == [3, 2, 1]
# Search a sorted list
def test_binary_search_no_list():
input_array = []
target = 1
assert arrays.binary_search(input_array, target) == -1
def test_binary_search_short_list_found():
input_array = [1]
target = 1
assert arrays.binary_search(input_array, target) == 0
def test_binary_search_short_list_not_found():
input_array = [1]
target = 10
assert arrays.binary_search(input_array, target) == -1
def test_binary_search_even_list():
input_array = [1, 4, 8, 10]
target = 4
assert arrays.binary_search(input_array, target) == 1
def test_binary_search_odd_list():
input_array = [1, 5, 10]
target = 1
assert arrays.binary_search(input_array, target) == 0
def test_binary_search_last_in_list():
input_array = [1, 5, 10]
target = 10
assert arrays.binary_search(input_array, target) == 2
def test_binary_search_not_in_list_big():
input_array = [1, 5, 10]
target = 100
assert arrays.binary_search(input_array, target) == -1
def test_binary_search_not_in_list_small():
input_array = [1, 5, 10]
target = -100
assert arrays.binary_search(input_array, target) == -1
| mit | Python |
|
4932483b10876eddab39477063a9b8546e5e0f33 | Create a.py | Kim-Seonghyeon/youtube_8m | a.py | a.py | a
| apache-2.0 | Python |
|
051bbd588e7ad20dd9a00918c437a86d46ba8f7e | Create transfer.py | leewiny/ctransfer,leewiny/ctransfer | transfer.py | transfer.py | #! /usr/bin/env python
#-*-coding:utf-8-*-
import MySQLdb
import psutil
import urllib
import time
import sys
import os
#########################################################################################################################
## MySQLdb : 在部署前需要确定系统安装了该python模块
## psutil : 在python中进行系统进程管理的模块
#########################################################################################################################
(TRANSFERSERVERCOMMAND, TRANSFERCLIENTCOMMAND, CMSSERVER, TMPDIR) = ("./transferserver", "./transferclient", 'http://cms.legaldaily.dev', './tmp', )
def serverwatchdog (): #
""" 内容接受服务器端监控程序:保证服务器端程序接受正常运行;查看系统传输监听进程是否在运行,若没有运行,启动监听进程 """
checkdirectory()
while True:
found = False
for process in psutil.get_process_list():
processname = process.name()
if processname==TRANSFERSERVERCOMMAND:
found = True
break
if not found:
os.system ( TRANSFERSERVERCOMMAND+' &' )
time.sleep(5)
def clientwatchdog ():
""" 内容发送程序监控进程:保证内容发送程序的正常运行;启动内容发送程序并监控内容发送程序是否运行,若没有运行,启动发送程序 """
checkdirectory ()
while True:
transfertask ()
time.sleep(5)
def checkdirectory ():
"""启动服务器端监听任务进程,如果当前目录不在适当的目录下,则给出错误提示并退出程序"""
if not os.path.isfile ( TRANSFERSERVERCOMMAND ):
print "transfer server command not compiled in current directory or run command not in current directory"
exit(0)
def transfertask ():
"""在发送的客户端运行传输任务,将需要发送的文件保存在指定的远程机器的指定位置"""
try:
if not os.path.isdir (TMPDIR):
os.mkdir(TMPDIR)
conn = MySQLdb.connect ( host='168.0.0.98', user='username', passwd='password', port=3306 )
cur = conn.cursor()
conn.select_db('cms')
cur.execute(' SELECT `uuid` FROM `cms_content_publish` WHERE `status`=1 ')
cur.scroll ( 0, mode='absolute' )
results = cur.fetchall()
for r in results:
cur.execute (' SELECT * FROM `cms_content_publish` WHERE `uuid`= %s ' % r[0] )
cur.scroll ( 0, mode='absolute' )
publish = cur.fetchone()
localurl = CMSSERVER+publish[12]
publicurl = '/var/www/cms/public'+publish[11]
serverhost = publish[8]
serverport = publish[9]
serverpswd = publish[10]
filename = TMPDIR + '/' + os.path.basename ( publicurl )
socket = urllib.urlopen(localurl)
fout = file ( filename, 'w' )
fout.write ( socket.read() )
transfercommand = "{0} {1} {2} {3} {4} {5}".format(TRANSFERCLIENTCOMMAND, filename, publicurl, serverhost, serverport, serverpswd)
deletecommand = 'DELETE FROM `cms_content_publish` WHERE `uuid`= {0} '.format ( r[0] )
# print transfercommand
# print deletecommand
os.system ( transfercommand ) # translate file to public server
#cur.execute ( deletecommand )
os.unlink ( filename ) # delete file that fetched from server
conn.commit()
cur.close()
conn.close()
except MySQLdb.Error,e:
print "Mysql Error %d: %s" % (e.args[0], e.args[1])
exit ()
if __name__=="__main__":
argc = len ( sys.argv )
if argc>1 :
clientwatchdog () # 客户端守护进程
else :
serverwatchdog () # 服务器端守护进程
| mit | Python |
|
50e24b0445f259d975e5dd78dd34a8e760e4ed88 | Create SQLite database and table and insert data from CSV file | joykuotw/python-endpoints,joykuotw/python-endpoints,joykuotw/python-endpoints | DB.py | DB.py | # Create a database
import sqlite3
import csv
from datetime import datetime
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class createDB():
def readCSV(self, filename):
conn = sqlite3.connect('CIUK.db')
print 'DB Creation Successful!'
cur = conn.cursor()
# cur.execute('''DROP TABLE PRODUCTS;''')
cur.execute('''CREATE TABLE PRODUCTS
(ID INTEGER PRIMARY KEY AUTOINCREMENT,
TITLE TEXT NOT NULL,
DESCRIPTION TEXT NOT NULL,
PRICE INTEGER NOT NULL,
CREATED_AT TIMESTAMP,
UPDATED_AT TIMESTAMP);''')
print 'Table Creation Successful!'
with open(filename) as f:
reader = csv.reader(f)
for row in reader:
cur.execute("INSERT INTO PRODUCTS VALUES (null, ?, ?, ?, ?, ?);", (unicode(row[0]), unicode(row[1]), unicode(row[2]), datetime.now(), datetime.now()))
print 'Successfully read data from CSV file!'
conn.commit()
conn.close()
c = createDB().readCSV('products.csv') | mit | Python |
|
874c01374397014e7c99afd67f5680ed32f1c5c6 | Build and revision number script | atifaziz/NCrontab,atifaziz/NCrontab | bn.py | bn.py | import sys
from time import gmtime
year, mon, mday, hour, min, sec, wday, yday, isdst = gmtime()
bld = ((year - 2000) * 12 + mon - 1) * 100 + mday
rev = hour * 100 + min
print 'Your build and revision number for today is %d.%d.' % (bld, rev)
| apache-2.0 | Python |
|
480b0bd80f65646da52824403ade92880af1af2e | Add circle ci settings | magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3 | project/circleci_settings.py | project/circleci_settings.py | # -*- coding: utf-8 -*-
DEBUG = True
LOCAL_DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'circle_test',
'USER': 'circleci',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '5432',
}
}
LOCALLY_INSTALLED_APPS = [
]
ENABLE_EMAILS = False
LOCALLY_ALLOWED_HOSTS = [
]
ADMINS = []
| mit | Python |
|
780e4eb03420d75c18d0b21b5e616f2952aeda41 | Test sending headers with end stream. | python-hyper/hyper-h2,bhavishyagopesh/hyper-h2,Kriechi/hyper-h2,Kriechi/hyper-h2,mhils/hyper-h2,vladmunteanu/hyper-h2,vladmunteanu/hyper-h2,python-hyper/hyper-h2 | test/test_basic_logic.py | test/test_basic_logic.py | # -*- coding: utf-8 -*-
"""
test_basic_logic
~~~~~~~~~~~~~~~~
Test the basic logic of the h2 state machines.
"""
import h2.connection
from hyperframe import frame
class TestBasicConnection(object):
"""
Basic connection tests.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
def test_begin_connection(self):
c = h2.connection.H2Connection()
frames = c.send_headers_on_stream(1, self.example_request_headers)
assert len(frames) == 1
def test_sending_some_data(self):
c = h2.connection.H2Connection()
frames = c.send_headers_on_stream(1, self.example_request_headers)
frames.append(c.send_data_on_stream(1, b'test', end_stream=True))
assert len(frames) == 2
def test_receive_headers_frame(self):
f = frame.HeadersFrame(1)
f.data = b'fake headers'
f.flags = set(['END_STREAM', 'END_HEADERS'])
c = h2.connection.H2Connection()
assert c.receive_frame(f) is None
def test_send_headers_end_stream(self):
c = h2.connection.H2Connection()
frames = c.send_headers_on_stream(
1, self.example_request_headers, end_stream=True
)
assert len(frames) == 1
assert frames[-1].flags == set(['END_STREAM', 'END_HEADERS'])
| # -*- coding: utf-8 -*-
"""
test_basic_logic
~~~~~~~~~~~~~~~~
Test the basic logic of the h2 state machines.
"""
import h2.connection
from hyperframe import frame
class TestBasicConnection(object):
"""
Basic connection tests.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
def test_begin_connection(self):
c = h2.connection.H2Connection()
frames = c.send_headers_on_stream(1, self.example_request_headers)
assert len(frames) == 1
def test_sending_some_data(self):
c = h2.connection.H2Connection()
frames = c.send_headers_on_stream(1, self.example_request_headers)
frames.append(c.send_data_on_stream(1, b'test', end_stream=True))
assert len(frames) == 2
def test_receive_headers_frame(self):
f = frame.HeadersFrame(1)
f.data = b'fake headers'
f.flags = set(['END_STREAM', 'END_HEADERS'])
c = h2.connection.H2Connection()
assert c.receive_frame(f) is None
| mit | Python |
8adac46cd59c562ec494508ad735843253adc1f2 | add frequencies benchmark | pombredanne/toolz,llllllllll/toolz,jcrist/toolz,bartvm/toolz,berrytj/toolz,berrytj/toolz,simudream/toolz,whilo/toolz,llllllllll/toolz,karansag/toolz,whilo/toolz,machinelearningdeveloper/toolz,quantopian/toolz,jdmcbr/toolz,pombredanne/toolz,jcrist/toolz,quantopian/toolz,bartvm/toolz,cpcloud/toolz,karansag/toolz,jdmcbr/toolz,simudream/toolz,machinelearningdeveloper/toolz,cpcloud/toolz | bench/test_frequencies.py | bench/test_frequencies.py | from toolz import frequencies, identity
data = range(1000)*1000
def test_frequencies():
frequencies(data)
| bsd-3-clause | Python |
|
892740ce17c2906de996089f07f005c7812270ef | add init back | Hawt-Lava/Charizard | src/__init__.py | src/__init__.py | """ Source Files, and a location for Global Imports """
| mit | Python |
|
94acf181f063808c2b6444dbc15ea40ee17bdee3 | print structure | JohanComparat/pyEmerge,JohanComparat/pyEmerge | bin/print_h5_structure.py | bin/print_h5_structure.py | import sys
file_name = sys.argv[1]
# python3 print_data_structure.py filename
import glob
import os
import numpy as n
import h5py # HDF5 support
f0 = h5py.File(file_name, "r")
def print_attr(h5item):
for attr in h5item:
print(attr, h5item[attr])
def print_all_key(h5item):
for key in h5item.keys():
print('========================================')
print(key, h5item[key])
print('- - - - - - - - - - - - - - - - - - - - ')
print_attr(h5item[key])
def print_data_structure(h5item):
print('+ + + + + + + HEADER + + + + + + + + +')
print_attr(h5item.attrs)
print('\n')
print('+ + + + + + + DATA + + + + + + + + + +')
print_all_key(h5item)
print_data_structure(f0)
| unlicense | Python |
|
fea9e1e80d03b87c05eacd02b5440fc783eb456d | Fix buildfier | dlorenc/debian-docker,GoogleContainerTools/base-images-docker,dlorenc/debian-docker,GoogleContainerTools/base-images-docker | package_managers/apt_get/repos.bzl | package_managers/apt_get/repos.bzl | # Copyright 2017 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Rules that create additional apt-get repo files."""
load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar")
def _impl(ctx):
ctx.actions.write(ctx.outputs.out, content="%s\n" % ctx.attr.repo)
_generate_additional_repo = rule(
attrs = {
"repo": attr.string(doc = "Additional repo to add, in sources.list format"),
},
executable = False,
outputs = {
"out": "%{name}.list",
},
implementation = _impl,
)
def generate_additional_repos(name, repos):
all_repo_files=[]
for i, repo in enumerate(repos):
repo_name = "%s_%s" % (name, i)
all_repo_files.append(repo_name)
_generate_additional_repo(
name=repo_name,
repo=repo
)
pkg_tar(
name=name,
srcs=all_repo_files,
package_dir="/etc/apt/sources.list.d/"
)
"""Generates /etc/apt/sources.list.d/ files with the specified repos.
Args:
repos: List of repos to add in sources.list format.
"""
| # Copyright 2017 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Rules that create additional apt-get repo files."""
load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar")
def _impl(ctx):
ctx.actions.write(ctx.outputs.out, content="%s\n" % ctx.attr.repo)
_generate_additional_repo = rule(
attrs = {
"repo": attr.string(doc = "Additional repo to add, in sources.list format"),
},
executable = False,
outputs = {
"out": "%{name}.list",
},
implementation = _impl,
)
def generate_additional_repos(name, repos):
all_repo_files=[]
for i, repo in enumerate(repos):
repo_name = "%s_%s" % (name, i)
all_repo_files.append(repo_name)
_generate_additional_repo(
name=repo_name,
repo=repo
)
pkg_tar(
name=name,
srcs=all_repo_files,
package_dir="/etc/apt/sources.list.d/"
)
"""Generates /etc/apt/sources.list.d/ files with the specified repos.
Args:
repos: List of repos to add in sources.list format.
"""
| apache-2.0 | Python |
f5a561494ece69c32d4bbd3e23c435a0fe74788a | Add local enum capability (needed for contentwrapper) | arobb/python-processrunner,arobb/python-processrunner | processrunner/enum.py | processrunner/enum.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
# For use with deployment statuses
# https://stackoverflow.com/a/1695250
def enum(*sequential, **named):
"""An implementation of the Enum data type
Usage
myEnum= enum(
'Apple'
, 'Banana')
"""
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = dict((value, key) for key, value in list(enums.items()))
enums['reverse_mapping'] = reverse
return type(str('Enum'), (), enums) | mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.