commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
b5972a5651ba1ace28ae54d5a1a4f31a07e97670 | add server_costs table | CottageLabs/finance,CottageLabs/finance,CottageLabs/finance,CottageLabs/finance | migrations/versions/1e27c434bb14_create_server_costs.py | migrations/versions/1e27c434bb14_create_server_costs.py | """create server_costs table
Revision ID: 1e27c434bb14
Revises: fa0f07475596
Create Date: 2016-03-14 15:57:19.945327
"""
# revision identifiers, used by Alembic.
revision = '1e27c434bb14'
down_revision = 'fa0f07475596'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table(
'server_costs',
sa.Column('project_url', sa.String(length=255), sa.ForeignKey('projects.url'), nullable=False, primary_key=True),
sa.Column('value', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True)
)
def downgrade():
op.drop_table('server_costs')
| apache-2.0 | Python |
|
ff519b5145accbc10fcb7baa955bc1fe44774c27 | Add browser/websocket.py | jonathanverner/brython,molebot/brython,JohnDenker/brython,rubyinhell/brython,brython-dev/brython,JohnDenker/brython,Hasimir/brython,firmlyjin/brython,amrdraz/brython,Mozhuowen/brython,firmlyjin/brython,jonathanverner/brython,Hasimir/brython,JohnDenker/brython,rubyinhell/brython,brython-dev/brython,Hasimir/brython,Hasimir/brython,kevinmel2000/brython,amrdraz/brython,rubyinhell/brython,amrdraz/brython,Isendir/brython,olemis/brython,brython-dev/brython,Mozhuowen/brython,jonathanverner/brython,firmlyjin/brython,kevinmel2000/brython,Mozhuowen/brython,firmlyjin/brython,molebot/brython,Lh4cKg/brython,kikocorreoso/brython,Isendir/brython,Lh4cKg/brython,olemis/brython,kevinmel2000/brython,jonathanverner/brython,kikocorreoso/brython,rubyinhell/brython,kevinmel2000/brython,amrdraz/brython,molebot/brython,Isendir/brython,Isendir/brython,Mozhuowen/brython,olemis/brython,Lh4cKg/brython,olemis/brython,kikocorreoso/brython,molebot/brython,Lh4cKg/brython,firmlyjin/brython,JohnDenker/brython | src/Lib/browser/websocket.py | src/Lib/browser/websocket.py | from browser import window
import javascript
WebSocket = javascript.JSConstructor(window.WebSocket) | bsd-3-clause | Python |
|
126491288a532da08fb3923eae2635a84736798d | Add new package: libsamplerate (#16143) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/libsamplerate/package.py | var/spack/repos/builtin/packages/libsamplerate/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libsamplerate(AutotoolsPackage):
"""libsamplerate (also known as Secret Rabbit Code) is a library for
performing sample rate conversion of audio data."""
homepage = "http://www.mega-nerd.com/libsamplerate/history.html"
url = "http://www.mega-nerd.com/libsamplerate/libsamplerate-0.1.9.tar.gz"
version('0.1.9', sha256='0a7eb168e2f21353fb6d84da152e4512126f7dc48ccb0be80578c565413444c1')
version('0.1.8', sha256='93b54bdf46d5e6d2354b7034395fe329c222a966790de34520702bb9642f1c06')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
| lgpl-2.1 | Python |
|
4e6a6e4f2758bd616f0c2c2703160cbb9c539b63 | add new package (#23843) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-kubernetes/package.py | var/spack/repos/builtin/packages/py-kubernetes/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyKubernetes(PythonPackage):
"""Official Python client library for kubernetes. """
homepage = "https://kubernetes.io"
git = "https://github.com/kubernetes-client/python.git"
pypi = "kubernetes/kubernetes-17.17.0.tar.gz"
maintainers = ['vvolkl']
version('17.17.0', sha256='c69b318696ba797dcf63eb928a8d4370c52319f4140023c502d7dfdf2080eb79')
version('12.0.1', sha256='ec52ea01d52e2ec3da255992f7e859f3a76f2bdb51cf65ba8cd71dfc309d8daa')
version('12.0.0', sha256='72f095a1cd593401ff26b3b8d71749340394ca6d8413770ea28ce18efd5bcf4c')
version('11.0.0', sha256='1a2472f8b01bc6aa87e3a34781f859bded5a5c8ff791a53d889a8bd6cc550430')
version('10.1.0', sha256='85a767d04f17d6d317374b6c35e09eb168a6bfd9276f0b3177cc206376bad968')
version('10.0.1', sha256='3770a496663396ad1def665eeadb947b3f45217a08b64b10c01a57e981ac8592')
version('9.0.0', sha256='a8b0aed55ba946faea660712595a52ae53a8854df773d96f47a63fa0c9d4e3bf')
depends_on('py-certifi@14.05.14:', type=('build', 'run'))
depends_on('py-six@1.9.0:', type=('build', 'run'))
depends_on('py-python-dateutil@2.5.3:', type=('build', 'run'))
depends_on('py-setuptools@21.0.0:', type=('build'))
depends_on('py-pyyaml@3.12:', type=('build', 'run'))
depends_on('py-google-auth@1.0.1:', type=('build', 'run'))
depends_on('py-ipaddress@1.0.17:', when='^python@:2.8', type=('build', 'run'))
depends_on('py-websocket-client@0.32:0.39,0.43:', type=('build', 'run'))
depends_on('py-requests', type=('build', 'run'))
depends_on('py-requests-oauthlib', type=('build', 'run'))
depends_on('py-urllib3@1.24.2:', type=('build', 'run'))
| lgpl-2.1 | Python |
|
04d6fd6ceabf71f5f38fd7cf25cd4ac2bcb6b57f | Add simple web server to display measurements | rigtorp/rpi2-sensors | server.py | server.py | import sqlite3
from flask import Flask, g, render_template_string
app = Flask(__name__)
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect('sensors.db')
return db
index_tmpl = """
<!doctype html>
<title>Sensors</title>
<body>
<h1>Senors</h1>
<dl>
<dt>Outside Temperature</dt>
<dd>{{ sensors['ot'] }}</dd>
<dt>Outside Humidity</dt>
<dd>{{ sensors['oh'] }}</dd>
<dt>Inside Temperature</dt>
<dd>{{ sensors['it'] }}</dd>
<dt>Inside Humidity</dt>
<dd>{{ sensors['ih'] }}</dd>
<dt>Barometric Pressure</dt>
<dd>{{ sensors['bp'] }}</dd>
<dt>Time</dt>
<dd>{{ ts }} UTC</dd>
</dl>
"""
@app.route('/')
def index():
db = get_db()
sensors = db.execute('SELECT * FROM measurements GROUP BY sensor')
sensors = [x for x in sensors]
ts = sensors[0][0]
sensors = dict([(x[1], x[2]) for x in sensors])
return render_template_string(index_tmpl, sensors=sensors, ts=ts)
if __name__ == '__main__':
app.debug = False
app.run(host='0.0.0.0')
| mit | Python |
|
acc23fe67231f8b556b2de7bd19f0050cbe379e6 | Add total calculation script | haozai309/hello_python | total_prices.py | total_prices.py | prices = {
"banana" : 4,
"apple" : 2,
"orange" : 1.5,
"pear" : 3
}
stock = {
"banana" : 6,
"apple" : 0,
"orange" : 32,
"pear" : 15,
}
total = 0
for key in prices:
print key
print "price: %s" % prices[key]
print "stock: %s" % stock[key]
print prices[key] * stock[key]
total += prices[key] * stock[key]
print total | apache-2.0 | Python |
|
31af4f92e97c83c42baff4e902cddf8184d84e4d | allow to run tox as 'python -m tox', which is handy on Windoze | gaborbernat/tox,selimb/tox,loechel/tox,Avira/tox,tox-dev/tox | tox/__main__.py | tox/__main__.py | from tox._cmdline import main
main()
| mit | Python |
|
f86d07998a2a80fcf9e69cca9d89c2ca4d982e02 | Fix windows dist script | victorvde/rust,stepancheg/rust-ide-rust,zaeleus/rust,dinfuehr/rust,kimroen/rust,AerialX/rust-rt-minimal,servo/rust,dwillmer/rust,ktossell/rust,kwantam/rust,jroesch/rust,ejjeong/rust,ebfull/rust,robertg/rust,mahkoh/rust,mdinger/rust,miniupnp/rust,zubron/rust,defuz/rust,P1start/rust,ktossell/rust,KokaKiwi/rust,mdinger/rust,gifnksm/rust,jashank/rust,ruud-v-a/rust,omasanori/rust,defuz/rust,AerialX/rust-rt-minimal,GBGamer/rust,aidancully/rust,michaelballantyne/rust-gpu,carols10cents/rust,KokaKiwi/rust,richo/rust,ejjeong/rust,seanrivera/rust,kimroen/rust,sae-bom/rust,nwin/rust,AerialX/rust,miniupnp/rust,miniupnp/rust,vhbit/rust,ebfull/rust,pelmers/rust,aturon/rust,GBGamer/rust,miniupnp/rust,ktossell/rust,untitaker/rust,vhbit/rust,avdi/rust,barosl/rust,robertg/rust,omasanori/rust,victorvde/rust,0x73/rust,pythonesque/rust,aidancully/rust,michaelballantyne/rust-gpu,graydon/rust,graydon/rust,nham/rust,0x73/rust,erickt/rust,sae-bom/rust,vhbit/rust,aidancully/rust,pelmers/rust,AerialX/rust,andars/rust,reem/rust,rohitjoshi/rust,bluss/rand,pythonesque/rust,SiegeLord/rust,ebfull/rust,jroesch/rust,GBGamer/rust,pythonesque/rust,AerialX/rust-rt-minimal,P1start/rust,KokaKiwi/rust,philyoon/rust,Ryman/rust,ebfull/rust,aidancully/rust,aepsil0n/rust,richo/rust,rohitjoshi/rust,michaelballantyne/rust-gpu,Ryman/rust,pelmers/rust,zaeleus/rust,zaeleus/rust,vhbit/rust,kimroen/rust,SiegeLord/rust,mitsuhiko/rust,stepancheg/rust-ide-rust,gifnksm/rust,jashank/rust,jbclements/rust,kwantam/rust,zachwick/rust,andars/rust,AerialX/rust,dinfuehr/rust,michaelballantyne/rust-gpu,seanrivera/rust,bombless/rust,dwillmer/rust,rohitjoshi/rust,Ryman/rust,stepancheg/rust-ide-rust,omasanori/rust,bombless/rust,nham/rust,hauleth/rust,mitsuhiko/rust,mihneadb/rust,XMPPwocky/rust,aturon/rust,ebfull/rand,ktossell/rust,aidancully/rust,P1start/rust,mitsuhiko/rust,pelmers/rust,carols10cents/rust,GBGamer/rust,mahkoh/rust,jashank/rust,jashank/rust,nwin/rust,defuz/rust,aepsil0n/rust,miniupnp/rust,krzysz00/rust,cllns/rust,bombless/rust-docs-chinese,pshc/rust,jbclements/rust,stepancheg/rust-ide-rust,TheNeikos/rust,jroesch/rust,sae-bom/rust,aturon/rust,erickt/rust,aepsil0n/rust,dinfuehr/rust,servo/rust,mitsuhiko/rust,untitaker/rust,zubron/rust,philyoon/rust,nham/rust,barosl/rust,rprichard/rust,GBGamer/rust,nham/rust,rprichard/rust,mahkoh/rust,dwillmer/rust,zaeleus/rust,rprichard/rust,andars/rust,philyoon/rust,Ryman/rust,nwin/rust,victorvde/rust,mihneadb/rust,carols10cents/rust,GBGamer/rust,seanrivera/rust,SiegeLord/rust,mvdnes/rust,ebfull/rust,ejjeong/rust,0x73/rust,bombless/rust,michaelballantyne/rust-gpu,pshc/rust,cllns/rust,mdinger/rust,aturon/rust,krzysz00/rust,pczarn/rust,zubron/rust,barosl/rust,jbclements/rust,jroesch/rust,jroesch/rust,aidancully/rust,michaelballantyne/rust-gpu,zaeleus/rust,aneeshusa/rust,kimroen/rust,pczarn/rust,servo/rust,dinfuehr/rust,mahkoh/rust,mitsuhiko/rust,defuz/rust,michaelballantyne/rust-gpu,jbclements/rust,aturon/rust,zubron/rust,mvdnes/rust,barosl/rust,rprichard/rust,untitaker/rust,gifnksm/rust,vhbit/rust,pczarn/rust,zubron/rust,aneeshusa/rust,kwantam/rust,aturon/rust,ejjeong/rust,sae-bom/rust,mitsuhiko/rust,mahkoh/rust,emk/rust,P1start/rust,andars/rust,hauleth/rust,krzysz00/rust,ejjeong/rust,krzysz00/rust,krzysz00/rust,pythonesque/rust,victorvde/rust,graydon/rust,vhbit/rust,andars/rust,XMPPwocky/rust,mvdnes/rust,stepancheg/rust-ide-rust,aneeshusa/rust,huonw/rand,P1start/rust,dinfuehr/rust,reem/rust,erickt/rust,emk/rust,nham/rust,jashank/rust,defuz/rust,mvdnes/rust,TheNeikos/rust,zubron/rust,pshc/rust,robertg/rust,rprichard/rust,bombless/rust,P1start/rust,KokaKiwi/rust,mitsuhiko/rust,carols10cents/rust,bhickey/rand,Ryman/rust,reem/rust,barosl/rust,cllns/rust,cllns/rust,KokaKiwi/rust,ruud-v-a/rust,dwillmer/rust,reem/rust,zachwick/rust,l0kod/rust,emk/rust,AerialX/rust-rt-minimal,kwantam/rust,aepsil0n/rust,ruud-v-a/rust,servo/rust,robertg/rust,pczarn/rust,carols10cents/rust,pshc/rust,GBGamer/rust,mihneadb/rust,aepsil0n/rust,hauleth/rust,barosl/rust,0x73/rust,zachwick/rust,nwin/rust,stepancheg/rust-ide-rust,pczarn/rust,avdi/rust,SiegeLord/rust,untitaker/rust,emk/rust,nwin/rust,philyoon/rust,seanrivera/rust,nham/rust,jroesch/rust,TheNeikos/rust,dwillmer/rust,jbclements/rust,kimroen/rust,mvdnes/rust,quornian/rust,waynenilsen/rand,avdi/rust,0x73/rust,quornian/rust,pelmers/rust,KokaKiwi/rust,emk/rust,mdinger/rust,kimroen/rust,TheNeikos/rust,dwillmer/rust,XMPPwocky/rust,zaeleus/rust,mdinger/rust,victorvde/rust,0x73/rust,jashank/rust,graydon/rust,quornian/rust,barosl/rust,quornian/rust,jroesch/rust,jbclements/rust,gifnksm/rust,victorvde/rust,jbclements/rust,andars/rust,emk/rust,AerialX/rust,ejjeong/rust,aturon/rust,mahkoh/rust,zubron/rust,zubron/rust,hauleth/rust,pshc/rust,TheNeikos/rust,richo/rust,ebfull/rust,avdi/rust,dwillmer/rust,pshc/rust,servo/rust,arthurprs/rand,kwantam/rust,pczarn/rust,richo/rust,omasanori/rust,ktossell/rust,omasanori/rust,miniupnp/rust,rohitjoshi/rust,XMPPwocky/rust,bombless/rust,omasanori/rust,seanrivera/rust,AerialX/rust,Ryman/rust,mihneadb/rust,AerialX/rust-rt-minimal,untitaker/rust,pshc/rust,pczarn/rust,carols10cents/rust,l0kod/rust,l0kod/rust,pythonesque/rust,miniupnp/rust,servo/rust,quornian/rust,quornian/rust,philyoon/rust,mdinger/rust,aneeshusa/rust,sae-bom/rust,gifnksm/rust,SiegeLord/rust,l0kod/rust,jashank/rust,l0kod/rust,jbclements/rust,AerialX/rust,erickt/rust,achanda/rand,kimroen/rust,kwantam/rust,jbclements/rust,zachwick/rust,dinfuehr/rust,rohitjoshi/rust,nwin/rust,quornian/rust,ktossell/rust,nwin/rust,erickt/rust,seanrivera/rust,krzysz00/rust,AerialX/rust-rt-minimal,vhbit/rust,nham/rust,avdi/rust,erickt/rust,cllns/rust,P1start/rust,erickt/rust,aneeshusa/rust,ktossell/rust,richo/rust,l0kod/rust,hauleth/rust,cllns/rust,rohitjoshi/rust,GrahamDennis/rand,shepmaster/rand,sae-bom/rust,zachwick/rust,jashank/rust,TheNeikos/rust,miniupnp/rust,dwillmer/rust,defuz/rust,graydon/rust,emk/rust,mihneadb/rust,rprichard/rust,vhbit/rust,pythonesque/rust,ruud-v-a/rust,pelmers/rust,Ryman/rust,XMPPwocky/rust,zachwick/rust,bombless/rust,jroesch/rust,SiegeLord/rust,l0kod/rust,mvdnes/rust,robertg/rust,retep998/rand,l0kod/rust,aneeshusa/rust,ruud-v-a/rust,nwin/rust,stepancheg/rust-ide-rust,robertg/rust,reem/rust,graydon/rust,XMPPwocky/rust,pythonesque/rust,richo/rust,reem/rust,aepsil0n/rust,0x73/rust,SiegeLord/rust,philyoon/rust,avdi/rust,servo/rust,mihneadb/rust,pshc/rust,ruud-v-a/rust,untitaker/rust,gifnksm/rust,hauleth/rust,GBGamer/rust | src/etc/copy-runtime-deps.py | src/etc/copy-runtime-deps.py | #!/usr/bin/env python
# xfail-license
# Copies Rust runtime dependencies to the specified directory
import snapshot, sys, os, shutil
def copy_runtime_deps(dest_dir):
for path in snapshot.get_winnt_runtime_deps():
shutil.copy(path, dest_dir)
lic_dest = os.path.join(dest_dir, "third-party")
if os.path.exists(lic_dest):
shutil.rmtree(lic_dest) # copytree() won't overwrite existing files
shutil.copytree(os.path.join(os.path.dirname(__file__), "third-party"), lic_dest)
copy_runtime_deps(sys.argv[1])
| #!/usr/bin/env python
# xfail-license
# Copies Rust runtime dependencies to the specified directory
import snapshot, sys, os, shutil
def copy_runtime_deps(dest_dir):
for path in snapshot.get_winnt_runtime_deps():
shutil.copy(path, dest_dir)
lic_dest = os.path.join(dest_dir, "third-party")
shutil.rmtree(lic_dest) # copytree() won't overwrite existing files
shutil.copytree(os.path.join(os.path.dirname(__file__), "third-party"), lic_dest)
copy_runtime_deps(sys.argv[1])
| apache-2.0 | Python |
d206b02e12cf7f5418cd02987313bd7ddd807901 | add geom_tile layer. | has2k1/plotnine,has2k1/plotnine | ggplot/geoms/geom_tile.py | ggplot/geoms/geom_tile.py | import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from geom import geom
class geom_tile(geom):
VALID_AES = ['x', 'y', 'fill']
def plot_layer(self, layer):
layer = {k: v for k, v in layer.iteritems() if k in self.VALID_AES}
layer.update(self.manual_aes)
x = layer.pop('x')
y = layer.pop('y')
fill = layer.pop('fill')
X = pd.DataFrame({'x': x,
'y': y,
'fill': fill}).set_index(['x', 'y']).unstack(0)
x_ticks = range(0, len(set(x)))
y_ticks = range(0, len(set(y)))
plt.imshow(X, interpolation='nearest', **layer)
return [
{'function': 'set_xticklabels', 'args': [x]},
{'function': 'set_xticks', 'args': [x_ticks]},
{'function': 'set_yticklabels', 'args': [y]},
{'function': 'set_yticks', 'args': [y_ticks]}
]
| mit | Python |
|
d940ce7cbd92c0e886139eaec3faa75aabbce16a | add test models | byteweaver/django-singleactiveobject | singleactiveobject/tests/models.py | singleactiveobject/tests/models.py | from singleactiveobject.models import SingleActiveObjectMixin
class SingleActiveObject(SingleActiveObjectMixin):
pass
| bsd-3-clause | Python |
|
71d3375c4ca1acb106f8825d2f39ca602fa47e94 | Test astroid trajectory implementation | bit0001/trajectory_tracking,bit0001/trajectory_tracking | src/test/trajectory/test_astroid_trajectory.py | src/test/trajectory/test_astroid_trajectory.py | #!/usr/bin/env python
import unittest
from geometry_msgs.msg import Point
from trajectory.astroid_trajectory import AstroidTrajectory
class AstroidTrajectoryTest(unittest.TestCase):
def setUp(self):
self.delta = 0.000001
self.radius = 5
self.period = 4
self.expected_position = Point()
self.trajectory = AstroidTrajectory(self.radius, self.period)
def test_when_creating_trajectory_the_radius_and_period_are_set(self):
self.assertEqual(self.radius, self.trajectory.radius)
self.assertEqual(self.period, self.trajectory.period)
def test_when_getting_position_after_1s_then_position_at_1s_is_returned(self):
self.expected_position.x = 0
self.expected_position.y = self.radius
self.assertAlmostEqual(self.expected_position, self.trajectory.get_position_at(1))
def test_when_getting_position_after_2s_then_position_at_2s_is_returned(self):
self.expected_position.x = -self.radius
self.expected_position.y = 0
self.assertAlmostEqual(self.expected_position, self.trajectory.get_position_at(2))
| mit | Python |
|
835b5f20061033b6fcf2a8b86203a42c5d4835ee | Add initial unit tests for parameter.py (List) | bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy,thouska/spotpy,thouska/spotpy,bees4ever/spotpy | spotpy/unittests/test_parameter.py | spotpy/unittests/test_parameter.py | import unittest
try:
import spotpy
except ImportError:
import sys
sys.path.append(".")
import spotpy
from spotpy import parameter
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestListParameterDistribution(unittest.TestCase):
def setUp(self):
self.values = [1, 2, 3, 4, 5]
self.list_param = parameter.List('test', self.values)
self.list_param_repeat = parameter.List('test2', self.values, repeat=True)
def test_list_is_callable(self):
self.assertTrue(callable(self.list_param), "List instance should be callable")
def test_list_gives_throwaway_value_on_first_call(self):
v = self.list_param()
self.assertNotEqual(self.values[0], v)
def test_list_gives_1_value_when_size_is_not_specified(self):
throwaway = self.list_param()
v = self.list_param()
self.assertEqual(self.values[0], v)
def test_list_gives_n_values_when_size_is_n(self):
throwaway = self.list_param()
v = self.list_param(len(self.values))
self.assertEqual(self.values, list(v))
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
e9a5a0c22de92f3b5eb5df567475736b72c5067c | Add pa300_calc_coord.py | wataash/Instr,wataash/Instr | pa300_calc_coord.py | pa300_calc_coord.py | # Std libs
from itertools import product
import sqlite3
# My libs
import constants as c
conn_params = sqlite3.connect(c.sql_params_dropbox)
cur_params = conn_params.cursor()
dats = cur_params.execute('''SELECT mask, mesa, xm_mesa, ym_mesa, xm_pad, ym_pad
FROM mesas''').fetchall()
for dat in dats:
mask, mesa, xm_mesa, ym_mesa, xm_pad, ym_pad = dat
dX, dY = cur_params.execute('SELECT d_X, d_Y FROM masks WHERE mask=?',\
(mask,)).fetchone()
for (X, Y) in product(range(1,21), range(1,21)):
x_mesa = (X-1)*dX + xm_mesa
y_mesa = (Y-1)*dY + ym_mesa
x_pad = (X-1)*dX + xm_pad
y_pad = (Y-1)*dY + ym_pad
print(mask, mesa, X, Y)
cur_params.execute('''INSERT OR REPLACE INTO
coord(mask, mesa, X, Y, xmesa, ymesa, xpad, ypad)
VALUES(?, ?, ?, ?, ?, ?, ?, ?)''',
(mask, mesa, X, Y,
x_mesa, y_mesa, x_pad, y_pad,))
conn_params.commit()
| mit | Python |
|
a52ae7a34b9ec1dd03653c6c735b3930033ac830 | add a sample of visitor pattern for resolving recursion limit problem using generator. | ptrsxu/snippetpy | patterns/visitor.py | patterns/visitor.py | """
from python cookbook 3rd edition. PY3 only.
Resolve the recursion limit problem.
"""
import types
class Node:
pass
class NodeVisitor:
def visit(self, node):
stack = [node]
last_result = None
while stack:
try:
last = stack[-1]
if isinstance(last, types.GeneratorType):
stack.append(last.send(last_result))
last_result = None
elif isinstance(last, Node):
stack.append(self._visit(stack.pop()))
else:
last_result = stack.pop()
except StopIteration:
stack.pop()
return last_result
def _visit(self, node):
methname = 'visit_' + type(node).__name__
meth = getattr(self, methname, None)
if meth is None:
meth = self.generic_visit
return meth(node)
def generic_visit(self, node):
raise RuntimeError('No {} method'.format('visit_' +
type(node).__name__))
class UnaryOperator(Node):
def __init__(self, operand):
self.operand = operand
class BinaryOperator(Node):
def __init__(self, left, right):
self.left = left
self.right = right
class Add(BinaryOperator):
pass
class Sub(BinaryOperator):
pass
class Mul(BinaryOperator):
pass
class Div(BinaryOperator):
pass
class Negate(UnaryOperator):
pass
class Number(Node):
def __init__(self, value):
self.value = value
# A sample visitor class that evaluates expressions
class Evaluator(NodeVisitor):
"""
Example for calculating 1 + 2*(3-4) / 5 (=> 0.6)
>>> t1 = Sub(Number(3), Number(4))
>>> t2 = Mul(Number(2), t1)
>>> t3 = Div(t2, Number(5))
>>> t4 = Add(Number(1), t3)
>>> e = Evaluator()
>>> print(e.visit(t4))
0.6
"""
def visit_Number(self, node):
return node.value
def visit_Add(self, node):
return self.visit(node.left) + self.visit(node.right)
def visit_Sub(self, node):
return self.visit(node.left) - self.visit(node.right)
def visit_Mul(self, node):
return self.visit(node.left) * self.visit(node.right)
def visit_Div(self, node):
return self.visit(node.left) / self.visit(node.right)
def visit_Negate(self, node):
return -self.visit(node.operand)
class Evaluator2(NodeVisitor):
"""
Resolve the problem of recursion limit.
Example:
>>> a = Number(0)
>>> for n in range(1, 100000):
... a = Add(a, Number(n))
...
>>> e = Evaluator()
>>> e.visit(a)
Traceback (most recent call last):
...
RuntimeError: maximum recursion depth exceeded in __instancecheck__
>>> e = Evaluator2()
>>> e.visit(a)
4999950000
"""
def visit_Number(self, node):
return node.value
def visit_Add(self, node):
yield (yield node.left) + (yield node.right)
def visit_Sub(self, node):
yield (yield node.left) - (yield node.right)
def visit_Mul(self, node):
yield (yield node.left) * (yield node.right)
def visit_Div(self, node):
yield (yield node.left) / (yield node.right)
def visit_Negate(self, node):
yield -(yield node.operand)
| mit | Python |
|
b3c89917895786bfab5d4fae9ce086767575a506 | Add a deployment script | slice/dogbot,slice/dogbot,sliceofcode/dogbot,slice/dogbot,sliceofcode/dogbot | deploy.py | deploy.py | """ This is a script that deploys Dogbot. """
import os
from pathlib import Path
from ruamel.yaml import YAML
import requests
# load the webhook url from the configuration
with open('config.yml') as f:
webhook_url = YAML(typ='safe').load(f)['monitoring']['health_webhook']
def info(text):
print('\033[32m[info]\033[0m', text)
def post(content=None, *, embed=None, wait_for_server=True):
info('POSTing to {}: {}'.format(webhook_url, content or embed))
payload = {'content': content, 'embeds': [embed]}
requests.post(webhook_url + ('?wait=true' if wait_for_server else ''), json=payload)
def deploy():
""" Deploys Dogbot. """
# resolve path to playbook
playbook = (Path.cwd() / 'deployment' / 'playbook.yml').resolve()
info('Path to Ansible playbook: {}'.format(playbook))
# post!
post(embed={'title': 'Deployment starting.', 'description': 'This shouldn\'t take too long.', 'color': 0xe67e22})
# run the playbook
info('Running Ansible playbook.')
exit_code = os.system('ansible-playbook {}'.format(playbook))
if exit_code != 0:
info('Deployment failed.')
return
info('Finished running playbook.')
# post!
post(embed={'title': 'Deployment finished.', 'description': 'The bot is restarting. This can take a bit.',
'color': 0x2ecc71})
if __name__ == '__main__':
deploy()
| mit | Python |
|
10b3ae6ab5009fe0c43b744dc655bd6512cec041 | Include basic version of contract object | leaffan/pynhldb | db/contract.py | db/contract.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import Base, session_scope
class Contract(Base):
__tablename__ = 'contracts'
__autoload__ = True
def __init__(self, player_id, contract_data):
| mit | Python |
|
aef7c25964883bae893913524bc9ff3dc0bdcde3 | Add a docker helper script (#18) | woofwoofinc/cargo-sphinx,woofwoofinc/cargo-sphinx | docker.py | docker.py | #!/usr/bin/env python3
import argparse
import subprocess
IMAGE_NAME = 'cargo-sphinx'
def has_image(name):
cmd = "docker images | awk '{{print $1}}' | grep '^{name}$' > /dev/null".format(
name=name),
proc = subprocess.run(cmd, shell=True)
return proc.returncode == 0
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', nargs='?',
help="Either 'build', 'shell', or 'docs'")
parser.add_argument('--nocache', action='store_true',
help="When building containers, don't use cached images.")
args = parser.parse_args()
action = args.action
if not has_image(IMAGE_NAME) or action == 'build':
run_build(IMAGE_NAME, nocache=args.nocache)
if action == 'build':
return
if action == 'shell':
run_shell(IMAGE_NAME)
elif action == 'docs':
run_docs(IMAGE_NAME)
else:
print("Unknown action '{}' specified.")
def run_build(image, nocache=False):
nocache_arg = "--no-cache" if nocache else ""
cmd = "docker build --rm=true -t {name} {nocache} .".format(
name=image, nocache=nocache_arg)
subprocess.run(cmd, shell=True, check=True)
def run_shell(image):
cmd = """docker run -it \\
-v "$(pwd):/{name}" \\
--workdir=/{name} \\
{name} \\
/bin/bash""".format(name=image)
subprocess.run(cmd, shell=True)
def run_docs(image):
cmd = """docker run -it \\
-v "$(pwd):/{name}" \\
--workdir=/{name}/docs \\
{name} \\
make clean html""".format(name=image)
subprocess.run(cmd, shell=True)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
647fd44f829a308dc16eb86a663dc1a3719476ab | add solution for Search a 2D Matrix II | zhyu/leetcode,zhyu/leetcode | algorithms/searchA2DMatrixII/searchA2DMatrixII.py | algorithms/searchA2DMatrixII/searchA2DMatrixII.py | class Solution:
# @param {integer[][]} matrix
# @param {integer} target
# @return {boolean}
def searchMatrix(self, matrix, target):
n = len(matrix)
m = len(matrix[0])
x = n-1
y = 0
while x >= 0 and y < m:
if matrix[x][y] == target:
return True
if matrix[x][y] > target:
x -= 1
else:
y += 1
return False
| mit | Python |
|
00e68a20d4691ae3172ae0bb11b8440387acc0d6 | Add the server based on oslo.service. | xgfone/pycom,xgfone/xutils | pycom/server.py | pycom/server.py | # encoding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
import os
import socket
import logging
import functools
import greenlet
import eventlet
from oslo_service import service
LOG = logging.getLogger(__name__)
def listen_socket(host, port, backlog=1024, reuse=True):
sock = socket.socket()
if reuse:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(backlog)
return sock
def wrap_exc(f):
@functools.wraps(f)
def inner(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception:
return None
return inner
class ServerBase(service.ServiceBase):
def __init__(self, pool_size=None):
self.pool_size = pool_size
self._pool = eventlet.GreenPool(self.pool_size)
self._server = None
def serve(self, pool):
raise NotImplementedError("The method of serve MUST be implemented")
def _spawn(self, pool):
pid = os.getpid()
try:
self.serve(pool)
finally:
pool.waitall()
LOG.info("[Process{0}] the server exited".format(pid))
def start(self):
self._server = eventlet.spawn(self.serve, pool=self._pool)
def stop(self):
if self._server is not None:
# let eventlet close socket
self._pool.resize(0)
self._server.kill()
def wait(self):
try:
if self._server is not None:
num = self._pool.running()
LOG.debug("Waiting server to finish %d requests.", num)
self._pool.waitall()
except greenlet.GreenletExit:
LOG.info("Server has stopped.")
def reset(self):
self._pool.resize(self.pool_size)
class SocketServer(ServerBase):
def __init__(self, handler, host, port, pool_size=None, backlog=1024, timeout=None):
self.host = host
self.port = port
self.sock = listen_socket(self.host, self.port, backlog)
LOG.info("Listen %s:%s" % (self.host, self.port))
self.handler = handler
self.timeout = timeout
super(SocketServer, self).__init__(pool_size)
def handle(self, conn, addr):
try:
self.handler(conn, addr)
except socket.timeout:
LOG.info("socket from {0} time out".format(addr))
finally:
try:
conn.close()
except socket.error:
pass
def serve(self, pool):
pid = os.getpid()
try:
while True:
try:
conn, addr = self.sock.accept()
conn.settimeout(self.timeout)
LOG.debug("[Process{0}] accepted {1}".format(pid, addr))
pool.spawn_n(self.handle, conn, addr)
except socket.error as e:
LOG.error("[Process{0}] can not handle the request from {1}: {2}".format(pid, addr, e))
except (KeyboardInterrupt, SystemExit):
LOG.info("[Process{0}] the server is exiting".format(pid))
break
finally:
try:
self.sock.close()
except socket.error as e:
pass
class TaskServer(ServerBase):
def __init__(self, task_fn, task_num=1, pool_size=None, *args, **kwargs):
super(TaskServer, self).__init__(pool_size)
self.task_fn = task_fn
self.task_num = task_num
self.args = args
self.kwargs = kwargs
def _wrap_exc(self):
try:
self.task_fn(*self.args, **self.kwargs)
except Exception:
pass
def server(self, pool):
for i in range(self.task_num):
pool.spawn_n(self._wrap_exc)
pool.waitall()
| mit | Python |
|
ada6128817769886e2869944fac3a8cea0b5b109 | Add a missing module | drtconway/pykmer | pykmer/timer.py | pykmer/timer.py | """
This module provides a simple timer class for instrumenting code.
"""
import time
class timer(object):
def __init__(self):
self.start = time.time()
self.sofar = 0.0
self.paused = False
self.events = 0
def pause(self):
now = time.time()
self.sofar += now - self.start
self.paused = True
def resume(self):
self.start = time.time()
self.paused = False
def stop(self):
if not self.paused:
now = time.time()
self.sofar += now - self.start
def tick(self, n = 1):
self.events += n
def reset(self):
self.start = time.time()
self.sofar = 0
self.paused = False
def time(self):
sofar = self.sofar
if not self.paused:
now = time.time()
sofar += now - self.start
return sofar
def rate(self, n = None):
if n is None:
n = self.events
return n / self.time()
| apache-2.0 | Python |
|
6e767e8f5b219d9883fb1a16846830efabac7d5b | Add python | koverholt/hello-world,koverholt/hello-world,koverholt/hello-world,koverholt/hello-world,koverholt/hello-world,koverholt/hello-world | python/hello.py | python/hello.py | print("Hello, World!")
| bsd-3-clause | Python |
|
01472504fc42137a05a85ae5ad6d4b7956865680 | Add autosolver for regex. | hghwng/mooc-algs2,hghwng/mooc-algs2 | quiz/5-regex.py | quiz/5-regex.py | #!/usr/bin/env python3
def make_array(text):
import re
regex = re.compile('(\d+)->(\d+)')
pairs = regex.findall(text)
ret = list()
for (src, dst) in pairs:
src = int(src)
dst = int(dst)
ret.append((src, dst))
return ret
def make_transitive_closure(states, eps_trans):
while True:
changed = False
for src in range(len(states)):
for dst in eps_trans[src]:
if dst not in states:
states.add(dst)
changed = True
if not changed:
return states
def make_epsilon_transition(regex):
trans = list(map(lambda x: set(), range(len(regex))))
stack = []
for i in range(len(regex)):
c = regex[i]
group_begin = i
if c == '(':
trans[i].add(i + 1)
stack.append(i)
elif c == '|':
stack.append(i)
elif c == ')':
trans[i].add(i + 1)
top = stack.pop()
if regex[top] == '(':
group_begin = top
elif regex[top] == '|':
group_begin = stack.pop()
trans[group_begin].add(top + 1)
trans[top].add(i)
elif c == '*':
trans[i].add(i + 1)
if i + 1 < len(regex) and regex[i + 1] == '*':
trans[group_begin].add(i + 1)
trans[i + 1].add(group_begin)
return trans
def solve_q1(regex, query):
eps_trans = make_epsilon_transition(regex)
states = set()
states.add(0)
make_transitive_closure(states, eps_trans)
for i in query:
new_states = set()
for st in states:
if st == len(regex):
continue
if i == regex[st]:
new_states.add(st + 1)
states = make_transitive_closure(new_states, eps_trans)
for i in list(states):
print(i, end=' ')
print()
def solve_q2(regex, queries):
eps_trans = make_epsilon_transition(regex)
for q in queries:
if q[1] in eps_trans[q[0]]:
print('y', end=' ')
else:
print('n', end=' ')
print()
q1_regex = ' ( ( A | ( C * B ) ) * A ) '
q1_query = ' A B B A B C '
q2_regex = ' ( A ( ( C D * ) * | B ) ) '
q2_query = '''
8->3
10->12
7->5
4->9
0->1
2->10
3->8
'''
solve_q1(q1_regex.replace(' ', ''),
q1_query.replace(' ', ''))
solve_q2(q2_regex.replace(' ', ''), make_array(q2_query))
| mit | Python |
|
2c1b5aedc5f4503a738ef7e9ffa0a7f969fecfef | add argparse example | familug/FAMILUG,familug/FAMILUG,familug/FAMILUG,familug/FAMILUG,familug/FAMILUG,familug/FAMILUG,familug/FAMILUG,familug/FAMILUG | Python/calculator_argp.py | Python/calculator_argp.py | import argparse
def main():
parser = argparse.ArgumentParser(description='Calculate two input numbers')
parser.add_argument(
'first', metavar='int', type=int,
help='first number')
parser.add_argument(
'oper', metavar='oper', type=str,
help='operator +, - or * ')
parser.add_argument(
'second', metavar='int', type=int,
help='second number')
args = parser.parse_args()
first = int(args.first)
second = int(args.second)
oper = args.oper
res = ''
if oper == '+':
res = first + second
elif oper == '-':
res = first - second
elif oper == '*':
res = first * second
else:
print "Not supported"
print res
if __name__ == "__main__":
main()
| bsd-2-clause | Python |
|
ed5f68211e93df983a5e15c7f1ce812b810b49c0 | Add ANTs package (#7717) | LLNL/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,LLNL/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,mfherbst/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,EmreAtes/spack,krafczyk/spack,krafczyk/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,tmerrick1/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,mfherbst/spack,LLNL/spack,EmreAtes/spack | var/spack/repos/builtin/packages/ants/package.py | var/spack/repos/builtin/packages/ants/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Ants(CMakePackage):
"""ANTs extracts information from complex datasets that include imaging.
Paired with ANTsR (answer), ANTs is useful for managing, interpreting
and visualizing multidimensional data. ANTs is popularly considered a
state-of-the-art medical image registration and segmentation toolkit.
ANTs depends on the Insight ToolKit (ITK), a widely used medical image
processing library to which ANTs developers contribute.
"""
homepage = "http://stnava.github.io/ANTs/"
url = "https://github.com/ANTsX/ANTs/archive/v2.2.0.tar.gz"
version('2.2.0', '5661b949268100ac0f7baf6d2702b4dd')
def install(self, spec, prefix):
with working_dir(join_path('spack-build', 'ANTS-build'), create=False):
make("install")
install_tree('Scripts', prefix.bin)
def setup_environment(self, spack_env, run_env):
run_env.set('ANTSPATH', self.prefix.bin)
| lgpl-2.1 | Python |
|
744cedc7e999f96aa0646bb43c039882991228ae | Add Asio package (#24485) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/asio/package.py | var/spack/repos/builtin/packages/asio/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os.path
class Asio(AutotoolsPackage):
"""C++ library for network and low-level I/O programming."""
homepage = "http://think-async.com/Asio/"
url = "https://github.com/chriskohlhoff/asio/archive/1.18.2.tar.gz"
git = "https://github.com/chriskohlhoff/asio.git"
maintainers = ["msimberg"]
version(
"1.18.2",
sha256="8d67133b89e0f8b212e9f82fdcf1c7b21a978d453811e2cd941c680e72c2ca32",
)
depends_on("autoconf", type="build")
depends_on("automake", type="build")
depends_on("m4", type="build")
depends_on("libtool", type="build")
stds = ("11", "14", "17")
variant(
"cxxstd",
default="11",
values=stds,
multi=False,
description="Use the specified C++ standard when building.",
)
variant(
"separate_compilation",
default=False,
description="Compile Asio sources separately",
)
variant(
"boost_coroutine",
default=False,
description="Enable support for Boost.Coroutine.",
)
depends_on("boost +context +coroutine", when="+boost_coroutine")
variant("boost_regex", default=False, description="Enable support for Boost.Regex.")
depends_on("boost +regex", when="+boost_regex")
for std in stds:
depends_on("boost cxxstd=" + std, when="cxxstd={0} ^boost".format(std))
def configure_args(self):
variants = self.spec.variants
args = [
"CXXFLAGS=-std=c++{0}".format(variants["cxxstd"].value),
]
if variants["separate_compilation"].value:
args.append("--enable-separate-compilation")
if variants["boost_coroutine"].value:
args.append("--enable-boost-coroutine")
if variants["boost_coroutine"].value or variants["boost_regex"].value:
args.append("--with-boost={self.spec['boost'].prefix}")
return args
def url_for_version(self, version):
return "https://github.com/chriskohlhoff/asio/archive/asio-{0}.tar.gz".format(
version.dashed
)
@property
def configure_directory(self):
return os.path.join(self.stage.source_path, "asio")
| lgpl-2.1 | Python |
|
b1feed0ced6d1328cc39bc9bba36331ec6da7803 | Add ban for pgp/gpg private key blocks | pre-commit/pre-commit-hooks,Harwood/pre-commit-hooks | pre_commit_hooks/detect_private_key.py | pre_commit_hooks/detect_private_key.py | from __future__ import print_function
import argparse
import sys
BLACKLIST = [
b'BEGIN RSA PRIVATE KEY',
b'BEGIN DSA PRIVATE KEY',
b'BEGIN EC PRIVATE KEY',
b'BEGIN OPENSSH PRIVATE KEY',
b'BEGIN PRIVATE KEY',
b'PuTTY-User-Key-File-2',
b'BEGIN SSH2 ENCRYPTED PRIVATE KEY',
b'BEGIN PGP PRIVATE KEY BLOCK',
]
def detect_private_key(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to check')
args = parser.parse_args(argv)
private_key_files = []
for filename in args.filenames:
with open(filename, 'rb') as f:
content = f.read()
if any(line in content for line in BLACKLIST):
private_key_files.append(filename)
if private_key_files:
for private_key_file in private_key_files:
print('Private key found: {}'.format(private_key_file))
return 1
else:
return 0
if __name__ == '__main__':
sys.exit(detect_private_key())
| from __future__ import print_function
import argparse
import sys
BLACKLIST = [
b'BEGIN RSA PRIVATE KEY',
b'BEGIN DSA PRIVATE KEY',
b'BEGIN EC PRIVATE KEY',
b'BEGIN OPENSSH PRIVATE KEY',
b'BEGIN PRIVATE KEY',
b'PuTTY-User-Key-File-2',
b'BEGIN SSH2 ENCRYPTED PRIVATE KEY',
]
def detect_private_key(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to check')
args = parser.parse_args(argv)
private_key_files = []
for filename in args.filenames:
with open(filename, 'rb') as f:
content = f.read()
if any(line in content for line in BLACKLIST):
private_key_files.append(filename)
if private_key_files:
for private_key_file in private_key_files:
print('Private key found: {}'.format(private_key_file))
return 1
else:
return 0
if __name__ == '__main__':
sys.exit(detect_private_key())
| mit | Python |
6b9b9642ca09f3b33bdf61bb5dacbaa7c29de8fc | Create __main__.py | 0xC70FF3/pyarchetype,0xC70FF3/pyarchetype | src/__main__.py | src/__main__.py | mit | Python |
||
a58a7f3206168ae98b952e804404c46b89e81640 | Add a snippet (Pillow). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/pil/python3_pillow_fork/show.py | python/pil/python3_pillow_fork/show.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import PIL.Image as pil_img # PIL.Image is a module not a class...
def main():
"""Main function"""
img = pil_img.open("lenna.png") # It works also with .jpg, ...
img.show()
if __name__ == '__main__':
main()
| mit | Python |
|
21f789eb05788fcaf0be1960b3c1171437d8a299 | Replace Dict by Mapping. | arpith/zulip,Diptanshu8/zulip,Diptanshu8/zulip,showell/zulip,sonali0901/zulip,mohsenSy/zulip,dhcrzf/zulip,showell/zulip,ryanbackman/zulip,joyhchen/zulip,AZtheAsian/zulip,amanharitsh123/zulip,mohsenSy/zulip,punchagan/zulip,eeshangarg/zulip,showell/zulip,jrowan/zulip,christi3k/zulip,Diptanshu8/zulip,joyhchen/zulip,verma-varsha/zulip,PhilSk/zulip,AZtheAsian/zulip,rht/zulip,hackerkid/zulip,synicalsyntax/zulip,zulip/zulip,punchagan/zulip,verma-varsha/zulip,jackrzhang/zulip,sup95/zulip,umkay/zulip,cosmicAsymmetry/zulip,paxapy/zulip,eeshangarg/zulip,jrowan/zulip,dawran6/zulip,ahmadassaf/zulip,vabs22/zulip,umkay/zulip,dattatreya303/zulip,JPJPJPOPOP/zulip,ahmadassaf/zulip,dattatreya303/zulip,isht3/zulip,krtkmj/zulip,souravbadami/zulip,AZtheAsian/zulip,JPJPJPOPOP/zulip,jainayush975/zulip,eeshangarg/zulip,grave-w-grave/zulip,KingxBanana/zulip,mahim97/zulip,ahmadassaf/zulip,rishig/zulip,Juanvulcano/zulip,hackerkid/zulip,samatdav/zulip,PhilSk/zulip,brainwane/zulip,Juanvulcano/zulip,peguin40/zulip,jrowan/zulip,grave-w-grave/zulip,sharmaeklavya2/zulip,tommyip/zulip,mahim97/zulip,vikas-parashar/zulip,vabs22/zulip,calvinleenyc/zulip,zacps/zulip,jrowan/zulip,Galexrt/zulip,JPJPJPOPOP/zulip,brockwhittaker/zulip,timabbott/zulip,arpith/zulip,dawran6/zulip,amyliu345/zulip,tommyip/zulip,calvinleenyc/zulip,jphilipsen05/zulip,souravbadami/zulip,blaze225/zulip,umkay/zulip,samatdav/zulip,brainwane/zulip,sharmaeklavya2/zulip,showell/zulip,niftynei/zulip,joyhchen/zulip,AZtheAsian/zulip,rht/zulip,timabbott/zulip,synicalsyntax/zulip,umkay/zulip,Jianchun1/zulip,amanharitsh123/zulip,jackrzhang/zulip,amyliu345/zulip,sharmaeklavya2/zulip,dhcrzf/zulip,blaze225/zulip,vaidap/zulip,niftynei/zulip,timabbott/zulip,shubhamdhama/zulip,paxapy/zulip,krtkmj/zulip,susansls/zulip,KingxBanana/zulip,umkay/zulip,rht/zulip,sup95/zulip,JPJPJPOPOP/zulip,isht3/zulip,SmartPeople/zulip,andersk/zulip,ahmadassaf/zulip,vikas-parashar/zulip,showell/zulip,zacps/zulip,christi3k/zulip,AZtheAsian/zulip,tommyip/zulip,brainwane/zulip,kou/zulip,shubhamdhama/zulip,samatdav/zulip,krtkmj/zulip,aakash-cr7/zulip,souravbadami/zulip,paxapy/zulip,brockwhittaker/zulip,dattatreya303/zulip,jackrzhang/zulip,samatdav/zulip,niftynei/zulip,jackrzhang/zulip,KingxBanana/zulip,rishig/zulip,jackrzhang/zulip,blaze225/zulip,dhcrzf/zulip,brockwhittaker/zulip,umkay/zulip,verma-varsha/zulip,reyha/zulip,showell/zulip,ryanbackman/zulip,souravbadami/zulip,vaidap/zulip,blaze225/zulip,peguin40/zulip,susansls/zulip,punchagan/zulip,umkay/zulip,dawran6/zulip,sonali0901/zulip,Galexrt/zulip,rht/zulip,brockwhittaker/zulip,souravbadami/zulip,KingxBanana/zulip,Juanvulcano/zulip,susansls/zulip,peguin40/zulip,dattatreya303/zulip,ahmadassaf/zulip,mohsenSy/zulip,kou/zulip,punchagan/zulip,aakash-cr7/zulip,shubhamdhama/zulip,jphilipsen05/zulip,hackerkid/zulip,jackrzhang/zulip,jphilipsen05/zulip,sup95/zulip,jrowan/zulip,PhilSk/zulip,amanharitsh123/zulip,vaidap/zulip,grave-w-grave/zulip,zulip/zulip,isht3/zulip,synicalsyntax/zulip,ryanbackman/zulip,hackerkid/zulip,jainayush975/zulip,eeshangarg/zulip,samatdav/zulip,Diptanshu8/zulip,sonali0901/zulip,jainayush975/zulip,christi3k/zulip,eeshangarg/zulip,dawran6/zulip,kou/zulip,rishig/zulip,jphilipsen05/zulip,synicalsyntax/zulip,reyha/zulip,andersk/zulip,zacps/zulip,cosmicAsymmetry/zulip,rishig/zulip,andersk/zulip,amanharitsh123/zulip,Jianchun1/zulip,rht/zulip,mohsenSy/zulip,vikas-parashar/zulip,isht3/zulip,rishig/zulip,christi3k/zulip,brockwhittaker/zulip,cosmicAsymmetry/zulip,SmartPeople/zulip,grave-w-grave/zulip,mohsenSy/zulip,timabbott/zulip,Jianchun1/zulip,cosmicAsymmetry/zulip,dattatreya303/zulip,dhcrzf/zulip,peguin40/zulip,reyha/zulip,reyha/zulip,rishig/zulip,shubhamdhama/zulip,TigorC/zulip,Galexrt/zulip,reyha/zulip,amyliu345/zulip,rht/zulip,jphilipsen05/zulip,aakash-cr7/zulip,shubhamdhama/zulip,isht3/zulip,ahmadassaf/zulip,zulip/zulip,TigorC/zulip,j831/zulip,tommyip/zulip,christi3k/zulip,vabs22/zulip,dhcrzf/zulip,PhilSk/zulip,calvinleenyc/zulip,timabbott/zulip,vikas-parashar/zulip,brainwane/zulip,vabs22/zulip,tommyip/zulip,blaze225/zulip,susansls/zulip,KingxBanana/zulip,jackrzhang/zulip,shubhamdhama/zulip,Jianchun1/zulip,amyliu345/zulip,arpith/zulip,jainayush975/zulip,rht/zulip,dawran6/zulip,amanharitsh123/zulip,Juanvulcano/zulip,samatdav/zulip,Jianchun1/zulip,ryanbackman/zulip,rishig/zulip,Galexrt/zulip,kou/zulip,jainayush975/zulip,calvinleenyc/zulip,grave-w-grave/zulip,arpith/zulip,eeshangarg/zulip,zulip/zulip,niftynei/zulip,ryanbackman/zulip,aakash-cr7/zulip,showell/zulip,JPJPJPOPOP/zulip,j831/zulip,verma-varsha/zulip,kou/zulip,hackerkid/zulip,ahmadassaf/zulip,j831/zulip,timabbott/zulip,vikas-parashar/zulip,paxapy/zulip,sup95/zulip,mahim97/zulip,JPJPJPOPOP/zulip,brockwhittaker/zulip,sonali0901/zulip,vaidap/zulip,Galexrt/zulip,krtkmj/zulip,arpith/zulip,dattatreya303/zulip,vaidap/zulip,brainwane/zulip,zulip/zulip,tommyip/zulip,SmartPeople/zulip,Diptanshu8/zulip,tommyip/zulip,mahim97/zulip,mahim97/zulip,amanharitsh123/zulip,calvinleenyc/zulip,dhcrzf/zulip,j831/zulip,blaze225/zulip,ryanbackman/zulip,zulip/zulip,Diptanshu8/zulip,peguin40/zulip,mohsenSy/zulip,arpith/zulip,cosmicAsymmetry/zulip,andersk/zulip,joyhchen/zulip,punchagan/zulip,amyliu345/zulip,timabbott/zulip,synicalsyntax/zulip,vabs22/zulip,calvinleenyc/zulip,sup95/zulip,jphilipsen05/zulip,susansls/zulip,zacps/zulip,Jianchun1/zulip,andersk/zulip,christi3k/zulip,PhilSk/zulip,souravbadami/zulip,joyhchen/zulip,brainwane/zulip,dawran6/zulip,Galexrt/zulip,synicalsyntax/zulip,zacps/zulip,KingxBanana/zulip,shubhamdhama/zulip,aakash-cr7/zulip,vikas-parashar/zulip,krtkmj/zulip,punchagan/zulip,sonali0901/zulip,grave-w-grave/zulip,TigorC/zulip,joyhchen/zulip,verma-varsha/zulip,krtkmj/zulip,verma-varsha/zulip,j831/zulip,jainayush975/zulip,kou/zulip,SmartPeople/zulip,synicalsyntax/zulip,sup95/zulip,TigorC/zulip,SmartPeople/zulip,brainwane/zulip,sharmaeklavya2/zulip,isht3/zulip,TigorC/zulip,paxapy/zulip,hackerkid/zulip,zacps/zulip,kou/zulip,paxapy/zulip,cosmicAsymmetry/zulip,mahim97/zulip,andersk/zulip,hackerkid/zulip,susansls/zulip,vabs22/zulip,vaidap/zulip,j831/zulip,jrowan/zulip,dhcrzf/zulip,Galexrt/zulip,amyliu345/zulip,TigorC/zulip,niftynei/zulip,punchagan/zulip,SmartPeople/zulip,Juanvulcano/zulip,sharmaeklavya2/zulip,andersk/zulip,peguin40/zulip,krtkmj/zulip,reyha/zulip,niftynei/zulip,eeshangarg/zulip,AZtheAsian/zulip,PhilSk/zulip,aakash-cr7/zulip,Juanvulcano/zulip,sonali0901/zulip,sharmaeklavya2/zulip,zulip/zulip | zerver/lib/session_user.py | zerver/lib/session_user.py | from __future__ import absolute_import
from django.contrib.auth import SESSION_KEY, get_user_model
from django.contrib.sessions.models import Session
from typing import Mapping, Optional
from six import text_type
def get_session_dict_user(session_dict):
# type: (Mapping[text_type, int]) -> Optional[int]
# Compare django.contrib.auth._get_user_session_key
try:
return get_user_model()._meta.pk.to_python(session_dict[SESSION_KEY])
except KeyError:
return None
def get_session_user(session):
# type: (Session) -> int
return get_session_dict_user(session.get_decoded())
| from __future__ import absolute_import
from django.contrib.auth import SESSION_KEY, get_user_model
from django.contrib.sessions.models import Session
from typing import Dict, Optional
from six import text_type
def get_session_dict_user(session_dict):
# type: (Dict[text_type, int]) -> Optional[int]
# Compare django.contrib.auth._get_user_session_key
try:
return get_user_model()._meta.pk.to_python(session_dict[SESSION_KEY])
except KeyError:
return None
def get_session_user(session):
# type: (Session) -> int
return get_session_dict_user(session.get_decoded())
| apache-2.0 | Python |
7037762247bd40455eb1944dc21684561c5f97ba | add a __init__ file | ManushiM/infoviz_refugee_project,ManushiM/infoviz_refugee_project,ManushiM/infoviz_refugee_project,Elixeus/infoviz_refugee_project,Elixeus/infoviz_refugee_project,Elixeus/infoviz_refugee_project,jgrundy/infoviz_refugee_project,jgrundy/infoviz_refugee_project,jgrundy/infoviz_refugee_project | dataScraping/__init__.py | dataScraping/__init__.py | #!/usr/bin/env python
| mit | Python |
|
2b4544820bf6549bc172f8d5b3532a9103190920 | add utility I used to generate random ph and temp readings | aquarimeter/aquarimeter,aquarimeter/aquarimeter | utils/generate_random_values.py | utils/generate_random_values.py | import random
ph = [random.uniform(0, 14) for x in range(30000)]
temp = [random.uniform(55, 90) for x in range(30000)]
temp_file = open('temp.csv', 'w+')
ph_file = open('ph.csv', 'w+')
for x in range(len(temp)):
temp_file.write("%.2f," % temp[x])
ph_file.write("%.2f," % ph[x])
temp_file.close()
ph_file.close()
| apache-2.0 | Python |
|
4883bd13c6e07a0568c29fd26a141888b52292b7 | Add retriever object for player draft information | leaffan/pynhldb | utils/player_draft_retriever.py | utils/player_draft_retriever.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import requests
from lxml import html
from db.team import Team
from db.player_draft import PlayerDraft
class PlayerDraftRetriever():
NHL_PLAYER_DRAFT_PREFIX = "https://www.nhl.com/player"
DRAFT_INFO_REGEX = re.compile(
"(\d{4})\s(.+),\s(\d+).+\srd,.+\((\d+).+\soverall\)")
def __init__(self):
pass
def retrieve_draft_information(self, player_id):
url = "/".join((self.NHL_PLAYER_DRAFT_PREFIX, str(player_id)))
r = requests.get(url)
doc = html.fromstring(r.text)
raw_draft_info = doc.xpath(
"//li[@class='player-bio__item']/span[text() = " +
"'Draft:']/parent::li/text()")
if not raw_draft_info:
print("No draft information found")
return
raw_draft_info = raw_draft_info.pop()
print(raw_draft_info)
match = re.search(self.DRAFT_INFO_REGEX, raw_draft_info)
if match:
year = int(match.group(1))
team = Team.find_by_orig_abbr(match.group(2))
round = int(match.group(3))
overall = int(match.group(4))
draft_info = PlayerDraft(
player_id, team.team_id, year, round, overall)
draft_info_db = PlayerDraft.find_by_player_id(player_id)
if draft_info_db:
if draft_info_db != draft_info:
draft_info_db.update(draft_info)
| mit | Python |
|
a5e18330ac84a93b9a3ffe7d8493c401d3ade11e | Create version.py | HarllanAndrye/nilmtk,pauldeng/nilmtk,jaduimstra/nilmtk,nilmtk/nilmtk,mmottahedi/nilmtk,AlexRobson/nilmtk,josemao/nilmtk,nilmtk/nilmtk | nilmtk/version.py | nilmtk/version.py | version = '0.1.0'
| apache-2.0 | Python |
|
776350aaaed8a8e3f00a492c1a1735c24f595d89 | add config_dialog.py | mynicolas/KeyCounter,mynicolas/KeyCounter,mynicolas/KeyCounter | dialogs/config_dialog.py | dialogs/config_dialog.py | #-*- coding: utf-8 -*-
from win32ui import IDD_SET_TABSTOPS
from win32ui import IDC_EDIT_TABS
from win32ui import IDC_PROMPT_TABS
from win32con import IDOK
from win32con import IDCANCEL
import win32ui
import win32con
from pywin.mfc import dialog
IDC_EDIT_USERNAME = 2000
IDC_EDIT_PASSWORD = 2001
def ConfigDialogTemplate():
style = win32con.DS_SETFONT | win32con.DS_MODALFRAME | win32con.DS_FIXEDSYS | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU
cs = win32con.WS_CHILD | win32con.WS_VISIBLE
listCs = cs | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_TABSTOP
dlg = [[u'输入用户名密码', (0, 0, 200, 75), style, None, (8, "MS Sans Serif")], ]
s = cs | win32con.CBS_DROPDOWN | win32con.WS_VSCROLL | win32con.WS_TABSTOP
dlg.append([130, u"账号:", -1, (30, 10, 50, 10), cs | win32con.SS_LEFT])
dlg.append(["EDIT", "", IDC_EDIT_USERNAME, (70, 8, 100, 12), cs])
dlg.append([130, u"密码:", -1, (30, 30, 50, 30), cs | win32con.SS_LEFT])
dlg.append(["EDIT", "", IDC_EDIT_PASSWORD, (70, 30, 100, 12), cs])
s = cs | win32con.WS_TABSTOP
dlg.append([128, u"确认", win32con.IDOK, (30, 50, 50, 15), s | win32con.BS_DEFPUSHBUTTON])
s = win32con.BS_PUSHBUTTON | s
dlg.append([128, u"取消", win32con.IDCANCEL, (120, 50, 50, 15), s])
return dlg
class ConfigDialog(dialog.Dialog):
def __init__(self):
dialog.Dialog.__init__(self, ConfigDialogTemplate())
self.DoModal()
def OnInitDialog(self):
self.username_control = self.GetDlgItem(IDC_EDIT_USERNAME)
self.password_control = self.GetDlgItem(IDC_EDIT_PASSWORD)
def OnDestroy(self, msg):
del self.username_control
del self.password_control
def OnOK(self):
if self.username_control.GetLine() and self.password_control.GetLine():
self.username = self.username_control.GetLine()
self.password = self.password_control.GetLine()
self._obj_.OnOK()
| mit | Python |
|
42faf76ffe421802e628dd2a79f518765d43284b | Create recordsCheck.py | abhishekg2389/youtube-8m-challenge | recordsCheck.py | recordsCheck.py | import tensorflow as tf
import glob as glob
import getopt
import sys
import cPickle as pkl
import numpy as np
import time
opts, _ = getopt.getopt(sys.argv[1:],"",["input_dir=", "input_file=", "output_file="])
input_dir = "/data/video_level_feat_v3/"
input_file = ""
output_file = ""
print(opts)
for opt, arg in opts:
if opt in ("--input_dir"):
input_dir = arg
if opt in ("--input_file"):
input_file = arg
if opt in ("--output_file"):
output_file = arg
f = open(input_dir, 'rb')
filepaths = pkl.load(f)
f.close()
filepaths = [input_dir+x for x in filepaths]
features_format = {}
feature_names = []
for x in ['q0', 'q1', 'q2', 'q3', 'q4', 'mean', 'stddv', 'skew', 'kurt', 'iqr', 'rng', 'coeffvar', 'efficiency']:
features_format[x + '_rgb_frame'] = tf.FixedLenFeature([1024], tf.float32)
features_format[x + '_audio_frame'] = tf.FixedLenFeature([128], tf.float32)
feature_names.append(str(x + '_rgb_frame'))
feature_names.append(str(x + '_audio_frame'))
features_format['video_id'] = tf.FixedLenFeature([], tf.string)
features_format['labels'] = tf.VarLenFeature(tf.int64)
features_format['video_length'] = tf.FixedLenFeature([], tf.float32)
start_time = time.time()
errors = []
counter = 0
for filepath in filepaths:
print(counter)
counter += 1
filepaths_queue = tf.train.string_input_producer([filepath], num_epochs=1)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filepaths_queue)
features = tf.parse_single_example(serialized_example,features=features_format)
with tf.Session() as sess:
init_op = tf.group(tf.global_variables_initializer(),tf.local_variables_initializer())
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
try:
while True:
proc_features, = sess.run([features])
except tf.errors.OutOfRangeError, e:
coord.request_stop(e)
except:
print("ERROR : "+filepath)
errors.append(filepath)
finally:
print(time.time() - start_time)
coord.request_stop()
coord.join(threads)
f = open(output_file, 'wb')
pkl.dump(errors, f, protocol=pkl.HIGHEST_PROTOCOL)
pkl.dump(counter, f, protocol=pkl.HIGHEST_PROTOCOL)
f.close()
| mit | Python |
|
f4bd76b7ebe376a2a0cea0ac1a44be4d741ce5c5 | Create LeetCode-541.py | yolozhang/Python-route | LeetCode-541.py | LeetCode-541.py | import math
class Solution(object):
def ReverseStr(self, str, k):
ans=''
n = int (math.ceil(len(str) / (2.0*k) ))
for i in range(n):
ans += str[2*i*k:(2*i+1)*k][::-1] #reverse k str
print '1',ans
ans += str[(2*i+1)*k:(2*i+2)*k]
print '2',ans
return ans
rs=Solution()
print rs.ReverseStr('sjodfjoig',3)
s='sjodfjoig'
print s[0:1]
a=''
a += s[8:20]
print s[10] #why???
print s[10:12] #
print 'a=',a
| unlicense | Python |
|
ed1dd068e41138f1f3b18b028e20e542965d2c7f | add word_dictionary.py task from week7 | pepincho/HackBulgaria,pepincho/Python101-and-Algo1-Courses,pepincho/HackBulgaria,pepincho/Python101-and-Algo1-Courses | Algo-1/week7/1-Word-Dictionary/word_dictionary.py | Algo-1/week7/1-Word-Dictionary/word_dictionary.py | class WordDictionary:
class Node:
def __init__(self, char):
# char is a substring of the phone number
self.char = char
# 10 digits
self.children_nodes = [None for i in range(26)]
self.isTerminal = False
def get_char(self):
return self.char
def add_node(self, node):
index = ord(node.char[0]) - 97
self.children_nodes[index] = node
def get_node(self, char):
index = ord(char) - 97
return self.children_nodes[index]
def __repr__(self):
return self.char
def insert(self, string):
current_node = self.root
for index in range(len(string)):
char = string[index]
result_node = current_node.get_node(char)
if result_node is None:
new_node = WordDictionary.Node(string[index:])
if index == len(string) - 1:
new_node.isTerminal = True
current_node.add_node(new_node)
current_node = new_node
else:
current_node = result_node
return self.root
def contains(self, phone_number):
root = self.root
index = 1
phone_number = str(phone_number)
current_node = root.get_node(phone_number[index - 1])
while current_node is not None and index < len(phone_number):
current_node = current_node.get_node(phone_number[index])
index += 1
# print(current_node)
if current_node is not None:
return True
return False
def __init__(self):
self.root = WordDictionary.Node('')
def main():
w = WordDictionary()
# w.insert('alabala')
# w.insert('asdf')
# print(w.contains('alabala'))
# w.insert('aladin')
# print(w.contains('asdf'))
# print(w.contains('aladin'))
# w.insert('circle')
# print(w.contains('rectangle'))
# print(w.contains('square'))
N = int(input())
while N != 0:
c = input()
command = c.split()
if command[0] == 'insert':
w.insert(command[1])
elif command[0] == 'contains':
print(w.contains(command[1]))
else:
pass
N -= 1
if __name__ == '__main__':
main()
| mit | Python |
|
a71b50f4b6a3bc1e760e3796f8c14f6c3e865a34 | replace identity translators with None | sloria/modular-odm,CenterForOpenScience/modular-odm,icereval/modular-odm,chrisseto/modular-odm | modularodm/translators/__init__.py | modularodm/translators/__init__.py | from dateutil import parser as dateparser
from bson import ObjectId
class DefaultTranslator(object):
null_value = None
to_default = None
from_default = None
class JSONTranslator(DefaultTranslator):
def to_datetime(self, value):
return str(value)
def from_datetime(self, value):
return dateparser.parse(value)
def to_ObjectId(self, value):
return str(value)
def from_ObjectId(self, value):
return ObjectId(value)
class StringTranslator(JSONTranslator):
null_value = 'none'
def to_default(self, value):
return str(value)
def from_int(self, value):
return int(value)
def from_float(self, value):
return float(value)
def from_bool(self, value):
return bool(value) | from dateutil import parser as dateparser
from bson import ObjectId
class DefaultTranslator(object):
null_value = None
def to_default(self, value):
return value
def from_default(self, value):
return value
def to_ObjectId(self, value):
return str(value)
def from_ObjectId(self, value):
return ObjectId(value)
class JSONTranslator(DefaultTranslator):
def to_datetime(self, value):
return str(value)
def from_datetime(self, value):
return dateparser.parse(value)
class StringTranslator(JSONTranslator):
null_value = 'none'
def to_default(self, value):
return str(value)
def from_int(self, value):
return int(value)
def from_float(self, value):
return float(value)
def from_bool(self, value):
return bool(value) | apache-2.0 | Python |
4cdeb6987910d4d5b33d37486ddeaafcde54bb2f | add classify_neuralnet script | NickleDave/hybrid-vocal-classifier | hvc/classify_neuralnet.py | hvc/classify_neuralnet.py | #from standard library
import glob
import sys
import os
import shelve
#from third-party
import numpy as np
import scipy.io as scio # to load matlab files
import numpy as np
from scipy.io import wavfile
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import StratifiedKFold
from keras.utils.np_utils import to_categorical
from keras.callbacks import ModelCheckpoint, CSVLogger
#from hvc
import hvc.utils.utils
import hvc.neuralnet.models
from hvc.utils import sequences
from hvc.audio.evfuncs import load_cbin,load_notmat
# get command line arguments
args = sys.argv
if len(args) != 2: # (first element, args[0], is the name of this script)
raise ValueError('Script requires one command line arguments, TRAIN_DIR')
TRAIN_DIR = args[1]
os.chdir(TRAIN_DIR)
try:
classify_dict = scio.loadmat('.\\classify\\to_classify.mat')
except FileNotFoundError:
print("Did not find required files in the directory supplied as command-line
" argument.\nPlease double check directory name.")
classify_dirs = classify_dict['classify_dirs']
clf_file = classify_dict['clf_file'][0] #[0] because string stored in np array
extension_id = clf_file.find('.dat')
# need to get rid of '.dat' extension before calling shelve with filename
clf_file = clf_file[:extension_id]
clf_file = '.\\train\\svmrbf_knn_results\\' + clf_file
clf_type = classify_dict['clf_type']
#need to get full directory path
with shelve.open(clf_file, 'r') as shlv:
if clf_type=='knn':
clf = shlv['knn_clf']
scaler = shlv['knn_scaler']
elif clf_type=='svm':
clf = shlv['svm_clf']
scaler = shlv['svm_scaler']
# used in loop below, see there for explanation
SHOULD_BE_DOUBLE = ['Fs',
'min_dur',
'min_int',
'offsets',
'onsets',
'sm_win',
'threshold']
#loop through dirs
for classify_dir in classify_dirs:
os.chdir(classify_dir)
notmats = glob.glob('*.not.mat')
if type(clf)==neighbors.classification.KNeighborsClassifier:
ftr_files = glob.glob('*knn_ftr.to_classify*')
elif type(clf)==SVC:
ftr_files = glob.glob('*svm_ftr.to_classify*')
for ftr_file,notmat in zip(ftr_files,notmats):
if type(clf)==neighbors.classification.KNeighborsClassifier:
samples = load_from_mat(ftr_file,'knn','classify')
elif type(clf)==SVC:
samples = load_from_mat(ftr_file,'svm','classify')
samples_scaled = scaler.transform(samples)
pred_labels = clf.predict(samples_scaled)
#chr() to convert back to character from uint32
pred_labels = [chr(val) for val in pred_labels]
# convert into one long string, what evsonganalty expects
pred_labels = ''.join(pred_labels)
notmat_dict = scio.loadmat(notmat)
notmat_dict['predicted_labels'] = pred_labels
notmat_dict['classifier_type'] = clf_type
notmat_dict['classifier_file'] = clf_file
print('saving ' + notmat)
# evsonganaly/Matlab expects all vars as double
for key, val in notmat_dict.items():
if key in SHOULD_BE_DOUBLE:
notmat_dict[key] = val.astype('d')
scio.savemat(notmat,notmat_dict)
| bsd-3-clause | Python |
|
bb785321cbb9d372f2009a4577404ae75fbd889a | exclude TestApp from cppcheck script | al2950/mygui,fmwviormv/mygui,fmwviormv/mygui,fmwviormv/mygui,scrawl/mygui,Anomalous-Software/mygui,scrawl/mygui,al2950/mygui,al2950/mygui,scrawl/mygui,al2950/mygui,Anomalous-Software/mygui,Anomalous-Software/mygui,Anomalous-Software/mygui,fmwviormv/mygui,scrawl/mygui | Scripts/cppcheck/cppcheck.py | Scripts/cppcheck/cppcheck.py | # run from root sources directory: python Scripts/cppcheck/cppcheck.py
import os
ignoredEndings = ["is never used", "It is safe to deallocate a NULL pointer", "Throwing exception in destructor"]
ignoredContent = ["MyGUI_UString"]
def isIgnoredWarning(warning):
for ignore in ignoredEndings:
if warning.endswith(ignore):
return True
for ignore in ignoredContent:
if warning.find(ignore) != -1:
return True
return False
def parseOutput():
file = open("temp.cppcheck", 'r')
line = file.readline()
while line != "":
line = line[0:len(line)-1]
if (not isIgnoredWarning(line)):
print line
line = file.readline()
file.close ()
def checkFolderSources(folder) :
os.system("cppcheck --enable=all -I Scripts/cppcheck " + folder + " 2>temp.cppcheck")
parseOutput()
#checkFolderSources('MyGUIEngine')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I MyGUIEngine/include MyGUIEngine/src 2>temp.cppcheck")
parseOutput()
checkFolderSources('Demos')
checkFolderSources('Tools')
checkFolderSources('UnitTests/UnitTest_*')
checkFolderSources('Common')
#checkFolderSources('Platforms/OpenGL')
# include temporary disabled due to cppcheck bug
#os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/OpenGL/OpenGLPlatform/include Platforms/OpenGL/OpenGLPlatform/src 2>temp.cppcheck")
os.system("cppcheck --enable=all -I Scripts/cppcheck Platforms/OpenGL/OpenGLPlatform/src 2>temp.cppcheck")
parseOutput()
#checkFolderSources('Platforms/Ogre')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/Ogre/OgrePlatform/include Platforms/Ogre/OgrePlatform/src 2>temp.cppcheck")
parseOutput()
#checkFolderSources('Platforms/DirectX')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/DirectX/DirectXPlatform/include Platforms/DirectX/DirectXPlatform/src 2>temp.cppcheck")
parseOutput()
checkFolderSources('Plugins')
checkFolderSources('Wrapper')
| # run from root sources directory: python Scripts/cppcheck/cppcheck.py
import os
ignoredEndings = ["is never used", "It is safe to deallocate a NULL pointer", "Throwing exception in destructor"]
ignoredContent = ["MyGUI_UString"]
def isIgnoredWarning(warning):
for ignore in ignoredEndings:
if warning.endswith(ignore):
return True
for ignore in ignoredContent:
if warning.find(ignore) != -1:
return True
return False
def parseOutput():
file = open("temp.cppcheck", 'r')
line = file.readline()
while line != "":
line = line[0:len(line)-1]
if (not isIgnoredWarning(line)):
print line
line = file.readline()
file.close ()
def checkFolderSources(folder) :
os.system("cppcheck --enable=all -I Scripts/cppcheck " + folder + " 2>temp.cppcheck")
parseOutput()
#checkFolderSources('MyGUIEngine')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I MyGUIEngine/include MyGUIEngine/src 2>temp.cppcheck")
parseOutput()
checkFolderSources('Demos')
checkFolderSources('Tools')
checkFolderSources('UnitTests')
checkFolderSources('Common')
#checkFolderSources('Platforms/OpenGL')
# include temporary disabled due to cppcheck bug
#os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/OpenGL/OpenGLPlatform/include Platforms/OpenGL/OpenGLPlatform/src 2>temp.cppcheck")
os.system("cppcheck --enable=all -I Scripts/cppcheck Platforms/OpenGL/OpenGLPlatform/src 2>temp.cppcheck")
parseOutput()
#checkFolderSources('Platforms/Ogre')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/Ogre/OgrePlatform/include Platforms/Ogre/OgrePlatform/src 2>temp.cppcheck")
parseOutput()
#checkFolderSources('Platforms/DirectX')
os.system("cppcheck --enable=all -I Scripts/cppcheck -I Platforms/DirectX/DirectXPlatform/include Platforms/DirectX/DirectXPlatform/src 2>temp.cppcheck")
parseOutput()
checkFolderSources('Plugins')
checkFolderSources('Wrapper')
| mit | Python |
bc34d530f4a21b5f06228d626f446c617b9c8876 | Add example that mirrors defconfig and oldconfig. | ulfalizer/Kconfiglib,ulfalizer/Kconfiglib | examples/defconfig_oldconfig.py | examples/defconfig_oldconfig.py | # Produces exactly the same output as the following script:
#
# make defconfig
# echo CONFIG_ETHERNET=n >> .config
# make oldconfig
# echo CONFIG_ETHERNET=y >> .config
# yes n | make oldconfig
#
# This came up in https://github.com/ulfalizer/Kconfiglib/issues/15.
import kconfiglib
import sys
conf = kconfiglib.Config(sys.argv[1])
# Mirrors defconfig
conf.load_config("arch/x86/configs/x86_64_defconfig")
conf.write_config(".config")
# Mirrors the first oldconfig
conf.load_config(".config")
conf["ETHERNET"].set_user_value('n')
conf.write_config(".config")
# Mirrors the second oldconfig
conf.load_config(".config")
conf["ETHERNET"].set_user_value('y')
for s in conf:
if s.get_user_value() is None and 'n' in s.get_assignable_values():
s.set_user_value('n')
# Write the final configuration
conf.write_config(".config")
| isc | Python |
|
7c270e2fb5e3169f179e045cc58fdd4d58672859 | add fixCAs.py to master | django-rea/nrp,valnet/valuenetwork,valnet/valuenetwork,django-rea/nrp,FreedomCoop/valuenetwork,valnet/valuenetwork,django-rea/nrp,FreedomCoop/valuenetwork,django-rea/nrp,FreedomCoop/valuenetwork,valnet/valuenetwork,FreedomCoop/valuenetwork | fixCAs.py | fixCAs.py | import sys
from valuenetwork.valueaccounting.models import *
agents = EconomicAgent.objects.all()
#import pdb; pdb.set_trace()
count = 0
for agent in agents:
agent.is_context = agent.agent_type.is_context
try:
agent.save()
count = count + 1
except:
print "Unexpected error:", sys.exc_info()[0]
print "count = " + str(count)
| agpl-3.0 | Python |
|
171d573082e528b1f103db7ea22022fdcb24d629 | Create count-depth_true_false_unknown.py | CAMI-challenge/evaluation,CAMI-challenge/evaluation | binning/count-depth_true_false_unknown.py | binning/count-depth_true_false_unknown.py | #!/usr/bin/env python
from sys import argv, stdout, stderr, exit
from numpy import mean
# simple dummy weight function counting each sequences as one
class oneweight:
__getitem__ = lambda self,key: 1
def usage():
print >> stderr, 'Usage: ', argv[0], '--labels lab.racol --predictions pred.racol [--with-unknown-labels --weights sequences.weights --scale .001]'
if __name__ == "__main__":
import getopt
# parse command line options
try:
opts, args = getopt.getopt( argv[1:], 'hl:p:w:s:u', ['help', 'labels=','predictions=','weights=','scale=','with-unknown-labels'] )
except getopt.GetoptError, err:
print str( err ) # will print something like "option -a not recognized"
usage()
exit(2)
# default parameters
reffile = None
predfile = None
weightfile = None
unknown_labels = False
scale = 1
for o, a in opts:
if o in ("-h", "--help"):
usage()
exit()
elif o in ("-l", "--labels"):
reffile = a
elif o in ("-p", "--predictions"):
predfile = a
elif o in ("-w", "--weights"):
weightfile = a
elif o in ("-u", "--with-unknown-labels"):
unknown_labels = True
elif o in ("-s", "--scale"):
scale = float( a )
else:
assert False, "unhandled option"
if not reffile:
print >>stderr, "you must specify a file for taxonomic labels"
usage()
exit( 3 )
if not predfile:
print >>stderr, "you must specify a file for taxonomic predictions"
usage()
exit( 4 )
# read ref assignments
ref={}
with open( reffile, "r" ) as f:
for line in f:
if line[0] != "#":
line = line.rstrip( "\n" ).split( "\t" )
ref[line[0]] = line[1:]
# read predictions
pred={}
with open( predfile, "r" ) as f:
for line in f:
if line[0] != "#":
line = line.rstrip( "\n" ).split( "\t" )
pred[line[0]] = line[1:]
# read weights if given
if weightfile:
weight = {}
with open( weightfile, "r" ) as f:
for line in f:
name, w = line.strip().split( "\t", 2 )[:2]
weight[name] = int( w )
else:
weight = oneweight()
# output only false lines in modified format
correct = {}
incorrect = {}
unknowns = {}
depth = 0
counter = 0
for seq, path in ref.items():
try:
counter += 1
#print path, pred[seq]
plen = min( len( path ), len( pred[seq]) )
for otax, ptax in zip( path, pred[seq] ):
if ptax == "":
plen -= 1
elif unknown_labels and otax == "":
try:
unknowns[plen] += weight[seq]
except KeyError:
unknowns[plen] = weight[seq]
break
elif ptax == otax:
try:
correct[plen] += weight[seq]
except KeyError:
correct[plen] = weight[seq]
break
else:
try:
incorrect[plen] += weight[seq]
except KeyError:
incorrect[plen] = weight[seq]
break
if not plen:
try:
correct[plen] += weight[seq]
except KeyError:
correct[plen] = weight[seq]
depth = max( depth, plen )
del pred[seq] #remove entry from predictions
except KeyError: #handle no prediction as root assignment
try:
correct[0] += weight[seq]
except KeyError:
correct[0] = weight[seq]
stderr.write( "%s not found in prediction file\n" % (seq) )
for seq, path in pred.items():
counter += 1
plen = len( path )
for tax in path:
if tax != "":
try:
unknowns[plen] += weight[seq]
except KeyError:
unknowns[plen] = weight[seq]
break
else:
plen -= 1
if not plen:
try:
unknowns[0] += weight[seq]
except KeyError:
unknowns[0] = weight[seq]
depth = max( depth, plen )
if type( weight ) == dict:
assert sum( correct.values() + incorrect.values() + unknowns.values() ) == sum( weight.values() )
else:
assert counter == sum( correct.values() + incorrect.values() + unknowns.values() )
print "depth\ttrue\tfalse\tunknown"
if scale == 1:
for l in range( depth + 1 ):
print "%d\t%d\t%d\t%d" % (l, correct.get( l, 0 ), incorrect.get( l, 0 ), unknowns.get( l, 0 ))
else:
for l in range( depth + 1 ):
print "%d\t%.2f\t%.2f\t%.2f" % (l, scale*correct.get( l, 0 ), scale*incorrect.get( l, 0 ), scale*unknowns.get( l, 0 ))
| mit | Python |
|
38f90c31f6a0f4459a8ba2f96205d80588b384c5 | Add CollectDict (incomplete dicts) | EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base | calvin/actorstore/systemactors/json/CollectDict.py | calvin/actorstore/systemactors/json/CollectDict.py | # -*- coding: utf-8 -*-
# Copyright (c) 2017 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, condition, manage
from calvin.utilities.calvinlogger import get_actor_logger
_log = get_actor_logger(__name__)
class CollectDict(Actor):
"""
Collect tokens from token port, forming a dict according to mapping. May produce
a partial dictionary.
Inputs:
token(routing="collect-any-tagged"): token
Outputs:
dict : Collected dictionary according to 'mapping'
"""
@manage(['mapping'])
def init(self, mapping):
self.mapping = mapping
def will_start(self):
self.inports['token'].set_config({'port-mapping':self.mapping})
@condition(['token'], ['dict'])
def collect_tokens(self, token):
_log.info("token: %r" % (token,))
return ActionResult(production=(token,))
action_priority = (collect_tokens, )
test_args = []
test_kwargs = {'select':{}}
| apache-2.0 | Python |
|
625139f9d3e5c06f4e5b355eaa070389f9a81954 | Add utils module | aaxelb/osf.io,ZobairAlijan/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,HarryRybacki/osf.io,adlius/osf.io,jeffreyliu3230/osf.io,acshi/osf.io,dplorimer/osf,brandonPurvis/osf.io,DanielSBrown/osf.io,njantrania/osf.io,samanehsan/osf.io,petermalcolm/osf.io,mattclark/osf.io,caseyrollins/osf.io,kch8qx/osf.io,RomanZWang/osf.io,fabianvf/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,doublebits/osf.io,caseyrollins/osf.io,chrisseto/osf.io,chennan47/osf.io,cslzchen/osf.io,acshi/osf.io,lamdnhan/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,cwisecarver/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,hmoco/osf.io,revanthkolli/osf.io,petermalcolm/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,danielneis/osf.io,samchrisinger/osf.io,himanshuo/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,SSJohns/osf.io,mluo613/osf.io,alexschiller/osf.io,lyndsysimon/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,felliott/osf.io,GaryKriebel/osf.io,hmoco/osf.io,Ghalko/osf.io,emetsger/osf.io,mluke93/osf.io,aaxelb/osf.io,amyshi188/osf.io,aaxelb/osf.io,zamattiac/osf.io,abought/osf.io,emetsger/osf.io,lamdnhan/osf.io,saradbowman/osf.io,kushG/osf.io,cosenal/osf.io,binoculars/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,hmoco/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,ckc6cz/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,icereval/osf.io,dplorimer/osf,asanfilippo7/osf.io,asanfilippo7/osf.io,petermalcolm/osf.io,arpitar/osf.io,chrisseto/osf.io,barbour-em/osf.io,sbt9uc/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,binoculars/osf.io,samanehsan/osf.io,icereval/osf.io,pattisdr/osf.io,SSJohns/osf.io,SSJohns/osf.io,mluo613/osf.io,cosenal/osf.io,HarryRybacki/osf.io,DanielSBrown/osf.io,kwierman/osf.io,lyndsysimon/osf.io,adlius/osf.io,Nesiehr/osf.io,himanshuo/osf.io,kch8qx/osf.io,jinluyuan/osf.io,reinaH/osf.io,sbt9uc/osf.io,danielneis/osf.io,njantrania/osf.io,alexschiller/osf.io,zamattiac/osf.io,Nesiehr/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,cldershem/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,jolene-esposito/osf.io,Nesiehr/osf.io,aaxelb/osf.io,doublebits/osf.io,erinspace/osf.io,zamattiac/osf.io,GaryKriebel/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,AndrewSallans/osf.io,njantrania/osf.io,adlius/osf.io,jeffreyliu3230/osf.io,sloria/osf.io,rdhyee/osf.io,revanthkolli/osf.io,brandonPurvis/osf.io,emetsger/osf.io,cwisecarver/osf.io,jnayak1/osf.io,Nesiehr/osf.io,emetsger/osf.io,reinaH/osf.io,chennan47/osf.io,ckc6cz/osf.io,kushG/osf.io,baylee-d/osf.io,billyhunt/osf.io,crcresearch/osf.io,felliott/osf.io,GaryKriebel/osf.io,GageGaskins/osf.io,samanehsan/osf.io,Ghalko/osf.io,abought/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,jmcarp/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,HarryRybacki/osf.io,chrisseto/osf.io,zkraime/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,fabianvf/osf.io,zachjanicki/osf.io,Ghalko/osf.io,billyhunt/osf.io,jmcarp/osf.io,mattclark/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,arpitar/osf.io,jolene-esposito/osf.io,jinluyuan/osf.io,jmcarp/osf.io,leb2dg/osf.io,cwisecarver/osf.io,billyhunt/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,kushG/osf.io,KAsante95/osf.io,bdyetton/prettychart,Johnetordoff/osf.io,jolene-esposito/osf.io,RomanZWang/osf.io,caneruguz/osf.io,GageGaskins/osf.io,saradbowman/osf.io,samanehsan/osf.io,amyshi188/osf.io,alexschiller/osf.io,lamdnhan/osf.io,pattisdr/osf.io,samchrisinger/osf.io,abought/osf.io,icereval/osf.io,GaryKriebel/osf.io,MerlinZhang/osf.io,mluo613/osf.io,chrisseto/osf.io,mfraezz/osf.io,samchrisinger/osf.io,njantrania/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,acshi/osf.io,danielneis/osf.io,zkraime/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,petermalcolm/osf.io,himanshuo/osf.io,mfraezz/osf.io,AndrewSallans/osf.io,KAsante95/osf.io,billyhunt/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,MerlinZhang/osf.io,monikagrabowska/osf.io,revanthkolli/osf.io,zkraime/osf.io,mluke93/osf.io,lyndsysimon/osf.io,doublebits/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,sloria/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,leb2dg/osf.io,kch8qx/osf.io,wearpants/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,MerlinZhang/osf.io,abought/osf.io,dplorimer/osf,mluo613/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,crcresearch/osf.io,acshi/osf.io,mluke93/osf.io,barbour-em/osf.io,barbour-em/osf.io,rdhyee/osf.io,crcresearch/osf.io,cldershem/osf.io,arpitar/osf.io,cosenal/osf.io,dplorimer/osf,barbour-em/osf.io,lyndsysimon/osf.io,jinluyuan/osf.io,doublebits/osf.io,KAsante95/osf.io,ticklemepierce/osf.io,laurenrevere/osf.io,mattclark/osf.io,caseyrygt/osf.io,cldershem/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,SSJohns/osf.io,arpitar/osf.io,binoculars/osf.io,KAsante95/osf.io,jnayak1/osf.io,fabianvf/osf.io,erinspace/osf.io,sloria/osf.io,kwierman/osf.io,caneruguz/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,zamattiac/osf.io,acshi/osf.io,kwierman/osf.io,jmcarp/osf.io,hmoco/osf.io,erinspace/osf.io,zkraime/osf.io,jnayak1/osf.io,wearpants/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,CenterForOpenScience/osf.io,lamdnhan/osf.io,sbt9uc/osf.io,jeffreyliu3230/osf.io,rdhyee/osf.io,jolene-esposito/osf.io,felliott/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,reinaH/osf.io,jinluyuan/osf.io,cldershem/osf.io,RomanZWang/osf.io,himanshuo/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,bdyetton/prettychart,cwisecarver/osf.io,adlius/osf.io,brianjgeiger/osf.io,felliott/osf.io,bdyetton/prettychart,ticklemepierce/osf.io,brandonPurvis/osf.io,wearpants/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,laurenrevere/osf.io,caneruguz/osf.io,pattisdr/osf.io,danielneis/osf.io,cosenal/osf.io,KAsante95/osf.io,TomBaxter/osf.io,HarryRybacki/osf.io,zachjanicki/osf.io,caseyrollins/osf.io,kushG/osf.io,reinaH/osf.io,jnayak1/osf.io,TomHeatwole/osf.io | website/addons/dropbox/utils.py | website/addons/dropbox/utils.py | # -*- coding: utf-8 -*-
import os
from website.project.utils import get_cache_content
from website.addons.dropbox.client import get_node_addon_client
def get_file_name(path):
return os.path.split(path.strip('/'))[1]
# TODO(sloria): TEST ME
def render_dropbox_file(file_obj, client=None):
# Filename for the cached MFR HTML file
cache_name = file_obj.get_cache_filename(client=client)
node_settings = file_obj.node.get_addon('dropbox')
rendered = get_cache_content(node_settings, cache_name)
if rendered is None: # not in MFR cache
dropbox_client = client or get_node_addon_client(node_settings)
file_response, metadata = dropbox_client.get_file_and_metadata(file_obj.path)
rendered = get_cache_content(
node_settings=node_settings,
cache_file=cache_name,
start_render=True,
file_path=get_file_name(file_obj.path),
file_content=file_response.read(),
download_path=file_obj.download_url
)
return rendered
| apache-2.0 | Python |
|
1cf4de645dd44269b01b7f57322a3edca8334fc8 | Add another example script for MIDI output: a minimal drum pattern sequencer | SpotlightKid/microbit-worldtour-monifa | mididrumbox.py | mididrumbox.py | from microbit import button_a, display
from microbit import uart
from microbit import running_time, sleep
NOTE_ON = 0x90
CONTROLLER_CHANGE = 0xB0
PROGRAM_CHANGE = 0xC0
class MidiOut:
def __init__(self, device, channel=1):
if channel < 1 or channel > 16:
raise ValueError('channel must be an integer between 1..16.')
self.channel = channel
self.device = device
def channel_message(self, command, *data, ch=None):
command = (command & 0xf0) | ((ch if ch else self.channel) - 1 & 0xf)
msg = [command] + [value & 0x7f for value in data]
self.device.write(bytes(msg))
def note_on(self, note, velocity=127, ch=None):
self.channel_message(NOTE_ON, note, velocity, ch=ch)
def control_change(self, control, value, lsb=False, ch=None):
self.channel_message(CONTROLLER_CHANGE, control,
value >> 7 if lsb else value, ch=ch)
if lsb and control < 20:
self.channel_message(CONTROLLER_CHANGE, control + 32, value, ch=ch)
def program_change(self, program, ch=None):
self.channel_message(PROGRAM_CHANGE, program, ch=ch)
class Pattern:
velocities = {
"-": None, # continue note
".": 0, # off
"+": 10, # ghost
"s": 60, # soft
"m": 100, # medium
"x": 120, # hard
}
def __init__(self, src):
self.step = 0
self.instruments = []
self._active_notes = {}
pattern = (line.strip() for line in src.split('\n'))
pattern = (line for line in pattern
if line and not line.startswith('#'))
for line in pattern:
parts = line.split(" ", 2)
if len(parts) == 3:
note, hits, description = parts
elif len(parts) == 2:
note, hits = parts
description = None
else:
continue
note = int(note)
self.instruments.append((note, hits))
self.steps = max(len(hits) for _, hits in self.instruments)
def playstep(self, midiout, channel=10):
for note, hits in self.instruments:
velocity = self.velocities.get(hits[self.step])
if velocity is not None:
if self._active_notes.get(note):
# velocity==0 <=> note off
midiout.note_on(note, 0, ch=channel)
self._active_notes[note] = 0
if velocity > 0:
midiout.note_on(note, max(1, velocity), ch=channel)
self._active_notes[note] = velocity
self.step = (self.step + 1) % self.steps
class Sequencer:
def __init__(self, midiout, bpm=120, channel=10):
self.midiout = midiout
self.mpq = 15000. / max(20, min(bpm, 400))
self.channel = channel
def play(self, pattern, kit=None):
if kit:
self.midiout.program_change(kit, ch=self.channel)
# give MIDI instrument some time to load drumkit
sleep(300)
while True:
last_tick = running_time()
pattern.playstep(self.midiout, self.channel)
timetowait = max(0, self.mpq - (running_time() - last_tick))
if timetowait > 0:
sleep(timetowait)
FUNKYDRUMMER = """\
36 x.x.......x..x..
38 ....x..m.m.mx..m
42 xxxxx.x.xxxxx.xx
46 .....x.x.....x..
"""
while True:
if button_a.is_pressed():
display.set_pixel(0, 0, 0)
break
display.set_pixel(0, 0, 5)
sleep(100)
display.set_pixel(0, 0, 0)
sleep(100)
# Initialize UART for MIDI
uart.init(baudrate=31250)
midi = MidiOut(uart)
seq = Sequencer(midi, bpm=90)
seq.play(Pattern(FUNKYDRUMMER), kit=9)
| mit | Python |
|
821a3826110ecfc64ab431b7028af3aae8aa80db | Add 20150522 question. | fantuanmianshi/Daily,fantuanmianshi/Daily | LeetCode/house_robbers.py | LeetCode/house_robbers.py | """
You are a professional robber planning to rob houses along a street. Each house
has a certain amount of money stashed, the only constraint stopping you from
robbing each of them is that adjacent houses have security system connected and
it will automatically contact the police if two adjacent houses were broken
into on the same night.
Given a list of non-negative integers representing the amount of money of each
house, determine the maximum amount of money you can rob tonight without
alerting the police.
"""
class Solution:
# @param {integer[]} nums
# @return {integer}
def rob(self, nums):
if not nums:
return 0
current, previous, result = 0, 0, 0
for x in nums:
temp = current
current = max(current, x + previous)
previous = temp
return current
| mit | Python |
|
06cd9e8e5006d68d7656b7f147442e54aaf9d7a1 | Add female Public Health College and Club | enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,enjaz/enjaz | clubs/migrations/0035_add_public_health_college.py | clubs/migrations/0035_add_public_health_college.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_college(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
College = apps.get_model('clubs', 'College')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
female_presidency = Club.objects.get(english_name="Presidency (Riyadh/Female)",
year=year_2015_2016)
r_i_f = College.objects.create(city='R', section='NG', name='I',
gender='F')
Club.objects.create(name="كلية الصحة العامة والمعلوماتية الصحية",
english_name="College of Public Health and Health Informatics",
description="",
email="pending@ksau-hs.edu.sa",
parent=female_presidency,
gender="F",
year=year_2015_2016,
city="R",
college=r_i_f)
def remove_college(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
College = apps.get_model('clubs', 'College')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
College.objects.get(city='R', section='NG', name='I',
gender='F').delete()
Club.objects.get(english_name="College of Public Health and Health Informatics",
city='R', gender='F', year=year_2015_2016)
class Migration(migrations.Migration):
dependencies = [
('clubs', '0034_club_media_assessor'),
]
operations = [
migrations.RunPython(
add_college,
reverse_code=remove_college),
]
| agpl-3.0 | Python |
|
c2257a268662c4ea220c6c4d869d38c9f9ab55de | Create hcsr04.py | ric96/joypi3 | hcsr04.py | hcsr04.py | !/usr/bin/env python
#
# HC-SR04 interface code for the Raspberry Pi
#
# William Henning @ http://Mikronauts.com
#
# uses joan's excellent pigpio library
#
# Does not quite work in one pin mode, will be updated in the future
#
import time
import pigpio
import memcache
mc = memcache.Client(['127.0.0.1:11211'], debug=0)
def _echo1(gpio, level, tick):
global _high
_high = tick
def _echo0(gpio, level, tick):
global _done, _high, _time
_time = tick - _high
_done = True
def readDistance2(_trig, _echo):
global pi, _done, _time
_done = False
pi.set_mode(_trig, pigpio.OUTPUT)
pi.gpio_trigger(_trig,50,1)
pi.set_mode(_echo, pigpio.INPUT)
time.sleep(0.0001)
tim = 0
while not _done:
time.sleep(0.001)
tim = tim+1
if tim > 50:
return 99999
return _time
pi = pigpio.pi('localhost',1234)
if __name__ == "__main__":
my_echo1 = pi.callback(10, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(10, pigpio.FALLING_EDGE, _echo0)
my_echo1 = pi.callback(25, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(25, pigpio.FALLING_EDGE, _echo0)
my_echo1 = pi.callback(8, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(8, pigpio.FALLING_EDGE, _echo0)
my_echo1 = pi.callback(5, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(5, pigpio.FALLING_EDGE, _echo0)
my_echo1 = pi.callback(12, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(12, pigpio.FALLING_EDGE, _echo0)
my_echo1 = pi.callback(16, pigpio.RISING_EDGE, _echo1)
my_echo0 = pi.callback(16, pigpio.FALLING_EDGE, _echo0)
while 1:
#print "DISTANCE 1: ",(readDistance2(24,10)/58),"\tDISTANCE 2: ",(readDistance2(9,25)/58),"\tDI$
#print "DISTANCE 2: ",(readDistance2(9,25)/58)
#print "DISTANCE 3: ",(readDistance2(11,8)/58)
#print "DISTANCE 4: ",(readDistance2(7,5)/58)
#print "DISTANCE 5: ",(readDistance2(6,12)/58)
#print "DISTANCE 6: ",(readDistance2(19,16)/58)
mc.set("d1",(readDistance2(24,10)/58))
mc.set("d2",(readDistance2(9,25)/58))
mc.set("d3",(readDistance2(11,8)/58))
mc.set("d4",(readDistance2(7,5)/58))
mc.set("d5",(readDistance2(6,12)/58))
mc.set("d6",(readDistance2(19,16)/58))
time.sleep(0.1)
# my_echo1.cancel()
# my_echo0.cancel()
| mit | Python |
|
f4357343df1d13f5828c233e84d14586a1f786d0 | add functools03.py | devlights/try-python | trypython/stdlib/functools03.py | trypython/stdlib/functools03.py | # coding: utf-8
"""
functoolsモジュールについて
singledispatch関数についてのサンプルです.
"""
import functools
import html
import numbers
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
# ---------------------------------------------
# singledispatch化したい関数に対して
# @functools.singledispatch デコレータを適用する
# 同じ呼び出しで呼び先を分岐させたい関数に対して
# @関数名.register(型) を付与すると登録される。
# ---------------------------------------------
@functools.singledispatch
def htmlescape(obj):
content = html.escape(repr(obj))
return f'<pre>{content}</pre>'
@htmlescape.register(str)
def _(text):
return f'<p>{text}</p>'
@htmlescape.register(numbers.Integral)
def _(n):
return f'<pre>0x{n}</pre>'
class Sample(SampleBase):
def exec(self):
pr('singledispatch(obj)', htmlescape((1, 2, 3)))
pr('singledispatch(str)', htmlescape('hello world'))
pr('singledispatch(int)', htmlescape(100))
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python |
|
17cdae7f50a7ed15c4e8a84cdb0000a32f824c5f | Add an oauth example script. | xrg/tweepy,raymondethan/tweepy,damchilly/tweepy,mlinsey/tweepy,ze-phyr-us/tweepy,obskyr/tweepy,markunsworth/tweepy,edsu/tweepy,kcompher/tweepy,yared-bezum/tweepy,tuxos/tweepy,alexhanna/tweepy,nickmalleson/tweepy,iamjakob/tweepy,cogniteev/tweepy,cinemapub/bright-response,nickmalleson/tweepy,conversocial/tweepy,cinemapub/bright-response,bconnelly/tweepy,alexhanna/tweepy,techieshark/tweepy,vivek8943/tweepy,abhishekgahlot/tweepy,nickmalleson/tweepy,takeshineshiro/tweepy,srimanthd/tweepy,thelostscientist/tweepy,abhishekgahlot/tweepy,sidewire/tweepy,vikasgorur/tweepy,kylemanna/tweepy,svven/tweepy,robbiewoods05/tweepy,aleczadikian/tweepy,vishnugonela/tweepy,atomicjets/tweepy,xrg/tweepy,markunsworth/tweepy,ze-phyr-us/tweepy,tweepy/tweepy,truekonrads/tweepy,wjt/tweepy,tsablic/tweepy,hackebrot/tweepy,jperecharla/tweepy,Choko256/tweepy,elijah513/tweepy,awangga/tweepy,arunxarun/tweepy,zhenv5/tweepy,nickmalleson/tweepy,kskk02/tweepy,IsaacHaze/tweepy,aganzha/tweepy,LikeABird/tweepy,arpithparikh/tweepy,sa8/tweepy,rudraksh125/tweepy | examples/outh/getaccesstoken.py | examples/outh/getaccesstoken.py | import webbrowser
import tweepy
"""
Query the user for their consumer key/secret
then attempt to fetch a valid access token.
"""
if __name__ == "__main__":
consumer_key = raw_input('Consumer key: ').strip()
consumer_secret = raw_input('Consumer secret: ').strip()
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
# Open authorization URL in browser
webbrowser.open(auth.get_authorization_url())
# Ask user for verifier pin
pin = raw_input('Verification pin number from twitter.com: ').strip()
# Get access token
token = auth.get_access_token(verifier=pin)
# Give user the access token
print 'Access token:'
print ' Key: %s' % token.key
print ' Secret: %s' % token.secret
| mit | Python |
|
62f44daaf325d94c7374836f3bb50fd5694c62c0 | Add utilities/extract_scores.py | nettrom/Wiki-Class | wikiclass/utilities/extract_scores.py | wikiclass/utilities/extract_scores.py |
r"""
Gathers the scores for a set of revisions and
prints a TSV to stdout of the format:
<page_id>\t<title>\n<rev_id>\t<prediction>\t<weighted_sum>
See https://phabricator.wikimedia.org/T135684 for more information.
Usage:
extract_scores -h | --help
extract_scores --dump=<dump-file>... --model=<model-file>
[--verbose]
[--rev-scores=<path>]
Options:
-h --help Prints out this documentation.
--dump=<dump-file> Path to dump file.
--model=<model-file> Path to the model file.
--verbose Prints dots and stuff to stderr
--rev-scores=<path> The location to write output to.
[default: <stdout>]
"""
from revscoring import ScorerModel
from revscoring.datasources import revision_oriented
from revscoring.dependencies import solve
import logging
import sys
import docopt
import mwxml
import mysqltsv
logger = logging.getLogger(__name__)
r_text = revision_oriented.revision.text
CLASS_WEIGHTS = {
'Stub': 0,
'Start': 1,
'C': 2,
'B': 3,
'GA': 4,
'FA': 5
}
def main(argv=None):
args = docopt.docopt(__doc__, argv=argv)
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
dumps = args['--dump']
with open(args['--model']) as f:
model = ScorerModel.load(f)
headers=["page_id", "title", "rev_id", "prediction", "weighted_sum"]
if args['--rev-scores'] == "<stdout>":
rev_scores = mysqltsv.Writer(sys.stdout, headers=headers)
else:
rev_scores = mysqltsv.Writer(open(args['--rev-scores'], "w"),
headers=headers)
verbose = args['--verbose']
run(dumps, model, rev_scores, verbose=verbose)
def run(paths, model, rev_scores, verbose=False):
def process_dump(dump, path):
for page in dump:
if int(page.namespace) != 0:
continue
for revision in page:
feature_values = list(solve(model.features,
cache={r_text: revision.text}))
yield (revision.id, model.score(feature_values), page.title, page.id)
for rev_id, score, title, page_id in mwxml.map(process_dump, paths):
weighted_sum = sum(CLASS_WEIGHTS[cls] * score['probability'][cls]
for cls in score['probability'])
rev_scores.write(
[page_id,
title,
rev_id,
CLASS_WEIGHTS[score['prediction']],
weighted_sum]
)
if verbose:
sys.stderr.write(CLASS_WEIGHTS[score['prediction']])
sys.stderr.flush()
if verbose:
sys.stderr.write("\n")
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
sys.stderr.write("\n^C Caught. Exiting...")
| mit | Python |
|
3898bec1a5470c79f93e7c69f6700a4af1801670 | Create love6.py | dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey | Python/CodingBat/love6.py | Python/CodingBat/love6.py | # http://codingbat.com/prob/p100958
def love6(a, b):
return ( (a == 6) or (b == 6) or (a+b == 6) or (abs(a-b) == 6) )
| mit | Python |
|
be095fdb2163575803020cefcfa0d86cff1d990f | Create new package (#6453) | krafczyk/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,iulian787/spack,EmreAtes/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/r-lars/package.py | var/spack/repos/builtin/packages/r-lars/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLars(RPackage):
"""Efficient procedures for fitting an entire lasso sequence with the cost
of a single least squares fit."""
homepage = "https://cran.r-project.org/web/packages/lars/index.html"
url = "https://cran.r-project.org/src/contrib/lars_1.2.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/lars"
depends_on('r@3.4.0:3.4.9')
version('1.2', '2571bae325f6cba1ad0202ea61695b8c')
version('1.1', 'e94f6902aade09b13ec25ba2381384e5')
version('0.9-8', 'e6f9fffab2d83898f6d3d811f04d177f')
| lgpl-2.1 | Python |
|
4acf6d76bf7ec982573331835f7bcddd8487b18b | Add package for unison | skosukhin/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,matthiasdiener/spack,TheTimmy/spack,LLNL/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,iulian787/spack,LLNL/spack,mfherbst/spack,skosukhin/spack,LLNL/spack,matthiasdiener/spack,tmerrick1/spack,skosukhin/spack,skosukhin/spack,lgarren/spack,lgarren/spack,mfherbst/spack,EmreAtes/spack,lgarren/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,tmerrick1/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack | var/spack/repos/builtin/packages/unison/package.py | var/spack/repos/builtin/packages/unison/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Unison(Package):
"""Unison is a file-synchronization tool for OSX, Unix, and
Windows. It allows two replicas of a collection of files and
directories to be stored on different hosts (or different disks
on the same host), modified separately, and then brought up to
date by propagating the changes in each replica to the
other."""
homepage = "https://www.cis.upenn.edu/~bcpierce/unison/"
url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.3.tar.gz"
version('2.48.4', '5334b78c7e68169df7de95f4c6c4b60f')
depends_on('ocaml', type='build')
parallel = False
def install(self, spec, prefix):
make('./mkProjectInfo')
make('UISTYLE=text')
mkdirp(prefix.bin)
install('unison', prefix.bin)
set_executable(join_path(prefix.bin, 'unison'))
| lgpl-2.1 | Python |
|
77d3756d27758276c084cf20693202cfa645df3e | Add fptool.py that will replace flash_fp_mcu | coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec | util/fptool.py | util/fptool.py | #!/usr/bin/env python3
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A tool to manage the fingerprint system on Chrome OS."""
import argparse
import os
import shutil
import subprocess
import sys
def cmd_flash(args: argparse.Namespace) -> int:
"""
Flash the entire firmware FPMCU using the native bootloader.
This requires the Chromebook to be in dev mode with hardware write protect
disabled.
"""
if not shutil.which('flash_fp_mcu'):
print('Error - The flash_fp_mcu utility does not exist.')
return 1
cmd = ['flash_fp_mcu']
if args.image:
if not os.path.isfile(args.image):
print(f'Error - image {args.image} is not a file.')
return 1
cmd.append(args.image)
print(f'Running {" ".join(cmd)}.')
sys.stdout.flush()
p = subprocess.run(cmd)
return p.returncode
def main(argv: list) -> int:
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(dest='subcommand', title='subcommands')
# This method of setting required is more compatible with older python.
subparsers.required = True
# Parser for "flash" subcommand.
parser_decrypt = subparsers.add_parser('flash', help=cmd_flash.__doc__)
parser_decrypt.add_argument(
'image', nargs='?', help='Path to the firmware image')
parser_decrypt.set_defaults(func=cmd_flash)
opts = parser.parse_args(argv)
return opts.func(opts)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | Python |
|
bc32b2bccc82caecea0cf936e13c3ae70d0e9486 | Add script to remove broken images. | Lodour/Weibo-Album-Crawler | utils/check.py | utils/check.py | from pathlib import Path
from PIL import Image
from concurrent.futures import ProcessPoolExecutor
import os
import sys
def verify_or_delete(filename):
try:
Image.open(filename).load()
except OSError:
return False
return True
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Remove Broken Images\nUsage: python check.py <dir>')
exit(-1)
filenames = list(Path(sys.args[1]).rglob('*.*'))
with ProcessPoolExecutor() as executor:
broken, total = 0, len(filenames)
jobs = executor.map(verify_or_delete, filenames)
for i, (filename, verified) in enumerate(zip(filenames, jobs)):
if not verified:
broken += 1
os.system('rm "%s"' % filename)
print('Checking %d/%d, %d deleted...' %
(i + 1, total, broken), end='\r')
print('\nDone.')
| mit | Python |
|
388bbd915a5e40a2e096eb22ab294ffcbd3db936 | Add a gmm, currently wrapping sklearn | bccp/bananaplots,bccp/bananaplots | bananas/model.py | bananas/model.py | import numpy
# FIXME: copy the functions here
from sklearn.mixture.gmm import log_multivariate_normal_density, logsumexp
class GMM(object):
def __init__(self, weights, means, covs):
self.weights = numpy.array(weights)
self.means = numpy.array(means)
self.covs = numpy.array(covs)
def score(self, X, return_responsibilities=False):
nc = len(self.weights)
X = numpy.array(X)
if X.ndim == 1:
X = X[:, None]
if X.size == 0:
return numpy.array([]), numpy.empty((0, len(self.weights)))
if X.shape[1] != self.means.shape[1]:
raise ValueError('The shape of X is not compatible with self')
lpr = numpy.log(self.weights)) + \
log_multivariate_normal_density(X,
self.means,
self.covs, 'full')
logprob = logsumexp(lpr, axis=1)
if return_responsibilities:
responsibilities = numpy.exp(lpr - logprob[:, None])
return logprob, responsibilities
return logprob
@classmethod
def fit(kls, nc, X):
# FIXME: get rid of this and add weights support
from sklearn import mixture
model = mixture.GMM(nc, covariance_type='full', n_iter=100)
model.fit(X)
if not model.converged_:
raise ValueError("Your data is strange. Gaussian mixture failed to converge")
return kls(model.weights_, model.means_, model.covars_)
class Confidence(object):
def __init__(self, model, confidence_table)
self.model = model
self.confidence_table = confidence_table
def score(self, X):
x, y = self.confidence_table
sc = self.model.score(X)
return numpy.interp(sc, x, y, left=1., right=0.)
@classmethod
def fit(kls, model, X, vmin=-5, vmax=0, nb=100):
sc = model.score(X)
confidence_levels = 1 - numpy.logspace(vmin, vmax, num=nb)
# FIXME: add weight support here
sc_cl = numpy.percentile(sc, 100. - confidence_levels * 100.)
confidence_table = numpy.array([sc_cl, confidence_levels])
return kls(model, confidence_table)
| apache-2.0 | Python |
|
b1bea70df1f62e4c0447a406b77266b804eec5df | add new Package (#15894) | iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/nanomsg/package.py | var/spack/repos/builtin/packages/nanomsg/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Nanomsg(CMakePackage):
"""The nanomsg library is a simple high-performance
implementation of several 'scalability protocols'"""
homepage = "https://nanomsg.org/"
url = "https://github.com/nanomsg/nanomsg/archive/1.0.0.tar.gz"
version('1.1.5', sha256='218b31ae1534ab897cb5c419973603de9ca1a5f54df2e724ab4a188eb416df5a')
version('1.0.0', sha256='24afdeb71b2e362e8a003a7ecc906e1b84fd9f56ce15ec567481d1bb33132cc7')
| lgpl-2.1 | Python |
|
f069a3feda43ebc436e404dad66dfaa06055e35a | Add h5sh python package (#14001) | iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/py-h5sh/package.py | var/spack/repos/builtin/packages/py-h5sh/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyH5sh(PythonPackage):
"""Shell-like environment for HDF5."""
homepage = "https://pypi.python.org/pypi/h5sh"
url = "https://github.com/sethrj/h5sh/archive/v0.1.1.tar.gz"
maintainers = ['sethrj']
version('0.1.1', sha256='111989d8200d1da8e150aee637a907e524ca0f98d5005a55587cba0d94d9c4a0')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-h5py', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-prompt-toolkit@2:', type=('build', 'run'))
depends_on('py-pygments', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
depends_on('py-pytest', type='test')
| lgpl-2.1 | Python |
|
5d3a774587bef551553a731d4f9dcfeefdb70ed3 | Add package for scalpel@0.5.3 (#5901) | skosukhin/spack,EmreAtes/spack,tmerrick1/spack,EmreAtes/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,EmreAtes/spack,skosukhin/spack,matthiasdiener/spack,lgarren/spack,iulian787/spack,matthiasdiener/spack,skosukhin/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,matthiasdiener/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,tmerrick1/spack,krafczyk/spack,lgarren/spack,mfherbst/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,iulian787/spack,lgarren/spack,LLNL/spack,krafczyk/spack,matthiasdiener/spack,krafczyk/spack,matthiasdiener/spack,iulian787/spack,lgarren/spack,iulian787/spack,LLNL/spack,mfherbst/spack | var/spack/repos/builtin/packages/scalpel/package.py | var/spack/repos/builtin/packages/scalpel/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Scalpel(MakefilePackage):
"""Scalpel is a software package for detecting INDELs (INsertions and
DELetions) mutations in a reference genome which has been sequenced
with next-generation sequencing technology.
"""
homepage = "http://scalpel.sourceforge.net/index.html"
url = "https://downloads.sourceforge.net/project/scalpel/scalpel-0.5.3.tar.gz"
version('0.5.3', '682c9f1cd6ab2cb11c6866f303c673f0')
depends_on('cmake')
depends_on('perl@5.10.0:')
# bamtools needs to build before the others.
parallel = False
@run_before('install')
def filter_sbang(self):
"""Run before install so that the standard Spack sbang install hook
can fix up the path to the perl|python binary.
"""
with working_dir(self.stage.source_path):
kwargs = {'ignore_absent': True, 'backup': False, 'string': False}
match = '^#!/usr/bin/env perl'
perl = self.spec['perl'].command
substitute = "#!{perl}".format(perl=perl)
files = ['FindDenovos.pl', 'scalpel-export',
'scalpel-discovery', 'FindVariants.pl',
'FindSomatic.pl']
filter_file(match, substitute, *files, **kwargs)
# Scalpel doesn't actually *have* an install step. The authors
# expect you to unpack the tarball, build it in the resulting
# directory, and add that directory to your PATH. The Perl
# scripts use `FindBin` to discover the directory in which they
# live and they run their own dedicated copies of {bam,sam}tools
# and etc... by explicitly naming the executables in their directory.
#
# Rather than trying to fix their code I just copied the juicy
# bits into prefix.bin. It's not normal, but....
#
def install(self, spec, prefix):
destdir = prefix.bin # see the note above....
mkdirp(destdir)
files = ['FindSomatic.pl', 'HashesIO.pm', 'MLDBM.pm',
'scalpel-export', 'Utils.pm', 'FindDenovos.pl',
'FindVariants.pl', 'scalpel-discovery',
'SequenceIO.pm', 'Usage.pm']
for f in files:
install(f, destdir)
dirs = ['Text', 'MLDBM', 'Parallel', ]
for d in dirs:
install_tree(d, join_path(destdir, d))
install_tree('bamtools-2.3.0/bin',
join_path(destdir, 'bamtools-2.3.0', 'bin'))
install_tree('bamtools-2.3.0/lib',
join_path(destdir, 'bamtools-2.3.0', 'lib'))
mkdirp(join_path(destdir, 'bcftools-1.1'))
install('bcftools-1.1/bcftools', join_path(destdir, 'bcftools-1.1'))
mkdirp(join_path(destdir, 'Microassembler'))
install('Microassembler/Microassembler',
join_path(destdir, 'Microassembler'))
mkdirp(join_path(destdir, 'samtools-1.1'))
install('samtools-1.1/samtools', join_path(destdir, 'samtools-1.1'))
| lgpl-2.1 | Python |
|
1f731dcbfcff76ba63e4aea4fc05a15dd5021daa | Test for overriding sys.stdout. | pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython | tests/io/sys_stdio_override.py | tests/io/sys_stdio_override.py | try:
import uio as io
except ImportError:
try:
import io
except ImportError:
print("SKIP")
raise SystemExit
import sys
try:
sys.stdout = sys.stdout
except AttributeError:
print("SKIP")
raise SystemExit
buf = io.StringIO()
sys.stdout = buf
print(1, "test", 10 + 20)
| mit | Python |
|
855d10b768fbfec7772f8e5df4c181d971fe0dd4 | add tests. | hickford/primesieve-python | tests/test_primesieve_array.py | tests/test_primesieve_array.py | from primesieve.array import n_primes, primes
def assert_array_equal(have, want):
assert list(have) == want
def test_primes_array():
assert_array_equal(primes(10), [2,3,5,7])
assert_array_equal(primes(10, 20), [11,13,17,19])
def test_n_primes_array():
assert_array_equal(n_primes(7), [2,3,5,7,11,13,17])
assert_array_equal(n_primes(5, 100), [101,103,107,109,113])
| mit | Python |
|
9de475e8007b209d005ed222686cb46bddef053d | Integrate LLVM at llvm/llvm-project@9e37b1e5a0c1 | tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "9e37b1e5a0c15f36c5642406d5aa02a657a0b19c"
LLVM_SHA256 = "e2cca91a76ee6b44a6af91874e429af582b248b96ccd139373fec69ed0b0215f"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "ab85996e475ceddfda82255c314229ac0c0f4994"
LLVM_SHA256 = "140b4198fa4f0ec1917a0e252feec5e19ccd9d7e96fc818c555b5551c796ec5b"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
c6b0b5a8cef752481d5ec6672313ec8829d4299f | Create saving.py | mauerflitza/Probieren2,mauerflitza/Probieren2,mauerflitza/Probieren2,mauerflitza/Probieren2 | Webpage/cgi-bin/saving.py | Webpage/cgi-bin/saving.py | #!/usr/bin/python3
import os
import os.path
import cgi, cgitb
import re
import pickle
#own packages
import dbcPattern
def dbc_main(): # NEW except for the call to processInput
form = cgi.FieldStorage() # standard cgi script lines to here!
# use format of next two lines with YOUR names and default data
pagedata = form['webpage'].value
SRate = form['SampleRates'].value
StartVal = form['StartVal'].value
if pagedata:
contents = processInput(pagedata, SRate, StartVal) # process input into a page
print(contents)
return -1
def processInput(pagedata, SRate, StartVal):
i=0
file=open("../saved.txt","w")
file.write(pagedata)
if SRate:
SRates_list=SRate.split(',')
if StartVal:
StartVal_list=StartVal.split(',')
file.write("\nEndeHTML")
for rate in SRates_list:
file.write('SampleRate '+i+": "+rate)
i++
file.write("\nEndeRates")
for values in StartVal_list:
file.write('StartValue '+i+": "+rate)
i++
file.write("\nEndeValues")
file.close()
return createHTML()
def createHTML(sig_num, sig_list):
signale=""
i=0
file=open("Header_Saved.html")
html_string = file.read()
file.close()
savings=open("Header_Saved.html")
for line in savings:
if re.match("EndeHTML",line):
break
else:
html_string+=line
savings.close()
return html_string
#Muss später ins Hauptprogramm kopiert werden
try: # NEW
cgitb.enable()
print("Content-Type: text/html;charset:UTF-8") # say generating html
print("\n\n")
dbc_main()
except:
cgi.print_exception() # catch and print errors
| mit | Python |
|
8c6335c7ba7ebb34566603eb2943752fd3f524db | Add Exercise 9.13. | jcrist/pydy,skidzo/pydy,jcrist/pydy,Shekharrajak/pydy,Shekharrajak/pydy,oliverlee/pydy,Shekharrajak/pydy,jcrist/pydy,oliverlee/pydy,jcrist/pydy,Shekharrajak/pydy,jcrist/pydy,skidzo/pydy,skidzo/pydy,jcrist/pydy,oliverlee/pydy,skidzo/pydy,jcrist/pydy | Kane1985/Chapter5/Ex9.13.py | Kane1985/Chapter5/Ex9.13.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.13 from Kane 1985."""
from __future__ import division
from sympy import expand, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dynamicsymbols
from util import msprint, partial_velocities
from util import function_from_partials, generalized_active_forces
q1, q2 = q = dynamicsymbols('q1:3')
q1d, q2d = qd = dynamicsymbols('q1:3', level=1)
u1, u2 = u = dynamicsymbols('u1:3')
# L' is the natural length of the springs
alpha, beta, L1, L2, k1, k2 = symbols('α β L1 L2 k1 k2',
real=True, positive=True)
# reference frames
N = ReferenceFrame('N')
# define points
pO = Point('O') # point O is fixed on the wall
pB1 = pO.locatenew('B1', (L1 + q1)*N.x) # treat block 1 as a point mass
pB2 = pB1.locatenew('B2', (L2 + q2)*N.x) # treat block 2 as a point mass
pB1.set_vel(N, pB1.pos_from(pO).dt(N))
pB2.set_vel(N, pB2.pos_from(pO).dt(N))
# kinematic differential equations
kde_map = dict(zip(map(lambda x: x.diff(), q), u))
# forces
#spring_forces = [(pB1, -k1 * q1 * N.x),
# (pB1, k2 * q2 * N.x),
# (pB2, -k2 * q2 * N.x)]
dashpot_forces = [(pB1, beta * q2d * N.x),
(pB2, -beta * q2d * N.x),
(pB2, -alpha * (q1d + q2d) * N.x)]
#forces = spring_forces + dashpot_forces
partials_c = partial_velocities(zip(*dashpot_forces)[0], u, N, kde_map)
Fr_c, _ = generalized_active_forces(partials_c, dashpot_forces)
#print('generalized active forces due to dashpot forces')
#for i, fr in enumerate(Fr_c, 1):
# print('(F{0})c = {1} = -∂ℱ/∂u{0}'.format(i, msprint(fr)))
dissipation_function = function_from_partials(
map(lambda x: -x.subs(kde_map), Fr_c), u, zero_constants=True)
print('ℱ = {0}'.format(msprint(dissipation_function)))
dissipation_function_expected = (alpha*u1**2 + 2*alpha*u1*u2 +
(alpha + beta)*u2**2)/2
assert expand(dissipation_function - dissipation_function_expected) == 0
| bsd-3-clause | Python |
|
5a2f8967ac09b3aa1fc1cda21fd6dc5cf1d3f896 | Add gesture recognition prototype | aikikode/uspeak | gesture_recognition/__init__.py | gesture_recognition/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pygame
from pygame import camera
from pygame.constants import QUIT, K_ESCAPE, KEYDOWN
import numpy as np
class Capture(object):
def __init__(self):
camera.init()
self.size = (640, 480, )
# create a display surface. standard pygame stuff
self.display = pygame.display.set_mode(self.size, 0)
# this is the same as what we saw before
self.clist = pygame.camera.list_cameras()
if not self.clist:
raise ValueError("Sorry, no cameras detected.")
self.cam = pygame.camera.Camera(self.clist[0], self.size)
self.cam.start()
# create a surface to capture to. for performance purposes
# bit depth is the same as that of the display surface.
self.snapshot = pygame.surface.Surface(self.size, 0, self.display)
self.thresholded = pygame.surface.Surface(self.size, 0, self.display)
self.previous_pixels = None
def get_and_flip(self):
# if you don't want to tie the framerate to the camera, you can check
# if the camera has an image ready. note that while this works
# on most cameras, some will never return true.
if self.cam.query_image():
self.snapshot = self.cam.get_image(self.snapshot)
pixels = pygame.surfarray.array3d(self.snapshot).astype(np.int) # np.int to make it signed
if self.previous_pixels is not None:
# Get image difference
p = np.subtract(pixels, self.previous_pixels)
# Reset all pixels below threshold
threshold = 30
bool_matrix = np.logical_and(p < threshold, p > -threshold)
p[bool_matrix] = 0
# p[np.invert(bool_matrix)] = 200
# Show differential image
self.snapshot = pygame.surfarray.make_surface(p)
self.previous_pixels = pixels
# blit it to the display surface. simple!
self.display.blit(self.snapshot, (0,0))
pygame.display.flip()
def main(self):
going = True
while going:
events = pygame.event.get()
for e in events:
if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE):
# close the camera safely
self.cam.stop()
going = False
self.get_and_flip()
Capture().main()
| mit | Python |
|
41ee54414845c3d8c1592048fe2f7cee57153eee | Add Python Numpy and Pandas cheatsheet | HKuz/Test_Code | pythonCheatsheet.py | pythonCheatsheet.py | #!/usr/local/bin/python
# Python Numpy and Pandas Cheatsheet
| mit | Python |
|
a813d79ccd63c9ff40afaf3fda4e2c8c0a37ee25 | Add wsgi file | atsuyim/readthedocs.org,royalwang/readthedocs.org,asampat3090/readthedocs.org,atsuyim/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,mrshoki/readthedocs.org,michaelmcandrew/readthedocs.org,d0ugal/readthedocs.org,soulshake/readthedocs.org,emawind84/readthedocs.org,takluyver/readthedocs.org,LukasBoersma/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,Carreau/readthedocs.org,jerel/readthedocs.org,safwanrahman/readthedocs.org,emawind84/readthedocs.org,pombredanne/readthedocs.org,agjohnson/readthedocs.org,sils1297/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,SteveViss/readthedocs.org,cgourlay/readthedocs.org,nikolas/readthedocs.org,VishvajitP/readthedocs.org,mhils/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,soulshake/readthedocs.org,GovReady/readthedocs.org,hach-que/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,techtonik/readthedocs.org,kdkeyser/readthedocs.org,KamranMackey/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,dirn/readthedocs.org,singingwolfboy/readthedocs.org,agjohnson/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,safwanrahman/readthedocs.org,nikolas/readthedocs.org,CedarLogic/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,techtonik/readthedocs.org,mhils/readthedocs.org,istresearch/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,stevepiercy/readthedocs.org,sils1297/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,laplaceliu/readthedocs.org,kdkeyser/readthedocs.org,SteveViss/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,emawind84/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,CedarLogic/readthedocs.org,Carreau/readthedocs.org,fujita-shintaro/readthedocs.org,d0ugal/readthedocs.org,fujita-shintaro/readthedocs.org,fujita-shintaro/readthedocs.org,KamranMackey/readthedocs.org,takluyver/readthedocs.org,Carreau/readthedocs.org,tddv/readthedocs.org,d0ugal/readthedocs.org,titiushko/readthedocs.org,dirn/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,asampat3090/readthedocs.org,agjohnson/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,sunnyzwh/readthedocs.org,attakei/readthedocs-oauth,pombredanne/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,mrshoki/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,rtfd/readthedocs.org,nikolas/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,cgourlay/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,michaelmcandrew/readthedocs.org,kenwang76/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,sid-kap/readthedocs.org,kenshinthebattosai/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,CedarLogic/readthedocs.org,sunnyzwh/readthedocs.org,CedarLogic/readthedocs.org,LukasBoersma/readthedocs.org,wanghaven/readthedocs.org,espdev/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,soulshake/readthedocs.org,istresearch/readthedocs.org,sid-kap/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,laplaceliu/readthedocs.org,VishvajitP/readthedocs.org,royalwang/readthedocs.org,cgourlay/readthedocs.org,espdev/readthedocs.org,takluyver/readthedocs.org,LukasBoersma/readthedocs.org,wanghaven/readthedocs.org,safwanrahman/readthedocs.org,raven47git/readthedocs.org,rtfd/readthedocs.org,takluyver/readthedocs.org,attakei/readthedocs-oauth,rtfd/readthedocs.org,gjtorikian/readthedocs.org,techtonik/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,KamranMackey/readthedocs.org,dirn/readthedocs.org,Tazer/readthedocs.org,kenshinthebattosai/readthedocs.org,nikolas/readthedocs.org,hach-que/readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,singingwolfboy/readthedocs.org,espdev/readthedocs.org | readthedocs/wsgi.py | readthedocs/wsgi.py | import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mit | Python |
|
7e9794dc98a268479f0f57128effc67f88586c8f | Add default message for list pages | nfletton/bvspca,nfletton/bvspca,nfletton/bvspca,nfletton/bvspca | bvspca/core/migrations/0025_auto_20180202_1214.py | bvspca/core/migrations/0025_auto_20180202_1214.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-02 19:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0024_contentindexpage_empty_message'),
]
operations = [
migrations.AlterField(
model_name='contentindexpage',
name='empty_message',
field=models.CharField(default='Empty', max_length=200),
),
]
| mit | Python |
|
63db1dc6c23c4afd41bca5cf06207e383c982b51 | structure of the commandprocessor | omarayad1/cantkeepup,omarayad1/cantkeepup,omarayad1/cantkeepup | app/core/commandprocessor.py | app/core/commandprocessor.py | class CommandProcessor:
def parseCommand(self):
pass
def constructUrl(self):
pass
def processCommand(self):
pass
| mit | Python |
|
2c687118a9aa248d6e6f28259d8a81217ee9cb1d | add solution for Number of Digit One | zhyu/leetcode,zhyu/leetcode | algorithms/numberOfDigitOne/numberOfDigitOne.py | algorithms/numberOfDigitOne/numberOfDigitOne.py | class Solution:
# @param {integer} n
# @return {integer}
def countDigitOne(self, n):
res = prev = 0
x = 1
while n > 0: # n = 23[y]xxx
y = n % 10
n /= 10
if y > 1:
res += x # 23[2]xxx
elif y == 1:
res += prev + 1 # 23[1]xxx
res += n * x # 0[1]xxx ~ 22[1]xxx
prev += y * x
x *= 10
return res
| mit | Python |
|
67dfcd5abb73aff7fd416f665de0d8461ba3e8b4 | Create Subset.py | MariusWirtz/TM1py,OLAPLINE/TM1py | Tests/Subset.py | Tests/Subset.py | __author__ = 'Marius Wirtz'
from TM1py import TM1Queries, Subset
import uuid
import json
import unittest
class TestAnnotationMethods(unittest.TestCase):
q = TM1Queries(ip='', port=8008, user='admin', password='apple', ssl=True)
random_string1 = str(uuid.uuid4()).replace('-', '_')
random_string2 = str(uuid.uuid4()).replace('-', '_')
# 1. create subset
def test_create_subset(self):
s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string1,
elements=['10110', '10300', '10210', '10000'])
response = self.q.create_subset(s)
print(response)
response_as_dict = json.loads(response)
name_in_response = response_as_dict['Name']
self.assertEqual(self.random_string1, name_in_response)
s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string2,
expression='{ HIERARCHIZE( {TM1SUBSETALL( [plan_business_unit] )} ) }')
response = self.q.create_subset(s)
response_as_dict = json.loads(response)
name_in_response = response_as_dict['Name']
self.assertEqual(self.random_string2, name_in_response)
# 2. get subset
def test_get_subset(self):
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')
self.assertIsInstance(s, Subset)
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')
self.assertIsInstance(s, Subset)
# 3. update subset
def test_update_subset(self):
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')
s.add_elements(['10110'])
self.q.update_subset(s)
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')
s.set_expression('{ HIERARCHIZE( {TM1SUBSETALL( [plan_business_unit] )} ) }')
self.q.update_subset(s)
# 4. delete subset
def test_delete_subset(self):
response = self.q.delete_subset('plan_business_unit', self.random_string1)
self.assertEqual(response, '')
response = self.q.delete_subset('plan_business_unit', self.random_string2)
self.assertEqual(response, '')
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
93d6915c0e45d1873a48c298749d6956edbc337e | add remote ssh capability to fs-drift.py | parallel-fs-utils/fs-drift,bengland2/fsstress,bengland2/fsstress,parallel-fs-utils/fs-drift | ssh_thread.py | ssh_thread.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
ssh_thread.py -- manages parallel execution of shell commands on remote hosts
Copyright 2012 -- Ben England
Licensed under the Apache License at http://www.apache.org/licenses/LICENSE-2.0
See Appendix on this page for instructions pertaining to license.
'''
import threading
import os
# this class is just used to create a python thread
# for each remote host that we want to use as a workload generator
# the thread just executes an ssh command to run this program on a remote host
class ssh_thread(threading.Thread):
ssh_prefix = 'ssh -x -o StrictHostKeyChecking=no '
def __str__(self):
return 'ssh-thread:%s:%s:%s' % \
(self.remote_host, str(self.status), self.remote_cmd)
def __init__(self, remote_host, remote_cmd_in):
threading.Thread.__init__(self)
self.remote_host = remote_host
self.remote_cmd = '%s %s "%s"' % \
(self.ssh_prefix, self.remote_host, remote_cmd_in)
# print('thread cmd %s'%self.remote_cmd)
self.status = None
def run(self):
self.status = os.system(self.remote_cmd)
| apache-2.0 | Python |
|
88e05bd1fe0f2e46e740a3d8d631d4a810c155a6 | Complete P8 | medifle/python_6.00.1x | Quiz/Problem8_satisfiesF.py | Quiz/Problem8_satisfiesF.py | def satisfiesF(L):
"""
Assumes L is a list of strings
Assume function f is already defined for you and it maps a string to a Boolean
Mutates L such that it contains all of the strings, s, originally in L such
that f(s) returns True, and no other elements
Returns the length of L after mutation
"""
Lclone = L[:]
for i in Lclone:
if not f(i):
L.remove(i)
return len(L)
#-----used for submitting-----
# run_satisfiesF(L, satisfiesF)
#-----test case-----
# def f(s):
# return 'a' in s
#
# L = ['a', 'b', 'bc', 'c', 'ab']
# print satisfiesF(L)
# print L | mit | Python |
|
820fe44762f0037eaacba9b7bf4129a29e25e799 | add migration | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0036_add_user_deferrable_modal.py | accelerator/migrations/0036_add_user_deferrable_modal.py | # Generated by Django 2.2.10 on 2021-03-03 17:08
from django.conf import settings
from django.db import (
migrations,
models,
)
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accelerator', '0035_add_deferrable_modal_model'),
]
operations = [
migrations.CreateModel(
name='UserDeferrableModal',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('created_at', models.DateTimeField(
auto_now_add=True,
null=True)),
('updated_at', models.DateTimeField(
auto_now=True,
null=True)),
('is_deferred', models.BooleanField(default=False)),
('deferred_to', models.DateTimeField(
blank=True,
null=True)),
('deferrable_modal', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.ACCELERATOR_DEFERRABLEMODAL_MODEL)),
('user', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'User Deferrable Modal',
'abstract': False,
'managed': True,
'swappable': None,
},
),
]
| mit | Python |
|
a5cabf4b778d03cac472e22b0e62bc262796b5ff | Add tests for `cms.templatetags.pagination`. | jamesfoley/cms,danielsamuels/cms,danielsamuels/cms,dan-gamble/cms,jamesfoley/cms,lewiscollard/cms,lewiscollard/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,danielsamuels/cms,dan-gamble/cms | cms/tests/test_templatetags_pagination.py | cms/tests/test_templatetags_pagination.py | from django.http import Http404
from django.test import RequestFactory, TestCase
from ..templatetags.pagination import paginate, pagination, pagination_url
class Object(object):
paginator = None
class PaginationTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
def test_paginate(self):
paginate_response = paginate({'request': self.request}, [])
self.assertEqual(repr(paginate_response), '<Page 1 of 1>')
with self.assertRaises(Http404):
self.request = self.factory.get('/?page=2')
paginate({'request': self.request}, [])
def test_pagination(self):
obj = Object()
pagination_response = pagination({'request': self.request}, obj)
self.assertDictEqual(pagination_response, {
'paginator': None,
'pagination_key': 'page',
'page_obj': obj,
'request': self.request,
})
def test_pagination_url(self):
self.assertEqual(pagination_url({'request': self.request}, 1), '/')
self.assertEqual(pagination_url({'request': self.request}, 2), '/?page=2')
| bsd-3-clause | Python |
|
aa218407a9efdde9daa53d638fdfdacff873f14b | test change | dlresende/extreme-carpaccio,octo-technology/extreme-carpaccio,octo-technology/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,dlresende/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,octo-technology/extreme-carpaccio,jak78/extreme-carpaccio,jak78/extreme-carpaccio,octo-technology/extreme-carpaccio,dlresende/extreme-carpaccio,dlresende/extreme-carpaccio,octo-technology/extreme-carpaccio,octo-technology/extreme-carpaccio,dlresende/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,octo-technology/extreme-carpaccio,jak78/extreme-carpaccio,dlresende/extreme-carpaccio,octo-technology/extreme-carpaccio,jak78/extreme-carpaccio,jak78/extreme-carpaccio,jak78/extreme-carpaccio,octo-technology/extreme-carpaccio,dlresende/extreme-carpaccio | clients/python/flask-server/tests/client_tests.py | clients/python/flask-server/tests/client_tests.py | """
integration test in python
!!!! NEED PYTHON 2.7.8
"""
import unittest
import urllib2
import urllib
import thread
import time
import json
from client import app
from multiprocessing import Process
class ServerHandlerTest(unittest.TestCase):
server = Process(target=app.run)
@classmethod
def setUpClass(cls):
cls.server.start()
time.sleep(1)
@classmethod
def tearDownClass(cls):
cls.server.terminate()
cls.server.join()
def assertContent(self, content, response):
for line in response.readlines():
if line == content:
found = True
self.assertTrue(found)
def test_should_call_get(self):
response = urllib2.urlopen("http://localhost:5000/")
self.assertContent('hello world', response)
def test_should_call_post_ping(self):
data = urllib.urlencode({'q': 'Ping'})
response = urllib2.urlopen("http://localhost:5000/ping", data)
self.assertContent('pong', response)
def test_should_call_post_order(self):
req = urllib2.Request('http://localhost:5000/order')
req.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(req, json.dumps({'q': 'Path'}))
self.assertEqual(json.loads(response.read()), {u'total' : 1000})
@unittest.expectedFailure
def test_should_call_post_unknown(self):
data = urllib.urlencode({'answer': 'hello'})
urllib2.urlopen("http://localhost:5000/unknown", data)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
|
e2ed85ae1bb3f647095abb00b118cf06ae7549aa | add setup (even if not really needed) | kif/HPP,kif/HPP | 0_Python/setup.py | 0_Python/setup.py | #!/usr/bin/python
from distutils.core import setup
from Cython.Distutils import build_ext
from distutils.extension import Extension
cy_mod = Extension("inside_polygon",
sources= ["inside_polygon.pyx"])
setup(ext_modules=[cy_mod],
cmdclass={'build_ext': build_ext})
| mit | Python |
|
cd1c88c519a7079b2cef752473e5da3ddb4224e3 | Add stress package (#3695) | iulian787/spack,LLNL/spack,skosukhin/spack,skosukhin/spack,matthiasdiener/spack,tmerrick1/spack,iulian787/spack,krafczyk/spack,LLNL/spack,TheTimmy/spack,iulian787/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,EmreAtes/spack,lgarren/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,LLNL/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,lgarren/spack,matthiasdiener/spack,LLNL/spack,EmreAtes/spack,lgarren/spack,TheTimmy/spack,mfherbst/spack,tmerrick1/spack,skosukhin/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,TheTimmy/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,mfherbst/spack,lgarren/spack,mfherbst/spack,skosukhin/spack,iulian787/spack,mfherbst/spack,krafczyk/spack | var/spack/repos/builtin/packages/stress/package.py | var/spack/repos/builtin/packages/stress/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Stress(AutotoolsPackage):
"""stress is a deliberately simple workload generator for POSIX systems.
It imposes a configurable amount of CPU, memory, I/O, and disk stress on
the system. It is written in C, and is free software licensed under the
GPLv2."""
homepage = "https://people.seas.harvard.edu/~apw/stress/"
url = "https://people.seas.harvard.edu/~apw/stress/stress-1.0.4.tar.gz"
version('1.0.4', '890a4236dd1656792f3ef9a190cf99ef')
| lgpl-2.1 | Python |
|
9ce90bc43cfcc5a56be958671f304e7929eb0446 | Add missing migration step dua changes in model | nimbis/cmsplugin-collapse,nimbis/cmsplugin-collapse | cmsplugin_collapse/migrations/0002_auto_20160210_0651.py | cmsplugin_collapse/migrations/0002_auto_20160210_0651.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_collapse', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='accordionheader',
name='show_first',
field=models.BooleanField(default=True, help_text='If selected, the first collapsible will be displayed in the open state.'),
preserve_default=True,
),
]
| bsd-3-clause | Python |
|
b0ea743fa320f0df6e35b4381e6bd778906a5532 | Add caching mechanism | gateway4labs/labmanager,labsland/labmanager,morelab/labmanager,labsland/labmanager,morelab/labmanager,porduna/labmanager,porduna/labmanager,labsland/labmanager,porduna/labmanager,porduna/labmanager,gateway4labs/labmanager,go-lab/labmanager,morelab/labmanager,morelab/labmanager,go-lab/labmanager,gateway4labs/labmanager,labsland/labmanager,go-lab/labmanager,gateway4labs/labmanager,go-lab/labmanager | labmanager/rlms/caches.py | labmanager/rlms/caches.py | import calendar
from cachecontrol import CacheControl
from cachecontrol.caches import FileCache
from cachecontrol.heuristics import LastModified, TIME_FMT
from email.utils import formatdate, parsedate, parsedate_tz
class LastModifiedNoDate(LastModified):
""" This takes the original LastModified implementation of
cachecontrol, but defaults the date in case it is not provided.
"""
def __init__(self, require_date = True, error_margin = None):
if error_margin is None:
if require_date:
self.error_margin = 0.1
else:
self.error_margin = 0.2
else:
self.error_margin = error_margin
self.require_date = require_date
def update_headers(self, resp):
headers = resp.headers
if 'expires' in headers:
return {}
if 'cache-control' in headers and headers['cache-control'] != 'public':
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if 'last-modified' not in headers:
return {}
parsed_date = parsedate_tz(headers.get('date'))
if self.require_date and parsed_date is None:
return {}
if parsed_date is None:
date = time.time()
faked_date = True
else:
date = calendar.timegm(parsed_date)
faked_date = False
last_modified = parsedate(headers['last-modified'])
if last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta * self.error_margin, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
new_headers = {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
if faked_date:
new_headers['date'] = time.strftime(TIME_FMT, time.gmtime(date))
return new_headers
def warning(self, resp):
return None
def get_cached_session():
CACHE_DIR = 'web_cache'
return CacheControl(requests.Session(),
cache=FileCache(CACHE_DIR), heuristic=LastModifiedNoDate(require_date=False))
| bsd-2-clause | Python |
|
b6ac6a73cf10372be3384dbeb99b82b137a9daa2 | Use chevrons instead of arrows in sortable links | sonofatailor/django-oscar,vovanbo/django-oscar,DrOctogon/unwash_ecom,Jannes123/django-oscar,ahmetdaglarbas/e-commerce,sonofatailor/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,dongguangming/django-oscar,sonofatailor/django-oscar,eddiep1101/django-oscar,ka7eh/django-oscar,adamend/django-oscar,nickpack/django-oscar,WadeYuChen/django-oscar,nickpack/django-oscar,Bogh/django-oscar,django-oscar/django-oscar,lijoantony/django-oscar,jinnykoo/christmas,rocopartners/django-oscar,jmt4/django-oscar,Idematica/django-oscar,makielab/django-oscar,ka7eh/django-oscar,okfish/django-oscar,pasqualguerrero/django-oscar,jlmadurga/django-oscar,mexeniz/django-oscar,DrOctogon/unwash_ecom,ademuk/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,saadatqadri/django-oscar,jmt4/django-oscar,anentropic/django-oscar,taedori81/django-oscar,itbabu/django-oscar,bschuon/django-oscar,WillisXChen/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,john-parton/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,kapt/django-oscar,jinnykoo/christmas,WillisXChen/django-oscar,michaelkuty/django-oscar,monikasulik/django-oscar,marcoantoniooliveira/labweb,pdonadeo/django-oscar,binarydud/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,manevant/django-oscar,marcoantoniooliveira/labweb,josesanch/django-oscar,nfletton/django-oscar,adamend/django-oscar,DrOctogon/unwash_ecom,WadeYuChen/django-oscar,jinnykoo/wuyisj,okfish/django-oscar,rocopartners/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,bnprk/django-oscar,ademuk/django-oscar,Jannes123/django-oscar,taedori81/django-oscar,spartonia/django-oscar,machtfit/django-oscar,nickpack/django-oscar,makielab/django-oscar,QLGu/django-oscar,makielab/django-oscar,machtfit/django-oscar,Bogh/django-oscar,dongguangming/django-oscar,MatthewWilkes/django-oscar,solarissmoke/django-oscar,elliotthill/django-oscar,taedori81/django-oscar,kapari/django-oscar,Bogh/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,bschuon/django-oscar,taedori81/django-oscar,pasqualguerrero/django-oscar,ahmetdaglarbas/e-commerce,bschuon/django-oscar,sasha0/django-oscar,saadatqadri/django-oscar,amirrpp/django-oscar,mexeniz/django-oscar,faratro/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,jinnykoo/christmas,dongguangming/django-oscar,josesanch/django-oscar,machtfit/django-oscar,spartonia/django-oscar,QLGu/django-oscar,okfish/django-oscar,Jannes123/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,john-parton/django-oscar,saadatqadri/django-oscar,bnprk/django-oscar,kapari/django-oscar,binarydud/django-oscar,elliotthill/django-oscar,ka7eh/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,MatthewWilkes/django-oscar,manevant/django-oscar,vovanbo/django-oscar,jinnykoo/wuyisj.com,okfish/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,michaelkuty/django-oscar,Bogh/django-oscar,faratro/django-oscar,Idematica/django-oscar,solarissmoke/django-oscar,manevant/django-oscar,saadatqadri/django-oscar,thechampanurag/django-oscar,amirrpp/django-oscar,thechampanurag/django-oscar,pasqualguerrero/django-oscar,rocopartners/django-oscar,john-parton/django-oscar,sasha0/django-oscar,faratro/django-oscar,monikasulik/django-oscar,ahmetdaglarbas/e-commerce,elliotthill/django-oscar,marcoantoniooliveira/labweb,itbabu/django-oscar,solarissmoke/django-oscar,ademuk/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,eddiep1101/django-oscar,spartonia/django-oscar,monikasulik/django-oscar,lijoantony/django-oscar,lijoantony/django-oscar,nickpack/django-oscar,itbabu/django-oscar,makielab/django-oscar,anentropic/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,django-oscar/django-oscar,WadeYuChen/django-oscar,thechampanurag/django-oscar,eddiep1101/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,WadeYuChen/django-oscar,spartonia/django-oscar,mexeniz/django-oscar,binarydud/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,marcoantoniooliveira/labweb,jmt4/django-oscar,nfletton/django-oscar,nfletton/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,jlmadurga/django-oscar,QLGu/django-oscar,MatthewWilkes/django-oscar,faratro/django-oscar,vovanbo/django-oscar,jinnykoo/wuyisj.com,jinnykoo/wuyisj,kapt/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj,nfletton/django-oscar,kapt/django-oscar,rocopartners/django-oscar,josesanch/django-oscar,Idematica/django-oscar,anentropic/django-oscar,bnprk/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar | oscar/templatetags/sorting_tags.py | oscar/templatetags/sorting_tags.py | # This is a rewrite of django-sorting but with added support for i18n title
# strings.
# See https://github.com/directeur/django-sorting
from django import template
from django.conf import settings
register = template.Library()
DEFAULT_SORT_UP = getattr(
settings, 'DEFAULT_SORT_UP',
'<i class="icon-chevron-up"></i>')
DEFAULT_SORT_DOWN = getattr(
settings, 'DEFAULT_SORT_DOWN',
'<i class="icon-chevron-down"></i>')
sort_directions = {
'asc': {'icon': DEFAULT_SORT_UP, 'inverse': 'desc'},
'desc': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
'': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
}
def anchor(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"anchor tag takes at least 1 argument")
try:
title = bits[2]
except IndexError:
title = bits[1].capitalize()
return SortAnchorNode(bits[1].strip(), title.strip())
class SortAnchorNode(template.Node):
def __init__(self, field, title):
self.field = template.Variable(field)
self.title = template.Variable(title)
def render(self, context):
field = self.field.resolve(context)
title = self.title.resolve(context)
request = context['request']
get_vars = request.GET.copy()
sort_field = get_vars.pop('sort', [None])[0]
icon = ''
if sort_field == field:
# We are already sorting on this field, so we set the inverse
# direction within the GET params that get used within the href.
direction = get_vars.pop('dir', [''])[0]
get_vars['dir'] = sort_directions[direction]['inverse']
icon = sort_directions[direction]['icon']
href = u'%s?sort=%s' % (request.path, field)
if len(get_vars) > 0:
href += "&%s" % get_vars.urlencode()
if icon:
title = u"%s %s" % (title, icon)
return u'<a href="%s">%s</a>' % (href, title)
anchor = register.tag(anchor)
| # This is a rewrite of django-sorting but with added support for i18n title
# strings.
# See https://github.com/directeur/django-sorting
from django import template
from django.conf import settings
register = template.Library()
DEFAULT_SORT_UP = getattr(settings, 'DEFAULT_SORT_UP', '↑')
DEFAULT_SORT_DOWN = getattr(settings, 'DEFAULT_SORT_DOWN', '↓')
sort_directions = {
'asc': {'icon': DEFAULT_SORT_UP, 'inverse': 'desc'},
'desc': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
'': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
}
def anchor(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"anchor tag takes at least 1 argument")
try:
title = bits[2]
except IndexError:
title = bits[1].capitalize()
return SortAnchorNode(bits[1].strip(), title.strip())
class SortAnchorNode(template.Node):
def __init__(self, field, title):
self.field = template.Variable(field)
self.title = template.Variable(title)
def render(self, context):
field = self.field.resolve(context)
title = self.title.resolve(context)
request = context['request']
get_vars = request.GET.copy()
sort_field = get_vars.pop('sort', [None])[0]
icon = ''
if sort_field == field:
# We are already sorting on this field, so we set the inverse
# direction within the GET params that get used within the href.
direction = get_vars.pop('dir', [''])[0]
get_vars['dir'] = sort_directions[direction]['inverse']
icon = sort_directions[direction]['icon']
href = u'%s?sort=%s' % (request.path, field)
if len(get_vars) > 0:
href += "&%s" % get_vars.urlencode()
if icon:
title = u"%s %s" % (title, icon)
return u'<a href="%s">%s</a>' % (href, title)
anchor = register.tag(anchor)
| bsd-3-clause | Python |
38f28bd0e5d4ea5af69ac7ccc553403a85ac61be | add problem 053 | smrmkt/project_euler | problem_053.py | problem_053.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
There are exactly ten ways of selecting three from five, 12345:
123, 124, 125, 134, 135, 145, 234, 235, 245, and 345
In combinatorics, we use the notation, 5C3 = 10.
In general,
nCr =
n!
r!(n−r)!
,where r ≤ n, n! = n×(n−1)×...×3×2×1, and 0! = 1.
It is not until n = 23, that a value exceeds one-million: 23C10 = 1144066.
How many, not necessarily distinct, values of nCr,
for 1 ≤ n ≤ 100, are greater than one-million?
'''
from math import factorial
import timeit
def calc(nlim, lim):
cnt = 0
for n in range(nlim+1):
for r in range(n+1):
ncr = factorial(n)/(factorial(r)*factorial(n-r))
if ncr > lim:
cnt += 1
return cnt
if __name__ == '__main__':
print calc(100, 1000000)
print timeit.Timer('problem_053.calc(100, 1000000)', 'import problem_053').timeit(1)
| mit | Python |
|
ac2d5c10e7895515acd63e2ca91924e99ec17003 | add (Failing) test | salilab/rmf,salilab/rmf,salilab/rmf,salilab/rmf | test/test_writing.py | test/test_writing.py | import RMF
RMF.set_log_level("trace")
path = RMF._get_temporary_file_path("writing.rmf")
print path
fh = RMF.create_rmf_file(path)
fh.add_frame("frame", RMF.FRAME)
fn = fh.get_root_node().add_child("frag", RMF.REPRESENTATION)
pf = RMF.ParticleFactory(fh)
ff = RMF.FragmentFactory(fh)
pf.get(fn).set_radius(1.0)
pf.get(fn).set_mass(2.0)
pf.get(fn).set_coordinates([1,2,3])
ff.get(fn).set_indexes([1,2,3,4])
del fh
fh = RMF.open_rmf_file_read_only(path)
fh.set_current_frame(RMF.FrameID(0))
fn = fh.get_root_node().get_children()[0]
pf = RMF.ParticleFactory(fh)
assert(pf.get_is(fn))
| apache-2.0 | Python |
|
3c7b4f727f5f4a061e3e2d8bcabdc007175ab4db | Add cache structures from pycrest (as base work) | Kyria/EsiPy,a-tal/EsiPy | esipy/cache.py | esipy/cache.py | # -*- encoding: utf-8 -*-
import hashlib
import zlib
import os
try:
import pickle
except ImportError: # pragma: no cover
import cPickle as pickle
import logging
logger = logging.getLogger("esipy.cache")
class BaseCache(object):
""" Base cache 'abstract' object that defined
the cache methods used in esipy
"""
def put(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
def invalidate(self, key):
raise NotImplementedError
def _hash(self, data):
h = hashlib.new('md5')
h.update(pickle.dumps(data))
# prefix allows possibility of multiple applications
# sharing same keyspace
return 'pyc_' + h.hexdigest()
class FileCache(BaseCache):
""" BaseCache implementation using files to store the data.
This implementation is 'persistent' as data are stored on the
disc and not only in the memory
"""
def __init__(self, path):
self._cache = {}
self.path = path
if not os.path.isdir(self.path):
os.mkdir(self.path, 0o700)
def _getpath(self, key):
return os.path.join(self.path, self._hash(key) + '.cache')
def put(self, key, value):
with open(self._getpath(key), 'wb') as f:
f.write(
zlib.compress(
pickle.dumps(value,
pickle.HIGHEST_PROTOCOL)))
self._cache[key] = value
def get(self, key):
if key in self._cache:
return self._cache[key]
try:
with open(self._getpath(key), 'rb') as f:
return pickle.loads(zlib.decompress(f.read()))
except IOError as ex:
logger.debug('IOError: {0}'.format(ex))
if ex.errno == 2: # file does not exist (yet)
return None
else: # pragma: no cover
raise
def invalidate(self, key):
self._cache.pop(key, None)
try:
os.unlink(self._getpath(key))
except OSError as ex:
if ex.errno == 2: # does not exist
pass
else: # pragma: no cover
raise
class DictCache(BaseCache):
""" BaseCache implementation using Dict to store the cached data. """
def __init__(self):
self._dict = {}
def get(self, key):
return self._dict.get(key, None)
def put(self, key, value):
self._dict[key] = value
def invalidate(self, key):
self._dict.pop(key, None)
class DummyCache(BaseCache):
""" Base cache implementation that provide a fake cache that
allows a "no cache" use without breaking everything """
def __init__(self):
self._dict = {}
def get(self, key):
return None
def put(self, key, value):
pass
def invalidate(self, key):
pass
class MemcachedCache(BaseCache):
""" Base cache implementation for memcached. """
def __init__(self, memcache_client):
""" memcache_client must be an instance of memcache.Client().
"""
import memcache
if not isinstance(memcache_client, memcache.Client):
raise ValueError('cache must be an instance of memcache.Client')
self._mc = memcache_client
def get(self, key):
return self._mc.get(self._hash(key))
def put(self, key, value):
return self._mc.set(self._hash(key), value)
def invalidate(self, key):
return self._mc.delete(self._hash(key))
| bsd-3-clause | Python |
|
492d90e1197803f2dbce0b07417d12497c9031fe | Implement away-notify | Heufneutje/txircd,ElementalAlchemist/txircd | txircd/modules/ircv3/awaynotify.py | txircd/modules/ircv3/awaynotify.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class AwayNotify(ModuleData):
implements(IPlugin, IModuleData)
name = "AwayNotify"
def actions(self):
return [ ("usermetadataupdate", 10, self.sendAwayNotice),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-away-notify" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-away-notify"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("away-notify")
def unload(self):
self.ircd.dataCache["unloading-away-notify"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-away-notify"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("away-notify")
def addCapability(self, capList):
capList.append("away-notify")
def sendAwayNotice(self, user, key, oldValue, value, visibility, setByUser, fromServer):
if key != "away":
return
if value:
for noticeUser in self.ircd.users.itervalues():
if "capabilities" in noticeUser.cache and "away-notify" in noticeUser.cache["capabilities"]:
noticeUser.sendMessage("AWAY", value, sourceuser=user)
else:
for noticeUser in self.ircd.users.itervalues():
if "capabilities" in noticeUser.cache and "away-notify" in noticeUser.cache["capabilities"]:
noticeUser.sendMessage("AWAY", sourceuser=user)
awayNotify = AwayNotify() | bsd-3-clause | Python |
|
a6ca9fdb71eacffe94fad476712650f82870bb2e | Add base code for choosing solver | fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python | pyoommf/sim.py | pyoommf/sim.py | import os
from drivers.llg import LLG
import oommfmif as o
class Sim(object):
def __init__(self, mesh, Ms, name=None):
self.mesh = mesh
self.Ms = Ms
self.name = name
self.gamma = 2.21e5
self.energies = []
self.N_Sims_Run = 0
# Want some kind of persistent 'knowledge' of number of runs
# and the sequence these occur in for data analysis
# when we call a simulation multiple times to either
# advance time or change parameters. Will need to think carefully
# about situations such as changing H_applied - should we recreate this
# data from the output files?
# Advantage of this is recreating sim object if needed.
def add(self, energy):
self.energies.append(energy)
def set_solver(self, solver='rk4'):
"""
Available solvers in OOMMF:
rk2, rk2heun, rk4, rkf54, rkf54m, rkf54s
"""
def set_m(self, m_init):
self.m_init = m_init
def create_mif(self, overwrite=True):
if self.name is None:
self.name = 'unnamed'
self.mif_filename = self.name + '_iteration' + \
str(self.N_Sims_Run) + '.mif'
if os.path.isfile(self.mif_filename):
print("DEBUG: This simulation name already exists.")
print("DEBUG: Overwriting MIF.")
mif_file = open(self.mif_filename, 'w')
mif_file.write('# MIF 2.1\n\n')
mif_file.write(self.mesh.atlas_mif())
mif_file.write(self.mesh.mesh_mif())
for energy in self.energies:
mif_file.write(energy.get_mif())
mif_file.write(self.llg.get_mif())
mif_file.write('Destination mags mmArchive\n\n')
mif_file.write(
'Schedule Oxs_TimeDriver::Magnetization mags Stage 1\n\n')
mif_file.close()
def run_until(self, t, alpha=0.1, gamma=2.21e5):
self.llg = LLG(t, self.m_init, self.Ms, alpha, gamma, name=self.name)
self.create_mif()
self.execute_mif()
def execute_mif(self):
path = o.retrieve_oommf_path()
executable = o.retrieve_oommf_executable(path)
process = o.call_oommf('boxsi ' + self.mif_filename)
process.wait()
| import os
from drivers.llg import LLG
import oommfmif as o
class Sim(object):
def __init__(self, mesh, Ms, name=None):
self.mesh = mesh
self.Ms = Ms
self.name = name
self.gamma = 2.21e5
self.energies = []
self.N_Sims_Run = 0
# Want some kind of persistent 'knowledge' of number of runs
# and the sequence these occur in for data analysis
# when we call a simulation multiple times to either
# advance time or change parameters. Will need to think carefully
# about situations such as changing H_applied - should we recreate this
# data from the output files?
# Advantage of this is recreating sim object if needed.
def add(self, energy):
self.energies.append(energy)
def set_m(self, m_init):
self.m_init = m_init
def create_mif(self, overwrite=True):
if self.name is None:
self.name = 'unnamed'
self.mif_filename = self.name + '_iteration' + \
str(self.N_Sims_Run) + '.mif'
if os.path.isfile(self.mif_filename):
print("DEBUG: This simulation name already exists.")
print("DEBUG: Overwriting MIF.")
mif_file = open(self.mif_filename, 'w')
mif_file.write('# MIF 2.1\n\n')
mif_file.write(self.mesh.atlas_mif())
mif_file.write(self.mesh.mesh_mif())
for energy in self.energies:
mif_file.write(energy.get_mif())
mif_file.write(self.llg.get_mif())
mif_file.write('Destination mags mmArchive\n\n')
mif_file.write(
'Schedule Oxs_TimeDriver::Magnetization mags Stage 1\n\n')
mif_file.close()
def run_until(self, t, alpha=0.1, gamma=2.21e5):
self.llg = LLG(t, self.m_init, self.Ms, alpha, gamma, name=self.name)
self.create_mif()
self.execute_mif()
def execute_mif(self):
path = o.retrieve_oommf_path()
executable = o.retrieve_oommf_executable(path)
process = o.call_oommf('boxsi ' + self.mif_filename)
process.wait()
| bsd-2-clause | Python |
cc78dc401b16ff189b86466e3c0cb4609a72af0d | add tester | regardscitoyens/nosdeputes.fr,regardscitoyens/nosdeputes.fr,regardscitoyens/nosdeputes.fr,regardscitoyens/nosdeputes.fr,regardscitoyens/nosdeputes.fr | batch/depute/test.py | batch/depute/test.py | #!/usr/bin/env python
import os, sys, json
split = False
splitval = False
if len(sys.argv) > 1:
field = sys.argv[1]
if len(sys.argv) > 2:
split = True
if len(sys.argv) > 3:
splitval = int(sys.argv[3])
else:
field = "all"
values = {}
def add_value(val):
if split and ' / ' in val:
for i,v in enumerate(val.split(' / ')):
if type(splitval) != int or splitval == i:
add_value(v)
return
if val not in values:
values[val] = 0
values[val] += 1
MISSING = []
for dep in os.listdir('out'):
with open(os.path.join('out', dep)) as f:
data = json.load(f)
if field == "all":
for k in data:
if data[k] and (type(data[k]) != list or data[k] != [""]):
add_value(k)
continue
if field in data:
if type(data[field]) == list:
if data[field] == [""]:
MISSING.append(data["id_institution"])
for i in data[field]:
if i:
add_value(i)
else: add_value(data[field])
else: MISSING.append(data["id_institution"])
miss = len(MISSING)
if miss <= 3 and max(values.values()) == 1:
print "ALL UNIQUE FIELDS (", len(values), ")"
sys.exit(0)
if miss > 3:
print miss, "MISSING:", MISSING
order = sorted(values, key=lambda x: values[x])
order.reverse()
for k in order:
print k.encode('utf-8'), ":", values[k]
| agpl-3.0 | Python |
|
b674a3e4de86728139e97bb02fa90a62a7700c31 | add speech processing test | MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI | testing/miniBrain.py | testing/miniBrain.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import time
from os.path import dirname, abspath
sys.path.append(dirname(dirname(abspath(__file__))))
reload(sys)
sys.setdefaultencoding('utf-8')
from EmeraldAI.Pipelines.InputProcessing.ProcessInput import ProcessInput
from EmeraldAI.Pipelines.ScopeAnalyzer.AnalyzeScope import AnalyzeScope
from EmeraldAI.Pipelines.ResponseProcessing.ProcessResponse import ProcessResponse
from EmeraldAI.Pipelines.TextToSpeech.TTS import TTS
from EmeraldAI.Pipelines.Trainer.Trainer import Trainer
from EmeraldAI.Entities.User import User
from EmeraldAI.Entities.Context import Context
from EmeraldAI.Entities.PipelineArgs import PipelineArgs
from EmeraldAI.Config.Config import *
from EmeraldAI.Logic.Audio.SoundMixer import *
def ProcessSpeech(data):
print "ProcessSpeech - Go"
cancelSpeech = False
stopwordList = Config().GetList("Bot", "StoppwordList")
if(data in stopwordList):
cancelSpeech = True
SoundMixer().Stop()
print "ProcessSpeech - No Stopword"
pipelineArgs = PipelineArgs(data)
print "ProcessSpeech - Pipeline Args Created"
pipelineArgs = ProcessInput().ProcessAsync(pipelineArgs)
print "ProcessSpeech - Process Async completed"
pipelineArgs = AnalyzeScope().Process(pipelineArgs)
print "ProcessSpeech - Scope analyzed"
pipelineArgs = ProcessResponse().Process(pipelineArgs)
print "ProcessSpeech - Response processed"
if(not cancelSpeech):
if(pipelineArgs.Animation != None):
print "There should have been an animation", pipelineArgs.Animation
pipelineArgs = TTS().Process(pipelineArgs)
print "ProcessSpeech - TTS Triggered"
trainerResult = Trainer().Process(pipelineArgs)
print "ProcessSpeech - Trainer Done"
Context().History.append(pipelineArgs)
print "Pipeline Args", pipelineArgs.toJSON()
print "Main User", User().toJSON()
print "Trainer Result: ", trainerResult
print "Input: ", data
print "Response: ", pipelineArgs.Response
while SoundMixer().IsPlaying():
time.sleep(1)
print "Set user..."
User().SetUserByCVTag("Max")
print "Start Speech processing"
ProcessSpeech("Warmup")
#ProcessSpeech("Guten Abend")
#ProcessSpeech("Wer ist Angela Merkel")
ProcessSpeech("Wieviel ist 432 plus 68")
ProcessSpeech("Wieviel ist 4 + 32 / 6")
#ProcessSpeech("Bist du ein Mensch")
#ProcessSpeech("TRIGGER_FACEAPP_OFF")
#ProcessSpeech("Was ist eine Süßkartoffel")
exit()
ProcessSpeech("xxx")
ProcessSpeech("xxx")
ProcessSpeech("xxx")
| apache-2.0 | Python |
|
0942d6bcc1d15b16b4d3170a1574fc5218b2c53b | add python_tips.py | coderzh/CodeTips,coderzh/CodeTips,coderzh/CodeTips | python_tips.py | python_tips.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import subprocess
# print 尽量用函数,不用语句
print('Hello World')
# do not use
print 'Hello World'
def subprocess_test():
# 执行子进程获得输出内容,尽量用 subprocess吧
text = os.popen('echo 123').read()
print(text)
# subprocess
# 父进程等待子进程完成
subprocess.call(['ls', '-l'])
return_code = subprocess.call('echo subprocess.call', shell=True)
print(return_code)
# Popen 不等待子进程完成,需调用 wait() 等待完成
child = subprocess.Popen(['ls', '-l'])
child.wait()
'''
child.poll() # 检查子进程状态
child.kill() # 终止子进程
child.send_signal() # 向子进程发送信号
child.terminate() # 终止子进程
'''
print('parent process')
# communicate
# 尽量不要用 shell=True
child1 = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE)
child2 = subprocess.Popen(['wc'], stdin=child1.stdout, stdout=subprocess.PIPE)
out = child2.communicate()
print('wc:', out)
def os_file_demo():
# 遍历目录文件
root = '.'
for f in os.listdir(root):
path = os.path.join(root, f)
print(path)
# 分割扩展名 ext
print(os.path.splitext(os.path.basename('/a/b/c.txt')))
def main():
subprocess_test()
os_file_demo()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='desc')
# 参数
# parser.add_argument('wan', help='eth0')
# parser.add_argument('lan', help='wlan0')
# 选项
parser.add_argument('-v', dest='version', default=None, help='version')
parser.add_argument('-u', dest='uin', default=None, help='uin')
args = parser.parse_args()
#print(args.wan, args.lan, args.version, args.uin)
main()
| mit | Python |
|
842c796a223ee9cb78c69ccb59416a2afe0fcee0 | Add tests for Permission class. | Acidity/PyPermissions | tests/permissions.py | tests/permissions.py | import unittest
from permission import Permission, PERMISSION_DELIMITER
class BasicPermissionTests(unittest.TestCase):
def setUp(self):
self.p1 = Permission("test{0}1{0}hello".format(PERMISSION_DELIMITER))
self.p2 = Permission("test{0}2{0}hello".format(PERMISSION_DELIMITER))
self.p3 = Permission("test")
self.p4 = Permission("test{0}1{0}hello".format(PERMISSION_DELIMITER))
self.ps1 = {self.p1, self.p2}
self.ps2 = {self.p1, self.p4}
self.ps3 = {self.p1}
def test_equal(self):
self.assertEqual(self.p1, self.p4)
self.assertNotEqual(self.p1, self.p2)
self.assertNotEqual(self.p1, self.p3)
self.assertEqual(self.ps2, self.ps3)
def test_grants_permission(self):
self.assertTrue(self.p1.grants_permission(self.p1))
self.assertTrue(self.p1.grants_permission(self.p4))
self.assertFalse(self.p1.grants_permission(self.p2))
self.assertFalse(self.p1.grants_permission(self.p3))
self.assertFalse(self.p3.grants_permission(self.p1))
def test_grants_any_permission(self):
self.assertTrue(self.p1.grants_any_permission(self.ps1))
self.assertTrue(self.p2.grants_any_permission(self.ps1))
self.assertFalse(self.p3.grants_any_permission(self.ps1))
self.assertTrue(self.p4.grants_any_permission(self.ps1))
def test_segments(self):
self.assertEqual(self.p1.segments, ["test", "1", "hello"])
self.assertEqual(self.p2.segments, ["test", "2", "hello"])
self.assertEqual(self.p3.segments, ["test"])
self.assertEqual(self.p1.segments, self.p4.segments)
if __name__ == "__main__":
unittest.main()
| mit | Python |
|
cb7d205add1d6e114277e596b2023c755dd1ff19 | add an example unit test | cloudmesh/cloudmesh.docker,karvenka/cloudmesh.docker,cloudmesh/cloudmesh.docker,karvenka/cloudmesh.docker | tests/test_docker.py | tests/test_docker.py | """ run with
python setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py
python setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py:Test_docker.test_001
nosetests -v --nocapture tests/cm_basic/test_var.py
or
nosetests -v tests/cm_basic/test_var.py
"""
from cloudmesh_client.common.Shell import Shell
from cloudmesh_client.common.util import HEADING
from cloudmesh_client.var import Var
def run(command):
print(command)
parameter = command.split(" ")
shell_command = parameter[0]
args = parameter[1:]
result = Shell.execute(shell_command, args)
print(result)
return result
# noinspection PyMethodMayBeStatic,PyPep8Naming
class Test_docker(object):
"""
"""
def setup(self):
pass
def test_003(self):
HEADING("list docker images")
result = run("cms docker image list")
print(result)
assert "cms" in result # need to make real assertion
def test_004(self):
HEADING("list docker images")
result = run("cms docker container list")
print(result)
assert "cms" in result # need to make real assertion
def test_005(self):
HEADING("list docker images")
result = run("cms docker network list")
print(result)
assert "cms" in result # need to make real assertion
| apache-2.0 | Python |
|
4e0476fa83d0832c328abf00b5167887a0af3fe6 | Add tests for hashes | pydanny/webhooks | tests/test_hashes.py | tests/test_hashes.py | from webhooks.hashes import placebo_hash_function, basic_hash_function
def test_placebo():
assert placebo_hash_function() == ""
def test_basic():
hashes = set([basic_hash_function() for x in range(30)])
assert len(hashes) == 30
| bsd-3-clause | Python |
|
ef8ca51dbd9b93a801a4a87be3c04f2c56cdef5a | test for call to enqueue passing | algorithmic-music-exploration/amen-server,algorithmic-music-exploration/amen-server | tests/test_server.py | tests/test_server.py | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
result = json.dumps({'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)})
# also need to check that we call upload
assert result == handle_post(q, files, get_url, upload) # wait,this seems to fail half the time, wtf?
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
| bsd-2-clause | Python |
|
b73e125fdcb12649e79aa2e108dcc019d9fffeb0 | add strtol test | f-prettyland/angr,angr/angr,angr/angr,schieb/angr,axt/angr,tyb0807/angr,schieb/angr,angr/angr,chubbymaggie/angr,tyb0807/angr,iamahuman/angr,axt/angr,chubbymaggie/angr,haylesr/angr,iamahuman/angr,haylesr/angr,schieb/angr,f-prettyland/angr,iamahuman/angr,tyb0807/angr,axt/angr,chubbymaggie/angr,f-prettyland/angr | tests/test_strtol.py | tests/test_strtol.py | import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.strtol')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def test_strtol():
b = angr.Project(os.path.join(test_location, "../../binaries/tests/x86_64/strtol_test"))
pg = b.factory.path_group(immutable=False)
# find the end of main
expected_outputs = {"base 8 worked\n", "base +8 worked\n", "0x worked\n", "+0x worked\n", "base +10 worked\n",
"base 10 worked\n", "base -8 worked\n", "-0x worked\n", "base -10 worked\n", "Nope\n"}
pg.explore(find=0x400804, num_find=len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen("./test2", stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
if __name__ == "__main__":
test_strtol()
| bsd-2-clause | Python |
|
f5d2b17371dbd974820b9b8ab1fcdb11ad8fa646 | Add in script to count duplicates. | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org | backend/scripts/countdups.py | backend/scripts/countdups.py | #!/usr/bin/env python
import rethinkdb as r
conn = r.connect('localhost', 30815, db='materialscommons')
rql = r.table('datafiles').filter(r.row['usesid'].match("^[0-9a-f]")).pluck('size')
total_bytes = 0
total_files = 0
for doc in rql.run(conn):
total_bytes = total_bytes + doc['size']
total_files = total_files + 1
print "Total bytes = %s for %d dups" %(format(total_bytes, ",d"), total_files)
| mit | Python |
|
8488e7c5245758e4651e6d723f93d52f3ff54d73 | Add tool for submitting jobs to AreWeCompressedYet | luctrudeau/daala,kodabb/daala,vr000m/daala,nvoron23/daala,iankronquist/daala,kustom666/daala,kustom666/daala,jmvalin/daala,kbara/daala,xiph/daala,kbara/daala,xiph/daala,jmvalin/daala,ascent12/daala,tribouille/daala,tribouille/daala,xiph/daala,xiphmont/daala,ycho/daala,xiph/daala,HeadhunterXamd/daala,nvoron23/daala,HeadhunterXamd/daala,xiphmont/daala,kustom666/daala,vr000m/daala,iankronquist/daala,luctrudeau/daala,KyleSiefring/daala,luctrudeau/daala,ascent12/daala,iankronquist/daala,kodabb/daala,nvoron23/daala,nvoron23/daala,kbara/daala,KyleSiefring/daala,ycho/daala,tribouille/daala,felipebetancur/daala,ascent12/daala,KyleSiefring/daala,iankronquist/daala,iankronquist/daala,mbebenita/daala,vr000m/daala,felipebetancur/daala,xiph/daala,kodabb/daala,jmvalin/daala,kustom666/daala,luctrudeau/daala,jmvalin/daala,ascent12/daala,kustom666/daala,xiphmont/daala,felipebetancur/daala,ycho/daala,tribouille/daala,mbebenita/daala,vr000m/daala,xiphmont/daala,nvoron23/daala,felipebetancur/daala,ycho/daala,ascent12/daala,KyleSiefring/daala,tribouille/daala,HeadhunterXamd/daala,luctrudeau/daala,mbebenita/daala,mbebenita/daala,vr000m/daala,kbara/daala,ycho/daala,kodabb/daala,xiphmont/daala,HeadhunterXamd/daala,kodabb/daala,KyleSiefring/daala,jmvalin/daala,felipebetancur/daala,HeadhunterXamd/daala,mbebenita/daala | tools/submit_awcy.py | tools/submit_awcy.py | #!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
| bsd-2-clause | Python |
|
0c145918d0f34bee1193eeaa0488eb369f0e843e | Use item_lookup_field for DELETE methods | sebest/eve,amagdas/eve,bcrochet/eve,elpoisterio/eve,EasonYi/eve,superdesk/eve,opticode/eve,matthieuprat/eve,pjs7678/eve,kynan/eve,julianhille/eve,jzorrof/eve,yanyanqin/eve,mugurrus/eve,hustlzp/eve,stratosgear/eve,kidaa/eve,kalbasit/eve,mcreenan/eve,eduardomb/eve | eve/methods/delete.py | eve/methods/delete.py | # -*- coding: utf-8 -*-
"""
eve.methods.delete
~~~~~~~~~~~~~~~~~~
This module imlements the DELETE method.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from flask import current_app as app, abort
from eve.utils import config
from eve.auth import requires_auth
from eve.methods.common import get_document, ratelimit
@ratelimit()
@requires_auth('item')
def delete(resource, **lookup):
"""Deletes a resource item. Deletion will occur only if request ETag
matches the current representation of the item.
:param resource: name of the resource to which the item(s) belong.
:param **lookup: item lookup query.
.. versionchanged:: 0.0.7
Support for Rate-Limiting.
.. versionchanged:: 0.0.5
Pass current resource to ``parse_request``, allowing for proper
processing of new configuration settings: `filters`, `sorting`, `paging`.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
"""
original = get_document(resource, **lookup)
if not original:
abort(404)
app.data.remove(resource, original[config.ID_FIELD])
return {}, None, None, 200
@requires_auth('resource')
def delete_resource(resource):
"""Deletes all item of a resource (collection in MongoDB terms). Won't drop
indexes. Use with caution!
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionadded:: 0.0.2
"""
app.data.remove(resource)
return {}, None, None, 200
| # -*- coding: utf-8 -*-
"""
eve.methods.delete
~~~~~~~~~~~~~~~~~~
This module imlements the DELETE method.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from flask import current_app as app, abort
from eve.utils import config
from eve.auth import requires_auth
from eve.methods.common import get_document, ratelimit
@ratelimit()
@requires_auth('item')
def delete(resource, **lookup):
"""Deletes a resource item. Deletion will occur only if request ETag
matches the current representation of the item.
:param resource: name of the resource to which the item(s) belong.
:param **lookup: item lookup query.
.. versionchanged:: 0.0.7
Support for Rate-Limiting.
.. versionchanged:: 0.0.5
Pass current resource to ``parse_request``, allowing for proper
processing of new configuration settings: `filters`, `sorting`, `paging`.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
"""
original = get_document(resource, **lookup)
if not original:
abort(404)
app.data.remove(resource, lookup[config.ID_FIELD])
return {}, None, None, 200
@requires_auth('resource')
def delete_resource(resource):
"""Deletes all item of a resource (collection in MongoDB terms). Won't drop
indexes. Use with caution!
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionadded:: 0.0.2
"""
app.data.remove(resource)
return {}, None, None, 200
| bsd-3-clause | Python |
837089f9195af984597522fffc8c2c6a02e73097 | Create config.example.py | trolleway/osmot | scripts/eurotram/config.example.py | scripts/eurotram/config.example.py | dbname='gis'
user='trolleway'
host='localhost'
password='admin'
ngw_url='http://trolleway.nextgis.com'
ngw_resource_id=
ngw_login = 'administrator'
ngw_password = 'admin'
| unlicense | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.