commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
bdb75567519914386da7f1d598c6c7aaf96d8e02 | Add sql solution for 561. Array Partition I | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/array-partition-i.py | py/array-partition-i.py | class Solution(object):
def arrayPairSum(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
return sum(sorted(nums)[::2])
| apache-2.0 | Python |
|
7275a50343cba5073dc2fa77e2e964daec002c38 | move refactored OttTestCase to utils | OpenTransitTools/utils | ott/utils/tests/ott_test_case.py | ott/utils/tests/ott_test_case.py | import os
import sys
import unittest
import urllib
import contextlib
from ott.utils import config_util
from ott.utils import file_utils
class OttTestCase(unittest.TestCase):
domain = "localhost"
port = "33333"
path = None
url_file = None
def get_url(self, svc_name, params=None, lang=None):
# import pdb; pdb.set_trace()
if self.path:
ret_val = "http://{}:{}/{}/{}".format(self.domain, self.port, self.path, svc_name)
else:
ret_val = "http://{}:{}/{}".format(self.domain, self.port, svc_name)
if params:
ret_val = "{0}?{1}".format(ret_val, params)
if lang:
ret_val = "{0}&_LOCALE_={1}".format(ret_val, lang)
if self.url_file:
url = ret_val.replace(" ", "+")
self.url_file.write(url)
self.url_file.write("\n")
return ret_val
def call_url(self, url):
ret_json = None
with contextlib.closing(urllib.urlopen(url)) as f:
ret_json = f.read()
return ret_json
def setUp(self):
dir = file_utils.get_project_root_dir()
ini = config_util.ConfigUtil('development.ini', run_dir=dir)
port = ini.get('ott.test_port', 'app:main')
if not port:
port = ini.get('ott.svr_port', 'app:main', self.port)
self.port = port
url_file = ini.get('ott.test_urlfile', 'app:main')
if url_file:
self.url_file = open(os.path.join(dir, url_file), "a+")
test_domain = ini.get('ott.test_domain', 'app:main')
if test_domain:
self.domain = test_domain
test_path = ini.get('ott.test_path', 'app:main')
if test_path:
self.path = test_path
def tearDown(self):
if self.url_file:
url_file.flush()
url_file.close()
def call_url_match_list(self, url, list):
u = self.call_url(url)
for l in list:
self.assertRegexpMatches(u, l)
def call_url_match_string(self, url, str):
u = self.call_url(url)
self.assertRegexpMatches(u, str)
| mpl-2.0 | Python |
|
e07c699caf699852c98b3396150b343553a386c4 | Add tests for language api | ganemone/ontheside,ganemone/ontheside,ganemone/ontheside | server/tests/api/test_language_api.py | server/tests/api/test_language_api.py | import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
| mit | Python |
|
6e736a48f8c49b8257305125742d89cb7f729fbc | index Ansible source versions | johntellsall/shotglass,johntellsall/shotglass,johntellsall/shotglass | shotglass/make_ver_ansible.py | shotglass/make_ver_ansible.py | #!/usr/bin/env python
'''
make_versions -- index many versions of a project
ALPHA code, will need modification for general use.
'''
import re
import subprocess
import sys
import git
NAME = 'ansible'
bad_tag_re = re.compile(r'(rc|beta|alpha)')
repos = git.Repo(NAME)
tags = [tag.name for tag in repos.tags
if tag.name.startswith('v') and not bad_tag_re.search(tag.name)]
checkout_cmd = 'cd {name} ; git checkout {tag}'
index_cmd = './manage.py make_index --project={name}-{tag} {name}'
for tag in tags[:2]:
cmd = checkout_cmd.format(name=NAME, tag=tag)
print '>>>', cmd
if subprocess.call(cmd, shell=True):
sys.exit(0)
cmd = index_cmd.format(name=NAME, tag=tag)
print '>>>', cmd
out = subprocess.check_output(cmd, shell=True)
print out
| mit | Python |
|
24e6a8a21ef61edbe00e6af8a1aea274394a23ed | Add a snippet (python/pygtk). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/pygtk/minimal.py | python/pygtk/minimal.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pygtk
pygtk.require('2.0')
import gtk
def main():
"""Main function"""
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.show()
gtk.main()
if __name__ == '__main__':
main()
| mit | Python |
|
08e4f449f0e871f996e9a265fd23a967a0377078 | Add bfx example | doubleDragon/QuantBot | quant/example/ex_bfx.py | quant/example/ex_bfx.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import time
from quant import config
from quant.api.bitfinex import PrivateClient
client = PrivateClient(key=config.Bitfinex_SUB_API_KEY, secret=config.Bitfinex_SUB_SECRET_TOKEN)
# client = PrivateClient(key=config.Bitfinex_API_KEY, secret=config.Bitfinex_SECRET_TOKEN)
# print(client.ticker('eosbtc'))
# print(client.balances())
amount = '20.0'
price = '0.00015'
symbol = 'eosbtc'
r_id = client.buy(symbol=symbol, amount=amount, price=price)
print(r_id)
if r_id:
time.sleep(1)
client.cancel_order(r_id)
# print(client.cancel_all_orders())
| mit | Python |
|
800639fe381ec502e54a3fbd95241b460bd3e3c3 | add tests for shannon.py | dit/dit,dit/dit,dit/dit,chebee7i/dit,Autoplectic/dit,dit/dit,Autoplectic/dit,chebee7i/dit,chebee7i/dit,chebee7i/dit,Autoplectic/dit,Autoplectic/dit,dit/dit,Autoplectic/dit | dit/algorithms/tests/test_shannon.py | dit/algorithms/tests/test_shannon.py | from __future__ import division
from nose.tools import *
from dit import Distribution as D, ScalarDistribution as SD
from dit.algorithms import (entropy as H,
mutual_information as I,
conditional_entropy as CH)
def test_H1():
d = SD([1/2, 1/2])
assert_almost_equal(H(d), 1.0)
def test_H2():
assert_almost_equal(H(1/2), 1.0)
def test_H3():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(H(d, [0]), 1.0)
assert_almost_equal(H(d, [1]), 1.0)
assert_almost_equal(H(d, [0,1]), 2.0)
assert_almost_equal(H(d), 2.0)
def test_H4():
d = SD([1/10]*10)
d.set_base(10)
assert_almost_equal(H(d), 1.0)
def test_I1():
outcomes = ['00', '01', '10', '11']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0], [1]), 0.0)
def test_I2():
outcomes = ['00', '11']
pmf = [1/2]*2
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0], [1]), 1.0)
def test_I3():
outcomes = ['000', '011', '101', '110']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(I(d, [0,1], [1,2]), 2.0)
def test_CH1():
outcomes = ['000', '011', '101', '110']
pmf = [1/4]*4
d = D(outcomes, pmf)
assert_almost_equal(CH(d, [0], [1,2]), 0.0)
assert_almost_equal(CH(d, [0,1], [2]), 1.0)
assert_almost_equal(CH(d, [0], [0]), 0.0) | bsd-3-clause | Python |
|
8c1353537d0920d8137d5ea9d22843da67e41d9a | Add string_format pylint plugin. | thaim/ansible,thaim/ansible | test/sanity/pylint/plugins/string_format.py | test/sanity/pylint/plugins/string_format.py | # (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import six
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers import utils
from pylint.checkers.utils import check_messages
from pylint.checkers.strings import parse_format_method_string
_PY3K = sys.version_info[:2] >= (3, 0)
MSGS = {
'E9305': ("Format string contains automatic field numbering "
"specification",
"ansible-format-automatic-specification",
"Used when a PEP 3101 format string contains automatic "
"field numbering (e.g. '{}').",
{'minversion': (2, 6)}),
'E9390': ("bytes object has no .format attribute",
"ansible-no-format-on-bytestring",
"Used when a bytestring was used as a PEP 3101 format string "
"as Python3 bytestrings do not have a .format attribute",
{'minversion': (3, 0)}),
}
class AnsibleStringFormatChecker(BaseChecker):
"""Checks string formatting operations to ensure that the format string
is valid and the arguments match the format string.
"""
__implements__ = (IAstroidChecker,)
name = 'string'
msgs = MSGS
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
func = utils.safe_infer(node.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
and func.bound.name in ('str', 'unicode', 'bytes')):
if func.name == 'format':
self._check_new_format(node, func)
def _check_new_format(self, node, func):
""" Check the new string formatting """
if (isinstance(node.func, astroid.Attribute)
and not isinstance(node.func.expr, astroid.Const)):
return
try:
strnode = next(func.bound.infer())
except astroid.InferenceError:
return
if not isinstance(strnode, astroid.Const):
return
if _PY3K and isinstance(strnode.value, six.binary_type):
self.add_message('ansible-no-format-on-bytestring', node=node)
return
if not isinstance(strnode.value, six.string_types):
return
if node.starargs or node.kwargs:
return
try:
fields, num_args, manual_pos = parse_format_method_string(strnode.value)
except utils.IncompleteFormatString:
return
if num_args:
self.add_message('ansible-format-automatic-specification',
node=node)
return
def register(linter):
"""required method to auto register this checker """
linter.register_checker(AnsibleStringFormatChecker(linter))
| mit | Python |
|
d6d21f6e7b8d2a44ff3406ddc9a050cc17372da8 | Add analyze_nir_intensity tests module | danforthcenter/plantcv,danforthcenter/plantcv,danforthcenter/plantcv | tests/plantcv/test_analyze_nir_intensity.py | tests/plantcv/test_analyze_nir_intensity.py | import cv2
import numpy as np
from plantcv.plantcv import analyze_nir_intensity, outputs
def test_analyze_nir(test_data):
# Clear previous outputs
outputs.clear()
# Read in test data
img = cv2.imread(test_data.small_gray_img, -1)
mask = cv2.imread(test_data.small_bin_img, -1)
_ = analyze_nir_intensity(gray_img=img, mask=mask, bins=256, histplot=True)
assert int(outputs.observations['default']['nir_median']['value']) == 117
def test_analyze_nir_16bit(test_data):
# Clear previous outputs
outputs.clear()
# Read in test data
img = cv2.imread(test_data.small_gray_img, -1)
mask = cv2.imread(test_data.small_bin_img, -1)
_ = analyze_nir_intensity(gray_img=np.uint16(img), mask=mask, bins=256, histplot=True)
assert int(outputs.observations['default']['nir_median']['value']) == 117
| mit | Python |
|
9790fb109d59214ee016750307cd39b2f2780cf7 | solve increment counter | gsathya/dsalgo,gsathya/dsalgo | algo/incrementcounter.py | algo/incrementcounter.py | from datetime import datetime, timedelta
from time import sleep
second = timedelta(seconds=1)
day = timedelta(days=1)
class Increment:
def __init__(self):
self.last_second_count = 0
self.last_day_count = 0
self.seconds_now = datetime.now()
self.days_now = datetime.now()
def increment(self):
now = datetime.now()
if (now - self.seconds_now) >= second:
self.last_second_count = 1
self.seconds_now = now
else:
self.last_second_count += 1
if (now - self.days_now) >= day:
self.last_day_count = 1
self.days_now = now
else:
self.last_day_count += 1
def get_events_last_second(self):
return self.last_second_count
def get_events_last_day(self):
return self.last_day_count
i = Increment()
for j in range(100):
sleep(0.01)
i.increment()
print i.get_events_last_day()
print i.get_events_last_second()
| mit | Python |
|
aa2b788c4d0b148ed9881da86de97965311b9cb4 | Add server.py | GraysonScherm/Distributed-Internet-Service-Delivery,GraysonScherm/Distributed-Internet-Service-Delivery | server.py | server.py | import socket, sys
import datetime
import time, random
TCP_IP = '72.36.65.116'
TCP_PORT = 5005
BUFFER_SIZE = 1024
if len(sys.argv) < 2:
print ("Enter the server id")
sys.exit(1)
while True:
v = random.randint(1, 10)
ts = time.time()
MESSAGE = str(v) + ";" + sys.argv[1] + ";" + datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((TCP_IP, TCP_PORT))
s.send(MESSAGE)
s.close()
print (MESSAGE)
time.sleep(5)
| mit | Python |
|
dcca93fbb66e5cd8bf0e0500aca3f187922e8806 | Add in team id spider | danmoeller/ncaa-bball-attendance,danmoeller/ncaa-bball-attendance,danmoeller/ncaa-bball-attendance | scrapy_espn/scrapy_espn/spiders/team_spider.py | scrapy_espn/scrapy_espn/spiders/team_spider.py | import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
} | mit | Python |
|
4eb6c05df9b8faf4492b23db1ef0e2aee141d24b | test case for tpt | trendelkampschroer/PyEMMA,arokem/PyEMMA,trendelkampschroer/PyEMMA,arokem/PyEMMA | emma2/msm/analysis/api_test.py | emma2/msm/analysis/api_test.py | '''
Created on 18.10.2013
@author: marscher
'''
import unittest
import emma2.msm.analysis.api as api
import numpy as np
class Test(unittest.TestCase):
def testTPT(self):
A = np.ndarray([1, 2, 3], dtype=int)
B = np.ndarray([4, 2], dtype=int)
T = np.ndarray([[ 0.5, 0, 0.5, 0],
[0, 0.5, 0.5, 0],
[1 / 3., 1 / 3., 0, 1 / 3.],
[0, 0, 1, 0]], shape=(4,4), dtype=np.double)
itpt = api.tpt(T, A, B)
print "flux: ", itpt.getFlux()
print "net flux: ", itpt.getNetFlux()
print "total flux: ", itpt.getTotalFlux()
print "forward committor", itpt.getForwardCommittor()
print "backward committor", itpt.getBackwardCommitor()
if __name__ == "__main__":
unittest.main()
| bsd-2-clause | Python |
|
458cf526a4ebb72b4fad84e8cd2b665e0f093c1b | Add functional test for cluster check recover | openstack/senlin,openstack/senlin,tengqm/senlin-container,openstack/senlin,stackforge/senlin,stackforge/senlin,tengqm/senlin-container | senlin/tests/functional/test_cluster_health.py | senlin/tests/functional/test_cluster_health.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
| apache-2.0 | Python |
|
48c008b4ac08114e30f4bee7a208d5d3fb925296 | Add partial simple greedy algorithm (baseline). | karulont/combopt | problem1/steiner-simplegreedy.py | problem1/steiner-simplegreedy.py | import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
| mit | Python |
|
fca390e7dd0d806cd87fa3570ce23ad132d8c852 | add new example | pignacio/python-nvd3,pignacio/python-nvd3,BibMartin/python-nvd3,mgx2/python-nvd3,oz123/python-nvd3,liang42hao/python-nvd3,oz123/python-nvd3,vdloo/python-nvd3,vdloo/python-nvd3,vdloo/python-nvd3,oz123/python-nvd3,Coxious/python-nvd3,liang42hao/python-nvd3,Coxious/python-nvd3,yelster/python-nvd3,mgx2/python-nvd3,mgx2/python-nvd3,BibMartin/python-nvd3,BibMartin/python-nvd3,yelster/python-nvd3,Coxious/python-nvd3,liang42hao/python-nvd3,pignacio/python-nvd3,yelster/python-nvd3 | examples/lineWithFocusChart.py | examples/lineWithFocusChart.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Examples for Python-nvd3 is a Python wrapper for NVD3 graph library.
NVD3 is an attempt to build re-usable charts and chart components
for d3.js without taking away the power that d3.js gives you.
Project location : https://github.com/areski/python-nvd3
"""
from nvd3 import lineWithFocusChart
import random
import datetime
import time
start_time = int(time.mktime(datetime.datetime(2012, 6, 1).timetuple()) * 1000)
nb_element = 100
#Open File for test
output_file = open('test_lineWithFocusChart.html', 'w')
#---------------------------------------
type = "lineWithFocusChart"
chart = lineWithFocusChart(name=type, color_category='category20b', date=True)
chart.set_containerheader("\n\n<h2>" + type + "</h2>\n\n")
xdata = range(nb_element)
xdata = map(lambda x: start_time + x * 1000000000, xdata)
ydata = [i + random.randint(-10, 10) for i in range(nb_element)]
ydata2 = map(lambda x: x * 2, ydata)
ydata3 = map(lambda x: x * 3, ydata)
ydata4 = map(lambda x: x * 4, ydata)
extra_serie = {"tooltip": {"y_start": "There is ", "y_end": " calls"}}
#extra_serie = None
chart.add_serie(name="serie 1", y=ydata, x=xdata, extra=extra_serie)
chart.add_serie(name="serie 2", y=ydata2, x=xdata, extra=extra_serie)
chart.add_serie(name="serie 3", y=ydata3, x=xdata, extra=extra_serie)
chart.add_serie(name="serie 4", y=ydata4, x=xdata, extra=extra_serie)
chart.buildhtml()
output_file.write(chart.htmlcontent)
#close Html file
output_file.close()
| mit | Python |
|
0114173d508298d6e9f72fd7f344d9123e4a7e59 | Create wtospark.py | jgarnier/iot-corner,jgarnier/iot-corner,jgarnier/iot-corner | sparkgw/wtospark.py | sparkgw/wtospark.py | from flask import Flask, request, abort
import json
import urllib2
app = Flask(__name__)
#Secret provided by
# fbabottemp99
# MmQ3YTA0MGUtNGI1Zi00MTI3LTlmZTMtMjQxNGJhYmRjMTI0MzI2ZDFlYWYtYzhh
# curl -X POST -H "X-Device-Secret: 12345" http://localhost:8080/report?temp=32
YOUR_DEVICE_SECRET = "12345"
YOUR_BOT_TOKEN = ""
YOUR_ROOM_ID = ""
@app.route('/report', methods =['POST'])
def inputArduino():
headers = request.headers
temperature = request.args.get('temp')
incoming_secret = headers.get('X-Device-Secret')
if temperature is None:
abort(401)
if incoming_secret is None:
abort(401)
elif YOUR_DEVICE_SECRET == incoming_secret:
# we dont use it but for illustration
json_file = request.json
toSpark('**Temperature:** '+temperature)
return 'Ok'
else:
print "Spoofed Hook"
abort(401)
# POST Function that sends the commits & comments in markdown to a Spark room
def toSpark(commits):
url = 'https://api.ciscospark.com/v1/messages'
headers = {'accept':'application/json','Content-Type':'application/json','Authorization': 'Bearer ' + YOUR_BOT_TOKEN}
values = {'roomId': YOUR_ROOM_ID, 'markdown': commits }
data = json.dumps(values)
req = urllib2.Request(url = url , data = data , headers = headers)
response = urllib2.urlopen(req)
the_page = response.read()
return the_page
if __name__ == '__main__':
app.run(host='0.0.0.0' , port=9090, debug=True)
| apache-2.0 | Python |
|
5432dd2ee2e1d20494d0b4cf8d816b298e70067c | Add test script. | nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome | protogeni/test/ma/lookup_keys.py | protogeni/test/ma/lookup_keys.py | #! /usr/bin/env python
#
# Copyright (c) 2012-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
import sys
import pwd
import getopt
import os
import re
import xmlrpclib
from M2Crypto import X509
def Usage():
print "usage: " + sys.argv[ 0 ] + " [option...] <public|private|identifying> <user_urn [...]>"
print """Options:
-d, --debug be verbose about XML methods invoked
-h, --help show options and usage
-r file, --read-commands=file specify additional configuration file"""
execfile( "test-common.py" )
authority = "geni-ma"
callargs = [
[{
'geni_type': 'geni_sfa',
'geni_version': '3',
'geni_value': get_self_credential()}],
{
}
]
try:
response = do_method(authority, "lookup_keys",
callargs,
response_handler=geni_am_response_handler)
print response
except xmlrpclib.Fault, e:
Fatal("Could not obtain keys: %s" % (str(e)))
| agpl-3.0 | Python |
|
d1edac38e3402ebe03f96597500c3d39e49f299d | add run_pylint.py | asah/pylint-setup,asah/pylint-setup | run_pylint.py | run_pylint.py | #!/usr/bin/python
#
# wrapper script for pylint which just shows the errors and changes the return value if there's problems
# (enforcing a minscore and/or maxerrors - defaults to perfection)
#
import sys, re, subprocess, os
MINSCORE = 10.0
MAXERRORS = 0
command = 'pylint --rcfile=pylintrc --disable=W0511,W9911,W9913 `find webui python_saml libs -name "*py"`'
# unbuffer *both* me and the pylint subprocess!
os.environ['PYTHONUNBUFFERED'] = '1'
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, universal_newlines=True)
num_errors = 0
score = 0
while True:
line = p.stdout.readline().strip()
if line is None:
break
match = re.search(r'^.+?:[0-9]+: \[.[0-9]+.+?\] ', line)
if match:
print line
num_errors += 1
continue
match = re.search(r'Your code has been rated at ([0-9.-]+)', line)
if match:
score = float(match.group(1))
break
if score < MINSCORE:
print "scored %.2f which is less than %.2f - aborting" % (score, MINSCORE)
sys.exit(3)
if num_errors < MAXERRORS:
print "%d errors which is more than %d - aborting" % (num_errors, MAXERRORS)
sys.exit(4)
| mit | Python |
|
a96c25cf46cd82716b397ba61c2b67acb8b7c2d7 | Add code reading. | thewizardplusplus/micro,thewizardplusplus/micro,thewizardplusplus/micro | micro.py | micro.py | #!/usr/bin/env python
from sys import argv
def get_code():
return argv[1]
if __name__ == '__main__':
code = get_code()
print(code)
| mit | Python |
|
084ebff19703c42c50621eb94ac070c6a471e983 | Solve the most wanted letter problem. | edwardzhu/checkio-solution | Home/mostWantedLetter.py | Home/mostWantedLetter.py | def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
| mit | Python |
|
d437f494db827c69da7aaec00a5acf1d133e16b2 | Add basic slash command example | rapptz/discord.py,Rapptz/discord.py | examples/app_commands/basic.py | examples/app_commands/basic.py | from typing import Optional
import discord
from discord import app_commands
MY_GUILD = discord.Object(id=0) # replace with your guild id
class MyClient(discord.Client):
def __init__(self, *, intents: discord.Intents, application_id: int):
super().__init__(intents=intents, application_id=application_id)
# A CommandTree is a special type that holds all the application command
# state required to make it work. This is a separate class because it
# allows all the extra state to be opt-in.
# Whenever you want to work with application commands, your tree is used
# to store it and work with it.
# Note: When using commands.Bot instead of discord.Client, the bot will
# maintain its own tree instead.
self.tree = app_commands.CommandTree(self)
# In this basic example, we just synchronize the app commands to one guild.
# Instead of specifying a guild to every command, we copy over our global commands instead.
# By doing so we don't have to wait up to an hour until they are shown to the end-user.
async def setup_hook(self):
# This copies the global commands over to your guild.
self.tree.copy_global_to(guild=MY_GUILD)
await self.tree.sync(guild=MY_GUILD)
intents = discord.Intents.default()
# In order to use a basic synchronization of the app commands in the setup_hook,
# you have replace the 0 with your bots application_id you find in the developer portal.
client = MyClient(intents=intents, application_id=0)
@client.event
async def on_ready():
print(f'Logged in as {client.user} (ID: {client.user.id})')
print('------')
@client.tree.command()
async def hello(interaction: discord.Interaction):
"""Says hello!"""
await interaction.response.send_message(f'Hi, {interaction.user.mention}')
@client.tree.command()
@app_commands.describe(
first_value='The first value you want to add something to',
second_value='The value you want to add to the first value',
)
async def add(interaction: discord.Interaction, first_value: int, second_value: int):
"""Adds two numbers together."""
await interaction.response.send_message(f'{first_value} + {second_value} = {first_value + second_value}')
# To make an argument optional, you can either give it a supported default argument
# or you can mark it as Optional from the typing library. This example does both.
@client.tree.command()
@app_commands.describe(member='The member you want to get the joined date from, defaults to the user who uses the command')
async def joined(interaction: discord.Interaction, member: Optional[discord.Member] = None):
"""Says when a member joined."""
# If no member is explicitly provided then we use the command user here
member = member or interaction.user
await interaction.response.send_message(f'{member} joined in {member.joined_at}')
client.run('token')
| mit | Python |
|
8fb4df5367b5c03d2851532063f6fa781fe2f980 | Add Fibonacci Series Using Recursion | TheAlgorithms/Python | Maths/fibonacciSeries.py | Maths/fibonacciSeries.py | # Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
| mit | Python |
|
5f8e01f976d75eca651e29ebdd379c865aa5bda9 | update merge_two_binary_trees_617 | lanpong/LeetCode,lanpong/LeetCode | Python/merge_two_binary_trees_617.py | Python/merge_two_binary_trees_617.py | # Given two binary trees and imagine that when you put one of them to cover the other, some nodes of the two trees are overlapped while the others are not.
# You need to merge them into a new binary tree. The merge rule is that if two nodes overlap, then sum node values up as the new value of the merged node.
# Otherwise, the NOT null node will be used as the node of new tree.
# Example 1:
# Input:
# Tree 1 Tree 2
# 1 2
# / \ / \
# 3 2 1 3
# / \ \
# 5 4 7
# Output:
# Merged tree:
# 3
# / \
# 4 5
# / \ \
# 5 4 7
# Note: The merging process must start from the root nodes of both trees.
# 题解:
# 就是合并两个二叉树,有相同节点的,则相加起来,还放在那个节点,
# 如果一个二叉树的一个节点上有而另一个二叉树的相同节点上没有数据的话就在那个节点上保留有的即可,
# 如此遍历二叉树直至结束
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def mergeTrees(self, t1, t2):
"""
:type t1: TreeNode
:type t2: TreeNode
:rtype: TreeNode
"""
if t1 and t2:
root=TreeNode(t1.val + t2.val)
root.left=self.mergeTrees(t1.left, t2.left)
root.right=self.mergeTrees(t1.right, t2.right)
return root
else:
return t1 or t2
| mit | Python |
|
8c6983656e550ebaf32ff714a3c22be276ba842b | Add ScribdDownloader.py | aknuck/Scribd-Downloader | ScribdDownloader.py | ScribdDownloader.py | #Scribd Downloader
#Adam Knuckey September 2015
print ("Starting Scribd Downloader")
import os
import re
import urllib, urllib2
import threading
from time import sleep
def download(link,destination):
#print link
urllib.urlretrieve(link,destination)
print("Enter textbook link:")
website = raw_input(" > ")
request = urllib2.Request(website)
request.add_header('User-Agent','Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11')
opener = urllib2.build_opener()
html = opener.open(request).read()
regex = re.compile("<title>.+</title>")
for m in regex.finditer(html):
title = m.group().replace("<title>","").replace("</title>","")
print ("Download "+title+"?")
proceed = raw_input("(y/n) > ").lower()
if proceed == "y":
print ("Downloading textbook - "+title+"...")
index = html.index('pageParams.contentUrl = "https://html2-f.scribdassets.com/')+len('pageParams.contentUrl = "https://html2-f.scribdassets.com/')
ident = html[index:index+17]
if not os.path.exists(title):
os.makedirs(title)
page = 1
regex = re.compile(ident)
for m in regex.finditer(html):#
link = html[m.start()-len('https://html2-f.scribdassets.com/'):m.start()+23+len(str(page))+11].replace("pages","images")+".jpg"
t = threading.Thread(target=download,args=(link,title+"/"+str(page)+".jpg"))
t.daemon = True
t.start()
sleep(0.05)
#print link
#urllib.urlretrieve(link,title+"/"+str(page)+".jpg")
page+=1
print ("Downloaded "+str(page-1)+" pages")
else:
print ("Exiting...")
| mit | Python |
|
97ae80b08958646e0c937f65a1b396171bf61e72 | Add a proper unit test for xreload.py. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_xreload.py | Lib/test/test_xreload.py | """Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
| mit | Python |
|
53e851f68f106bff919a591a3516f26d5b07c375 | add unit test case for FedMsgContext.send_message | fedora-infra/fedmsg,cicku/fedmsg,vivekanand1101/fedmsg,pombredanne/fedmsg,mathstuf/fedmsg,vivekanand1101/fedmsg,maxamillion/fedmsg,fedora-infra/fedmsg,chaiku/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,pombredanne/fedmsg,chaiku/fedmsg,maxamillion/fedmsg,fedora-infra/fedmsg,mathstuf/fedmsg,pombredanne/fedmsg,mathstuf/fedmsg,maxamillion/fedmsg,cicku/fedmsg,chaiku/fedmsg | fedmsg/tests/test_core.py | fedmsg/tests/test_core.py | import unittest
import mock
import warnings
from fedmsg.core import FedMsgContext
from common import load_config
class TestCore(unittest.TestCase):
def setUp(self):
config = load_config()
config['io_threads'] = 1
self.ctx = FedMsgContext(**config)
def test_send_message(self):
"""send_message is deprecated
It tests
- deprecation warning showing up appropriately
- that we call publish method behind the scene
"""
fake_topic = "org.fedoraproject.prod.compose.rawhide.complete"
fake_msg = "{'arch'': 's390', 'branch': 'rawhide', 'log': 'done'}"
self.ctx.publish = mock.Mock(spec_set=FedMsgContext.publish)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
self.ctx.send_message(topic=fake_topic, msg=fake_msg)
assert len(w) == 1
assert str(w[0].message) == ".send_message is deprecated."
assert self.ctx.publish.called
topic, msg, modname = self.ctx.publish.call_args[0]
assert topic == fake_topic
assert msg == fake_msg
assert modname is None
| lgpl-2.1 | Python |
|
aa38c6604476f1181903c688c0444ed87c9d75a1 | Add engine tests. | simphony/simphony-common | simphony/engine/tests/test_engine_metadata.py | simphony/engine/tests/test_engine_metadata.py | """Tests regarding loading engine's metadata."""
import sys
import unittest
import simphony.engine as engine_api
from simphony.engine import ABCEngineExtension, EngineInterface
from simphony.engine.extension import EngineManager, EngineManagerException
from simphony.engine.extension import EngineFeatureMetadata, EngineMetadata
class _Example1(ABCEngineExtension):
def get_supported_engines(self):
example_engine = \
self.create_engine_metadata('EXAMPLE1',
None,
[EngineInterface.Internal,
EngineInterface.FileIO])
return [example_engine]
def create_wrapper(self, cuds, engine_name, engine_interface):
if engine_name == 'EXAMPLE1':
pass
else:
raise Exception('Only EXAMPLE1 engine is supported. '
'Unsupported eninge: %s', engine_name)
def get_example_engine_extension():
class _Example2(ABCEngineExtension):
def get_supported_engines(self):
example_engine = \
self.create_engine_metadata('EXAMPLE2',
None,
[EngineInterface.Internal,
EngineInterface.FileIO])
return [example_engine]
def create_wrapper(self, cuds, engine_name, engine_interface):
if engine_name == 'EXAMPLE2':
pass
else:
raise Exception('Only EXAMPLE2 engine is supported. '
'Unsupported eninge: %s', engine_name)
return _Example2
class TestEnginePublicAPI(unittest.TestCase):
"""Test everything engine metadata."""
def setUp(self):
self.manager = engine_api._ENGINE_MANAGER
def tearDown(self):
pass
def test_get_supported_engines(self):
supported = engine_api.get_supported_engines()
assert(isinstance(supported, list))
def test_create_wrapper(self):
pass
class TestEngineManager(unittest.TestCase):
"""Test everything engine metadata."""
def setUp(self):
self.manager = EngineManager()
self.manager.load_metadata(sys.modules[__name__])
def tearDown(self):
pass
def test_get_supported_engines(self):
supported = self.manager.get_supported_engines()
self.assertIn('EXAMPLE1', supported)
self.assertNotIn('LAMMPS', supported)
def test_engine_count(self):
supported = self.manager.get_supported_engines()
self.assertEqual(len(supported), 1)
def test_assert_duplicate_engine(self):
self.assertRaises(Exception, self.manager.load_metadata, sys.modules[__name__])
def test_add_extension(self):
cls = get_example_engine_extension()
self.manager.add_extension(cls())
supported = self.manager.get_supported_engines()
self.assertIn('EXAMPLE2', supported)
self.assertEqual(len(supported), 2)
def test_create_wrapper(self):
self.assertRaises(EngineManagerException, self.manager.create_wrapper, None, 'EXAMPLE2')
# Example is a dummpy engine. It does not have any wrapper.
self.assertEqual(self.manager.create_wrapper(None, 'EXAMPLE1'), None)
def test_non_module_load(self):
class MyClass:pass
self.assertRaises(EngineManagerException, self.manager.load_metadata, MyClass)
class TestEngineFeature(unittest.TestCase):
"""Test everything engine metadata."""
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
self.assertRaises(EngineManagerException, EngineFeatureMetadata, None, None)
self.assertRaises(EngineManagerException, EngineFeatureMetadata, None, [])
class TestEngineMetadata(unittest.TestCase):
"""Test everything engine metadata."""
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
m = EngineMetadata('myengine', None, EngineInterface.Internal)
self.assertEqual(m.name, 'myengine')
self.assertEqual(m.features, None)
self.assertEqual(m.interfaces, EngineInterface.Internal)
| bsd-2-clause | Python |
|
8483174f32801318d1cd8aa33abb04819b4a7810 | Create usonic.py | JayJanarthanan/RPI-GARAGE,JayJanarthanan/RPI-GARAGE,JayJanarthanan/RPI-GARAGE,JayJanarthanan/RPi-Garage-Opener,JayJanarthanan/RPI-GARAGE,JayJanarthanan/RPi-Garage-Opener | usonic.py | usonic.py |
#!/usr/bin/python
# remember to change the GPIO values below to match your sensors
# GPIO output = the pin that's connected to "Trig" on the sensor
# GPIO input = the pin that's connected to "Echo" on the sensor
def reading(sensor):
import time
import RPi.GPIO as GPIO
# Disable any warning message such as GPIO pins in use
GPIO.setwarnings(False)
# use the values of the GPIO pins, and not the actual pin number
# so if you connect to GPIO 25 which is on pin number 22, the
# reference in this code is 25, which is the number of the GPIO
# port and not the number of the physical pin
GPIO.setmode(GPIO.BCM)
varOUT = 23
varIN = 24
if sensor == 0:
# point the software to the GPIO pins the sensor is using
# change these values to the pins you are using
# GPIO output = the pin that's connected to "Trig" on the sensor
# GPIO input = the pin that's connected to "Echo" on the sensor
GPIO.setup(varOUT,GPIO.OUT)
GPIO.setup(varIN,GPIO.IN)
GPIO.output(varOUT, GPIO.LOW)
# found that the sensor can crash if there isn't a delay here
# no idea why. If you have odd crashing issues, increase delay
time.sleep(0.3)
# sensor manual says a pulse ength of 10Us will trigger the
# sensor to transmit 8 cycles of ultrasonic burst at 40kHz and
# wait for the reflected ultrasonic burst to be received
# to get a pulse length of 10Us we need to start the pulse, then
# wait for 10 microseconds, then stop the pulse. This will
# result in the pulse length being 10Us.
# start the pulse on the GPIO pin
# change this value to the pin you are using
# GPIO output = the pin that's connected to "Trig" on the sensor
GPIO.output(varOUT, True)
# wait 10 micro seconds (this is 0.00001 seconds) so the pulse
# length is 10Us as the sensor expects
time.sleep(0.00001)
# stop the pulse after the time above has passed
# change this value to the pin you are using
# GPIO output = the pin that's connected to "Trig" on the sensor
GPIO.output(varOUT, False)
# listen to the input pin. 0 means nothing is happening. Once a
# signal is received the value will be 1 so the while loop
# stops and has the last recorded time the signal was 0
# change this value to the pin you are using
# GPIO input = the pin that's connected to "Echo" on the sensor
while GPIO.input(varIN) == 0:
signaloff = time.time()
# listen to the input pin. Once a signal is received, record the
# time the signal came through
# change this value to the pin you are using
# GPIO input = the pin that's connected to "Echo" on the sensor
while GPIO.input(varIN) == 1:
signalon = time.time()
# work out the difference in the two recorded times above to
# calculate the distance of an object in front of the sensor
timepassed = signalon - signaloff
# we now have our distance but it's not in a useful unit of
# measurement. So now we convert this distance into centimetres
distance = timepassed * 17000
# return the distance of an object in front of the sensor in cm
return distance
# we're no longer using the GPIO, so tell software we're done
GPIO.cleanup()
else:
print "Incorrect usonic() function varible."
print reading(0)
| mit | Python |
|
7a7d597c771ba8100957b5ca00156d7147c695c5 | Add clear_db_es_contents tests | hms-dbmi/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,4dn-dcic/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,hms-dbmi/fourfront | src/encoded/tests/test_clear_db_es_contents.py | src/encoded/tests/test_clear_db_es_contents.py | import pytest
from encoded.commands.clear_db_es_contents import (
clear_db_tables,
run_clear_db_es
)
pytestmark = [pytest.mark.setone, pytest.mark.working]
def test_clear_db_tables(app, testapp):
# post an item and make sure it's there
post_res = testapp.post_json('/testing-post-put-patch/', {'required': 'abc'},
status=201)
testapp.get(post_res.location, status=200)
clear_db_tables(app)
# item should no longer be present
testapp.get(post_res.location, status=404)
def test_run_clear_db_envs(app):
# if True, then it cleared DB
assert run_clear_db_es(app, None, True) == True
prev_env = app.registry.settings.get('env.name')
# should never run on these envs
app.registry.settings['env.name'] = 'fourfront-webprod'
assert run_clear_db_es(app, None, True) == False
app.registry.settings['env.name'] = 'fourfront-webprod2'
assert run_clear_db_es(app, None, True) == False
# test if we are only running on specific envs
app.registry.settings['env.name'] = 'fourfront-test-env'
assert run_clear_db_es(app, 'fourfront-other-env', True) == False
assert run_clear_db_es(app, 'fourfront-test-env', True) == True
# reset settings after test
if prev_env is None:
del app.registry.settings['env.name']
else:
app.registry.settings['env.name'] = prev_env
| mit | Python |
|
28677132dbcacd7d348262007256b3e2a9e44da2 | add gate client module | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | server/Mars/Client/GateClient.py | server/Mars/Client/GateClient.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import PathHelper as PH
PH.addPathes('../')
import sys
from bson import BSON
from msgpack.bMsgPack import msgPackExt, extHook
import msgpack
from MarsLog.LogManager import LogManager
from MarsRpc.ChannelClient import ChannelClient
from MarsRpc.Compressor import Compressor
from Utils.PyProto import ClientGate_pb2
from Utils.PyProto import Common_pb2
from Utils.EntityFactory import EntityFactory
from Utils.EntityManager import EntityManager
from Utils.IdCreator import IdCreator
from Utils.MessageCodec import Md5IndexDecoder, Md5IndexEncoder
from ServerProxy import ServerProxy
MARS_DEVICEID = str(IdCreator.genId())
class GateClient(ClientGate_pb2.SGate2Client):
ST_INIT = 0
ST_CONNECTING = 1
ST_RECONNECTING = 3
ST_CONNECT_FAILED = 4
ST_CONNECT_SUCCESSED = 5
ST_DISCONNECTED = 6
CB_ON_CONNECT_FAILED = 1
CB_ON_CONNECT_SUCCESSED = 2
CB_ON_DISCONNECTED = 3
CB_ON_CONNECT_REPLY = 4
CB_ON_RELIABLE_MSG_UNSENT = 5
def __init__(self, host, port, clientConf, proto='BSON'):
super(GateClient, self).__init__(self)
self.client = ChannelClient(host, port, self)
| bsd-2-clause | Python |
|
36fdfa89230fd08b6c28501f3f277bff642e36e3 | Create ipy_custom_action_button.py | satishgoda/learningqt,satishgoda/learningqt | pyside/pyside_basics/jamming/QAction/ipy_custom_action_button.py | pyside/pyside_basics/jamming/QAction/ipy_custom_action_button.py | from collections import OrderedDict
from functools import partial
from PySide import QtCore
from PySide import QtGui
##
class CustomAction(QtGui.QAction):
def __init__(self, message, *args, **kwargs):
super(CustomAction, self).__init__(*args, **kwargs)
self.message = message
self.triggered.connect(self.callback)
def callback(self):
print self.message, self.sender(), self.senderSignalIndex()
class CustomButton(QtGui.QPushButton):
def __init__(self, *args, **kwargs):
super(CustomButton, self).__init__(*args, **kwargs)
self.clicked.connect(self.callback)
def callback(self):
print self.text()
for action in self.actions():
action.activate(QtGui.QAction.ActionEvent.Trigger)
##
mw = QtGui.QMainWindow()
customAction1 = CustomAction("Action 1", mw)
customAction2 = CustomAction("Action 2", mw)
button = CustomButton("Click me")
print customAction1, button
button.show()
##
button.addAction(customAction1)
##
button.addAction(customAction2)
##
button.removeAction(customAction1)
##
button.removeAction(customAction2)
##
button.addActions([customAction1, customAction2])
| mit | Python |
|
18f63b98bf7eefe3022dc4681e81ada9969d5228 | Create guess-the-word.py | tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/guess-the-word.py | Python/guess-the-word.py | # Time: O(n^2)
# Space: O(n)
# This problem is an interactive problem new to the LeetCode platform.
#
# We are given a word list of unique words, each word is 6 letters long,
# and one word in this list is chosen as secret.
#
# You may call master.guess(word) to guess a word.
# The guessed word should have type string and must be from the original
# list with 6 lowercase letters.
#
# This function returns an integer type,
# representing the number of exact matches (value and position)
# of your guess to the secret word.
# Also, if your guess is not in the given wordlist, it will return -1 instead.
#
# For each test case, you have 10 guesses to guess the word.
# At the end of any number of calls, if you have made 10 or
# less calls to master.guess
# and at least one of these guesses was the secret, you pass the testcase.
#
# Besides the example test case below,
# there will be 5 additional test cases, each with 100 words in the word list.
# The letters of each word in those testcases were chosen independently at
# random from 'a' to 'z',
# such that every word in the given word lists is unique.
#
# Example 1:
# Input: secret = "acckzz", wordlist = ["acckzz","ccbazz","eiowzz","abcczz"]
#
# Explanation:
#
# master.guess("aaaaaa") returns -1, because "aaaaaa" is not in wordlist.
# master.guess("acckzz") returns 6, because "acckzz" is secret
# and has all 6 matches.
# master.guess("ccbazz") returns 3, because "ccbazz" has 3 matches.
# master.guess("eiowzz") returns 2, because "eiowzz" has 2 matches.
# master.guess("abcczz") returns 4, because "abcczz" has 4 matches.
#
# We made 5 calls to master.guess and one of them was the secret,
# so we pass the test case.
# Note: Any solutions that attempt to circumvent the judge will result
# in disqualification.
#
# """
# This is Master's API interface.
# You should not implement it, or speculate about its implementation
# """
# class Master(object):
# def guess(self, word):
# """
# :type word: str
# :rtype int
# """
import collections
import itertools
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def findSecretWord(self, wordlist, master):
"""
:type wordlist: List[Str]
:type master: Master
:rtype: None
"""
def match(a, b):
matches = 0
for i in xrange(len(a)):
if a[i] == b[i]:
matches += 1
return matches
i, n = 0, 0
while i < 10 and n < 6:
count = collections.Counter(w1 for w1, w2 in
itertools.permutations(wordlist, 2)
if match(w1, w2) == 0)
guess = min(wordlist, key=lambda w: count[w])
n = master.guess(guess)
wordlist = [w for w in wordlist if match(w, guess) == n]
i += 1
| mit | Python |
|
803a2702a1330be1f51428f8d7533cfee27c3f90 | Add facebook_test_user support. | merwok-forks/facepy,buzzfeed/facepy,jwjohns/facepy,liorshahverdi/facepy,Spockuto/facepy,jgorset/facepy,jwjohns/facepy | facepy/test.py | facepy/test.py | import facepy
class FacebookTestUser(object):
def __init__(self, **kwargs):
fields = ('id', 'access_token', 'login_url', 'email', 'password')
for field in fields:
setattr(self, field, kwargs[field])
self.graph = facepy.GraphAPI(self.access_token)
class TestUserManager(object):
def __init__(self, app_id, app_secret):
access_token = facepy.get_application_access_token(app_id, app_secret)
self.graph = facepy.GraphAPI(access_token)
self.app_id = app_id
def create_user(self, **parameters):
""" creates facebook test user
Valid parameters (with default values):
installed = true
name = FULL_NAME
locale = en_US
permissions = read_stream
"""
url = "%s/accounts/test-users" % self.app_id
return FacebookTestUser(**self.graph.post(url, **parameters))
def delete_user(self, user):
self.graph.delete(str(user.id))
class TestUser(object):
def __init__(self, manager, **user_params):
self.manager = manager
self.user_params = user_params
def __enter__(self):
self._user = self.manager.create_user(**self.user_params)
return self._user
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
self.manager.delete_user(self._user)
| mit | Python |
|
332f1fc67481432f6e8dd7cd9a35b02b12c9b6f6 | Create numpy.py | ticcky/code101,ticcky/code101 | numpy.py | numpy.py | # Best dimensions in each column of a matrix x.
for i in range(x.shape[0]):
dims = x[:,i].argsort()[-5:]
vals = x[dims,i]
print dims, vals
| apache-2.0 | Python |
|
4d4904e69e030be3f2b0e30c957507626d58a50e | Teste nas listas | M3nin0/supreme-broccoli,M3nin0/supreme-broccoli,M3nin0/supreme-broccoli,M3nin0/supreme-broccoli | _Listas/sherlock.py | _Listas/sherlock.py | # Quem é o culpado
perguntas = []
ct = 0
pt = 0
quest = input("Você telefonou a vitima: ")
perguntas.append(quest)
quest = input("Vocẽ esteve no local do crime: ")
perguntas.append(quest)
quest = input("Você mora perto da vitima? ")
perguntas.append(quest)
quest = input("Devia para a vitima? ")
perguntas.append(quest)
quest = input("Já trabalhou com a vitima? ")
perguntas.append(quest)
while ct <= len(perguntas) - 1:
if perguntas[ct] in "sim":
pt += 1
ct += 1
if pt >= 1 and pt <= 2:
print("Você é um suspeito")
elif pt >= 3 and pt <= 4:
print("Você é cumplice!")
if pt == 5:
print("CULPADO,CULPADO, VOCÊ SERÁ PRESO!!!")
| apache-2.0 | Python |
|
235bfc6db908b6701de77df11e00e89a307d738e | Create tinymongo.py | jjonesAtMoog/tinymongo,schapman1974/tinymongo | tinymongo/tinymongo.py | tinymongo/tinymongo.py | mit | Python |
||
b351e5106684b0af8b862bb6ba5375671c1f431d | include getcomments.py | jaredsohn/hacker-news-download-all-comments | getcomments.py | getcomments.py | import urllib2
import json
import datetime
import time
import pytz
import pandas as pd
from pandas import DataFrame
ts = str(int(time.time()))
df = DataFrame()
hitsPerPage = 1000
requested_keys = ["author", "comment_text", "created_at_i", "objectID", "points"]
i = 0
while True:
try:
url = 'https://hn.algolia.com/api/v1/search_by_date?tags=comment&hitsPerPage=%s&numericFilters=created_at_i<%s' % (hitsPerPage, ts)
req = urllib2.Request(url)
response = urllib2.urlopen(req)
data = json.loads(response.read())
last = data["nbHits"] < hitsPerPage
data = DataFrame(data["hits"])[requested_keys]
df = df.append(data,ignore_index=True)
ts = data.created_at_i.min()
print i
if (last):
break
time.sleep(3.6)
i += 1
except Exception, e:
print e
df["comment_text"] = df["comment_text"].map(lambda x: x.translate(dict.fromkeys([0x201c, 0x201d, 0x2011, 0x2013, 0x2014, 0x2018, 0x2019, 0x2026, 0x2032])).encode('utf-8').replace(',',''))
df["created_at"] = df["created_at_i"].map(lambda x: datetime.datetime.fromtimestamp(int(x), tz=pytz.timezone('America/New_York')).strftime('%Y-%m-%d %H:%M:%S'))
ordered_df = df[["comment_text","points","author","created_at","objectID"]]
ordered_df.to_csv("hacker_news_comments.csv",encoding='utf-8', index=False) | apache-2.0 | Python |
|
74ecac2dbca41d737f62325955fd4d0dc393ac16 | Rename flots.py to plots.py | whbrewer/spc,whbrewer/spc,whbrewer/spc,whbrewer/spc | plots.py | plots.py | import json
import re
class plot(object):
def get_data(self,fn,col1,col2):
y = ''
for line in open(fn, 'rU'):
# don't parse comments
if re.search(r'#',line): continue
x = line.split()
if not re.search(r'[A-Za-z]{2,}\s+[A-Za-z]{2,}',line):
y += '[ ' + x[col1] + ', ' + x[col2] + '], '
str = "[ %s ]" % y
return str
| mit | Python |
|
f13da24b8fb4cf6d8fff91e88afb1507528c2c2a | Add `.ycm_extra_conf.py` for https://github.com/Valloric/ycmd | exponent/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent | android/.ycm_extra_conf.py | android/.ycm_extra_conf.py | import os
basePath = os.path.dirname(os.path.realpath(__file__))
def FlagsForFile(filename, **kwargs):
return {
'flags': [
'-std=c++11',
'-DFOLLY_NO_CONFIG=1',
'-DFOLLY_USE_LIBCPP',
'-I' + basePath + '/ReactAndroid/../ReactCommon/cxxreact/..',
'-I' + basePath + '/ReactAndroid/../ReactCommon/jschelpers/..',
'-I' + basePath + '/ReactAndroid/src/main/jni/first-party/fb/include',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/folly',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/jsc',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/glog/..',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/glog/glog-0.3.3/src/',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/boost/boost_1_63_0',
'-I' + basePath + '/ReactAndroid/build/third-party-ndk/double-conversion',
'-I' + basePath + '/ReactAndroid/../ReactCommon/cxxreact',
],
}
| bsd-3-clause | Python |
|
654f21b39a68aa461b6457199403e7d89781cc79 | add migration | d120/pyophase,d120/pyophase,d120/pyophase,d120/pyophase | students/migrations/0010_auto_20161010_1345.py | students/migrations/0010_auto_20161010_1345.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-10 11:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('students', '0009_auto_20161005_1820'),
]
operations = [
migrations.AlterField(
model_name='student',
name='want_exam',
field=models.BooleanField(default=False, help_text='Die Klausur ist eine Simulation einer Uniklausur, um die Unteschiede zwischen einer Schul- und einer Universitätsklausr zu zeigen. Abgefragt werden hauptsächlich Informationen aus der Ophase. Es ist nicht verpflichtend die Klausur zu bestehen.', verbose_name='Klausur mitschreiben?'),
),
]
| agpl-3.0 | Python |
|
b90b43ceefb78e1a94ba898ed23443567786cf25 | Add /monitoring/status handler | diyan/falcon_seed | app/monitoring/handlers.py | app/monitoring/handlers.py | from __future__ import unicode_literals, absolute_import, division
import json
from structlog import get_logger
class StatusHandler:
def __init__(self):
self.logger = get_logger()
def on_get(self, req, res):
"""
@type req: falcon.request.Request
@type res: falcon.response.Response
"""
rv = dict(
status='OK',
settings={}, # TODO pass some/all settings here
content_type=req.content_type,
url=req.url,
remote_addr='', # TODO Use falcon or wgsi API to get remote addr
headers=req.headers,
cookies=req.cookies,
context=req.context)
res.body = json.dumps(dict(result=rv))
class AppRoutesHandler:
def __init__(self):
self.logger = get_logger()
def on_get(self, req, res):
# TODO return result: routes?: [handler, url, methods]
pass | mit | Python |
|
95c71727bf340f55e17a15d475aba54438eb0b8e | add solution for Partition List | zhyu/leetcode,zhyu/leetcode | src/partitionList.py | src/partitionList.py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param head, a ListNode
# @param x, an integer
# @return a ListNode
def partition(self, head, x):
head1, head2 = ListNode(0), ListNode(0)
last1, last2 = head1, head2
while head:
if head.val < x:
last1.next = head
head = head.next
last1 = last1.next
last1.next = None
else:
last2.next = head
head = head.next
last2 = last2.next
last2.next = None
last1.next = head2.next
return head1.next
| mit | Python |
|
ef14676bd07cb53cecaaaa6cb3a0a56c248aa74d | clone for refactoring | igelbox/blender-ogf | io_scene_ogf/ogf_utils.py | io_scene_ogf/ogf_utils.py | #! /usr/bin/python
import io, struct
class Chunks:
OGF_HEADER = 0x1
OGF4_S_DESC = 0x12
OGF4_CHILDREN = 0x9
OGF4_TEXTURE = 0x2
OGF4_VERTICES = 0x3
OGF4_INDICES = 0x4
class rawr:
def __init__(self, data):
self.offs = 0
self.data = data
def read(sz=1):
v = data[self.offs:self.offs+sz]
self.offs+=sz
return v;
def unpack(self, fmt):
s = struct.calcsize(fmt)
self.offs += s
return struct.unpack_from(fmt, self.data, self.offs-s)
def unpack_asciiz(self):
zpos = self.data.find(0, self.offs);
if (zpos == -1):
zpos = len(self.data);
return self.unpack('={}sx'.format(zpos - self.offs))[0].decode('cp1251');
def ogfr(data):
MASK_COMPRESSED = 0x80000000
offs = 0
while (offs < len(data)):
i, s = struct.unpack_from('=II', data, offs);
if ((i & MASK_COMPRESSED) != 0):
raise Exception('compressed')
offs += 8 + s
yield (i & ~MASK_COMPRESSED, data[offs-s:offs])
def cfrs(tupl, expected):
if (tupl[0] != expected):
raise Exception('expected {}, but found: {}'.format(expected, tupl[0]))
return tupl[1]
def load_ogfX(h, ogr):
raise Exception('unsupported OGF format version: {}'.format(h[0]))
def load_ogf4_m05(ogr):
c = rawr(cfrs(next(ogr), Chunks.OGF4_TEXTURE))
tex = c.unpack_asciiz()
shd = c.unpack_asciiz()
#~ print ('texture:{}, shader:{}'.format(tex, shd));
c = rawr(cfrs(next(ogr), Chunks.OGF4_VERTICES))
vf, vc = c.unpack('=II')
vv = []; nn = []; tt = []
if (vf == 0x12071980):#OGF4_VERTEXFORMAT_FVF_1L
for _ in range(vc):
v = c.unpack('=fff')
vv.append(v)
n = c.unpack('=fff')
nn.append(n)
c.unpack('=fff')#tangen
c.unpack('=fff')#binorm
tt.append(c.unpack('=ff'))
f = c.unpack('=I')[0]
elif (vf == 0x240e3300):#OGF4_VERTEXFORMAT_FVF_2L
for _ in range(vc):
bb = c.unpack('=HH')
vv.append(c.unpack('=fff'))
nn.append(c.unpack('=fff'))
c.unpack('=fff')#tangen
c.unpack('=fff')#binorm
c.unpack('=f')
tt.append(c.unpack('=ff'))
else:
raise Exception('unexpected vertex format: {:#x}'.format(vf))
#~ print('vf:{:#x}, vc:{}'.format(vf, vc))
c = rawr(cfrs(next(ogr), Chunks.OGF4_INDICES))
ic = c.unpack('=I')[0]
ii = []
for _ in range(ic//3):
ii.append(c.unpack('=HHH'))
#~ print('{},[],{}'.format(vv, ii))
return (vv, ii, nn, tt, tex)
def load_ogf4_m10(ogr):
c = rawr(cfrs(next(ogr), Chunks.OGF4_S_DESC))
src = c.unpack_asciiz()
#~ print ('source:{}'.format(src));
exptool = c.unpack_asciiz()
exptime, crttime, modtime = c.unpack('=III')
result = []
for i, c in ogfr(cfrs(next(ogr), Chunks.OGF4_CHILDREN)):
result.append(load_ogf(c))
return result
def load_ogf4(h, ogr):
mt, shid = h.unpack('=BH')
print ('modeltype:{}, shaderid:{}'.format(mt, shid))
bbox = h.unpack('=ffffff')
#~ print ('bbox:{}'.format(bbox))
bsphere = h.unpack('=ffff')
#~ print ('bsphere:{}'.format(bsphere))
return {
5: load_ogf4_m05,
10: load_ogf4_m10
}.get(mt)(ogr)
def load_ogf(data):
ogr = ogfr(data)
cr = rawr(cfrs(next(ogr), Chunks.OGF_HEADER))
ver = cr.unpack('=B')[0]
#~ print ('version:{}'.format(ver))
return {
4: load_ogf4
}.get(ver, load_ogfX)(cr, ogr)
def load(fname):
with io.open(fname, mode = 'rb') as f:
return load_ogf(f.read())
if __name__ == '__main__':
print(load('test.ogf'))
| bsd-2-clause | Python |
|
5bff284204a1397dbc63e83363d865213a35efe6 | add a new test file test_begin_end.py | alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl | tests/unit/selection/modules/test_begin_end.py | tests/unit/selection/modules/test_begin_end.py | # Tai Sakuma <tai.sakuma@gmail.com>
import pytest
try:
import unittest.mock as mock
except ImportError:
import mock
from alphatwirl.selection.modules import Not, NotwCount
##__________________________________________________________________||
not_classes = [Not, NotwCount]
not_classe_ids = [c.__name__ for c in not_classes]
@pytest.mark.parametrize('NotClass', not_classes, ids=not_classe_ids)
def test_not_begin(NotClass):
selection = mock.Mock()
obj = NotClass(selection)
event = mock.Mock()
obj.begin(event)
assert [mock.call(event)] == selection.begin.call_args_list
@pytest.mark.parametrize('NotClass', not_classes, ids=not_classe_ids)
def test_not_begin_absent(NotClass):
selection = mock.Mock()
del selection.begin
obj = NotClass(selection)
event = mock.Mock()
obj.begin(event)
@pytest.mark.parametrize('NotClass', not_classes, ids=not_classe_ids)
def test_not_end(NotClass):
selection = mock.Mock()
obj = NotClass(selection)
obj.end()
assert [mock.call()] == selection.end.call_args_list
@pytest.mark.parametrize('NotClass', not_classes, ids=not_classe_ids)
def test_not_end_absent(NotClass):
selection = mock.Mock()
del selection.end
obj = NotClass(selection)
obj.end()
##__________________________________________________________________||
| bsd-3-clause | Python |
|
7655fe94decf2fc9c3a07104f8fa76cf39442ddb | implement rectified drive | FRC-1123/frc2017-1123,FRC-1123/frc2017-1123,FRC-1123/frc2017-1123,FRC-1123/frc2017-1123 | rectifieddrive.py | rectifieddrive.py | import navx
import subsystems
class RectifiedDrive:
"""
This class implemented the rectifiedDrive function, which sets the motor outputs
given a desired power and angular velocity using the NavX and a PID controller.
"""
def __init__(self, kp, ki, kd, period=0.05):
self.kp = kp
self.ki = ki
self.kd = kd
self.period = period
self.prev_error = 0.0
def rectifiedDrive(self, power, angular_vel):
error = angular_vel - navx.ahrs.getRate()
output = self.calc_pid(error)
left_output = power - output
if abs(left_output) > 1.0: # normalize if magnitude greater than 1
left_output /= abs(left_output)
right_output = power + output
if abs(right_output) > 1.0: # normalize if magnitude greater than 1
right_output /= abs(right_output)
subsystems.motors.robot_drive.setLeftRightMotorOutputs(left_output, right_output)
def calc_pid(self, error):
e_deriv = (error - self.prev_error) / self.period
e_int = (error + self.prev_error) / 2 * self.period
self.prev_error = error
return self.kp * error + self.kd * e_deriv + self.ki * e_int
| mit | Python |
|
dd1d0893823561efec203cdfbb927b8edac7a72a | Add a coupld tests to create exception classes from error code names | darjus-amzn/boto,Asana/boto,vishnugonela/boto,podhmo/boto,weebygames/boto,SaranyaKarthikeyan/boto,clouddocx/boto,bleib1dj/boto,TiVoMaker/boto,tpodowd/boto,rayluo/boto,tpodowd/boto,disruptek/boto,stevenbrichards/boto,revmischa/boto,pfhayes/boto,ekalosak/boto,ryansb/boto,shaunbrady/boto,acourtney2015/boto,alfredodeza/boto,jotes/boto,zachmullen/boto,alex/boto,jindongh/boto,nexusz99/boto,felix-d/boto,janslow/boto,kouk/boto,ocadotechnology/boto,kouk/boto,j-carl/boto,campenberger/boto,disruptek/boto,alex/boto,zzzirk/boto,vijaylbais/boto,garnaat/boto,varunarya10/boto,appneta/boto,trademob/boto,s0enke/boto,dimdung/boto,drbild/boto,khagler/boto,nikhilraog/boto,elainexmas/boto,shipci/boto,nishigori/boto,rosmo/boto,drbild/boto,bryx-inc/boto,serviceagility/boto,israelbenatar/boto,lra/boto,weka-io/boto,ddzialak/boto,ramitsurana/boto,abridgett/boto,yangchaogit/boto,awatts/boto,appneta/boto,Pretio/boto | tests/unit/beanstalk/test_exception.py | tests/unit/beanstalk/test_exception.py | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
| mit | Python |
|
de38b3e7b3d8458920b913316b06bb10b886df9f | Implement ArgumentSelector for overload disambiguation | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | thinglang/symbols/argument_selector.py | thinglang/symbols/argument_selector.py | import collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
| mit | Python |
|
609bd2a0712ee488dd76bb3619aef70343adb304 | add test__doctests.py | lindenlab/eventlet,tempbottle/eventlet,tempbottle/eventlet,collinstocks/eventlet,lindenlab/eventlet,collinstocks/eventlet,lindenlab/eventlet | greentest/test__doctests.py | greentest/test__doctests.py | import os
import re
import doctest
import unittest
import eventlet
base = os.path.dirname(eventlet.__file__)
modules = set()
for path, dirs, files in os.walk(base):
package = 'eventlet' + path.replace(base, '').replace('/', '.')
modules.add((package, os.path.join(path, '__init__.py')))
for f in files:
module = None
if f.endswith('.py'):
module = f[:-3]
if module:
modules.add((package + '.' + module, os.path.join(path, f)))
suite = unittest.TestSuite()
tests_count = 0
modules_count = 0
for m, path in modules:
if re.search('^\s*>>> ', open(path).read(), re.M):
s = doctest.DocTestSuite(m)
print '%s (from %s): %s tests' % (m, path, len(s._tests))
suite.addTest(s)
modules_count += 1
tests_count += len(s._tests)
print 'Total: %s tests in %s modules' % (tests_count, modules_count)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
| mit | Python |
|
2885adb781ba5179e0dcc7645644bcb182e7bfe7 | Create hacks/eKoomerce/__init__.py | priyamanibhat/eKoomerce,priyamanibhat/eKoomerce,priyamanibhat/eKoomerce | hacks/eKoomerce/__init__.py | hacks/eKoomerce/__init__.py | import bs4
| mit | Python |
|
d9cce6f06503f1527d56d40d3037f46344c517d4 | Add PerUserData utility. | rhertzog/librement,rhertzog/librement,rhertzog/librement | src/librement/utils/user_data.py | src/librement/utils/user_data.py | from django.db import models
from django.db.models.signals import post_save, pre_delete
from django.contrib.auth.models import User
def PerUserData(related_name=None):
"""
Class factory that returns an abstract model attached to a ``User`` object
that creates and destroys concrete child instances where required.
Example usage::
class ToppingPreferences(PerUserData('toppings')):
pepperoni = models.BooleanField(default=True)
anchovies = models.BooleanField(default=False)
>>> u = User.objects.create_user('test', 'example@example.com')
>>> u.toppings # ToppingPreferences created automatically
<ToppingPreferences: user=test>
>>> u.toppings.anchovies
False
"""
class UserDataBase(models.base.ModelBase):
def __new__(cls, name, bases, attrs):
model = super(UserDataBase, cls).__new__(cls, name, bases, attrs)
if model._meta.abstract:
return model
def on_create(sender, instance, created, *args, **kwargs):
if created:
model.objects.create(user=instance)
def on_delete(sender, instance, *args, **kwargs):
model.objects.filter(pk=instance).delete()
post_save.connect(on_create, sender=User, weak=False)
pre_delete.connect(on_delete, sender=User, weak=False)
return model
class UserData(models.Model):
user = models.OneToOneField(
'auth.User',
primary_key=True,
related_name=related_name,
)
__metaclass__ = UserDataBase
class Meta:
abstract = True
def __unicode__(self):
return 'user=%s' % self.user.username
return UserData
| agpl-3.0 | Python |
|
e58d30a64ae2ce2962dbaaf119e5e4c4ee33e4e7 | Create pub.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | cloud/mqtt_server/pub.py | cloud/mqtt_server/pub.py | #!/usr/bin/env python
import asyncio
from hbmqtt.client import MQTTClient
from hbmqtt.mqtt.constants import QOS_0, QOS_1, QOS_2
async def publish_test():
try:
C = MQTTClient()
ret = await C.connect('mqtt://192.168.0.4:1883/')
message = await C.publish('server', 'MESSAGE-QOS_0'.encode(), qos=QOS_0)
message = await C.publish('server', 'MESSAGE-QOS_1'.encode(), qos=QOS_1)
message = await C.publish('gateway', 'MESSAGE-QOS_2'.encode(), qos=QOS_2)
print("messages published")
await C.disconnect()
except ConnectException as ce:
print("Connection failed: %s" % ce)
asyncio.get_event_loop().stop()
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(publish_test())
| mit | Python |
|
8551c56a9fea5d21ea9dc6761eff8e93d451f6b3 | Add pip setup.py | google/gin-config,google/gin-config | setup.py | setup.py | """Setup script for gin-config.
See:
https://github.com/google/gin-config
"""
import codecs
from os import path
from setuptools import find_packages
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with codecs.open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gin-config',
version='0.1',
include_package_data=True,
packages=find_packages(exclude=['contrib', 'docs', 'tests']), # Required
extras_require={ # Optional
'tf': ['tensorflow'],
'test': ['coverage'],
},
description='Gin-config: a lightweight configuration library for Python',
long_description=long_description,
url='https://github.com/google/gin-config', # Optional
author='The Gin-Config Team', # Optional
classifiers=[ # Optional
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: ML Tools',
# Pick your license as you wish
'License :: OSI Approved :: Apache License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
project_urls={ # Optional
'Bug Reports': 'https://github.com/google/gin-config/issues',
'Source': 'https://github.com/google/gin-config',
},
)
| apache-2.0 | Python |
|
379c5e73d767753142a62ba57f5928acf754b508 | Add simple setup.py for ease of system-installing | lahwran/crow2 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="crow2",
version="0.1.dev0",
packages=find_packages(),
scripts=["bin/crow2"],
install_requires=["twisted", "zope.interface"]
)
| mit | Python |
|
c2d14b8c3beaee3cff498fc02106751fce8e8e1c | Add setup.py | bridgewell/pb2df,jason2506/pb2df | setup.py | setup.py | import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import pb2df
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='pb2df',
version=pb2df.__version__,
author=pb2df.__author__,
author_email='',
description='Convert ProtoBuf objects to Spark DataFrame.',
long_description=__doc__,
url='https://github.com/jason2506/pb2df',
license=pb2df.__license__,
packages=find_packages(),
zip_safe=False,
platforms='any',
install_requires=['protobuf'],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
| bsd-3-clause | Python |
|
a7bf54f417576bfc355e1851258e711dadd73ad3 | Add python trove classifiers | orbitvu/django-taggit,cimani/django-taggit,izquierdo/django-taggit,doselect/django-taggit,kaedroho/django-taggit,Maplecroft/django-taggit,vhf/django-taggit,7kfpun/django-taggit,nealtodd/django-taggit,laanlabs/django-taggit,kminkov/django-taggit,Eksmo/django-taggit,IRI-Research/django-taggit,guoqiao/django-taggit,tamarmot/django-taggit,adrian-sgn/django-taggit,gem/django-taggit,twig/django-taggit,eugena/django-taggit,benjaminrigaud/django-taggit | setup.py | setup.py | from setuptools import setup, find_packages
from taggit import VERSION
f = open('README.rst')
readme = f.read()
f.close()
setup(
name='django-taggit',
version=".".join(map(str, VERSION)),
description='django-taggit is a reusable Django application for simple tagging.',
long_description=readme,
author='Alex Gaynor',
author_email='alex.gaynor@gmail.com',
url='http://github.com/alex/django-taggit/tree/master',
packages=find_packages(),
zip_safe=False,
package_data = {
'taggit': [
'locale/*/LC_MESSAGES/*',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
| from setuptools import setup, find_packages
from taggit import VERSION
f = open('README.rst')
readme = f.read()
f.close()
setup(
name='django-taggit',
version=".".join(map(str, VERSION)),
description='django-taggit is a reusable Django application for simple tagging.',
long_description=readme,
author='Alex Gaynor',
author_email='alex.gaynor@gmail.com',
url='http://github.com/alex/django-taggit/tree/master',
packages=find_packages(),
zip_safe=False,
package_data = {
'taggit': [
'locale/*/LC_MESSAGES/*',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
| bsd-3-clause | Python |
733289636661f3c0034a66eaa8763058ef43796d | update setup.py | arcturusannamalai/open-tamil,tuxnani/open-telugu,Ezhil-Language-Foundation/open-tamil,tuxnani/open-telugu,tshrinivasan/open-tamil,tuxnani/open-telugu,tshrinivasan/open-tamil,atvKumar/open-tamil,tshrinivasan/open-tamil,tshrinivasan/open-tamil,atvKumar/open-tamil,atvKumar/open-tamil,Ezhil-Language-Foundation/open-tamil,arcturusannamalai/open-tamil,atvKumar/open-tamil,tuxnani/open-telugu,atvKumar/open-tamil,Ezhil-Language-Foundation/open-tamil,arcturusannamalai/open-tamil,Ezhil-Language-Foundation/open-tamil,atvKumar/open-tamil,arcturusannamalai/open-tamil,arcturusannamalai/open-tamil,Ezhil-Language-Foundation/open-tamil,Ezhil-Language-Foundation/open-tamil,arcturusannamalai/open-tamil,Ezhil-Language-Foundation/open-tamil,tshrinivasan/open-tamil,tshrinivasan/open-tamil,tuxnani/open-telugu,atvKumar/open-tamil,Ezhil-Language-Foundation/open-tamil,Ezhil-Language-Foundation/open-tamil,arcturusannamalai/open-tamil,tshrinivasan/open-tamil | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
from codecs import open
setup(name='Open-Tamil',
version='0.1-dev',
description='Tamil language text processing tools',
author='Muthiah Annamalai',
author_email='ezhillang@gmail.com',
url='https://github.com/arcturusannamalai/open-tamil',
packages=['tamil'],
license='GPLv3',
platforms='PC,Linux,Mac',
classifiers='Natural Language :: Tamil',
long_description=open('README.md','r','UTF-8').read(),
download_url='https://github.com/arcturusannamalai/open-tamil/archive/latest.zip',#pip
)
| #!/usr/bin/env python
from distutils.core import setup
from codecs import open
setup(name='Open Tamil',
version='0.1-dev',
description='Tamil language text processing tools',
author='Muthiah Annamalai',
author_email='ezhillang@gmail.com',
url='https://github.com/arcturusannamalai/open-tamil',
packages=['tamil'],
license='GPLv3',
platforms='PC,Linux,Mac',
classifiers='Natural Language :: Tamil',
long_description=open('README.md','r','UTF-8').read(),
download_url='https://github.com/arcturusannamalai/open-tamil/archive/latest.zip',#pip
)
| mit | Python |
231d050fe611adb201cd7ae55f52212d0b84caa1 | Check for pandoc. add pyandoc to setup_requires | djipko/sparts,facebook/sparts,djipko/sparts,fmoo/sparts,facebook/sparts,pshuff/sparts,fmoo/sparts,bboozzoo/sparts,bboozzoo/sparts,pshuff/sparts | setup.py | setup.py | from setuptools import setup, find_packages, Command
from setuptools.command.build_py import build_py as _build_py
from distutils.spawn import find_executable
import os.path
import imp
import pandoc.core
pandoc.core.PANDOC_PATH = find_executable('pandoc')
assert pandoc.core.PANDOC_PATH is not None, \
"'pandoc' is a required system binary to generate documentation.\n" \
"Please install it somewhere in your PATH to run this command."
ROOT = os.path.abspath(os.path.dirname(__file__))
def read(fname):
return open(os.path.join(ROOT, fname)).read()
def read_md_as_rest(fname):
doc = pandoc.Document()
doc.markdown = read(fname)
return doc.rst
def version():
file, pathname, description = imp.find_module('sparts', [ROOT])
return imp.load_module('sparts', file, pathname, description).__version__
class gen_thrift(Command):
user_options=[]
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.mkpath(os.path.join(ROOT, 'sparts', 'gen'))
for f in os.listdir(os.path.join(ROOT, 'thrift')):
self.spawn(['thrift', '-out', os.path.join(ROOT, 'sparts', 'gen'),
'-v', '--gen', 'py:new_style',
os.path.join(ROOT, 'thrift', f)])
class build_py(_build_py):
def run(self):
self.run_command('gen_thrift')
_build_py.run(self)
setup(
name="sparts",
version=version(),
packages=find_packages(),
description="Build services in python with as little code as possible",
long_description=read_md_as_rest("README.md"),
install_requires=[],
setup_requires=['pyandoc'],
author='Peter Ruibal',
author_email='ruibalp@gmail.com',
license='ISC',
keywords='service boostrap daemon thrift tornado',
url='http://github.com/fmoo/sparts',
test_suite="tests",
cmdclass={'gen_thrift': gen_thrift,
'build_py': build_py},
)
| from setuptools import setup, find_packages, Command
from setuptools.command.build_py import build_py as _build_py
from distutils.spawn import find_executable
import os.path
import imp
import pandoc.core
pandoc.core.PANDOC_PATH = find_executable('pandoc')
ROOT = os.path.abspath(os.path.dirname(__file__))
def read(fname):
return open(os.path.join(ROOT, fname)).read()
def read_md_as_rest(fname):
doc = pandoc.Document()
doc.markdown = read(fname)
return doc.rst
def version():
file, pathname, description = imp.find_module('sparts', [ROOT])
return imp.load_module('sparts', file, pathname, description).__version__
class gen_thrift(Command):
user_options=[]
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.mkpath(os.path.join(ROOT, 'sparts', 'gen'))
for f in os.listdir(os.path.join(ROOT, 'thrift')):
self.spawn(['thrift', '-out', os.path.join(ROOT, 'sparts', 'gen'),
'-v', '--gen', 'py:new_style',
os.path.join(ROOT, 'thrift', f)])
class build_py(_build_py):
def run(self):
self.run_command('gen_thrift')
_build_py.run(self)
setup(
name="sparts",
version=version(),
packages=find_packages(),
description="Build services in python with as little code as possible",
long_description=read_md_as_rest("README.md"),
install_requires=[],
author='Peter Ruibal',
author_email='ruibalp@gmail.com',
license='ISC',
keywords='service boostrap daemon thrift tornado',
url='http://github.com/fmoo/sparts',
test_suite="tests",
cmdclass={'gen_thrift': gen_thrift,
'build_py': build_py},
)
| bsd-3-clause | Python |
5a16ada916d719a0499d75bc5c82aaa5228dec15 | Split off IP/hostname munging to addr_util | catap/namebench,jimmsta/namebench-1 | libnamebench/addr_util.py | libnamebench/addr_util.py | # Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions related to IP Addresses & Hostnames."""
# TODO(tstromberg): Investigate replacement with ipaddr library
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import math
import re
import util
import os.path
import socket
import sys
import traceback
import zlib
# TODO(tstromberg): Find a way to combine the following two regexps.
# Used to decide whether or not to benchmark a name
INTERNAL_RE = re.compile('^0|\.pro[md]z*\.|\.corp|\.bor|\.hot$|internal|dmz|\._[ut][dc]p\.|intra|\.\w$|\.\w{5,}$', re.IGNORECASE)
# Used to decide if a hostname should be censored later.
PRIVATE_RE = re.compile('^\w+dc\.|^\w+ds\.|^\w+sv\.|^\w+nt\.|\.corp|internal|intranet|\.local', re.IGNORECASE)
# ^.*[\w-]+\.[\w-]+\.[\w-]+\.[a-zA-Z]+\.$|^[\w-]+\.[\w-]{3,}\.[a-zA-Z]+\.$
FQDN_RE = re.compile('^.*\..*\..*\..*\.$|^.*\.[\w-]*\.\w{3,4}\.$|^[\w-]+\.[\w-]{4,}\.\w+\.')
IP_RE = re.compile('^[0-9.]+$')
def ExtractIPsFromString(ip_string):
"""Return a tuple of ip addressed held in a string."""
ips = []
# IPV6 If this regexp is too loose, see Regexp-IPv6 in CPAN for inspiration.
ips.extend(re.findall('[\dabcdef:]+:[\dabcdef:]+', ip_string, re.IGNORECASE))
ips.extend(re.findall('\d+\.\d+\.\d+\.+\d+', ip_string))
return ips
def ExtractIPTuplesFromString(ip_string):
ip_tuples = []
for ip in ExtractIPsFromString(ip_string):
ip_tuples.append((ip,ip))
return ip_tuples
def IsPrivateHostname(hostname):
"""Basic matching to determine if the hostname is likely to be 'internal'."""
if PRIVATE_RE.search(hostname):
return True
else:
return False
def IsLoopbackIP(ip):
"""Boolean check to see if an IP is private or not.
Returns: Number of bits that should be preserved.
"""
if ip.startswith('127.') or ip == '::1':
return True
else:
return False
def IsPrivateIP(ip):
"""Boolean check to see if an IP is private or not.
Returns: Number of bits that should be preserved.
"""
if re.match('^10\.', ip):
return 1
elif re.match('^192\.168', ip):
return 2
elif re.match('^172\.(1[6-9]|2[0-9]|3[0-1])\.', ip):
return 1
else:
return None
def MaskIPBits(ip, use_bits):
"""Mask an IP, but still keep a meaningful checksum."""
ip_parts = ip.split('.')
checksum = zlib.crc32(''.join(ip_parts[use_bits:]))
masked_ip = '.'.join(ip_parts[0:use_bits])
return masked_ip + ".x-" + str(checksum)[-4:]
def MaskPrivateHost(ip, hostname, name):
"""Mask unnamed private IP's."""
# If we have a name not listed as SYS-x.x.x.x, then we're clear.
if name and ip not in name:
return (ip, hostname, name)
use_bits = IsPrivateIP(ip)
if use_bits:
ip = MaskIPBits(ip, use_bits)
hostname = 'internal.ip'
elif IsPrivateHostname(hostname):
ip = MaskIPBits(ip, 2)
hostname = 'internal.name'
if 'SYS-' in name:
name = "SYS-%s" % ip
else:
name = ''
return (ip, hostname, name)
| apache-2.0 | Python |
|
e458733b0aa1cbb142fc6818ae1f7cf84bef6518 | Add setup | xiaohaiguicc/friendly-computing-machine | setup.py | setup.py | import setuptools
if __name__ == "__main__":
setuptools.setup(
name='friendly_computing_machine',
version="0.1.1",
description='A starting template for Python programs',
author='CHENXI CAI',
author_email='ccai28@emory.edu',
url="https://github.com/xiaohaiguicc/friendly-computing-machine",
license='BSD-3C',
packages=setuptools.find_packages(),
install_requires=[
'numpy>=1.7',
],
extras_require={
'docs': [
'sphinx==1.2.3', # autodoc was broken in 1.3.1
'sphinxcontrib-napoleon',
'sphinx_rtd_theme',
'numpydoc',
],
'tests': [
'pytest',
'pytest-cov',
'pytest-pep8',
'tox',
],
},
tests_require=[
'pytest',
'pytest-cov',
'pytest-pep8',
'tox',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
zip_safe=True,
)
| bsd-3-clause | Python |
|
bc17ea522b0120ec7308ba0309d87b18ba9163d9 | Add setup.py | joelverhagen/PingdomBackup | setup.py | setup.py | import sys
from setuptools import setup
setup(
name='pingdombackup',
version="0.1.0",
description='Backup Pingdom logs',
long_description='Backup Pingdom result logs to a SQLite database.',
author='Joel Verhagen',
author_email='joel.verhagen@gmail.com',
install_requires=['requests>=2.1.0'],
url='https://github.com/joelverhagen/PingdomBackup',
packages=['pingdombackup'],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: System :: Monitoring'
]
)
| mit | Python |
|
1dd8a34cba565f70a30a6c8ab4604a489377e752 | Add template remove script | tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes | src/utils/remove_templates.py | src/utils/remove_templates.py | def remove_templates(text):
"""Remove all text contained between '{{' and '}}', even in the case of
nested templates.
Args:
text (str): Full text of a Wikipedia article as a single string.
Returns:
str: The full text with all templates removed.
"""
start_char = 0
while '{{' in text:
depth = 0
prev_char = None
open_pos = None
close_pos = None
for pos in xrange(start_char, len(text)):
char = text[pos]
# Open Marker
if char == '{' and prev_char == '{':
if depth == 0:
open_pos = pos-1
# When we scan the string again after removing the chunk
# that starts here, we know all text before is template
# free, so we mark this position for the next while
# iteration
start_char = open_pos
depth += 1
# Close Marker
elif char == '}' and prev_char == '}':
depth -= 1
if depth == 0:
close_pos = pos
# Remove all text between the open and close markers
text = text[:open_pos] + text[close_pos+1:]
break
prev_char = char
return text
| apache-2.0 | Python |
|
ad714cbf92d2984c9cc855e99e31bf622c38a220 | add setup file | sigurdga/samklang-menu,sigurdga/samklang-menu | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(
name = 's7n-menu',
version = "1a1",
packages = ['s7n', 's7n.menu'],
)
| agpl-3.0 | Python |
|
ff147838ce320c97c34e00be4dafb63b6d0603fc | Add setup.py | seguri/python-oneliner | setup.py | setup.py | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
import sys
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='oneliner',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.1',
description='Module for practical Python one-liners',
long_description=long_description,
# The project's main homepage.
url='https://github.com/seguri/python-oneliner',
# Author details
author='Marco Seguri',
author_email='marco@seguri.name',
# Choose your license
license='Apache 2.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache 2.0 License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
],
# What does your project relate to?
keywords='oneliner repl',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
#packages=find_packages(exclude=['contrib', 'docs', 'tests']),
#packages=['oneliner'],
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
py_modules=["oneliner"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
#install_requires=['peppercorn'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
#extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
#},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
#package_data={
# 'sample': ['package_data.dat'],
#},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'pyl-{0.major}.{0.minor}=oneliner:main'.format(sys.version_info),
],
},
)
| apache-2.0 | Python |
|
73d9b80d6fa1cf75dba73e396d1f5d3bd4963df6 | Create setup.py | sevenbigcat/wthen | setup.py | setup.py | from distutils.core import setup
setup(
name = 'wthen',
packages = ['wthen'], # this must be the same as the name above
version = '0.1',
description = 'A simple rule engine with YAML format',
author = 'Alex Yu',
author_email = 'mltest2000@aliyun.com',
url = 'https://github.com/sevenbigcat/wthen', # use the URL to the github repo
download_url = 'https://github.com/sevenbigcat/wtehn/archive/0.1.tar.gz', # I'll explain this in a second
keywords = ['rule engine', 'ECA', 'YAML'], # arbitrary keywords
classifiers = [],
)
| mit | Python |
|
62e126908e08544f8595be368d300b0abaca82d3 | support old setuptools versions | python-hyper/hyper-h2,vladmunteanu/hyper-h2,python-hyper/hyper-h2,bhavishyagopesh/hyper-h2,Kriechi/hyper-h2,vladmunteanu/hyper-h2,Kriechi/hyper-h2 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('h2/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'h2',
]
setup(
name='h2',
version=version,
description='HTTP/2 State-Machine based protocol implementation',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Cory Benfield',
author_email='cory@lukasa.co.uk',
url='http://hyper.rtfd.org',
packages=packages,
package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
package_dir={'h2': 'h2'},
include_package_data=True,
license='MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'hyperframe>=3.1, <4',
'hpack>=2.0, <3',
],
extras_require={
':python_version == "2.7" or python_version == "3.3"': ['enum34>=1.0.4, <1.1'],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('h2/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'h2',
]
setup(
name='h2',
version=version,
description='HTTP/2 State-Machine based protocol implementation',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Cory Benfield',
author_email='cory@lukasa.co.uk',
url='http://hyper.rtfd.org',
packages=packages,
package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
package_dir={'h2': 'h2'},
include_package_data=True,
license='MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'hyperframe~=3.1',
'hpack~=2.0',
],
extras_require={
':python_version<"3.4"': ['enum34~=1.0.4'],
}
)
| mit | Python |
4d16ae6d1ad8b308c14c23e802349001b81ae461 | Add Python-based opcode enum parser | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | thinglang/compiler/opcodes.py | thinglang/compiler/opcodes.py | import os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
| mit | Python |
|
ac823e61fd214f9818bb7a893a8ed52a3bfa3af4 | Add utils for graph visualization. | cerrno/neurokernel | neurokernel/conn_utils.py | neurokernel/conn_utils.py | #!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
| bsd-3-clause | Python |
|
e663394d1dc4de7b8e3a877f0c9870a804e804f2 | Make tests runnable from lifelines.tests | nerdless/lifelines,CamDavidsonPilon/lifelines,jstoxrocky/lifelines,wavelets/lifelines | lifelines/tests/__main__.py | lifelines/tests/__main__.py | import unittest
from . import test_suite
if __name__ == '__main__':
unittest.main(module=test_suite)
| mit | Python |
|
525a8438bd601592c4f878ca5d42d3dab8943be0 | Test that specific Failures are caught before parent Failures | 0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe | ooni/tests/test_errors.py | ooni/tests/test_errors.py | from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
| bsd-2-clause | Python |
|
90d079928eaf48e370d21417e4d6e649ec0f5f6f | Update tasks and evaluate viewports on saving | phha/taskwiki,Spirotot/taskwiki | taskwiki/taskwiki.py | taskwiki/taskwiki.py | import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
| import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
| mit | Python |
f2e9f2adbc81a37847bbe27401dd852317243486 | add a test for the session tables | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | test/sessionstest.py | test/sessionstest.py | #!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rpath, Inc.
#
import time
import testsuite
testsuite.setup()
import sqlite3
import rephelp
from mint_rephelp import MintRepositoryHelper
from mint import dbversion
from mint import sessiondb
class SessionTest(MintRepositoryHelper):
def testSessions(self):
st = sessiondb.SessionsTable(self.db)
# create a session
st.save("abcdefg123456", {'_data': 'data',
'_accessed': time.time() - 20,
'_timeout': 10}
)
# load and check data
d = st.load("abcdefg123456")
assert(d['_data'] == 'data')
# clean up expired sessions
st.cleanup()
# confirm that expired session went away
d = st.load("abcdefg123456")
assert(not d)
def setUp(self):
rephelp.RepositoryHelper.setUp(self)
try:
os.unlink(self.reposDir + "/db")
except:
pass
self.db = sqlite3.connect(self.reposDir + "/db")
self.versionTable = dbversion.VersionTable(self.db)
self.db.commit()
if __name__ == "__main__":
testsuite.main()
| apache-2.0 | Python |
|
ddbfc403034c1ed98590088889687ff23f222aab | add package | EmreAtes/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,EmreAtes/spack,lgarren/spack,krafczyk/spack,matthiasdiener/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,skosukhin/spack,skosukhin/spack,matthiasdiener/spack,TheTimmy/spack,EmreAtes/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,iulian787/spack,lgarren/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,tmerrick1/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,lgarren/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,tmerrick1/spack,LLNL/spack,lgarren/spack | var/spack/packages/paraview/package.py | var/spack/packages/paraview/package.py | from spack import *
class Paraview(Package):
homepage = 'http://www.paraview.org'
url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz'
version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz')
variant('python', default=False, description='Enable Python support')
variant('matplotlib', default=False, description='Enable Matplotlib support')
variant('numpy', default=False, description='Enable NumPy support')
variant('tcl', default=False, description='Enable TCL support')
variant('mpi', default=False, description='Enable MPI support')
variant('osmesa', default=False, description='Enable OSMesa support')
variant('qt', default=False, description='Enable Qt support')
depends_on('python', when='+python')
depends_on('py-numpy', when='+python+numpy')
depends_on('py-matplotlib', when='+python+matplotlib')
depends_on('tcl', when='+tcl')
depends_on('mpi', when='+mpi')
depends_on('qt', when='+qt')
depends_on('bzip2')
depends_on('freetype')
depends_on('hdf5') # drags in mpi
depends_on('jpeg')
depends_on('libpng')
depends_on('libtiff')
#depends_on('libxml2') # drags in python
depends_on('netcdf')
#depends_on('protobuf') # version mismatches?
#depends_on('sqlite') # external version not supported
depends_on('zlib')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
def feature_to_bool(feature, on='ON', off='OFF'):
if feature in spec:
return on
return off
def nfeature_to_bool(feature):
return feature_to_bool(feature, on='OFF', off='ON')
feature_args = std_cmake_args[:]
feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt'))
feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python'))
feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi'))
feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl'))
feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa'))
feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa'))
feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
feature_args.extend(std_cmake_args)
cmake('..',
'-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
'-DBUILD_TESTING:BOOL=OFF',
'-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON',
'-DVTK_USER_SYSTEM_HDF5:BOOL=ON',
'-DVTK_USER_SYSTEM_JPEG:BOOL=ON',
#'-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON',
'-DVTK_USER_SYSTEM_NETCDF:BOOL=ON',
'-DVTK_USER_SYSTEM_TIFF:BOOL=ON',
'-DVTK_USER_SYSTEM_ZLIB:BOOL=ON',
*feature_args)
make()
make('install')
| lgpl-2.1 | Python |
|
eb71a3d3319480b3f99cb44f934a51bfb1b5bd67 | Add abstract class for HAP channels | postlund/pyatv,postlund/pyatv | pyatv/auth/hap_channel.py | pyatv/auth/hap_channel.py | """Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
| mit | Python |
|
5fa7514d9cf6bed319adb5f63b07c29feb5e29ea | add hex.cmdline.py3.py | TristanCavelier/notesntools,TristanCavelier/notesntools,TristanCavelier/notesntools,TristanCavelier/notesntools,TristanCavelier/notesntools | python/hex.cmdline.py3.py | python/hex.cmdline.py3.py | #!/usr/bin/env python3
# Copyright (c) 2014 Tristan Cavelier <t.cavelier@free.fr>
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What The Fuck You Want
# To Public License, Version 2, as published by Sam Hocevar. See
# http://www.wtfpl.net/ for more details.
"""usage: hex [-h] [-d] [-i] [-w COLS] [FILE]
Hex encode or decode FILE, or standard input, to standard output.
positional arguments:
FILE
optional arguments:
-h, --help show this help message and exit
-d, --decode decode data
-i, --ignore-garbage when decoding, ignore non-hex digits
-w COLS, --wrap COLS wrap encoded lines after COLS character (default 76).
Use 0 to disable line wrapping
"""
import sys, os
import argparse
import binascii
def pipe_encode_no_wrap():
while True:
chunk = os.read(sys.stdin.fileno(), 1024)
if len(chunk) == 0: return 0
sys.stdout.write("".join("%02X" % b for b in chunk))
def pipe_encode_wrap(wrap):
byte_length = int(wrap / 2)
while True:
chunk = os.read(sys.stdin.fileno(), byte_length)
if len(chunk) == 0: return 0
sys.stdout.write("".join("%02X" % b for b in chunk) + "\n")
def pipe_decode_ignore_garbage():
remain = None
while True:
chunk = os.read(sys.stdin.fileno(), 1024)
if len(chunk) == 0: break
for b in chunk:
if (b >= 48 and b <= 57) or (b >= 97 and b <= 102) or (b >= 65 and b <= 70):
if remain is None: remain = b
else:
os.write(sys.stdout.fileno(), binascii.unhexlify(chr(remain) + chr(b)))
remain = None
if remain is None: return 0
sys.stderr.write("hex: invalid input\n")
return 1
def pipe_decode():
remain = None
while True:
chunk = os.read(sys.stdin.fileno(), 1024)
if len(chunk) == 0: break
for b in chunk:
if b in (0x0D, 0x0A): continue
if remain is None: remain = b
else:
os.write(sys.stdout.fileno(), binascii.unhexlify(chr(remain) + chr(b)))
remain = None
if remain is None: return 0
sys.stderr.write("hex: invalid input\n")
return 1
def main():
parser = argparse.ArgumentParser(description='Hex encode or decode FILE, or standard input, to standard output.')
parser.add_argument("-d", "--decode", dest="decode", default=False, action="store_true", help="decode data")
parser.add_argument("-i", "--ignore-garbage", dest="ignore_garbage", default=False, action="store_true", help="when decoding, ignore non-hex digits")
parser.add_argument("-w", "--wrap", metavar="COLS", dest="wrap", default=76, type=int, help="wrap encoded lines after COLS character (default 76). Use 0 to disable line wrapping")
parser.add_argument("file", metavar="FILE", nargs="?", default=None)
args = parser.parse_args()
if args.file is not None:
sys.stdin = open(args.file, "rb")
if args.decode:
if args.ignore_garbage:
return pipe_decode_ignore_garbage()
return pipe_decode()
if args.wrap == 0:
return pipe_encode_no_wrap()
if args.wrap % 2 == 0:
return pipe_encode_wrap(args.wrap)
return pipe_encode_clever_wrap(args.wrap)
if __name__ == "__main__":
sys.exit(main())
| mit | Python |
|
0089de0eccae27bf4cd5a2f9166e8418d64171c3 | Create XOR.py | jenniferwx/Programming_Practice,jenniferwx/Programming_Practice,jenniferwx/Programming_Practice | XOR.py | XOR.py | '''
Implement XOR operation
'''
def XOR(a,b):
result = 0
power = 1
while a>0 or b>0:
m = a%2
n = b%2
if m+n==1:
result = result+power
power *=2
a = a/2
b = b/2
return result
if __name__=='__main__':
a = 123
b = 230
print XOR(a,b)
| bsd-3-clause | Python |
|
bd01797f18012927202b87872dc33caf685306c0 | Add GDB plugin for printing ABC values | klkblake/abcc,klkblake/abcc,klkblake/abcc,klkblake/abcc | gdb.py | gdb.py | deadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
| bsd-3-clause | Python |
|
2d320058c96f88348d8226fa4a827a6c2c973237 | Add Classical multidimensional scaling algorithm. | ntduong/ML | mds.py | mds.py | """
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main() | mit | Python |
|
a78d879c9c097c32c58f5246d46a4a188b17d99c | Add workup vebose name change migration. | SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools | workup/migrations/0002_add_verbose_names.py | workup/migrations/0002_add_verbose_names.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
| mit | Python |
|
d4a87c2131c02b3638743167ce32c779ece14fd5 | Create crawlerino.py | dmahugh/crawlerino,dmahugh/crawlerino | crawlerino.py | crawlerino.py | """Simple web crawler, to be extended for various uses.
Written in Python 3, uses requests and BeautifulSoup modules.
"""
def crawler(startpage, maxpages=100, singledomain=True):
"""Crawl the web starting from specified page.
1st parameter = starting page url
maxpages = maximum number of pages to crawl
singledomain = whether to only crawl links within startpage's domain
"""
import requests, re, bs4
from urllib.parse import urldefrag, urljoin, urlparse
from collections import deque
pagequeue = deque() # queue of pages to be crawled
pagequeue.append(startpage)
crawled = [] # list of pages already crawled
domain = urlparse(startpage).netloc # for singledomain option
pages = 0 # number of pages succesfully crawled so far
failed = 0 # number of pages that couldn't be crawled
while pages < maxpages and pagequeue:
url = pagequeue.popleft() # get next page to crawl (FIFO queue)
try:
response = requests.get(url)
if not response.headers['content-type'].startswith('text/html'):
continue # don't crawl non-HTML links
soup = bs4.BeautifulSoup(response.text, "html.parser")
print('Crawling:', url)
pages += 1
crawled.append(url)
# PROCESSING CODE GOES HERE:
# do something interesting with this page
# get target URLs for all links on the page
links = [a.attrs.get('href') for a in soup.select('a[href]')]
# remove fragment identifiers
links = [urldefrag(link)[0] for link in links]
# remove any empty strings
links = list(filter(None,links))
# if it's a relative link, change to absolute
links = [link if bool(urlparse(link).netloc) else urljoin(url,link) for link in links]
# if singledomain=True, remove links to other domains
if singledomain:
links = [link for link in links if (urlparse(link).netloc == domain)]
# add these links to the queue (except if already crawled)
for link in links:
if link not in crawled and link not in pagequeue:
pagequeue.append(link)
except:
print("*FAILED*:", url)
failed += 1
print('{0} pages crawled, {1} pages failed to load.'.format(pages, failed))
# if running standalone, crawl some Microsoft pages as a test
if __name__ == "__main__":
crawler('http://www.microsoft.com', maxpages=30, singledomain=True)
| mit | Python |
|
91facfcc42e001e2a598d6d06e55270ef9239b1d | add migration | jrsupplee/django-activity-stream,jrsupplee/django-activity-stream | actstream/migrations/0006_auto_20170329_2048.py | actstream/migrations/0006_auto_20170329_2048.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-29 20:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0005_auto_20161119_2211'),
]
operations = [
migrations.AddField(
model_name='action',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
| bsd-3-clause | Python |
|
1e050f30e8307a75976a52b8f1258a5b14e43733 | Add middleware for static serving | cggh/DQXServer | wsgi_static.py | wsgi_static.py | import wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
| agpl-3.0 | Python |
|
a086307e6aac341ed8a6596d0a05b7a8d198c7ec | Add command to dump and restore user pointers. | brainwane/zulip,dwrpayne/zulip,dotcool/zulip,nicholasbs/zulip,ikasumiwt/zulip,DazWorrall/zulip,xuanhan863/zulip,tdr130/zulip,alliejones/zulip,brainwane/zulip,vabs22/zulip,Qgap/zulip,DazWorrall/zulip,souravbadami/zulip,developerfm/zulip,mdavid/zulip,mohsenSy/zulip,rishig/zulip,shubhamdhama/zulip,kaiyuanheshang/zulip,hackerkid/zulip,hustlzp/zulip,hackerkid/zulip,peiwei/zulip,bluesea/zulip,jrowan/zulip,dxq-git/zulip,Cheppers/zulip,luyifan/zulip,vaidap/zulip,codeKonami/zulip,Juanvulcano/zulip,vakila/zulip,technicalpickles/zulip,Jianchun1/zulip,adnanh/zulip,alliejones/zulip,dhcrzf/zulip,m1ssou/zulip,stamhe/zulip,vaidap/zulip,zachallaun/zulip,dawran6/zulip,zhaoweigg/zulip,dawran6/zulip,mansilladev/zulip,dawran6/zulip,so0k/zulip,jonesgithub/zulip,voidException/zulip,wangdeshui/zulip,aakash-cr7/zulip,PaulPetring/zulip,jainayush975/zulip,zofuthan/zulip,isht3/zulip,Drooids/zulip,tommyip/zulip,amyliu345/zulip,moria/zulip,Batterfii/zulip,ipernet/zulip,punchagan/zulip,hj3938/zulip,peguin40/zulip,jainayush975/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,johnnygaddarr/zulip,saitodisse/zulip,Jianchun1/zulip,eeshangarg/zulip,nicholasbs/zulip,Drooids/zulip,aps-sids/zulip,noroot/zulip,natanovia/zulip,dattatreya303/zulip,wavelets/zulip,udxxabp/zulip,zacps/zulip,bluesea/zulip,JPJPJPOPOP/zulip,hafeez3000/zulip,brockwhittaker/zulip,ryanbackman/zulip,hafeez3000/zulip,ufosky-server/zulip,suxinde2009/zulip,arpitpanwar/zulip,JPJPJPOPOP/zulip,shubhamdhama/zulip,developerfm/zulip,easyfmxu/zulip,alliejones/zulip,xuanhan863/zulip,proliming/zulip,zulip/zulip,AZtheAsian/zulip,huangkebo/zulip,PaulPetring/zulip,ashwinirudrappa/zulip,ryansnowboarder/zulip,deer-hope/zulip,hj3938/zulip,fw1121/zulip,zofuthan/zulip,blaze225/zulip,eastlhu/zulip,jeffcao/zulip,praveenaki/zulip,KingxBanana/zulip,LeeRisk/zulip,tdr130/zulip,mdavid/zulip,bssrdf/zulip,udxxabp/zulip,karamcnair/zulip,ufosky-server/zulip,jerryge/zulip,mahim97/zulip,kou/zulip,Jianchun1/zulip,brockwhittaker/zulip,kou/zulip,hj3938/zulip,dhcrzf/zulip,tbutter/zulip,luyifan/zulip,souravbadami/zulip,dnmfarrell/zulip,Cheppers/zulip,avastu/zulip,tbutter/zulip,johnnygaddarr/zulip,ikasumiwt/zulip,udxxabp/zulip,cosmicAsymmetry/zulip,ApsOps/zulip,calvinleenyc/zulip,Diptanshu8/zulip,peguin40/zulip,pradiptad/zulip,ufosky-server/zulip,MariaFaBella85/zulip,tommyip/zulip,arpitpanwar/zulip,akuseru/zulip,dnmfarrell/zulip,jessedhillon/zulip,jphilipsen05/zulip,dxq-git/zulip,eastlhu/zulip,j831/zulip,firstblade/zulip,JanzTam/zulip,Batterfii/zulip,amallia/zulip,jimmy54/zulip,itnihao/zulip,udxxabp/zulip,KJin99/zulip,kaiyuanheshang/zulip,Batterfii/zulip,dxq-git/zulip,Jianchun1/zulip,LAndreas/zulip,natanovia/zulip,praveenaki/zulip,jrowan/zulip,adnanh/zulip,vaidap/zulip,wdaher/zulip,punchagan/zulip,pradiptad/zulip,timabbott/zulip,Drooids/zulip,zofuthan/zulip,amyliu345/zulip,zulip/zulip,avastu/zulip,shaunstanislaus/zulip,qq1012803704/zulip,blaze225/zulip,zwily/zulip,deer-hope/zulip,thomasboyt/zulip,dattatreya303/zulip,swinghu/zulip,sonali0901/zulip,guiquanz/zulip,dattatreya303/zulip,brainwane/zulip,atomic-labs/zulip,cosmicAsymmetry/zulip,christi3k/zulip,themass/zulip,ericzhou2008/zulip,seapasulli/zulip,fw1121/zulip,vabs22/zulip,shrikrishnaholla/zulip,calvinleenyc/zulip,cosmicAsymmetry/zulip,AZtheAsian/zulip,hustlzp/zulip,shubhamdhama/zulip,jonesgithub/zulip,dhcrzf/zulip,samatdav/zulip,hustlzp/zulip,jainayush975/zulip,amanharitsh123/zulip,developerfm/zulip,KJin99/zulip,jessedhillon/zulip,showell/zulip,DazWorrall/zulip,jerryge/zulip,zacps/zulip,jrowan/zulip,dnmfarrell/zulip,qq1012803704/zulip,j831/zulip,xuanhan863/zulip,bowlofstew/zulip,wdaher/zulip,zachallaun/zulip,MariaFaBella85/zulip,bluesea/zulip,niftynei/zulip,Gabriel0402/zulip,esander91/zulip,fw1121/zulip,vakila/zulip,jerryge/zulip,dhcrzf/zulip,KingxBanana/zulip,TigorC/zulip,codeKonami/zulip,Galexrt/zulip,natanovia/zulip,Juanvulcano/zulip,yocome/zulip,huangkebo/zulip,tommyip/zulip,themass/zulip,dattatreya303/zulip,cosmicAsymmetry/zulip,mohsenSy/zulip,shaunstanislaus/zulip,timabbott/zulip,johnny9/zulip,PaulPetring/zulip,timabbott/zulip,vabs22/zulip,tbutter/zulip,peguin40/zulip,firstblade/zulip,schatt/zulip,Jianchun1/zulip,KJin99/zulip,gkotian/zulip,Frouk/zulip,KJin99/zulip,qq1012803704/zulip,grave-w-grave/zulip,pradiptad/zulip,yocome/zulip,zorojean/zulip,joshisa/zulip,j831/zulip,karamcnair/zulip,kaiyuanheshang/zulip,littledogboy/zulip,lfranchi/zulip,ryansnowboarder/zulip,fw1121/zulip,MayB/zulip,Vallher/zulip,bitemyapp/zulip,dwrpayne/zulip,xuanhan863/zulip,hackerkid/zulip,vabs22/zulip,so0k/zulip,hackerkid/zulip,paxapy/zulip,tdr130/zulip,esander91/zulip,mdavid/zulip,arpith/zulip,mansilladev/zulip,dwrpayne/zulip,joyhchen/zulip,LeeRisk/zulip,voidException/zulip,levixie/zulip,RobotCaleb/zulip,showell/zulip,suxinde2009/zulip,tbutter/zulip,tiansiyuan/zulip,karamcnair/zulip,technicalpickles/zulip,bowlofstew/zulip,MayB/zulip,levixie/zulip,gigawhitlocks/zulip,dhcrzf/zulip,TigorC/zulip,jainayush975/zulip,PhilSk/zulip,Gabriel0402/zulip,dwrpayne/zulip,showell/zulip,shrikrishnaholla/zulip,wdaher/zulip,JanzTam/zulip,esander91/zulip,jeffcao/zulip,amanharitsh123/zulip,wangdeshui/zulip,proliming/zulip,johnnygaddarr/zulip,arpitpanwar/zulip,gkotian/zulip,adnanh/zulip,isht3/zulip,vakila/zulip,kou/zulip,peiwei/zulip,dnmfarrell/zulip,zhaoweigg/zulip,KingxBanana/zulip,isht3/zulip,ahmadassaf/zulip,niftynei/zulip,itnihao/zulip,moria/zulip,Frouk/zulip,RobotCaleb/zulip,thomasboyt/zulip,lfranchi/zulip,firstblade/zulip,karamcnair/zulip,RobotCaleb/zulip,hengqujushi/zulip,aakash-cr7/zulip,Qgap/zulip,schatt/zulip,johnnygaddarr/zulip,glovebx/zulip,easyfmxu/zulip,DazWorrall/zulip,littledogboy/zulip,zorojean/zulip,SmartPeople/zulip,jackrzhang/zulip,susansls/zulip,blaze225/zulip,bowlofstew/zulip,adnanh/zulip,zwily/zulip,brockwhittaker/zulip,mansilladev/zulip,isht3/zulip,lfranchi/zulip,jackrzhang/zulip,jrowan/zulip,peiwei/zulip,moria/zulip,jphilipsen05/zulip,dattatreya303/zulip,willingc/zulip,LAndreas/zulip,babbage/zulip,andersk/zulip,deer-hope/zulip,kaiyuanheshang/zulip,paxapy/zulip,huangkebo/zulip,jackrzhang/zulip,ericzhou2008/zulip,samatdav/zulip,DazWorrall/zulip,EasonYi/zulip,timabbott/zulip,dotcool/zulip,LeeRisk/zulip,aps-sids/zulip,yuvipanda/zulip,JPJPJPOPOP/zulip,brockwhittaker/zulip,xuanhan863/zulip,amallia/zulip,itnihao/zulip,RobotCaleb/zulip,ApsOps/zulip,ryanbackman/zulip,mansilladev/zulip,peguin40/zulip,punchagan/zulip,itnihao/zulip,zhaoweigg/zulip,hustlzp/zulip,sonali0901/zulip,levixie/zulip,nicholasbs/zulip,noroot/zulip,suxinde2009/zulip,glovebx/zulip,guiquanz/zulip,schatt/zulip,synicalsyntax/zulip,wweiradio/zulip,glovebx/zulip,jessedhillon/zulip,shaunstanislaus/zulip,krtkmj/zulip,synicalsyntax/zulip,wavelets/zulip,aliceriot/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,ipernet/zulip,Frouk/zulip,noroot/zulip,zacps/zulip,reyha/zulip,aps-sids/zulip,EasonYi/zulip,noroot/zulip,bowlofstew/zulip,m1ssou/zulip,mahim97/zulip,guiquanz/zulip,MariaFaBella85/zulip,Juanvulcano/zulip,tiansiyuan/zulip,dnmfarrell/zulip,tiansiyuan/zulip,brainwane/zulip,armooo/zulip,suxinde2009/zulip,seapasulli/zulip,bastianh/zulip,armooo/zulip,RobotCaleb/zulip,yuvipanda/zulip,LAndreas/zulip,levixie/zulip,Cheppers/zulip,Diptanshu8/zulip,babbage/zulip,akuseru/zulip,bastianh/zulip,brockwhittaker/zulip,EasonYi/zulip,PhilSk/zulip,alliejones/zulip,brainwane/zulip,bastianh/zulip,saitodisse/zulip,ApsOps/zulip,zhaoweigg/zulip,xuanhan863/zulip,Qgap/zulip,jessedhillon/zulip,johnny9/zulip,stamhe/zulip,zachallaun/zulip,verma-varsha/zulip,cosmicAsymmetry/zulip,bluesea/zulip,schatt/zulip,xuxiao/zulip,ryansnowboarder/zulip,kaiyuanheshang/zulip,hackerkid/zulip,suxinde2009/zulip,mdavid/zulip,johnny9/zulip,LeeRisk/zulip,sup95/zulip,joyhchen/zulip,dhcrzf/zulip,jackrzhang/zulip,he15his/zulip,rishig/zulip,saitodisse/zulip,jainayush975/zulip,fw1121/zulip,jessedhillon/zulip,joshisa/zulip,aps-sids/zulip,kokoar/zulip,zhaoweigg/zulip,Vallher/zulip,rishig/zulip,rishig/zulip,qq1012803704/zulip,andersk/zulip,shaunstanislaus/zulip,Suninus/zulip,umkay/zulip,Qgap/zulip,aliceriot/zulip,jrowan/zulip,thomasboyt/zulip,rishig/zulip,showell/zulip,ApsOps/zulip,bssrdf/zulip,praveenaki/zulip,Suninus/zulip,bastianh/zulip,zhaoweigg/zulip,schatt/zulip,tiansiyuan/zulip,sup95/zulip,LAndreas/zulip,SmartPeople/zulip,zulip/zulip,sonali0901/zulip,Drooids/zulip,hayderimran7/zulip,zacps/zulip,showell/zulip,bastianh/zulip,eeshangarg/zulip,willingc/zulip,zachallaun/zulip,atomic-labs/zulip,zorojean/zulip,technicalpickles/zulip,hafeez3000/zulip,shrikrishnaholla/zulip,m1ssou/zulip,babbage/zulip,Suninus/zulip,Cheppers/zulip,TigorC/zulip,ashwinirudrappa/zulip,SmartPeople/zulip,bluesea/zulip,hackerkid/zulip,aliceriot/zulip,wweiradio/zulip,jimmy54/zulip,lfranchi/zulip,synicalsyntax/zulip,sharmaeklavya2/zulip,reyha/zulip,ikasumiwt/zulip,KJin99/zulip,kou/zulip,sup95/zulip,sonali0901/zulip,ikasumiwt/zulip,andersk/zulip,firstblade/zulip,jerryge/zulip,dwrpayne/zulip,hayderimran7/zulip,nicholasbs/zulip,yuvipanda/zulip,praveenaki/zulip,zachallaun/zulip,m1ssou/zulip,zwily/zulip,Drooids/zulip,akuseru/zulip,jonesgithub/zulip,bssrdf/zulip,KJin99/zulip,wavelets/zulip,adnanh/zulip,guiquanz/zulip,pradiptad/zulip,wavelets/zulip,wdaher/zulip,Vallher/zulip,amanharitsh123/zulip,mansilladev/zulip,wangdeshui/zulip,PhilSk/zulip,aps-sids/zulip,DazWorrall/zulip,TigorC/zulip,paxapy/zulip,tiansiyuan/zulip,saitodisse/zulip,ryansnowboarder/zulip,niftynei/zulip,sup95/zulip,amallia/zulip,joyhchen/zulip,tiansiyuan/zulip,Suninus/zulip,kokoar/zulip,jonesgithub/zulip,m1ssou/zulip,verma-varsha/zulip,eeshangarg/zulip,Juanvulcano/zulip,tommyip/zulip,EasonYi/zulip,zacps/zulip,tdr130/zulip,littledogboy/zulip,xuxiao/zulip,gigawhitlocks/zulip,verma-varsha/zulip,technicalpickles/zulip,hustlzp/zulip,tbutter/zulip,AZtheAsian/zulip,bssrdf/zulip,johnnygaddarr/zulip,armooo/zulip,rht/zulip,sonali0901/zulip,zachallaun/zulip,RobotCaleb/zulip,Frouk/zulip,jimmy54/zulip,saitodisse/zulip,Gabriel0402/zulip,vikas-parashar/zulip,MayB/zulip,deer-hope/zulip,wavelets/zulip,rht/zulip,xuxiao/zulip,Batterfii/zulip,avastu/zulip,glovebx/zulip,jainayush975/zulip,niftynei/zulip,firstblade/zulip,udxxabp/zulip,avastu/zulip,qq1012803704/zulip,ryanbackman/zulip,levixie/zulip,rishig/zulip,vabs22/zulip,joshisa/zulip,proliming/zulip,Qgap/zulip,shrikrishnaholla/zulip,dattatreya303/zulip,kokoar/zulip,jerryge/zulip,codeKonami/zulip,easyfmxu/zulip,wavelets/zulip,umkay/zulip,jeffcao/zulip,aliceriot/zulip,jimmy54/zulip,amanharitsh123/zulip,rht/zulip,themass/zulip,noroot/zulip,tiansiyuan/zulip,PaulPetring/zulip,deer-hope/zulip,aliceriot/zulip,synicalsyntax/zulip,MariaFaBella85/zulip,KingxBanana/zulip,andersk/zulip,bastianh/zulip,easyfmxu/zulip,adnanh/zulip,firstblade/zulip,tdr130/zulip,mohsenSy/zulip,Vallher/zulip,luyifan/zulip,SmartPeople/zulip,kaiyuanheshang/zulip,schatt/zulip,mahim97/zulip,zofuthan/zulip,ericzhou2008/zulip,KJin99/zulip,vikas-parashar/zulip,suxinde2009/zulip,Gabriel0402/zulip,mohsenSy/zulip,vakila/zulip,zacps/zulip,brainwane/zulip,saitodisse/zulip,ashwinirudrappa/zulip,stamhe/zulip,qq1012803704/zulip,vikas-parashar/zulip,ufosky-server/zulip,ahmadassaf/zulip,Qgap/zulip,krtkmj/zulip,atomic-labs/zulip,shubhamdhama/zulip,showell/zulip,wdaher/zulip,ipernet/zulip,zorojean/zulip,niftynei/zulip,Juanvulcano/zulip,armooo/zulip,kokoar/zulip,Qgap/zulip,PaulPetring/zulip,PhilSk/zulip,JPJPJPOPOP/zulip,codeKonami/zulip,wangdeshui/zulip,kou/zulip,sup95/zulip,hustlzp/zulip,susansls/zulip,joyhchen/zulip,aps-sids/zulip,glovebx/zulip,arpith/zulip,umkay/zulip,atomic-labs/zulip,qq1012803704/zulip,isht3/zulip,johnnygaddarr/zulip,guiquanz/zulip,sup95/zulip,zwily/zulip,jeffcao/zulip,aakash-cr7/zulip,udxxabp/zulip,bssrdf/zulip,xuxiao/zulip,yocome/zulip,willingc/zulip,praveenaki/zulip,eastlhu/zulip,deer-hope/zulip,wweiradio/zulip,hafeez3000/zulip,armooo/zulip,ipernet/zulip,johnnygaddarr/zulip,susansls/zulip,joshisa/zulip,Cheppers/zulip,swinghu/zulip,AZtheAsian/zulip,jphilipsen05/zulip,mansilladev/zulip,LAndreas/zulip,ryanbackman/zulip,seapasulli/zulip,gigawhitlocks/zulip,shrikrishnaholla/zulip,reyha/zulip,hayderimran7/zulip,he15his/zulip,hayderimran7/zulip,RobotCaleb/zulip,EasonYi/zulip,ApsOps/zulip,swinghu/zulip,JanzTam/zulip,jphilipsen05/zulip,synicalsyntax/zulip,grave-w-grave/zulip,jphilipsen05/zulip,huangkebo/zulip,Suninus/zulip,christi3k/zulip,samatdav/zulip,thomasboyt/zulip,pradiptad/zulip,itnihao/zulip,hengqujushi/zulip,christi3k/zulip,SmartPeople/zulip,calvinleenyc/zulip,ahmadassaf/zulip,samatdav/zulip,hengqujushi/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,hayderimran7/zulip,LAndreas/zulip,shrikrishnaholla/zulip,atomic-labs/zulip,JanzTam/zulip,kaiyuanheshang/zulip,he15his/zulip,umkay/zulip,jessedhillon/zulip,zwily/zulip,he15his/zulip,babbage/zulip,JanzTam/zulip,TigorC/zulip,Galexrt/zulip,ryansnowboarder/zulip,codeKonami/zulip,rishig/zulip,so0k/zulip,Frouk/zulip,hustlzp/zulip,xuanhan863/zulip,brockwhittaker/zulip,amyliu345/zulip,yocome/zulip,luyifan/zulip,codeKonami/zulip,yocome/zulip,ufosky-server/zulip,wangdeshui/zulip,dawran6/zulip,christi3k/zulip,willingc/zulip,babbage/zulip,jackrzhang/zulip,swinghu/zulip,PhilSk/zulip,seapasulli/zulip,dwrpayne/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,krtkmj/zulip,nicholasbs/zulip,vaidap/zulip,Diptanshu8/zulip,johnny9/zulip,paxapy/zulip,nicholasbs/zulip,bitemyapp/zulip,voidException/zulip,nicholasbs/zulip,susansls/zulip,paxapy/zulip,amanharitsh123/zulip,rht/zulip,karamcnair/zulip,proliming/zulip,zwily/zulip,brainwane/zulip,alliejones/zulip,niftynei/zulip,eastlhu/zulip,Frouk/zulip,peiwei/zulip,Batterfii/zulip,Vallher/zulip,dxq-git/zulip,codeKonami/zulip,Galexrt/zulip,dotcool/zulip,ApsOps/zulip,shaunstanislaus/zulip,willingc/zulip,LeeRisk/zulip,deer-hope/zulip,arpitpanwar/zulip,voidException/zulip,vakila/zulip,arpith/zulip,hackerkid/zulip,ryanbackman/zulip,adnanh/zulip,ericzhou2008/zulip,Diptanshu8/zulip,amallia/zulip,akuseru/zulip,vaidap/zulip,akuseru/zulip,verma-varsha/zulip,shaunstanislaus/zulip,EasonYi/zulip,levixie/zulip,jphilipsen05/zulip,hengqujushi/zulip,littledogboy/zulip,verma-varsha/zulip,MariaFaBella85/zulip,mahim97/zulip,gigawhitlocks/zulip,jimmy54/zulip,arpitpanwar/zulip,souravbadami/zulip,Suninus/zulip,dxq-git/zulip,stamhe/zulip,Galexrt/zulip,littledogboy/zulip,isht3/zulip,lfranchi/zulip,grave-w-grave/zulip,punchagan/zulip,verma-varsha/zulip,ashwinirudrappa/zulip,joyhchen/zulip,umkay/zulip,gigawhitlocks/zulip,sharmaeklavya2/zulip,so0k/zulip,MariaFaBella85/zulip,aps-sids/zulip,gkotian/zulip,eastlhu/zulip,zachallaun/zulip,praveenaki/zulip,ericzhou2008/zulip,akuseru/zulip,LAndreas/zulip,mansilladev/zulip,Jianchun1/zulip,hayderimran7/zulip,zulip/zulip,zwily/zulip,voidException/zulip,ryanbackman/zulip,ryansnowboarder/zulip,lfranchi/zulip,he15his/zulip,grave-w-grave/zulip,jrowan/zulip,ikasumiwt/zulip,souravbadami/zulip,hafeez3000/zulip,amyliu345/zulip,swinghu/zulip,esander91/zulip,natanovia/zulip,themass/zulip,shubhamdhama/zulip,moria/zulip,vikas-parashar/zulip,thomasboyt/zulip,Drooids/zulip,souravbadami/zulip,ufosky-server/zulip,fw1121/zulip,Batterfii/zulip,suxinde2009/zulip,ipernet/zulip,zorojean/zulip,huangkebo/zulip,vaidap/zulip,jeffcao/zulip,zofuthan/zulip,LeeRisk/zulip,reyha/zulip,souravbadami/zulip,bssrdf/zulip,sharmaeklavya2/zulip,babbage/zulip,easyfmxu/zulip,aliceriot/zulip,bowlofstew/zulip,j831/zulip,zulip/zulip,wavelets/zulip,littledogboy/zulip,yuvipanda/zulip,grave-w-grave/zulip,paxapy/zulip,pradiptad/zulip,technicalpickles/zulip,guiquanz/zulip,Vallher/zulip,developerfm/zulip,jonesgithub/zulip,amallia/zulip,xuxiao/zulip,rht/zulip,samatdav/zulip,yocome/zulip,aliceriot/zulip,peguin40/zulip,m1ssou/zulip,Diptanshu8/zulip,gkotian/zulip,Drooids/zulip,hj3938/zulip,ikasumiwt/zulip,hafeez3000/zulip,avastu/zulip,bastianh/zulip,christi3k/zulip,yocome/zulip,umkay/zulip,aakash-cr7/zulip,eeshangarg/zulip,peiwei/zulip,akuseru/zulip,MayB/zulip,Frouk/zulip,alliejones/zulip,KingxBanana/zulip,gigawhitlocks/zulip,dhcrzf/zulip,eeshangarg/zulip,blaze225/zulip,mahim97/zulip,joshisa/zulip,stamhe/zulip,noroot/zulip,tommyip/zulip,developerfm/zulip,kou/zulip,saitodisse/zulip,sharmaeklavya2/zulip,wdaher/zulip,moria/zulip,tommyip/zulip,Vallher/zulip,joyhchen/zulip,amyliu345/zulip,ApsOps/zulip,aakash-cr7/zulip,kokoar/zulip,esander91/zulip,udxxabp/zulip,sharmaeklavya2/zulip,bluesea/zulip,atomic-labs/zulip,xuxiao/zulip,PaulPetring/zulip,punchagan/zulip,zulip/zulip,Batterfii/zulip,SmartPeople/zulip,gkotian/zulip,Gabriel0402/zulip,zorojean/zulip,dnmfarrell/zulip,guiquanz/zulip,yuvipanda/zulip,huangkebo/zulip,samatdav/zulip,bitemyapp/zulip,pradiptad/zulip,vakila/zulip,esander91/zulip,dotcool/zulip,hengqujushi/zulip,johnny9/zulip,dotcool/zulip,armooo/zulip,karamcnair/zulip,jerryge/zulip,calvinleenyc/zulip,reyha/zulip,karamcnair/zulip,gkotian/zulip,mdavid/zulip,arpith/zulip,noroot/zulip,bitemyapp/zulip,ashwinirudrappa/zulip,krtkmj/zulip,Gabriel0402/zulip,johnny9/zulip,seapasulli/zulip,mahim97/zulip,calvinleenyc/zulip,ashwinirudrappa/zulip,proliming/zulip,itnihao/zulip,reyha/zulip,glovebx/zulip,christi3k/zulip,sonali0901/zulip,krtkmj/zulip,rht/zulip,arpith/zulip,showell/zulip,willingc/zulip,calvinleenyc/zulip,themass/zulip,luyifan/zulip,joshisa/zulip,sharmaeklavya2/zulip,eastlhu/zulip,hj3938/zulip,huangkebo/zulip,kokoar/zulip,yuvipanda/zulip,gkotian/zulip,TigorC/zulip,thomasboyt/zulip,EasonYi/zulip,willingc/zulip,developerfm/zulip,shaunstanislaus/zulip,vikas-parashar/zulip,esander91/zulip,he15his/zulip,shubhamdhama/zulip,ufosky-server/zulip,grave-w-grave/zulip,j831/zulip,Diptanshu8/zulip,dawran6/zulip,vabs22/zulip,armooo/zulip,babbage/zulip,zorojean/zulip,aakash-cr7/zulip,xuxiao/zulip,wdaher/zulip,jerryge/zulip,littledogboy/zulip,DazWorrall/zulip,eeshangarg/zulip,JanzTam/zulip,praveenaki/zulip,he15his/zulip,avastu/zulip,firstblade/zulip,so0k/zulip,blaze225/zulip,PaulPetring/zulip,timabbott/zulip,shrikrishnaholla/zulip,easyfmxu/zulip,Gabriel0402/zulip,swinghu/zulip,jeffcao/zulip,timabbott/zulip,mohsenSy/zulip,Cheppers/zulip,LeeRisk/zulip,ipernet/zulip,luyifan/zulip,m1ssou/zulip,ahmadassaf/zulip,bowlofstew/zulip,amallia/zulip,seapasulli/zulip,wangdeshui/zulip,eastlhu/zulip,themass/zulip,dwrpayne/zulip,glovebx/zulip,wweiradio/zulip,kou/zulip,ahmadassaf/zulip,hengqujushi/zulip,jackrzhang/zulip,dxq-git/zulip,ipernet/zulip,ashwinirudrappa/zulip,kokoar/zulip,jonesgithub/zulip,tbutter/zulip,zulip/zulip,Cheppers/zulip,ahmadassaf/zulip,MayB/zulip,proliming/zulip,Juanvulcano/zulip,bluesea/zulip,zofuthan/zulip,jackrzhang/zulip,avastu/zulip,hj3938/zulip,wweiradio/zulip,shubhamdhama/zulip,lfranchi/zulip,hafeez3000/zulip,easyfmxu/zulip,yuvipanda/zulip,andersk/zulip,andersk/zulip,ikasumiwt/zulip,johnny9/zulip,atomic-labs/zulip,AZtheAsian/zulip,ericzhou2008/zulip,umkay/zulip,MayB/zulip,punchagan/zulip,wangdeshui/zulip,levixie/zulip,natanovia/zulip,swinghu/zulip,punchagan/zulip,hj3938/zulip,arpith/zulip,mdavid/zulip,technicalpickles/zulip,vikas-parashar/zulip,tbutter/zulip,synicalsyntax/zulip,alliejones/zulip,seapasulli/zulip,wweiradio/zulip,moria/zulip,so0k/zulip,amanharitsh123/zulip,itnihao/zulip,themass/zulip,schatt/zulip,dnmfarrell/zulip,peguin40/zulip,stamhe/zulip,moria/zulip,jeffcao/zulip,hengqujushi/zulip,synicalsyntax/zulip,mdavid/zulip,so0k/zulip,JPJPJPOPOP/zulip,natanovia/zulip,Suninus/zulip,stamhe/zulip,susansls/zulip,ericzhou2008/zulip,Galexrt/zulip,jimmy54/zulip,bowlofstew/zulip,fw1121/zulip,proliming/zulip,natanovia/zulip,JanzTam/zulip,voidException/zulip,dotcool/zulip,KingxBanana/zulip,bitemyapp/zulip,Galexrt/zulip,j831/zulip,jimmy54/zulip,arpitpanwar/zulip,susansls/zulip,jessedhillon/zulip,wweiradio/zulip,peiwei/zulip,developerfm/zulip,MayB/zulip,hayderimran7/zulip,mohsenSy/zulip,arpitpanwar/zulip,eeshangarg/zulip,bitemyapp/zulip,peiwei/zulip,vakila/zulip,voidException/zulip,amyliu345/zulip,bitemyapp/zulip,krtkmj/zulip,technicalpickles/zulip,dawran6/zulip,Galexrt/zulip,tommyip/zulip,PhilSk/zulip,luyifan/zulip,zhaoweigg/zulip,ryansnowboarder/zulip,timabbott/zulip,tdr130/zulip,amallia/zulip,thomasboyt/zulip,rht/zulip,tdr130/zulip,blaze225/zulip,bssrdf/zulip,dotcool/zulip,joshisa/zulip,dxq-git/zulip,andersk/zulip,zofuthan/zulip | zephyr/management/commands/dump_pointers.py | zephyr/management/commands/dump_pointers.py | from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
| apache-2.0 | Python |
|
769abf579f7bd082f7c6f4295edb49b41b252bce | Add empty alembic revision | agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft | alembic/versions/4784a128a6dd_empty_revision.py | alembic/versions/4784a128a6dd_empty_revision.py | """Empty revision
This is the empty revision that can be used as the base for future
migrations.
Initial database creation shall be done via `metadata.create_all()` and
`alembic stamp head`.
Revision ID: 4784a128a6dd
Revises:
Create Date: 2017-12-13 00:48:12.079431
"""
from alembic import op
import sqlalchemy as sa
import pycroft
# revision identifiers, used by Alembic.
revision = '4784a128a6dd'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| apache-2.0 | Python |
|
4bf84b05b183916fd211f77ab8099ef14c9cec06 | Update migrations | teamtaverna/core | app/timetables/migrations/0003_auto_20171107_1103.py | app/timetables/migrations/0003_auto_20171107_1103.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-07 11:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0002_auto_20171005_2209'),
]
operations = [
migrations.AlterField(
model_name='course',
name='name',
field=models.CharField(help_text='Example: appetizer, main course, dessert', max_length=150, verbose_name='Course Name'),
),
migrations.AlterField(
model_name='dish',
name='name',
field=models.CharField(max_length=255, verbose_name='Dish Name'),
),
migrations.AlterField(
model_name='meal',
name='name',
field=models.CharField(max_length=60, verbose_name='Meal Name'),
),
migrations.AlterField(
model_name='timetable',
name='name',
field=models.CharField(max_length=255, verbose_name='Timetable Name'),
),
migrations.AlterField(
model_name='vendor',
name='name',
field=models.CharField(max_length=255, verbose_name='Vendor Name'),
),
]
| mit | Python |
|
74135b8289fa4b6684c54d8c9e37671c75b92447 | add admin for area settings | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | adhocracy4/maps/admin.py | adhocracy4/maps/admin.py | from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from . import models
@admin.register(models.AreaSettings)
class AreaSettingsAdmin(admin.ModelAdmin):
list_filter = ('module__project__organisation', 'module__project')
list_display = ('module',)
fieldsets = (
(None, {'fields': ('module',)}),
(_('Polygon'), {
'fields': ('polygon',),
'description': _('Enter a valid GeoJSON object. '
'To initialize a new areasetting enter the '
'string "false" without quotation marks.')
})
)
| agpl-3.0 | Python |
|
2dce9ed68463b536f246f01b2ac5cb275df2453b | add polynomial | Semen52/GIBDD | regression.py | regression.py | # coding: utf8
from datetime import datetime
import itertools
import matplotlib.pyplot as plt
import numpy as np
from sklearn.preprocessing import PolynomialFeatures, Imputer
from sklearn.pipeline import make_pipeline
from sklearn.linear_model import Ridge, BayesianRidge
from utils import *
data_accidents = load_data_from_csv('data/parsered1.csv', False)
data_weather = load_data_from_csv('data/weather_utf8.csv',False)
'''
Because of the fact that we don't have particular time (only dates) for accidents and do have time for weather
measurements, let's choose one time for all accident, e.g. 15:00.
'''
for i in data_accidents.index:
#converting date to standard datetime representation, adding particular time
data_accidents.ix[i,'date'] = str(datetime.strptime(str(data_accidents.ix[i,'date']) + ' 15:00:00', '%Y-%m-%d %H:%M:%S'))
for i in data_weather.index:
data_weather.ix[i,'date'] = str(datetime.strptime(str(data_weather.ix[i,'date']),'%d.%m.%Y %H:%M'))
#merging two datasets on date
data = data_accidents.merge(data_weather, on='date')
#casting to numpy array
array = np.array(data[['num_dtp','T']].values, dtype=np.float64)
#preprocessing, completing missing values
imp = Imputer(missing_values='NaN', strategy='median', axis=1)
new_data = imp.fit_transform(array)
#sorting data by 'T', for better plotting
new_data = new_data[new_data[:, 1].argsort()]
x = new_data[:,1]
y = new_data[:,0]
x_plot = x
X = x[:,np.newaxis]
X_plot = x_plot[:,np.newaxis]
plt.scatter(x, y, s = 30, label="training points")
#algos = itertools.cycle([Ridge(), BayesianRidge()])
for degree in [1, 3, 5, 7]:
model = make_pipeline(PolynomialFeatures(degree), Ridge())
model.fit(X, y)
y_plot = model.predict(X_plot)
plt.plot(x_plot, y_plot, label="degree %d" % degree)
plt.legend(loc='lower left')
plt.show()
| apache-2.0 | Python |
|
eb91b11930319369bc9cfc3b1b15c0b92fb4d85c | Add `OrganizationOption` tests based on `ProjectOption`. | JamesMura/sentry,gencer/sentry,nicholasserra/sentry,gencer/sentry,fotinakis/sentry,looker/sentry,JackDanger/sentry,gencer/sentry,looker/sentry,fotinakis/sentry,looker/sentry,jean/sentry,daevaorn/sentry,ifduyue/sentry,beeftornado/sentry,zenefits/sentry,alexm92/sentry,mvaled/sentry,mitsuhiko/sentry,nicholasserra/sentry,zenefits/sentry,fotinakis/sentry,fotinakis/sentry,ifduyue/sentry,ifduyue/sentry,nicholasserra/sentry,ifduyue/sentry,mitsuhiko/sentry,gencer/sentry,BuildingLink/sentry,daevaorn/sentry,JamesMura/sentry,looker/sentry,daevaorn/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,zenefits/sentry,jean/sentry,ifduyue/sentry,BuildingLink/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,alexm92/sentry,JackDanger/sentry,jean/sentry,beeftornado/sentry,jean/sentry,mvaled/sentry,looker/sentry,mvaled/sentry,jean/sentry,daevaorn/sentry,JamesMura/sentry,mvaled/sentry,mvaled/sentry,JackDanger/sentry,zenefits/sentry,alexm92/sentry,BuildingLink/sentry,BuildingLink/sentry,gencer/sentry | tests/sentry/models/test_organizationoption.py | tests/sentry/models/test_organizationoption.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
| bsd-3-clause | Python |
|
f264f8804c208f2b55471f27f92a9e8c1ab5d778 | Test our new happenings-by-year view. | hello-base/web,hello-base/web,hello-base/web,hello-base/web | tests/correlations/test_views.py | tests/correlations/test_views.py | # -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
| apache-2.0 | Python |
|
43c4595ae26a7663538e712af37553c7a64fade7 | Add a couple unit tests for teuthology.parallel | michaelsevilla/teuthology,caibo2014/teuthology,ceph/teuthology,SUSE/teuthology,SUSE/teuthology,t-miyamae/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,robbat2/teuthology,yghannam/teuthology,yghannam/teuthology,dmick/teuthology,dreamhost/teuthology,zhouyuan/teuthology,dmick/teuthology,ivotron/teuthology,caibo2014/teuthology,michaelsevilla/teuthology,tchaikov/teuthology,ivotron/teuthology,robbat2/teuthology,ceph/teuthology,dmick/teuthology,t-miyamae/teuthology,tchaikov/teuthology,ktdreyer/teuthology,SUSE/teuthology,dreamhost/teuthology | teuthology/test/test_parallel.py | teuthology/test/test_parallel.py | from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
| mit | Python |
|
f370ee48c8aec312f9ea8a9ce1737214e51e2eaf | Disable repaint.key_mobile_sites_repaint. | hgl888/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk | tools/perf/benchmarks/repaint.py | tools/perf/benchmarks/repaint.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from benchmarks import silk_flags
from measurements import smoothness
from telemetry import benchmark
import page_sets
class _Repaint(perf_benchmark.PerfBenchmark):
@classmethod
def AddBenchmarkCommandLineArgs(cls, parser):
parser.add_option('--mode', type='string',
default='viewport',
help='Invalidation mode. '
'Supported values: fixed_size, layer, random, viewport.')
parser.add_option('--width', type='int',
default=None,
help='Width of invalidations for fixed_size mode.')
parser.add_option('--height', type='int',
default=None,
help='Height of invalidations for fixed_size mode.')
@classmethod
def Name(cls):
return 'repaint'
def CreateStorySet(self, options):
return page_sets.KeyMobileSitesRepaintPageSet(
options.mode, options.width, options.height)
def CreatePageTest(self, options):
return smoothness.Repaint()
#crbug.com/499320
#@benchmark.Enabled('android')
@benchmark.Disabled()
class RepaintKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
@classmethod
def Name(cls):
return 'repaint.key_mobile_sites_repaint'
@benchmark.Enabled('android')
class RepaintGpuRasterizationKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites with forced GPU
rasterization.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
tag = 'gpu_rasterization'
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'repaint.gpu_rasterization.key_mobile_sites_repaint'
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from benchmarks import silk_flags
from measurements import smoothness
from telemetry import benchmark
import page_sets
class _Repaint(perf_benchmark.PerfBenchmark):
@classmethod
def AddBenchmarkCommandLineArgs(cls, parser):
parser.add_option('--mode', type='string',
default='viewport',
help='Invalidation mode. '
'Supported values: fixed_size, layer, random, viewport.')
parser.add_option('--width', type='int',
default=None,
help='Width of invalidations for fixed_size mode.')
parser.add_option('--height', type='int',
default=None,
help='Height of invalidations for fixed_size mode.')
@classmethod
def Name(cls):
return 'repaint'
def CreateStorySet(self, options):
return page_sets.KeyMobileSitesRepaintPageSet(
options.mode, options.width, options.height)
def CreatePageTest(self, options):
return smoothness.Repaint()
@benchmark.Enabled('android')
class RepaintKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
@classmethod
def Name(cls):
return 'repaint.key_mobile_sites_repaint'
@benchmark.Enabled('android')
class RepaintGpuRasterizationKeyMobileSites(_Repaint):
"""Measures repaint performance on the key mobile sites with forced GPU
rasterization.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
tag = 'gpu_rasterization'
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'repaint.gpu_rasterization.key_mobile_sites_repaint'
| bsd-3-clause | Python |
3d523bca7377c0f4c80a4f697b0c41d340eb8200 | add a command to clear the celery queue | crateio/crate.web,crateio/crate.web | crate_project/apps/crate/management/clear_celery.py | crate_project/apps/crate/management/clear_celery.py | import redis
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
r = redis.StrictRedis(host=settings.GONDOR_REDIS_HOST, port=settings.GONDOR_REDIS_PORT, password=settings.GONDOR_REDIS_PASSWORD)
r.delete("celery")
| bsd-2-clause | Python |
|
33e7216ae9b367c509b5075496fce08d346743e2 | Implement channel limit | ElementalAlchemist/txircd,Heufneutje/txircd | txircd/modules/rfc/cmode_l.py | txircd/modules/rfc/cmode_l.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, param):
try:
return [ int(param) ]
except ValueError:
return None
def apply(self, actionType, channel, param, alsoChannel, user):
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode() | bsd-3-clause | Python |
|
fc03641455ce005c340bdd0baf2463a7db41ba8f | test with captured packet from chrome | colinmarc/python-spdy | test/b.py | test/b.py | from spdy.connection import Connection, SERVER
b = b'\x80\x02\x00\x01\x01\x00\x01\x0e\x00\x00\x00\x01\x00\x00\x00\x00\x00\x008\xea\xdf\xa2Q\xb2b\xe0b`\x83\xa4\x17\x06{\xb8\x0bu0,\xd6\xae@\x17\xcd\xcd\xb1.\xb45\xd0\xb3\xd4\xd1\xd2\xd7\x02\xb3,\x18\xf8Ps,\x83\x9cg\xb0?\xd4=:`\x07\x81\xd5\x99\xeb@\xd4\x1b3\xf0\xa3\xe5i\x06A\x90\x8bu\xa0N\xd6)NI\xce\x80\xab\x81%\x03\x06\xbe\xd4<\xdd\xd0`\x9d\xd4<\xa8\xa5,\xa0<\xce\xc0\x07J\x089 \xa6\x95\xa5\xa9\xa5%\x03[.\xb0l\xc9Oa`vw\ra`+\x06&\xc7\xdcT\x06\xd6\x8c\x92\x92\x82b\x06f\x90\xbf\x19\xf5\x19\xb8\x10\x99\x95\x01\x18\xf5U\x9999\x89\xfa\xa6z\x06\n\x1a\x11\x00\x19\x1aZ+\xf8d\xe6\x95V(d\x9aY\x98i*8\x02}\x9e\x1a\x9e\x9a\xe4\x9dY\xa2ojl\xaagh\xa8\xa0\xe1\xed\x11\xe2\xeb\xa3\xa3\x90\x93\x99\x9d\xaa\xe0\x9e\x9a\x9c\x9d\xaf\xa9\xe0\x9c\x01,sR\xf5\r\xcd\xf5\x80\x01cf\xacgn\xa9\x10\x9c\x98\x96X\x94\t\xd5\xc4\xc0\x0e\ry\x06\x0eX\x84\x00\x00\x00\x00\xff\xff\x80\x02\x00\x06\x00\x00\x00\x04\x00\x00\x00\x01'
c = Connection(SERVER)
c.incoming(b)
print(c.get_frame())
print(c.get_frame())
print(c.get_frame())
print(c.get_frame())
| bsd-2-clause | Python |
|
7c24ffe52fe96339d14f522dc7c67122d01cead6 | add istabular predicate | blaze/datashape,aterrel/datashape,quantopian/datashape,cowlicks/datashape,aterrel/datashape,cowlicks/datashape,cpcloud/datashape,quantopian/datashape,cpcloud/datashape,llllllllll/datashape,ContinuumIO/datashape,blaze/datashape,ContinuumIO/datashape,llllllllll/datashape | datashape/predicates.py | datashape/predicates.py | from .util import collect, remove, dshape
from .coretypes import *
# https://github.com/ContinuumIO/datashape/blob/master/docs/source/types.rst
dimension_types = (Fixed, Var, Ellipsis)
isunit = lambda x: isinstance(x, Unit)
def isdimension(ds):
""" Is a component a dimension?
>>> isdimension(Fixed(10))
True
>>> isdimension(Var())
True
>>> isdimension(int32)
False
"""
return isinstance(ds, dimension_types)
def ishomogenous(ds):
""" Does datashape contain only one dtype?
>>> ishomogenous(int32)
True
>>> ishomogenous(var * (3 * string))
True
>>> ishomogenous(var * Record([('name', string), ('amount', int32)]))
False
"""
return len(set(remove(isdimension, collect(isunit, ds)))) == 1
def dimensions(ds):
""" Number of dimensions of datashape
Interprets records as dimensional
>>> dimensions(int32)
0
>>> dimensions(10 * int32)
1
>>> dimensions(var * (10 * int32))
2
>>> dimensions(var * Record([('name', string), ('amount', int32)]))
2
"""
if not isinstance(ds, DataShape):
ds = dshape(ds)
if isdimension(ds[0]):
return 1 + dimensions(ds.subarray(1))
if isinstance(ds[0], Record):
return 1 + max(map(dimensions, ds[0].fields.values()))
if len(ds) == 1 and isunit(ds[0]):
return 0
raise NotImplementedError('Can not compute dimensions for %s' % ds)
def isfixed(ds):
""" Contains no variable dimensions
>>> isfixed('10 * int')
True
>>> isfixed('var * int')
False
>>> isfixed('10 * {name: string, amount: int}')
True
>>> isfixed('10 * {name: string, amounts: var * int}')
False
"""
if not isinstance(ds, DataShape):
ds = dshape(ds)
if isinstance(ds[0], Var):
return False
if isinstance(ds[0], Record):
return all(map(isfixed, ds[0].fields.values()))
if len(ds) > 1:
return isfixed(ds.subarray(1))
return True
def istabular(ds):
""" Can be represented by a two dimensional with fixed columns
>>> istabular('var * 3 * int')
True
>>> istabular('var * {name: string, amount: int}')
True
>>> istabular('var * 10 * 3 * int')
False
>>> istabular('10 * var * int')
False
"""
if not isinstance(ds, DataShape):
ds = dshape(ds)
return dimensions(ds) == 2 and isfixed(ds.subarray(1))
| from .util import collect, remove, dshape
from .coretypes import *
# https://github.com/ContinuumIO/datashape/blob/master/docs/source/types.rst
dimension_types = (Fixed, Var, Ellipsis)
isunit = lambda x: isinstance(x, Unit)
def isdimension(ds):
""" Is a component a dimension?
>>> isdimension(Fixed(10))
True
>>> isdimension(Var())
True
>>> isdimension(int32)
False
"""
return isinstance(ds, dimension_types)
def ishomogenous(ds):
""" Does datashape contain only one dtype?
>>> ishomogenous(int32)
True
>>> ishomogenous(var * (3 * string))
True
>>> ishomogenous(var * Record([('name', string), ('amount', int32)]))
False
"""
return len(set(remove(isdimension, collect(isunit, ds)))) == 1
def dimensions(ds):
""" Number of dimensions of datashape
Interprets records as dimensional
>>> dimensions(int32)
0
>>> dimensions(10 * int32)
1
>>> dimensions(var * (10 * int32))
2
>>> dimensions(var * Record([('name', string), ('amount', int32)]))
2
"""
if not isinstance(ds, DataShape):
ds = dshape(ds)
if isdimension(ds[0]):
return 1 + dimensions(ds.subarray(1))
if isinstance(ds[0], Record):
return 1 + max(map(dimensions, ds[0].fields.values()))
if len(ds) == 1 and isunit(ds[0]):
return 0
raise NotImplementedError('Can not compute dimensions for %s' % ds)
def isfixed(ds):
""" Contains no variable dimensions
>>> isfixed('10 * int')
True
>>> isfixed('var * int')
False
>>> isfixed('10 * {name: string, amount: int}')
True
>>> isfixed('10 * {name: string, amounts: var * int}')
False
"""
if not isinstance(ds, DataShape):
ds = dshape(ds)
if isinstance(ds[0], Var):
return False
if isinstance(ds[0], Record):
return all(map(isfixed, ds[0].fields.values()))
if len(ds) > 1:
return isfixed(ds.subarray(1))
return True
| bsd-2-clause | Python |
3406467f3d17621d436fc05d8820e21b7399a241 | add simple depth frame network benchmark | rjw57/streamkinect2 | scripts/depth_client.py | scripts/depth_client.py | #!/usr/bin/env python
"""
Simple benchmark of how fast depth frames are delivered.
"""
import logging
import threading
import time
from tornado.ioloop import IOLoop, PeriodicCallback
from streamkinect2.server import ServerBrowser
from streamkinect2.client import Client
# Install the zmq ioloop
from zmq.eventloop import ioloop
ioloop.install()
# Get our logger
log = logging.getLogger(__name__)
class Benchmark(object):
def __init__(self, io_loop=None):
self.io_loop = io_loop or IOLoop.instance()
self.records = {}
self.report_callback = PeriodicCallback(self._report, 1000, self.io_loop)
self.report_callback.start()
def on_depth_frame(self, client, depth_frame, kinect_id):
self.records[kinect_id]['count'] += 1
def on_add_kinect(self, client, kinect_id):
client.on_depth_frame.connect(self.on_depth_frame, sender=client)
log.info('Enabling depth streaming on kinect "{0}"'.format(kinect_id))
client.enable_depth_frames(kinect_id)
self.records[kinect_id] = { 'start': time.time(), 'count': 0, }
def new_client(self, client, io_loop):
"""Called when a new client has been created. Enable depth streaming on all
devices and benchmark result."""
# Register interest in devices
client.on_add_kinect.connect(self.on_add_kinect, sender=client)
def _report(self):
now = time.time()
for k, v in self.records.items():
delta = now - v['start']
log.info('Kinect "{0}", {1} frames in {2:.0f} seconds => {3:1f} fps'.format(
k, v['count'], delta, v['count']/delta))
# Our listening class
class Listener(object):
def __init__(self, browser, benchmark, io_loop = None):
self.benchmark = benchmark
self.io_loop = io_loop or IOLoop.instance()
browser.on_add_server.connect(self.add_server, sender=browser)
browser.on_remove_server.connect(self.remove_server, sender=browser)
# Keep a reference to browser since we remain interested and do not
# wish it garbage collected.
self.browser = browser
# Keep a list of clients for each server which appears
self.clients = {}
def add_server(self, browser, server_info):
log.info('Discovered server "{0.name}" at "{0.endpoint}"'.format(server_info))
client = Client(server_info.endpoint, connect_immediately=True)
self.clients[server_info.endpoint] = client
self.benchmark.new_client(client, self.io_loop)
def remove_server(self, browser, server_info):
log.info('Server "{0.name}" at "{0.endpoint}" went away'.format(server_info))
try:
client = self.clients[server_info.endpoint]
except KeyError:
# We didn't have a client for this server
return
client.disconnect()
del self.clients[server_info.endpoint]
class IOLoopThread(threading.Thread):
def run(self):
# Create the server browser
log.info('Creating server browser...')
listener = Listener(ServerBrowser(), Benchmark())
# Run the ioloop
log.info('Running...')
ioloop.IOLoop.instance().start()
log.info('Stopping')
def stop(self):
io_loop = ioloop.IOLoop.instance()
io_loop.add_callback(io_loop.stop)
self.join(3)
def main():
# Set log level
logging.basicConfig(level=logging.INFO)
print('=============================================')
print('Press Enter to exit')
print('=============================================')
# Start the event loop
ioloop_thread = IOLoopThread()
ioloop_thread.start()
# Wait for input
input()
# Stop thread
ioloop_thread.stop()
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
9dd20f8361cff99329a5ab4b526e29edddac9a61 | add session.py | charlesbos/my-scripts,charlesbos/my-scripts | session.py | session.py | #!/usr/bin/python
#A quick and dirty interface to end a session
# This assumes systemd and xinitrc (for logout)
#By Charles Bos
from tkinter import *
import os
import sys
def getWm() :
args = sys.argv
if len(args) == 1 : return "-u $USER"
else : return args[1]
def runAction() :
if option.get() == 1 : os.system("pkill " + getWm())
elif option.get() == 2 : os.system("systemctl suspend")
elif option.get() == 3 : os.system("systemctl hibernate")
elif option.get() == 4 : os.system("systemctl reboot")
elif option.get() == 5 : os.system("systemctl poweroff")
class UI() :
def __init__(self, parent) :
global option
option = IntVar()
r1 = Radiobutton(parent, text = "Logout", variable = option, value = 1).grid(row = 2, column = 1)
r2 = Radiobutton(parent, text = "Suspend", variable = option, value = 2).grid(row = 2, column = 2)
r3 = Radiobutton(parent, text = "Hibernate", variable = option, value = 3).grid(row = 2, column = 3)
r4 = Radiobutton(parent, text = "Reboot", variable = option, value = 4).grid(row = 2, column = 4)
r5 = Radiobutton(parent, text = "Poweroff", variable = option, value = 5).grid(row = 2, column = 5)
b1 = Button(parent, text = "Ok", command = runAction).grid(row = 3, column = 1, columnspan = 5)
top = Tk()
top.title("End session")
ui = UI(top)
top.mainloop()
| agpl-3.0 | Python |
|
c43d929f9ee2f21a7e93986171307cd0f17fa96c | add unittests of helpers | mail6543210/clime,moskytw/clime | tests/test_helper.py | tests/test_helper.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from types import BuiltinFunctionType
from clime.helper import *
class TestClimeHelper(unittest.TestCase):
def test_autotype(self):
cases = ('string', '100', '100.0', None)
answers = ('string', 100 , 100.0 , None)
for case, answer in zip(cases, answers):
self.assertEqual(autotype(case), answer)
def test_getargspec(self):
docs = [
None,
'',
'abcd',
'f1()',
'f2(x)',
'f3(x, y)',
'f4(x[, a])',
'f5(x, y[, a])',
'f6(x, y[, a[, b]])',
'f7([a])',
'f8([a[, b]])',
]
answers = [
(None, 0),
(None, 0),
(None, 0),
(None, 0),
(['x'], 0),
(['x', 'y'], 0),
(['x', 'a'], 1),
(['x', 'y', 'a'], 1),
(['x', 'y', 'a', 'b'], 2),
(['a'], 1),
(['a', 'b'], 2),
]
f = type('Dummy', tuple(), {'__doc__': None})()
trans = lambda x: (x[0], len(x[-1] or []))
for doc, answer in zip(docs, answers):
f.__doc__ = doc
self.assertEqual(trans(getargspec( f )), answer)
def test_getoptmetas(self):
doc = """
-d, --debug enable debug mode
-q, -s, --quiet, --slient enable slient mode
-n N, --times N how many times do you want
"""
answer = [ [('d', None), ('debug', None)],
[('q', None), ('s', None), ('quiet', None), ('slient', None)],
[('n', 'N'), ('times', 'N')] ]
self.assertEqual(list(getoptmetas(doc)), answer)
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
dd8496c61543b3e39c5ee3ccb8bc7b9f69e9487f | add tests for packet | kratsg/ironman | tests/test_packet.py | tests/test_packet.py | from zope.interface.verify import verifyClass, verifyObject
from ironman.packet import IPBusPacket
from ironman.interfaces import IIPbusPacket
def test_ipbus_packet_create():
obj = IPBusPacket()
assert obj is not None
def test_ipbus_packet_class_iface():
# Assure the class implements the declared interface
assert verifyClass(IIPBusPacket, IPBusPacket)
def test_ipbus_packet_instance_iface():
# Assure instances of the class provide the declared interface
assert verifyObject(IIPBusPacket, IPBusPacket())
| mit | Python |
|
b2aace3212f51ac7db83281903e6282849a58adb | add portmanteau finder | darius/languagetoys,darius/languagetoys | portmanteau.py | portmanteau.py | """
Let's find pairs of words that blend nicely, like
book + hookup --> bookup
Strategy: given a wordlist, first remove generative affixes like un-
and -ly. Find all reasonably-long substrings of every word. Match
suffixes of candidate first words with midparts of candidate second
words.
TODO: get better at stripping affixes
(though that's not always a win: e.g.:
contendresse contendress + tendresse)
(also: bunchawed from bunch and unchawed)
TODO: currently we're matching suffixes against prefixes instead
of midparts, so the motivating example above doesn't even appear...
TODO: the pronunciations should blend, not just the spelling.
"""
import re
raw_words = set(unicode(line.rstrip('\n'), 'utf8').lower()
for line in open('words')) #open('/usr/share/dict/words'))
left_noise = """
be bi em en di duo im iso non oct octo out pre quad quadra quadri re
sub tri un uni
""".split()
right_noise = """
ability able adian age an ation d ed en ent er es escent ful ian ic
ies ily iness ing ish ite ize less let log like liness ly ness og
ogy proof r ress ry s ship tion y
""".split()
def noisy(w):
for ln in left_noise:
if w.startswith(ln) and w[len(ln):] in raw_words:
return True
for rn in right_noise:
if w.endswith(rn) and w[:-len(rn)] in raw_words:
return True
for i in range(1, len(w)):
p, s = w[:i], w[i:]
if p in raw_words and s in raw_words:
return True
return False
words = set(w for w in raw_words if not noisy(w))
if False:
for word in sorted(words):
print word
import sys
sys.exit(0)
prefixes = {}
for w in words:
if 3 < len(w):
for i in range(3, len(w)+1):
p = w[:i]
prefixes.setdefault(p, []).append(w)
suffixes = {}
for w in words:
if 3 < len(w):
for i in range(len(w)-3):
p = w[i:]
suffixes.setdefault(p, []).append(w)
common = set()
for prefix, prefix_words in prefixes.iteritems():
if prefix in suffixes:
suffix_words = suffixes[prefix]
if suffix_words != prefix_words:
if any(not p.startswith(s)
and not s.endswith(p)
and (s + p[len(prefix):]) not in raw_words
for p in prefix_words
for s in suffix_words):
common.add(prefix)
print len(common)
print max(common, key=len)
def portmanteaus(affix):
for p in prefixes[affix]:
for s in suffixes[affix]:
if (not p.startswith(s) and not s.endswith(p)
and (s + p[len(affix):]) not in raw_words):
yield s, p, affix
import math
import pdist
def score((s, p, affix)):
return -math.log10(pdist.Pw(s) * pdist.Pw(p) * 1.1**len(affix))
L = len(s) + len(p) - len(affix)
return -math.log10(pdist.Pw(s) * pdist.Pw(p) * 2**(-float(L)/len(affix)))
results = [(score(triple), triple)
for affix in common
for triple in portmanteaus(affix)]
for score, (s, p, affix) in sorted(results):
combo = s + p[len(affix):]
print ' %6.2f %-30s %s + %s' % (score, combo, s, p)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.