commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
64ea416a335d9c1a8946411c2b3b1a67cd450131
Add first pass at reconstructed targets module.
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
vizard/targets.py
vizard/targets.py
import viz import vizact import vizshape import vrlab class Target: '''A target is a single cube in the motion-capture space. Subjects are tasked with touching the cubes during the experiment. ''' def __init__(self, index, x, y, z): self.center = x, y, z self.sphere = vizshape.addSphere( 0.7, center=self.center, color=viz.WHITE) self.sound = viz.addAudio('{:02d}.wav'.format(index)) self.signal = vizact.Signal() self.sensor = vizproximity.addBoundingSphereSensor(self.sphere, scale=1) def activate(self, prox): prox.clearSensors() prox.addSensor(self.sensor) prox.onEnter(self.sensor, lambda e: vrlab.sounds.drip.play()) prox.onEnter(self.sensor, lambda e: self.sphere.color(viz.BLUE)) prox.onEnter(self.sensor, self.signal.send) prox.onExit(self.sensor, lambda e: self.sphere.color(viz.WHITE)) NUMBERED = ( Target( 0, -1.98, 0.05, -1.86), Target( 1, -1.72, 1.83, 2.26), Target( 2, 0.00, 0.05, 1.86), Target( 3, 1.73, 0.05, -1.79), Target( 4, 1.89, 0.99, 2.26), Target( 5, -2.14, 0.93, 0.10), Target( 6, -0.24, 0.90, -1.76), Target( 7, 1.51, 1.81, -1.76), Target( 9, 1.79, 0.05, 0.00), Target(10, 0.10, 1.89, 0.10), Target(11, -0.24, 1.86, 2.26), ) CIRCUITS = ( (10, 0, 1, 3, 8, 4, 11, 7, 9, 6, 5, 2), (7, 1, 0, 11, 9, 2, 8, 3, 6, 4, 10, 5), (3, 0, 8, 11, 5, 10, 6, 1, 4, 2, 9, 7), (11, 8, 7, 3, 4, 6, 9, 5, 0, 2, 1, 10), (4, 7, 8, 5, 6, 0, 3, 1, 9, 10, 2, 11), (10, 3, 9, 1, 2, 4, 5, 7, 11, 0, 6, 8), )
mit
Python
58626e757b463f2aec6751e04fbaf0e83cf0adf9
Create Bigram.py
HaythemSahbani/Web-mining-university-project
src/3-trained-classifier/Bigram.py
src/3-trained-classifier/Bigram.py
__author__ = 'Atef Bellaaj' __author__ = 'Bellaaj' import collections import nltk.metrics import nltk.classify.util from nltk.classify import NaiveBayesClassifier from nltk.corpus import movie_reviews neg_ids = movie_reviews.fileids('neg') pos_ids = movie_reviews.fileids('pos') import itertools from nltk.collocations import BigramCollocationFinder from nltk.metrics import BigramAssocMeasures def bigram_word_feats(words, score_fn=BigramAssocMeasures.chi_sq, n=200): bigram_finder = BigramCollocationFinder.from_words(words) bigrams = bigram_finder.nbest(score_fn, n) return dict([(ngram, True) for ngram in itertools.chain(words, bigrams)]) neg_feats = [(bigram_word_feats(movie_reviews.words(fileids=[f])), 'neg') for f in neg_ids] pos_feats = [(bigram_word_feats(movie_reviews.words(fileids=[f])), 'pos') for f in pos_ids] neg_limit = len(neg_feats)*3/4 pos_limit = len(pos_feats)*3/4 trainfeats = neg_feats[:neg_limit] + pos_feats[:pos_limit] testfeats = neg_feats[neg_limit:] + pos_feats[pos_limit:] print 'train on %d instances, test on %d instances' % (len(trainfeats), len(testfeats)) print neg_feats[1] classifier = NaiveBayesClassifier.train(trainfeats) import pickle f = open('bigram_classifier.pickle', 'wb') pickle.dump(classifier, f) f.close() print 'accuracy:', nltk.classify.util.accuracy(classifier, testfeats) classifier.show_most_informative_features() refsets = collections.defaultdict(set) testsets = collections.defaultdict(set) for i, (feats, label) in enumerate(testfeats): refsets[label].add(i) observed = classifier.classify(feats) testsets[observed].add(i) print 'pos precision:', nltk.metrics.precision(refsets['pos'], testsets['pos']) print 'pos recall:', nltk.metrics.recall(refsets['pos'], testsets['pos']) print 'pos F-measure:', nltk.metrics.f_measure(refsets['pos'], testsets['pos']) print 'neg precision:', nltk.metrics.precision(refsets['neg'], testsets['neg']) print 'neg recall:', nltk.metrics.recall(refsets['neg'], testsets['neg']) print 'neg F-measure:', nltk.metrics.f_measure(refsets['neg'], testsets['neg'])
mit
Python
ad0a1c1404c53f1565ef728a747d5d5f319f1992
Add tests for Enterprise
auth0/auth0-python,auth0/auth0-python
auth0/v2/test/authentication/test_enterprise.py
auth0/v2/test/authentication/test_enterprise.py
import unittest import mock from ...authentication.enterprise import Enterprise class TestEnterprise(unittest.TestCase): @mock.patch('auth0.v2.authentication.enterprise.Enterprise.get') def test_saml_metadata(self, mock_get): e = Enterprise('my.domain.com') e.saml_metadata('cid') mock_get.assert_called_with( url='https://my.domain.com/samlp/metadata/cid' ) @mock.patch('auth0.v2.authentication.enterprise.Enterprise.get') def test_wsfed_metadata(self, mock_get): e = Enterprise('my.domain.com') e.wsfed_metadata() mock_get.assert_called_with( url='https://my.domain.com/wsfed/FederationMetadata' \ '/2007-06/FederationMetadata.xml' )
mit
Python
8780243a88f505c06962247fdcc6e4bc4abb2912
add prototype at python
kobtea/gof
prototype.py
prototype.py
#!/usr/bin/env python import copy class Manager: def __init__(self): self.showcase = {} def register(self, name, obj): self.showcase[name] = obj def clone(self, name): return copy.deepcopy(self.showcase[name]) class MessageBox: def __init__(self, deco_char): self.deco_char = deco_char def display(self, message): print(self.deco_char * (len(message) + len(self.deco_char) * 2 + 2)) print('{0} {1} {0}'.format(self.deco_char, message)) print(self.deco_char * (len(message) + len(self.deco_char) * 2 + 2)) if __name__ == '__main__': manager = Manager() box1 = MessageBox('*') manager.register('ast', box1) box2 = manager.clone('ast') print(id(box1)) print(id(box2)) box1.display('hogehoge') box2.display('hogehoge')
mit
Python
c61452cb7358c3000992e593349158a0e24a5f51
Add migration
pfctdayelise/allseasons,pfctdayelise/allseasons,pfctdayelise/allseasons,pfctdayelise/allseasons
allseasons/convert/migrations/0004_message.py
allseasons/convert/migrations/0004_message.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-07-28 14:05 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('convert', '0003_auto_20170714_1421'), ] operations = [ migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('sender', models.CharField(max_length=100)), ('receiver', models.CharField(max_length=100)), ('date', models.DateTimeField(auto_now=True)), ('mtype', models.CharField(choices=[('email', 'email')], max_length=100)), ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='convert.EventOfInterest')), ], options={ 'ordering': ('date',), }, ), ]
bsd-3-clause
Python
c68d2492b8dcc6fbd7fc91e784994ef9cf43db0f
Create LORA_Repeater_logger.py
AlcorDust/LORA_stuff,AlcorDust/LORA_stuff
LORA_Repeater/LORA_Repeater_logger.py
LORA_Repeater/LORA_Repeater_logger.py
from datetime import datetime NOME_FILE = "LORA_LOG.txt" import serial ser = serial.Serial('/dev/ttyACM0', 9600) while ser.inWaiting()!=0: trash = ser.readline() while(True): while ser.inWaiting()!=0: incoming = ser.readline().decode("utf-8") #print(incoming) parsed = str(incoming).split(",") time = datetime.now().strftime("%H:%M:%S") data = parsed[1] +"," + parsed[2] +"," + parsed[3] + "," + time + "\n" print(data) with open(NOME_FILE, "a+") as f: f.write(data)
mit
Python
1553863d25eb3053fdf558a290e2eb0a1fae28c0
Add debug tests.
KenKundert/inform,KenKundert/inform
tests/test_debug.py
tests/test_debug.py
#!/usr/bin/env python # Test Inform debug functions try: # python3 import builtins except ImportError: # python2 import __builtin__ as builtins # Imports {{{1 from inform import Inform, aaa, ddd, ppp, sss, vvv from textwrap import dedent # Test cases {{{1 def test_anglicize(capsys): Inform(colorscheme=None, prog_name=False) ppp() out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 18, test_debug.test_anglicize() ''').lstrip() def test_grouch(capsys): Inform(colorscheme=None, prog_name=False) a = 0 b = 'b' ppp('hey now!', a, b) out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 28, test_debug.test_grouch(): hey now! 0 b ''').lstrip() def test_salver(capsys): Inform(colorscheme=None, prog_name=False) a = 0 b = 'b' c = [a, b] d = {a, b} e = {a:b} ddd('hey now!', a, b, c, d, e) out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 41, test_debug.test_salver(): 'hey now!' 0 'b' [0, 'b'] {0, 'b'} {0: 'b'} ''').lstrip() def test_daiquiri(capsys): Inform(colorscheme=None, prog_name=False) a = 0 b = 'b' c = [a, b] d = {a, b} e = {a:b} ddd(s='hey now!', a=a, b=b, c=c, d=d, e=e) out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 60, test_debug.test_daiquiri(): a = 0 b = 'b' c = [0, 'b'] d = {0, 'b'} e = {0: 'b'} s = 'hey now!' ''').lstrip() class Info: def __init__(self, **kwargs): self.__dict__.update(kwargs) ddd(self=self, **kwargs) def test_prude(capsys): Inform(colorscheme=None, prog_name=False) Info(email='ted@ledbelly.com') out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 75, test_debug.Info.__init__(): email = 'ted@ledbelly.com' self = Info object containing {'email': 'ted@ledbelly.com'} ''').lstrip() def test_update(capsys): Inform(colorscheme=None, prog_name=False) a = 0 b = 'b' c = [a, b] d = {a, b} e = {a:b} vvv() out, err = capsys.readouterr() out = '\n'.join(l for l in out.split('\n') if 'capsys' not in l) assert out == dedent(''' DEBUG: test_debug.py, 94, test_debug.test_update(): a = 0 b = 'b' c = [0, 'b'] d = {0, 'b'} e = {0: 'b'} ''').lstrip() def test_shear(capsys): Inform(colorscheme=None, prog_name=False) a = 0 b = 'b' c = [a, b] d = {a, b} e = {a:b} vvv(a, b, c, d, e) out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 113, test_debug.test_shear(): a = 0 b = 'b' c = [0, 'b'] d = {0, 'b'} e = {0: 'b'} ''').lstrip() def test_prostrate(capsys): Inform(colorscheme=None, prog_name=False) sss() out, err = capsys.readouterr() out = out.strip().split('\n') assert out[0] == 'DEBUG: test_debug.py, 126, test_debug.test_prostrate():' assert out[-2] == " File '/home/ken/src/inform/tests/test_debug.py', line 126, in test_prostrate," assert out[-1] == ' sss()' def test_rubber(capsys): Inform(colorscheme=None, prog_name=False) a = aaa('a') out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 135, test_debug.test_rubber(): 'a' ''').lstrip() assert a == 'a' b = aaa(b = 'b') out, err = capsys.readouterr() assert out == dedent(''' DEBUG: test_debug.py, 142, test_debug.test_rubber(): b: 'b' ''').lstrip() assert b == 'b'
mit
Python
60c10a781501b0a467b55a599d835bdc760c8891
Add test_utils
ulope/django-watchman,mwarkentin/django-watchman,gerlachry/django-watchman,mwarkentin/django-watchman,gerlachry/django-watchman,JBKahn/django-watchman,ulope/django-watchman,blag/django-watchman,blag/django-watchman,JBKahn/django-watchman
tests/test_utils.py
tests/test_utils.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_django-watchman ------------ Tests for `django-watchman` decorators module. """ from __future__ import unicode_literals import unittest from watchman.utils import get_checks class TestWatchman(unittest.TestCase): def setUp(self): pass def test_get_checks_returns_all_available_checks_by_default(self): self.assertEqual([check.__name__ for check in get_checks()], ['caches_status', 'email_status', 'databases_status']) def test_get_checks_with_check_list_returns_union(self): check_list = ['watchman.checks.caches_status'] self.assertEqual([check.__name__ for check in get_checks(check_list=check_list)], ['caches_status']) def test_get_checks_with_skip_list_returns_difference(self): skip_list = ['watchman.checks.caches_status'] self.assertEqual([check.__name__ for check in get_checks(skip_list=skip_list)], ['databases_status', 'email_status']) def test_get_checks_with_matching_check_and_skip_list_returns_empty_list(self): check_list, skip_list = ['watchman.checks.caches_status'], ['watchman.checks.caches_status'] self.assertEqual([check.__name__ for check in get_checks(check_list=check_list, skip_list=skip_list)], []) def test_get_checks_with_check_and_skip_list(self): check_list = ['watchman.checks.caches_status', 'watchman.checks.databases_status'] skip_list = ['watchman.checks.caches_status'] self.assertEqual([check.__name__ for check in get_checks(check_list=check_list, skip_list=skip_list)], ['databases_status'])
bsd-3-clause
Python
52219c4d55c7b80b4a2185887675615c4d427298
Add is_sequence util function
thaim/ansible,thaim/ansible
lib/ansible/module_utils/common/collections.py
lib/ansible/module_utils/common/collections.py
# Copyright (c), Sviatoslav Sydorenko <ssydoren@redhat.com> 2018 # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) """Collection of low-level utility functions.""" from __future__ import absolute_import, division, print_function __metaclass__ = type from ..six import binary_type, text_type from ._collections_compat import Sequence def is_string(seq): """Identify whether the input has a string-like type (inclding bytes).""" return isinstance(seq, (text_type, binary_type)) def is_sequence(seq, include_strings=False): """Identify whether the input is a sequence. Strings and bytes are not sequences here, unless ``include_string`` is ``True``. Non-indexable things are never of a sequence type. """ if not include_strings and is_string(seq): return False return isinstance(seq, Sequence)
mit
Python
973c2098eec88c9656fe858d4815bd7925d532f6
add Memento pattern
JakubVojvoda/design-patterns-python
memento/Memento.py
memento/Memento.py
# # Python Design Patterns: Memento # Author: Jakub Vojvoda [github.com/JakubVojvoda] # 2016 # # Source code is licensed under MIT License # (for more details see LICENSE) # import sys # # Memento # stores internal state of the Originator object and protects # against access by objects other than the originator # class Memento: def __init__(self, state): self._state = state def setState(self, state): self._state = state; def getState(self): return self._state # # Originator # creates a memento containing a snapshot of its current internal # state and uses the memento to restore its internal state # class Originator: def __init__(self): self._state = 0 def setState(self, state): print("Set state to " + str(state) + ".") self._state = state def getState(self): return self._state def setMemento(self, memento): self._state = memento.getState() def createMemento(self): return Memento(self._state) # # CareTaker # is responsible for the memento's safe keeping # class CareTaker: def __init__(self, originator): self._originator = originator self._history = [] def save(self): print("Save state.") self._history.append(self._originator.createMemento()) def undo(self): print("Undo state.") self._originator.setMemento(self._history[-1]) self._history.pop() if __name__ == "__main__": originator = Originator() caretaker = CareTaker(originator) originator.setState(1) caretaker.save() originator.setState(2) caretaker.save() originator.setState(3) caretaker.undo() print("Actual state is " + str(originator.getState()) + ".")
mit
Python
cbbf4ec62bc8b8ed2c375e9e60939f932d2034e8
Create jogovelha.py
grazielaosouza/devops-aula05
src/jogovelha.py
src/jogovelha.py
apache-2.0
Python
0e12011edc31f964db8ce419d2f64b6d525be641
Create delete_occurrences_of_an_element_if_it_occurs_more_than_n_times.py
Kunalpod/codewars,Kunalpod/codewars
delete_occurrences_of_an_element_if_it_occurs_more_than_n_times.py
delete_occurrences_of_an_element_if_it_occurs_more_than_n_times.py
#Kunal Gautam #Codewars : @Kunalpod #Problem name: Delete occurrences of an element if it occurs more than n times #Problem level: 6 kyu def delete_nth(order,max_e): i=0 while(i<len(order)): if order[:i].count(order[i])>=max_e: order.pop(i) else: i+=1 return order
mit
Python
06451bdb55faaa7fd22f7bac403d00dda0018c5d
Create setup.py
Cophy08/nhlscrapi,robhowley/nhlscrapi
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages setup( name="nhlscrapi", version=nhlscrapi.__version__, description='NHL Scrapr API for Python', author='Rob Howley', author_email='howley.robert@gmail.com', url='https://github.com/robhowley/nhlscrapi', packages=find_packages(), include_package_data=True, license="Apache Software License version 2.0", platforms='any', zip_safe=False, keywords='nhlscrapi', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], test_suite='tests', # Dependent packages (distributions) install_requires=[], )
apache-2.0
Python
f1d277c58f80a352b3715c145ce55a4030a4ab6a
add setup.py
zetaops/fake_zato
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup from setuptools import find_packages setup( name='Fake Zato', version='0.1.0', description='Fake Zato', author='Zetaops', author_email='aliriza@zetaops.io', url='https://github.com/zetaops/fake_zato', packages=find_packages(), )
agpl-3.0
Python
a262aeda8b706848b33d30353a9f269daf3acb0d
Bump version
BetterWorks/djangosaml2,MiguelSR/djangosaml2,WebSpider/djangosaml2,BetterWorks/djangosaml2,WebSpider/djangosaml2,MiguelSR/djangosaml2,writepython/djangosaml2,knaperek/djangosaml2,knaperek/djangosaml2,writepython/djangosaml2
setup.py
setup.py
# Copyright (C) 2011-2012 Yaco Sistemas <lgs@yaco.es> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from setuptools import setup, find_packages def read(*rnames): return open(os.path.join(os.path.dirname(__file__), *rnames)).read() setup( name='djangosaml2', version='0.13.1', description='pysaml2 integration in Django', long_description='\n\n'.join([read('README'), read('CHANGES')]), classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Security", "Topic :: Software Development :: Libraries :: Application Frameworks", ], keywords="django,pysaml2,saml2,federated authentication,authentication", author="Yaco Sistemas", author_email="lgs@yaco.es", url="https://bitbucket.org/lgs/djangosaml2", license='Apache 2.0', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'pysaml2==2.2.0', 'python-memcached==1.48', ], )
# Copyright (C) 2011-2012 Yaco Sistemas <lgs@yaco.es> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from setuptools import setup, find_packages def read(*rnames): return open(os.path.join(os.path.dirname(__file__), *rnames)).read() setup( name='djangosaml2', version='0.13.0', description='pysaml2 integration in Django', long_description='\n\n'.join([read('README'), read('CHANGES')]), classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Security", "Topic :: Software Development :: Libraries :: Application Frameworks", ], keywords="django,pysaml2,saml2,federated authentication,authentication", author="Yaco Sistemas", author_email="lgs@yaco.es", url="https://bitbucket.org/lgs/djangosaml2", license='Apache 2.0', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'pysaml2==2.2.0', 'python-memcached==1.48', ], )
apache-2.0
Python
9eacc3c3b81002c721cb24a1641583bf49bc3a53
bump version number
cornell-brg/pymtl,12yujim/pymtl,jjffryan/pymtl,cornell-brg/pymtl,12yujim/pymtl,cfelton/pymtl,cfelton/pymtl,Glyfina-Fernando/pymtl,jjffryan/pymtl,tj93/pymtl,Glyfina-Fernando/pymtl,tj93/pymtl,cornell-brg/pymtl,12yujim/pymtl,cfelton/pymtl,jjffryan/pymtl,jck/pymtl,tj93/pymtl,Glyfina-Fernando/pymtl,jck/pymtl,jck/pymtl
setup.py
setup.py
# setup.py inspired by the PyPA sample project: # https://github.com/pypa/sampleproject/blob/master/setup.py from setuptools import setup, find_packages from codecs import open # To use a consistent encoding from os import path def get_long_description(): here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name = 'pymtl', version = '1.4alpha0', # https://www.python.org/dev/peps/pep-0440/ description = 'Python-based hardware modeling framework', long_description = get_long_description(), url = 'https://github.com/cornell-brg/pymtl', author = 'Derek Lockhart', author_email = 'lockhart@csl.cornell.edu', # BSD 3-Clause License: # - http://choosealicense.com/licenses/bsd-3-clause # - http://opensource.org/licenses/BSD-3-Clause license='BSD', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX :: Linux', ], packages = find_packages( exclude=['scripts', 'tests', 'ubmark', 'perf_tests'] ), package_data={ 'pymtl': [ 'tools/translation/verilator_wrapper.templ.c', 'tools/translation/verilator_wrapper.templ.py', 'tools/translation/cpp_wrapper.templ.py', ], }, install_requires = [ 'cffi', 'greenlet', 'pytest', 'pytest-xdist', # Note: leaving out numpy due to pypy incompatibility #'numpy==1.9.0', ], )
# setup.py inspired by the PyPA sample project: # https://github.com/pypa/sampleproject/blob/master/setup.py from setuptools import setup, find_packages from codecs import open # To use a consistent encoding from os import path def get_long_description(): here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name = 'pymtl', version = '1.3alpha3', # https://www.python.org/dev/peps/pep-0440/ description = 'Python-based hardware modeling framework', long_description = get_long_description(), url = 'https://github.com/cornell-brg/pymtl', author = 'Derek Lockhart', author_email = 'lockhart@csl.cornell.edu', # BSD 3-Clause License: # - http://choosealicense.com/licenses/bsd-3-clause # - http://opensource.org/licenses/BSD-3-Clause license='BSD', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX :: Linux', ], packages = find_packages( exclude=['scripts', 'tests', 'ubmark', 'perf_tests'] ), package_data={ 'pymtl': [ 'tools/translation/verilator_wrapper.templ.c', 'tools/translation/verilator_wrapper.templ.py', 'tools/translation/cpp_wrapper.templ.py', ], }, install_requires = [ 'cffi', 'greenlet', 'pytest', 'pytest-xdist', # Note: leaving out numpy due to pypy incompatibility #'numpy==1.9.0', ], )
bsd-3-clause
Python
c99b5e564252aff55f14dd63c9cdef1728026561
Add setup.py
supl/twid,iconvinced/twid
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import twid from setuptools import setup, find_packages setup( name = "twid", version = twid.__version__, description = "The relevant functions about Taiwan Identification Card system.", author = "Plenty Su", author_email = "plenty.su@gmail.com", license = "MIT", packages = find_packages() )
mit
Python
f16a21776eafc7fc373b9c43d5db74cea213c897
Create SoftwareCategory.py
OscarEReyes/Modding-Tool
SoftwareCategory.py
SoftwareCategory.py
from lxml import etree class SoftwareCategory: def __init__(self, parent, category, unlock, scan=False): self.software = category self.feature = unlock if not scan: self.create_software_category(parent, category, unlock) @classmethod def delete_category(cls, feature, software_category): """ * Parameter: feature (etree element -Tag- 'Feature') * Parameter: software_category (SoftwareCategory Object) * Remove the dependency from feature (etree element) """ for child in feature: if child.tag == 'SoftwareCategory' and child.text == software_category: feature.remove(child) break def create_software_category(self, parent, category, unlock): """ * Parameter: parent (etree element -Tag- 'Feature') * Parameter: category (str) * Parameter: unlock (str) * Create an etree subElement with a Tag "SoftwareCategory", * an attribute of Software equal to the parameter category. * Set text to the unlock parameter value * Return etree element """ etree.SubElement(parent, "SoftwareCategory", Category=category).text = unlock
mit
Python
c54bd0cf16891bbc8b82dd2cb2af1455795325a2
add setup.py
bcicen/dsplice
setup.py
setup.py
import os import sys from setuptools import setup exec(open('dsplice/version.py').read()) setup(name='dsplice', version=version, packages=['dsplice'], description='Docker image merge tool', author='Bradley Cicenas', author_email='bradley@vektor.nyc', url='https://github.com/bcicen/dsplice', install_requires=['docker-py>=1.7.2'], license='http://opensource.org/licenses/MIT', classifiers=( 'License :: OSI Approved :: MIT License ', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ), keywords='docker image merge devops', entry_points = { 'console_scripts' : ['dsplice = dsplice.cli:main'] } )
mit
Python
3f66dbc15cb0564b22d304e09ed3c0b673d59476
Add setup.py
conbus/fbmq,conbus/fbmq
setup.py
setup.py
from distutils.core import setup setup(name='fbmq', version='1.0.1', install_requires=['json', 'requests>=2.0'] )
mit
Python
a1f17cf4b56edf861c9b650ccd18049ecf168e03
Add setup.py
clugg/humanizepy
setup.py
setup.py
import os import re try: from setuptools import setup except ImportError: from distutils.core import setup PACKAGE_NAME = "humanizepy" HERE = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(HERE, "README.md")) as fp: README = fp.read() with open(os.path.join(HERE, PACKAGE_NAME, "__init__.py")) as fp: VERSION = re.search("__version__ = \"([^\"]+)\"", fp.read()).group(1) setup( name=PACKAGE_NAME, version=VERSION, author="James \"clug\"", author_email="pip@clug.xyz", maintainer="James \"clug\"", maintainer_email="pip@clug.xyz", url="https://github.com/clugg/humanizepy", description=("Humanize values that are readable only for developers."), long_description=README, classifiers=["Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Utilities"], license="MIT", keywords="humanize values roman numeral binary", packages=[PACKAGE_NAME] )
mit
Python
92138e7ab37e6a69eb3808f9888b52b9e38deaa0
remove duplicate classifier
muhammad-ammar/django-require,muhammad-ammar/django-require,karolyi/django-require2,karolyi/django-require2,thijstriemstra/django-require,thijstriemstra/django-require,etianen/django-require,etianen/django-require
setup.py
setup.py
from distutils.core import setup from require import __version__ version_str = ".".join(str(n) for n in __version__) setup( name = "django-require", version = version_str, license = "BSD", description = "A Django staticfiles post-processor for optimizing with RequireJS.", author = "Dave Hall", author_email = "dave@etianen.com", url = "https://github.com/etianen/django-require", packages = [ "require", "require.management", "require.management.commands", "require.templatetags", ], package_data = { "require": [ "resources/*.jar", "resources/*.js", "resources/tests/*.js", ], }, classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Internet :: WWW/HTTP", ], )
from distutils.core import setup from require import __version__ version_str = ".".join(str(n) for n in __version__) setup( name = "django-require", version = version_str, license = "BSD", description = "A Django staticfiles post-processor for optimizing with RequireJS.", author = "Dave Hall", author_email = "dave@etianen.com", url = "https://github.com/etianen/django-require", packages = [ "require", "require.management", "require.management.commands", "require.templatetags", ], package_data = { "require": [ "resources/*.jar", "resources/*.js", "resources/tests/*.js", ], }, classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Internet :: WWW/HTTP", ], )
bsd-3-clause
Python
ac94d2cf9b4ab775fb7a125a83abc4fa59d56136
Add setuptools build
sblosser/pyshadowcopy
setup.py
setup.py
from setuptools import setup, find_packages import os here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.md')) as f: long_description = f.read() setup( name='pyshadowcopy', version='0.0.1', description='Python class to work with Shadow Copy on Windows', long_description=long_description, url='https://github.com/sblosser/pyshadowcopy', author='sblosser', license='MIT', keywords=['Windows', 'VSS', 'win32'], py_modules=['vss'], install_requires=['pypiwin32'], )
mit
Python
edcf0e371ea3430c7d0c515dbf59e39e3522c076
Add license information to setup.py
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
setup.py
setup.py
from distutils.core import setup import loginurl setup(name='django-loginurl', version=loginurl.__version__, description='Allowing an anonymous user to log in by only visiting a URL', author='Fajran Iman Rusadi', author_email='fajran@gmail.com', url='http://github.com/fajran/django-loginurl/', license='BSD', download_url='http://github.com/fajran/django-loginurl/tarball/v0.1.2', packages=['loginurl', 'loginurl.management', 'loginurl.management.commands'], package_dir={'loginurl': 'loginurl'}, classifiers=['Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities'], )
from distutils.core import setup import loginurl setup(name='django-loginurl', version=loginurl.__version__, description='Allowing an anonymous user to log in by only visiting a URL', author='Fajran Iman Rusadi', author_email='fajran@gmail.com', url='http://github.com/fajran/django-loginurl/', download_url='http://github.com/fajran/django-loginurl/tarball/v0.1.2', packages=['loginurl', 'loginurl.management', 'loginurl.management.commands'], package_dir={'loginurl': 'loginurl'}, classifiers=['Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities'], )
agpl-3.0
Python
ef53ea9d1754ce5056b7b872ad0b7cd99e4af2bc
Add setup file
jaredmichaelsmith/bundigo
setup.py
setup.py
# -*- coding: utf-8 -*- import re from distutils.core import setup version = re.search( '^__version__\s*=\*"(.*)"', open('bundigo/bundigo.py').read(), re.M ).group(1) with open('README.md', 'rb') as f: long_descr = f.read().decode('utf-8') setup( name = 'bundigo', packages = ['bundigo'], entry_points = { 'console_scripts': ['bundigo = bundigo.bundigo.main'] }, version = version, description = "Your one-stop shop for starting a software project", long_description = long_descr, license = 'MIT', author = 'Jared Smith', author_email = 'jared@jaredsmith.io', url = 'https://jaredmichaelsmith.com/bundigo', install_requires=[ ], )
mit
Python
d9b844db2dc0453c073050c6ce7db18c3d48b57c
add setup.py file
squidboylan/apt-package-mirror
setup.py
setup.py
import setuptools setuptools.setup( install_requires=['pyyaml'], author = 'Caleb Boylan', name = 'apt-package-mirror', description = 'Python script for running an apt package mirror', author_email = 'calebboylan@gmail.com', url = 'https://github.com/squidboylan/apt-package-mirror', version = '0.1.1', classifiers = [ 'Development Status :: 3 - Alpha', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ], packages=setuptools.find_packages(), entry_points = { 'console_scripts': ['apt-package-mirror=mirror:main'], } )
apache-2.0
Python
ac2f2b72c1f653f15058b300c82060c90adf146b
Update for 1.3.0 release
RyanGutenkunst/dadi,niuhuifei/dadi,cheese1213/dadi,ChenHsiang/dadi,paulirish/dadi,niuhuifei/dadi,ChenHsiang/dadi,paulirish/dadi,beni55/dadi,RyanGutenkunst/dadi,yangjl/dadi,cheese1213/dadi,yangjl/dadi,beni55/dadi
setup.py
setup.py
# Importing these adds a 'bdist_mpkg' option that allows building binary # packages on OS X. try: import setuptools import bdist_mpkg except ImportError: pass import os import numpy.distutils.core as core # Configure our C modules that are built with f2py. tridiag = core.Extension(name = 'dadi.tridiag', sources = ['dadi/tridiag.pyf', 'dadi/tridiag.c']) int_c = core.Extension(name = 'dadi.integration_c', sources = ['dadi/integration_c.pyf', 'dadi/integration1D.c', 'dadi/integration2D.c', 'dadi/integration3D.c', 'dadi/integration_shared.c', 'dadi/tridiag.c']) # If we're building a distribution, try to update svnversion. Note that this # fails silently. for arg in os.sys.argv: if arg.count('sdist') or arg.count('bdist'): os.system("svn up") os.system("svn info > dadi/svnversion") core.setup(name='dadi', version='1.3.0', author='Ryan Gutenkunst', author_email='rng7@cornell.edu', url='http://dadi.googlecode.com', ext_modules = [tridiag, int_c], scripts=['scripts/ms_jsfs.py'], packages=['dadi'], package_data = {'dadi':['svnversion'], 'tests':['IM.fs']}, license='BSD' )
# Importing these adds a 'bdist_mpkg' option that allows building binary # packages on OS X. try: import setuptools import bdist_mpkg except ImportError: pass import os import numpy.distutils.core as core # Configure our C modules that are built with f2py. tridiag = core.Extension(name = 'dadi.tridiag', sources = ['dadi/tridiag.pyf', 'dadi/tridiag.c']) int_c = core.Extension(name = 'dadi.integration_c', sources = ['dadi/integration_c.pyf', 'dadi/integration1D.c', 'dadi/integration2D.c', 'dadi/integration3D.c', 'dadi/integration_shared.c', 'dadi/tridiag.c']) # If we're building a distribution, try to update svnversion. Note that this # fails silently. for arg in os.sys.argv: if arg.count('sdist') or arg.count('bdist'): os.system("svn up") os.system("svn info > dadi/svnversion") core.setup(name='dadi', version='1.2.3', author='Ryan Gutenkunst', author_email='rng7@cornell.edu', url='http://dadi.googlecode.com', ext_modules = [tridiag, int_c], scripts=['scripts/ms_jsfs.py'], packages=['dadi'], package_data = {'dadi':['svnversion'], 'tests':['IM.fs']}, license='BSD' )
bsd-3-clause
Python
379488ee2980e1b33753d098d88fb1139a69deeb
add setup.py
firemark/hs-onliner,firemark/hs-onliner,firemark/hs-onliner
setup.py
setup.py
from setuptools import setup, find_packages setup( name="hs-onliner", version="0.0.1", author="Firemark & Kytes", author_email="marpiechula@gmail.com", description="Site to view who will be in hackerspace every week." license="MIT", keywords="example documentation tutorial", url="https://github.com/firemark/hs-onliner", packages=find_packages(), install_requires=( 'Flask==0.10.1' ) )
mit
Python
b63a6ababb1a66ed3766399328c5b9c4ac0a7ce3
Bump version
rail/funsize,petemoore/funsize,rail/funsize,mozilla/funsize,petemoore/funsize,mozilla/funsize
setup.py
setup.py
from setuptools import setup setup( name="funsize", version="0.29", description="Funsize Scheduler", author="Mozilla Release Engineering", packages=["funsize"], include_package_data=True, # Not zip safe because we have data files in the package zip_safe=False, entry_points={ "console_scripts": [ "funsize-scheduler = funsize.scheduler:main", ], }, install_requires=[ "amqp", "anyjson", "argparse", "cffi", # PGPy depends on this _specific_ version of cryptography "cryptography==0.6", "enum34", "kombu", "PGPy", "pycparser", "PyHawk-with-a-single-extra-commit", "Jinja2", "PyYAML", "redo", # Because taskcluster hard pins this version... "requests==2.4.3", "singledispatch", "six", "taskcluster>=0.0.16", "wsgiref", ], tests_require=[ 'hypothesis', 'pytest', 'mock', ], )
from setuptools import setup setup( name="funsize", version="0.28", description="Funsize Scheduler", author="Mozilla Release Engineering", packages=["funsize"], include_package_data=True, # Not zip safe because we have data files in the package zip_safe=False, entry_points={ "console_scripts": [ "funsize-scheduler = funsize.scheduler:main", ], }, install_requires=[ "amqp", "anyjson", "argparse", "cffi", # PGPy depends on this _specific_ version of cryptography "cryptography==0.6", "enum34", "kombu", "PGPy", "pycparser", "PyHawk-with-a-single-extra-commit", "Jinja2", "PyYAML", "redo", # Because taskcluster hard pins this version... "requests==2.4.3", "singledispatch", "six", "taskcluster>=0.0.16", "wsgiref", ], tests_require=[ 'hypothesis', 'pytest', 'mock', ], )
mpl-2.0
Python
a281bad5905da4710314d657943cc145b7d748d4
add minimal setup.py
the-virtual-brain/tvb-hpc,the-virtual-brain/tvb-hpc,the-virtual-brain/tvb-hpc
setup.py
setup.py
import setuptools setuptools.setup( name='tvb-hpc', version='0.0', description='HPC code generation for TVB', author='TVB-HPC Contributors', url='https://github.com/the-virtual-brain/tvb-hpc', packages=setuptools.find_packages(), )
apache-2.0
Python
b383fadf43d3fb31d1501c780d4436717cc43776
add setup.py
Bornazadeh/django-payline-dotir,Bornazadeh/django-payline-dotir
setup.py
setup.py
import os from setuptools import setup, find_packages os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-payline-dotir', version='0.1', author='Mahdi Bornazadeh', author_email='Bornazadeh@gmail.com', description='Persian payline.ir payment gateway in django.', long_description=open("README.md", 'rb').read().decode('utf-8'), license='BSD License', url='http://www.bornazadeh.ir/payline', zip_safe=False, include_package_data=True, packages=find_packages(), install_requires=[ "requests", ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Software Development :: Libraries :: " "Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", ], )
bsd-2-clause
Python
6bc555b93e09ab18a5778487cf3eb47329e83098
Set version to our own.
ccstolley/python-instagram,rebeccamoreau/python-instagram,ironman5366/python-instagram,aie108/python-instagram,gcd0318/python-instagram,raphaelfruneaux/python-instagram,Instagram/python-instagram,Mrfuture1/instagram,ac-cs/python-instagram,TrevorOctober/python-instagram,i94u/python-instagram,jamesthechamp/python-instagram,webjunkie/python-instagram,beni55/python-instagram,M4gn4tor/python-instagram,Rafeh01/python-instagram,RebelMouseTeam/python-instagram,MA3STR0/python-instagram,sergeyglazyrindev/python-instagram,raymondethan/python-instagram,sociateru/python-instagram,najleonard/python-instagram,LeadSift/python-instagram,Mrfuture1/instagram,Crowdbooster/python-instagram,facebookarchive/python-instagram,designcrumble/python-instagram,iRGBit/python-instagram,Aviramk/python-instagram,navierula/python-instagram
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup(name="python-instagram", version="0.8.0powll1", description="Instagram API client", license="MIT", install_requires=["simplejson","httplib2"], author="Instagram, Inc", author_email="apidevelopers@instagram.com", url="http://github.com/Instagram/python-instagram", packages = find_packages(), keywords= "instagram", zip_safe = True)
#!/usr/bin/env python from setuptools import setup, find_packages setup(name="python-instagram", version="0.8.0", description="Instagram API client", license="MIT", install_requires=["simplejson","httplib2"], author="Instagram, Inc", author_email="apidevelopers@instagram.com", url="http://github.com/Instagram/python-instagram", packages = find_packages(), keywords= "instagram", zip_safe = True)
bsd-3-clause
Python
68fac699c5506f80ab727a4c569d8797294584bd
Bump the version number.
HubSpot/hapipy,CurataEng/hapipy,CBitLabs/hapipy,jonathan-s/happy
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup( name='hapipy', version='2.9.1', description="A python wrapper around HubSpot's APIs", long_description=open('README.md').read(), author='HubSpot Dev Team', author_email='devteam+hapi@hubspot.com', url='https://github.com/HubSpot/hapipy', download_url='https://github.com/HubSpot/hapipy/tarball/v2.9.0', license='LICENSE.txt', packages=['hapi', 'hapi.mixins'], install_requires=[ 'nose==1.1.2', 'unittest2==0.5.1', 'simplejson>=2.1.2' ], )
#!/usr/bin/env python from setuptools import setup setup( name='hapipy', version='2.9.0', description="A python wrapper around HubSpot's APIs", long_description=open('README.md').read(), author='HubSpot Dev Team', author_email='devteam+hapi@hubspot.com', url='https://github.com/HubSpot/hapipy', download_url='https://github.com/HubSpot/hapipy/tarball/v2.9.0', license='LICENSE.txt', packages=['hapi', 'hapi.mixins'], install_requires=[ 'nose==1.1.2', 'unittest2==0.5.1', 'simplejson>=2.1.2' ], )
apache-2.0
Python
c1c49b0e1718331663ee109f3417aff97fd23b70
Add minimal setup.py for RTD
njsmith/h11,python-hyper/h11
setup.py
setup.py
# Minimal setup.py to get readthedocs working, not recommended for real use from distutils.core import setup setup(name="h11", version="0.0.0", packages=["h11"], )
mit
Python
f34dd8ab047275b8d29366599621443a8bc468c9
Add launcher script for nbconvert
scraperwiki/databaker,scraperwiki/databaker
databaker/databaker_nbconvert.py
databaker/databaker_nbconvert.py
#!/usr/bin/env python import os import subprocess import sys def main(argv): if len(argv) == 0 or len(argv) > 2: print("Usage: databaker_process.py <notebook_file> <input_file>") print() print("<input_file> is optional; it replaces DATABAKER_INPUT_FILE") print("in the notebook.") print("The input file should also be in the same directory as the") print("notebook.") sys.exit(1) process_env = os.environ.copy() if len(argv) == 2: process_env['DATABAKER_INPUT_FILE'] = argv[1] # TODO get custom templates working; according to this: # https://github.com/jupyter/nbconvert/issues/391 # they should work, but I get TemplateNotFound when using absolute path # for template. cmd_line = ['jupyter', 'nbconvert', '--to', 'html', '--execute', argv[0]] print("Running:", ' '.join(cmd_line)) subprocess.call(args=cmd_line, env=process_env) if __name__ == '__main__': main(sys.argv[1:])
agpl-3.0
Python
a18c6d560a02049bf3dae08bebf6d3598f29c35d
Add pywinauto automated test for top-down view
google/orbit,google/orbit,google/orbit,google/orbit
contrib/automation_tests/orbit_top_down.py
contrib/automation_tests/orbit_top_down.py
""" Copyright (c) 2020 The Orbit Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. """ """Inspect the top-down view in Orbit using pywinauto. Before this script is run there needs to be a gamelet reserved and "hello_ggp_standalone" has to be started. The script requires absl and pywinauto. Since pywinauto requires the bitness of the python installation to match the bitness of the program under test it needs to by run from 64 bit python. This automation script covers a basic workflow: - start Orbit - connect to a gamelet - select a process - take a capture - verify that the top-down view contains at least 3 rows - verify that the first item is "hello_* (all threads)" - verify that the second item is "GgpSwapchain [*]" - verify that the children of the first item are "*clone" and "_start" """ import orbit_testing import logging import time from absl import app import pywinauto from pywinauto.application import Application def main(argv): orbit_testing.WaitForOrbit() application = Application(backend='uia').connect(title_re='orbitprofiler') orbit_testing.ConnectToGamelet(application) orbit_testing.SelectProcess(application, 'hello_') orbit_testing.FocusOnCaptureWindow(application) orbit_testing.Capture(application, 5); main_wnd = application.window(title_re='orbitprofiler', found_index=0) main_wnd.child_window(title="Top-Down").click_input() logging.info('Switched to Top-Down tab') # Now that the "Top-Down" tab is selected, # main_wnd.TreeView is the QTreeView of the top-down view. # main_wnd.TreeView.children(control_type='TreeItem') returns # every cell in the top-down view, in order by row and then column. # It can take a few seconds. logging.info('Listing items of the top-down view...') tree_items = main_wnd.TreeView.children(control_type='TreeItem') TOP_DOWN_ROW_CELL_COUNT = 6 row_count_before_expansion = len(tree_items) / TOP_DOWN_ROW_CELL_COUNT if row_count_before_expansion < 3: raise RuntimeError('Less than 3 rows in the top-down view') if (not tree_items[0].window_text().startswith('hello_') or not tree_items[0].window_text().endswith(' (all threads)')): raise RuntimeError('First item of the top-down view is not "hello_* (all threads)"') logging.info('Verified that first item is "hello_* (all threads)"') if (not tree_items[TOP_DOWN_ROW_CELL_COUNT].window_text().startswith('GgpSwapchain [') or not tree_items[TOP_DOWN_ROW_CELL_COUNT].window_text().endswith(']')): raise RuntimeError('Second item of the top-down view is not "GgpSwapchain [*]"') logging.info('Verified that second item is "GgpSwapchain [*]"') tree_items[0].double_click_input() logging.info('Expanded the first item') logging.info('Re-listing items of the top-down view...') tree_items = main_wnd.TreeView.children(control_type='TreeItem') row_count_after_expansion = len(tree_items) / TOP_DOWN_ROW_CELL_COUNT if row_count_after_expansion != row_count_before_expansion + 2: raise RuntimeError('First item of the top-down view doesn\'t have exactly two children') if (not tree_items[TOP_DOWN_ROW_CELL_COUNT].window_text().endswith('clone') or tree_items[2 * TOP_DOWN_ROW_CELL_COUNT].window_text() != '_start'): raise RuntimeError('Children of the first item of the top-down view ' 'are not "*clone" and "_start"') logging.info('Verified that children of the first item are "*clone" and "_start"') main_wnd.CloseButton.click_input() logging.info('Closed Orbit.') if __name__ == '__main__': app.run(main)
bsd-2-clause
Python
afe216da917c171ff857de122be64a9b2a7d3e9c
migrate doaj client test from harvester
DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj
doajtest/unit/test_api_client.py
doajtest/unit/test_api_client.py
""" Unit tests for the DOAJ client """ from unittest import TestCase from doajtest.fixtures.journals import JournalFixtureFactory from portality.api.v1.client import client as doajclient, models from portality.lib import dataobj class TestDOAJ(TestCase): def setUp(self): pass def tearDown(self): pass def test_01_journal_issns(self): source = JournalFixtureFactory.make_journal_source() j = models.Journal(source) issns = j.all_issns() assert "1234-5678" in issns assert "9876-5432" in issns assert "4444-4444" in issns assert "5555-5555" in issns assert "0101-0101" in issns assert len(issns) == 5 def test_02_validate_article(self): invalid = {"bibjson" : {}} # first check the article validator works with self.assertRaises(dataobj.DataStructureException): models.ArticleValidator(invalid) # then check that the api validation method works a = models.Article(invalid) assert not a.is_api_valid()
apache-2.0
Python
fc9dd735c96ae21b4a64286e4c9ebcedc0e1fbca
Add script to subset kerning plist.
adobe-type-tools/kern-dump
subsetKerning.py
subsetKerning.py
import sys from plistlib import writePlist from defcon import Font __doc__ = ''' Subset kerning in UFO given a list of glyphs provided. Will export new plist files that can be swapped into the UFO. Usage: python subsetKerning.py subsetList font.ufo ''' class SubsetKerning(object): """docstring for SubsetKerning""" def __init__(self, font, subsetFile): self.font = Font(font) self.subsetFile = subsetFile with open(self.subsetFile, 'r') as ssfile: rawData = ssfile.read() self.subsetGlyphList = [line.split()[0] for line in rawData.splitlines()] def subsetGroups(self): newGroups = {} for groupName, glyphList in self.font.groups.items(): combinedGlyphs = set(self.subsetGlyphList) & set(glyphList) newGlyphList = sorted(list(combinedGlyphs)) if len(newGlyphList): newGroups[groupName] = newGlyphList return newGroups def subsetKerning(self): newGroups = self.subsetGroups() newKerning = {} plistStyleKerning = {} # All allowed items for kerning, which are our subset glyphs, # plus the groups filtered earlier: allowedItems = set(newGroups) | set(self.subsetGlyphList) for [left, right], value in self.font.kerning.items(): if set([left, right]) <= allowedItems: newKerning[left, right] = value # Since the kerning paradigm stored in the plist differs from the # in the kerning object, the data structure needs some modification: for [left, right], value in newKerning.items(): partnerDict = plistStyleKerning.setdefault(left, {}) partnerDict[right] = value return plistStyleKerning def run(): sk = SubsetKerning(sys.argv[-1], sys.argv[-2]) writePlist(sk.subsetGroups(), 'subset_groups.plist') writePlist(sk.subsetKerning(), 'subset_kerning.plist') print 'done' if len(sys.argv) == 3: run() else: print __doc__
mit
Python
0e6d0def4e00868ed5e788f5319440ab6382f10f
include provision script (#176)
anastasia-tarasova/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,anastasia-tarasova/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,anastasia-tarasova/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk
cxs/libcxs/scripts/provision_agent_keys.py
cxs/libcxs/scripts/provision_agent_keys.py
#!/usr/bin/env python3 # Provided by The Python Standard Library import json import argparse import asyncio import time import os import urllib.request import sys from ctypes import * def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("WALLET_NAME") parser.add_argument("AGENCY_URL") parser.add_argument("WALLET_KEY") parser.add_argument("--agent-seed", help="optional seed used to create enterprise->agent DID/VK") parser.add_argument("--enterprise-seed", help="optional seed used to create enterprise DID/VK") parser.add_argument("--verbose", action="store_true") return parser.parse_args() def get_agency_info(agency_url): agency_info = {} agency_resp = '' #Get agency's did and verkey: try: agency_req=urllib.request.urlopen('{}/agency'.format(agency_url)) except: exc_type, exc_value, exc_traceback = sys.exc_info() sys.stderr.write("Failed looking up agency did/verkey: '{}': {}\n".format(exc_type.__name__,exc_value)) print(json.dumps({ 'provisioned': False, 'provisioned_status': "Failed: Could not retrieve agency info from: {}/agency: '{}': {}".format(agency_url,exc_type.__name__,exc_value) },indent=2)) sys.exit(1) agency_resp = agency_req.read() try: agency_info = json.loads(agency_resp.decode()) except: exc_type, exc_value, exc_traceback = sys.exc_info() sys.stderr.write("Failed parsing response from agency endpoint: {}/agency: '{}': {}\n".format(agency_url,exc_type.__name__,exc_value)) sys.stderr.write("RESPONSE: {}".format(agency_resp)) print(json.dumps({ 'provisioned': False, 'provisioned_status': "Failed: Could not parse response from agency endpoint from: {}/agency: '{}': {}".format(agency_url,exc_type.__name__,exc_value) },indent=2)) sys.exit(1) return agency_info def register_agent(args): cxs = CDLL('./libcxs.so') agency_info = get_agency_info(args.AGENCY_URL) json_str = json.dumps({'agency_url':args.AGENCY_URL, 'agency_did':agency_info['DID'], 'agency_verkey':agency_info['verKey'], 'wallet_name':args.WALLET_NAME, 'wallet_key':args.WALLET_KEY, 'agent_seed':args.agent_seed, 'enterprise_seed':args.enterprise_seed}) c_json = c_char_p(json_str.encode('utf-8')) rc = cxs.cxs_provision_agent(c_json) if rc == 0: sys.stderr.write("could not register agent, see log\n") print(json.dumps({ 'provisioned': False, 'provisioned_status': 'Failed: Could not register agenct, see log\n' },indent=2)) else: pointer = c_int(rc) string = cast(pointer.value, c_char_p) new_config = json.loads(string.value.decode('utf-8')) print(json.dumps(new_config, indent=2)) async def main(): args = parse_args() if args.verbose: os.environ["RUST_LOG"] = "info" else: os.environ["RUST_LOG"] = "error" register_agent(args) if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main()) time.sleep(.1)
apache-2.0
Python
35258c9b37997801af05875f04f450050a3e5273
Create tarea5.py
JOSUEXLION/prog3-uip,JOSUEXLION/prog3-uip
tareas/tarea5.py
tareas/tarea5.py
#josue de leon #lista de supermercado #una tupla para las opciones, y una lista para la lista import os lista = [] opciones = ("1. Añadir producto a la lista.","2. Borrar el ultimo producto de la lista.","3. Mostrar toda la lista.") control = 1 print("\n\n\tBienvenido a su lista de compras.\n\nRecuerde que su lista esta en blanco, asi que lo primero que debe hacer es\nagregar nuevos elementos.") while control == 1: print("\nSeleccione que desea hacer:\n\n\t"+str(opciones[0])+"\n\t"+str(opciones[1])+"\n\t"+str(opciones[2])+"\n") seleccion = int(input("Escoja una opcion: ")) if seleccion == 1: print('*Ingrese un producto para añadir a su lista.\n*Ingrese "fin" para finalizar su lista.') producto_lista = '' while producto_lista.lower() != "fin": producto_lista = input() if producto_lista.lower() != "fin": lista.append(producto_lista) elif seleccion == 2: del lista[(len(lista)-1)] print("El ultimo elemento ha sido borrado!") elif seleccion == 3: centinela = 1 while centinela <= len(lista): print("- "+lista[centinela-1]) centinela = centinela + 1 control = int(input('\n¿Desea continuar con su lista?\n- Presione "1" para CONTINUAR.\n- Si desea SALIR presione "0": ')) os.system("cls")
mit
Python
0c4d6491fe89e339e9d9505e6e46e8317e78034a
Add telnet testing script
indiv0/SEGIMAP,uiri/SEGIMAP,uiri/SEGIMAP,indiv0/SEGIMAP,uiri/SEGIMAP,indiv0/SEGIMAP
telnet/telnet.py
telnet/telnet.py
#!/usr/bin/env python3 import pexpect import os, sys, time ip = "127.0.0.1" port = "10000" username = "nikitapekin@gmail.com" password = "12345" os.remove('../maildir/.lock') child = pexpect.spawn('telnet '+ ip + ' ' + port) child.expect('.\n') child.logfile = sys.stdout.buffer time.sleep(1) child.sendline('1 login ' + username + ' ' + password) child.expect('1 OK logged in successfully as nikitapekin@gmail.com') child.sendline('2 select INBOX') child.expect('successful') child.sendline('3 fetch 1:2 (FLAGS BODY[HEADER.FIELDS (DATE FROM)])') child.expect('unimplemented')
mit
Python
40832b561437d8a022b2dbe9f19e5fcf622fb6d4
Add genspec.py, used to convert from the current Python-based spec to the new XML-based
freedesktop-unofficial-mirror/telepathy__telepathy-spec,TelepathyIM/telepathy-spec
tools/genspec.py
tools/genspec.py
#!/usr/bin/python2.4 import sys try: from elementtree.ElementTree import fromstring, tostring, Element, SubElement except ImportError: print "You need to install ElementTree (http://effbot.org/zone/element-index.htm)" sys.exit(1) import dbus from xml.dom.minidom import parseString from telepathy.server import * copyright = """\ Copyright (C) 2005, 2006 Collabora Limited Copyright (C) 2005, 2006 Nokia Corporation Copyright (C) 2006 INdT """ license = """\ This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ def strip (element): if element.text: element.text = element.text.strip() if element.tail: element.tail = element.tail.strip() for child in element: strip (child) nameconsts = {} f = file(sys.argv[2]) for line in f: name, const = line.split() nameconsts[name] = const classes = file(sys.argv[1]) for line in classes: if line[0] == '#': continue elif line == '\n': continue line = line.strip() print line cls = eval(line) bases = (cls, dbus.service.Object) # classes half-baked to order... :) subclass = type(line, bases, {'__init__':lambda self: None, '__del__':lambda self: None, '_object_path':'/'+line, '_name':line}) instance = subclass() xml = instance.Introspect() # sort root = fromstring(xml) for i, e in enumerate(root): if e.get('name') == 'org.freedesktop.DBus.Introspectable': del root[i] # embrace and extend the D-Bus introspection data, because it only supports # annotations which are effectively an attribute value, and we want # multi-line docstrings root.set('xmlns:tp', 'http://telepathy.freedesktop.org/wiki/DbusSpec#extensions-v0') for interface in root: interface[:] = sorted(interface[:], key=lambda e: e.get('name')) for member in interface: SubElement(member, 'tp:docstring').text = '\n%s\n' % getattr(cls, member.get('name')).__doc__ text = cls.__doc__ interface.set('tp:name-const', nameconsts[interface.get('name')]) if text is not None: SubElement(interface, 'tp:docstring').text = '\n%s\n' % text break else: # ContactList has no methods interface = SubElement(root, 'interface', name=cls._dbus_interfaces[0]) text = cls.__doc__ if text is not None: SubElement(interface, 'tp:docstring').text = '\n%s\n' % text interface.set('tp:name-const', nameconsts[cls._dbus_interfaces[0]]) basename = root[0].get('name') elt = Element('tp:license') elt.text = license root.insert(0, elt) elt = Element('tp:copyright') elt.text = copyright root.insert(0, elt) # pretty print strip(root) xml = tostring(root) dom = parseString(xml) basename = basename.replace('org.freedesktop.Telepathy.', '') basename = basename.replace('.', '-') file = open(basename + '.xml', 'w') s = dom.toprettyxml(' ', '\n') file.write(s) # keep the string splitting here - it stops vim thinking this file # is XML! file.write('<!-- v''im:set sw=2 sts=2 et ft=xml: -->\n') file.close()
lgpl-2.1
Python
945fe81c4a0f970e57ff7c5a13d8c3aa03df5fc6
Add function to save/restore environment between configuration checks.
cournape/numscons,cournape/numscons,cournape/numscons
numscons/checkers/new/common.py
numscons/checkers/new/common.py
from copy import deepcopy def save_and_set(env, opts, keys=None): """Put informations from option configuration into a scons environment, and returns the savedkeys given as config opts args.""" saved_keys = {} if keys is None: keys = opts.keys() for k in keys: saved_keys[k] = (env.has_key(k) and deepcopy(env[k])) or [] kw = dict(zip(keys, [opts[k] for k in keys])) if kw.has_key('LINKFLAGSEND'): env.AppendUnique(**{'LINKFLAGSEND' : kw['LINKFLAGSEND']}) del kw['LINKFLAGSEND'] env.Prepend(**kw) return saved_keys def restore(env, saved): keys = saved.keys() kw = dict(zip(keys, [saved[k] for k in keys])) env.Replace(**kw)
bsd-3-clause
Python
bef94fea3318c835c1474ebdfe74f89d8251baf9
add test_cover.py
dialounke/pylayers,dialounke/pylayers,pylayers/pylayers,pylayers/pylayers
pylayers/gis/test/test_cover.py
pylayers/gis/test/test_cover.py
import pylayers.gis.ezone as ez from pylayers.gis.gisutil import ent,ext2qt import matplotlib.pyplot as plt import numpy as np import seaborn as sns import os import smopy from cartopy import config import cartopy.crs as ccrs fig = plt.figure(figsize=(12,12)) white = np.zeros((10,10)) ax = fig.add_subplot(111) z = ez.Ezone('N48W002') z.loadh5() z.rebase() zoom=11 p = (48.721095,-1.830548) print "p : ",p xtile,ytile=smopy.deg2num(p[0],p[1],zoom,do_round=True) print "xtile,ytile : ",xtile,ytile (lat0,lon0)=smopy.num2deg(xtile,ytile,zoom,do_round=True) (lat1,lon1)=smopy.num2deg(xtile+1,ytile+1,zoom,do_round=True) print "lat,lon WN",lat0,lon0 print "lat,lon ES",lat1,lon1 #mp = smopy.Map((lat1,lon0,lat0,lon1),z=zoom) mp = smopy.Map((48,-2,49,-1),z=zoom) ##f,a = z.show(alpha=0.3) box_tile = mp.box_tile print box_tile L_ll,l_ll=smopy.num2deg(box_tile[0],box_tile[1]+1,zoom) L_ur,l_ur=smopy.num2deg(box_tile[2]+1,box_tile[3],zoom) extent_true = np.array((l_ll,l_ur,L_ll,L_ur)) print extent_true #print extent_true ##print z.extent f,a = z.show(fig=fig,ax=ax,alpha=0.4) #f,a=plt.subplots(1,1) im1 = a.imshow(mp.img,extent=extent_true,alpha=0.6) im2 = a.imshow(white,extent=(-2.2,-0.9,47.9,49.1),alpha=0) a.plot(p[1],p[0],'ob') ###mp.box_tile=(0,0,73000,111000) ###mp.h=73000 ###mp.w=111000 ###mp.box_tile=(0,111000,73000,0) ###mp.xmin = 0 ###mp.ymin=0 ###ax = mp.show_mpl(figsize=(20,10),alpha=1) ##fig=plt.gcf() ###z.extent_c=(0,1024,0,1280) ###z.extent_c=(506,509,351,355) ###print z.extent_c a = z.cover(Ht=2,Hr=2,Rmax=10000) ##
mit
Python
1a29e182a196e3fc4fbe00c0db6e22c2619473f3
Add iOSExtractor test
qurami/strings2pot
strings2pot/extractors/ios_test.py
strings2pot/extractors/ios_test.py
# -*- coding: utf-8 -*- import os import unittest import ios class iOSExtractorTest(unittest.TestCase): def setUp(self): self.mock_source_file = 'mock_source_ios.strings' self.mock_destination_file = 'mock_destination_ios.pot' def mock_context_id_generator(s): return 'MOCK_CONTEXT_ID' self.mock_context_id_generator = mock_context_id_generator with open(self.mock_source_file, 'a') as source_file: source_file.write(""" /* Test string with a placeholder */ "Test string with a \"%@\" here" = "Test string with a \"%@\" here"; """) def tearDown(self): try: os.unlink(self.mock_source_file) os.unlink(self.mock_destination_file) except Exception, e: pass # test that the iOSExtractor class constructor sets source_file and destination_file attributes def test_ctor(self): sut = ios.iOSExtractor( self.mock_source_file, self.mock_destination_file, self.mock_context_id_generator ) self.assertEqual(sut.source_file, self.mock_source_file) self.assertEqual(sut.destination_file, self.mock_destination_file) # test that iOSExtractor parse_string method converts string in POT format def test_parse_string(self): sut = ios.iOSExtractor('', '', self.mock_context_id_generator) single_line_string = "\' \" %@" self.assertEqual( sut.parse_string(single_line_string), '"\' \" %s"' ) multi_line_string = "\' \" \\n %@" self.assertEqual( sut.parse_string(multi_line_string), '''"" "\' \" \\n" " %s"''' ) # test that iOSExtractor run method converts an input file in POT format def test_run(self): sut = ios.iOSExtractor( self.mock_source_file, self.mock_destination_file, self.mock_context_id_generator ) sut.run() with open(self.mock_destination_file, 'r') as destination_file: lines = destination_file.readlines() pot_content_as_string = "".join(lines) self.assertEqual( pot_content_as_string, ''' #: mock_source_ios.strings:4 msgctxt "MOCK_CONTEXT_ID" msgid "Test string with a \"%s\" here" msgstr "" ''' ) if __name__ == '__main__': unittest.main()
mit
Python
f1c95af353c741f26a9bd95f8228ef74e90bca75
Add itest
samirelanduk/inferi
itests/tests.py
itests/tests.py
from unittest import TestCase import inferi class Tests(TestCase): def test_variables(self): # Basic variable behaviour var = inferi.Variable(4, 8, 15, 16, 23, 42, name="Numbers") self.assertEqual(var.values(), (4, 8, 15, 16, 23, 42)) self.assertEqual(var.name(), "Numbers") self.assertEqual(len(var), 6) self.assertEqual(var.length(), 6) var[4] = 24 self.assertEqual(var.values(), (4, 8, 15, 16, 24, 42)) var[4] = 23 self.assertEqual(var[4], 23) var.set(2, 14) self.assertEqual(var.values(), (4, 8, 14, 16, 23, 42)) var.set(2, 15) self.assertEqual(var.get(2), 15) var.add(108) self.assertEqual(var.values(), (4, 8, 15, 16, 23, 42, 108)) var.remove(108) self.assertEqual(var.values(), (4, 8, 15, 16, 23, 42)) self.assertEqual(var.pop(), 42) self.assertEqual(var.values(), (4, 8, 15, 16, 23)) var.add(42) self.assertEqual(var[-1], 42) var.name("The Numbers") self.assertEqual(var.name(), "The Numbers") # Variable metrics self.assertEqual(var.min(), 4) self.assertEqual(var.max(), 42) self.assertEqual(var.sum(), 108) self.assertEqual(var.mean(), 18) self.assertEqual(var.median(), 15.5) self.assertEqual(var.mode(), None) var.add(15) self.assertEqual(var.mode(), 15) var.pop() self.assertEqual(var.range(), 38) self.assertAlmostEqual(var.variance(), 182, delta=0.005) self.assertAlmostEqual(var.variance(population=True), 151.67, delta=0.005) self.assertAlmostEqual(var.st_dev(), 13.49, delta=0.005) self.assertAlmostEqual(var.st_dev(population=True), 12.32, delta=0.005) self.assertAlmostEqual(var.zscore(4.51), -1, delta=0.005) # Variable comparison var2 = inferi.Variable(34, 21, 56, 43, 78, 79) self.assertAlmostEqual(var.covariance_with(var2), 269.2, delta=0.05) self.assertAlmostEqual(var.correlation_with(var2), 0.845, delta=0.005) # Variable arithmetic var3 = inferi.Variable.average(var, var2) self.assertEqual(var3.length(), 6) self.assertEqual(var3[0], 19) self.assertEqual(var3[2], 35.5) var3 = var + var2 self.assertEqual(var3.length(), 6) self.assertEqual(var3[0], 38) self.assertEqual(var3[2], 71) var3 = var2 - var self.assertEqual(var3.length(), 6) self.assertEqual(var3[0], 30) self.assertEqual(var3[2], 41) # Variable errors var = inferi.Variable(4, 8, 15, name="Numbers", error=[0.8, 0.5, 0.3]) self.assertEqual(var.values(), (4, 8, 15)) self.assertEqual(var.error(), (0.8, 0.5, 0.3)) self.assertEqual(var.get(0), 4) self.assertEqual(var.get(0, error=True).error(), 0.8) self.assertEqual(var.get(0, error=True).relative_error(), 0.2)
mit
Python
90642d734fbdcc3a97693106259c35c25f19d38e
Add problem 1
dimkarakostas/matasano-cryptochallenges
problem_1.py
problem_1.py
import sys hex_string = sys.argv[1] print hex_string.decode('hex').encode('base64')
mit
Python
e19e45f7c6ff68599503c3ee0d6712974a8b4e66
Document current pycurl exception behavior
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
tests/error_test.py
tests/error_test.py
#! /usr/bin/env python # -*- coding: iso-8859-1 -*- # vi:ts=4:et import pycurl import sys import unittest class ErrorTest(unittest.TestCase): def setUp(self): self.curl = pycurl.Curl() def tearDown(self): self.curl.close() # error originating in libcurl def test_pycurl_error_libcurl(self): try: # perform without a url self.curl.perform() except pycurl.error: exc_type, exc = sys.exc_info()[:2] assert exc_type == pycurl.error # pycurl.error's arguments are libcurl errno and message self.assertEqual(2, len(exc.args)) self.assertEqual(int, type(exc.args[0])) self.assertEqual(str, type(exc.args[1])) # unpack err, msg = exc self.assertEqual(pycurl.E_URL_MALFORMAT, err) # possibly fragile self.assertEqual('No URL set!', msg) # pycurl raises standard library exceptions in some cases def test_pycurl_error_stdlib(self): try: # set an option of the wrong type self.curl.setopt(pycurl.WRITEFUNCTION, True) except TypeError: exc_type, exc = sys.exc_info()[:2] # error originating in pycurl def test_pycurl_error_pycurl(self): try: # invalid option combination self.curl.setopt(pycurl.WRITEFUNCTION, lambda x: x) with open(__file__) as f: self.curl.setopt(pycurl.WRITEHEADER, f) except pycurl.error: exc_type, exc = sys.exc_info()[:2] assert exc_type == pycurl.error # for non-libcurl errors, arguments are just the error string self.assertEqual(1, len(exc.args)) self.assertEqual(str, type(exc.args[0])) self.assertEqual('cannot combine WRITEHEADER with WRITEFUNCTION.', exc.args[0])
lgpl-2.1
Python
b6500cc5ae48212b7cabefc313b417a42273274b
Add test for parsing the man page
lord63/tldr.py
tests/test_parse.py
tests/test_parse.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import unittest import mock from tldr.parser import parse_page class TestParse(unittest.TestCase): def test_parse_page(self): mock_config = { 'colors': { 'command': 'cyan', 'description': 'blue', 'usage': 'green' }, 'platform': 'linux', 'repo_directory': '/tmp/tldr' } with mock.patch('tldr.parser.get_config', return_value=mock_config): result = parse_page('/tmp/tldr/pages/sunos/prctl.md') assert ''.join(result) == ( '\n\x1b[0m\x1b[34m Get or set the resource controls of ' 'running processes,\n\x1b[0m\x1b[34m tasks, and projects\n' '\x1b[0m\n\x1b[0m\x1b[32m- examine process limits and ' 'permissions\n\x1b[0m\n\x1b[0m\x1b[36m prctl {{PID}}\n\x1b' '[0m\n\x1b[0m\x1b[32m- examine process limits and permissions ' 'in machine parseable format\n\x1b[0m\n\x1b[0m\x1b[36m prctl ' '-P {{PID}}\n\x1b[0m\n\x1b[0m\x1b[32m- Get specific limit for ' 'a running process\n\x1b[0m\n\x1b[0m\x1b[36m prctl -n ' 'process.max-file-descriptor {{PID}}\x1b[0m' )
mit
Python
38dee68b15e2daf3c9d6ece845dc561500545258
Create test_plots.py
gwtsa/gwtsa,pastas/pastas,pastas/pasta
tests/test_plots.py
tests/test_plots.py
from test_model import test_add_stressmodel from pastas.plots import TrackSolve def test_plot(): ml = test_add_stressmodel() ml.plot() def test_decomposition(): ml = test_add_stressmodel() ml.plots.decomposition(min_ylim_diff=0.1) def test_results(): ml = test_add_stressmodel() ml.plots.results() def test_block_response(): ml = test_add_stressmodel() ml.plots.block_response() def test_step_response(): ml = test_add_stressmodel() ml.plots.step_response() def test_diagnostics(): ml = test_add_stressmodel() ml.plots.diagnostics() def test_stresses(): ml = test_add_stressmodel() ml.plots.stresses() def test_contributions_pie(): ml = test_add_stressmodel() ml.plots.contributions_pie() def test_tracksolve(): ml = test_add_stressmodel() track = TrackSolve(ml) track.initialize_figure() ml.solve(callback=track.update_figure)
mit
Python
8a573baabee65bfbd348901e0d1c7828cdadd337
Add tests for stats.normalize
jhamrick/python-snippets
tests/test_stats.py
tests/test_stats.py
import numpy as np np.seterr(all='raise') from stats import normalize def check_normalization_constants(arr, axis): sum = np.log(np.sum(arr, axis=axis)) z = normalize(np.log(arr), axis=axis)[0] zdiff = np.abs(sum - z) if not (zdiff < 1e-8).all(): print sum print z raise AssertionError("wrong normalization constant") def check_normalization(arr, axis): sum = np.sum(arr, axis=axis) norm = np.log(arr / np.expand_dims(sum, axis=axis)) n = normalize(np.log(arr), axis=axis)[1] ndiff = np.abs(norm - n) if not(ndiff < 1e-8).all(): print norm print n raise AssertionError("wrong normalized values") def test_normalize_10(): """Test stats.normalize for a vector""" for i in xrange(5): arr = np.random.gamma(2, scale=2, size=10) yield (check_normalization_constants, arr, 0) yield (check_normalization, arr, 0) def test_normalize_5x10x15(): """Test stats.normalize for a multidimensional array""" for i in xrange(5): arr = np.random.gamma(2, scale=2, size=(5, 15, 20)) for axis in xrange(3): yield (check_normalization_constants, arr, axis) yield (check_normalization, arr, axis) def test_normalize_2x100000(): """Test stats.normalize for a large array""" for i in xrange(1): arr = np.random.gamma(2, scale=2, size=(2, 100000)) for axis in xrange(2): yield (check_normalization_constants, arr, axis) yield (check_normalization, arr, axis)
mit
Python
88f6c8c3657cba81c65da34a7161c860c8a23c5f
add RPC test for InvalidateBlock
donaloconnor/bitcoin,scmorse/bitcoin,Flowdalic/bitcoin,lakepay/lake,Jcing95/iop-hd,ppcoin/ppcoin,emc2foundation/einsteinium,alejandromgk/Lunar,mb300sd/bitcoin,misdess/bitcoin,RHavar/bitcoin,shaolinfry/litecoin,nathan-at-least/zcash,Cocosoft/bitcoin,namecoin/namecoin-core,initaldk/bitcoin,irvingruan/bitcoin,nvmd/bitcoin,jambolo/bitcoin,TrainMAnB/vcoincore,gameunits/gameunits,TrainMAnB/vcoincore,domob1812/namecore,meighti/bitcoin,tdudz/elements,nbenoit/bitcoin,psionin/smartcoin,senadmd/coinmarketwatch,HeliumGas/helium,dgarage/bc3,antonio-fr/bitcoin,wcwu/bitcoin,ahmedbodi/terracoin,jonasschnelli/bitcoin,pinheadmz/bitcoin,Alonzo-Coeus/bitcoin,cannabiscoindev/cannabiscoin420,particl/particl-core,elliotolds/bitcoin,Cocosoft/bitcoin,bitcoinclassic/bitcoinclassic,braydonf/bitcoin,balajinandhu/bitcoin,lakepay/lake,arnuschky/bitcoin,sstone/bitcoin,TheBlueMatt/bitcoin,ElementsProject/elements,namecoin/namecore,fanquake/bitcoin,cculianu/bitcoin-abc,koharjidan/bitcoin,cculianu/bitcoin-abc,domob1812/i0coin,lakepay/lake,initaldk/bitcoin,domob1812/bitcoin,superjudge/bitcoin,bittylicious/bitcoin,cculianu/bitcoin-abc,reorder/viacoin,loxal/zcash,Vector2000/bitcoin,AkioNak/bitcoin,Diapolo/bitcoin,paveljanik/bitcoin,lateminer/bitcoin,error10/bitcoin,BitcoinHardfork/bitcoin,Bitcoin-ABC/bitcoin-abc,martindale/elements,joshrabinowitz/bitcoin,DigiByte-Team/digibyte,ericshawlinux/bitcoin,cculianu/bitcoin-abc,tjps/bitcoin,Krellan/bitcoin,haobtc/bitcoin,digibyte/digibyte,pouta/bitcoin,zander/bitcoinclassic,itmanagerro/tresting,OmniLayer/omnicore,monacoinproject/monacoin,hasanatkazmi/bitcoin,ajtowns/bitcoin,syscoin/syscoin,syscoin/syscoin,zotherstupidguy/bitcoin,simonmulser/bitcoin,hsavit1/bitcoin,acid1789/bitcoin,ahmedbodi/temp_vert,domob1812/huntercore,MazaCoin/maza,rjshaver/bitcoin,Alex-van-der-Peet/bitcoin,qtumproject/qtum,vcoin-project/vcoincore,tuaris/bitcoin,globaltoken/globaltoken,daveperkins-github/bitcoin-dev,habibmasuro/bitcoin,earonesty/bitcoin,awemany/BitcoinUnlimited,gravio-net/graviocoin,funbucks/notbitcoinxt,reorder/viacoin,denverl/bitcoin,zsulocal/bitcoin,llluiop/bitcoin,Electronic-Gulden-Foundation/egulden,loxal/zcash,daliwangi/bitcoin,NateBrune/bitcoin-fio,ppcoin/ppcoin,CryptArc/bitcoin,torresalyssa/bitcoin,174high/bitcoin,kazcw/bitcoin,cryptoprojects/ultimateonlinecash,Kogser/bitcoin,UFOCoins/ufo,RyanLucchese/energi,Rav3nPL/PLNcoin,xawksow/GroestlCoin,psionin/smartcoin,gmaxwell/bitcoin,gandrewstone/bitcoinxt,wellenreiter01/Feathercoin,fanquake/bitcoin,czr5014iph/bitcoin4e,jn2840/bitcoin,peercoin/peercoin,GreenParhelia/bitcoin,ludbb/bitcoin,jonghyeopkim/bitcoinxt,MasterX1582/bitcoin-becoin,Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,peercoin/peercoin,ediston/energi,faircoin/faircoin,jmcorgan/bitcoin,bitcoinsSG/bitcoin,thrasher-/litecoin,xieta/mincoin,Har01d/bitcoin,torresalyssa/bitcoin,Rav3nPL/PLNcoin,GroestlCoin/bitcoin,ericshawlinux/bitcoin,Rav3nPL/polcoin,simdeveloper/bitcoin,sarielsaz/sarielsaz,karek314/bitcoin,bitbrazilcoin-project/bitbrazilcoin,nbenoit/bitcoin,HashUnlimited/Einsteinium-Unlimited,aniemerg/zcash,omefire/bitcoin,aspanta/bitcoin,sickpig/BitcoinUnlimited,andreaskern/bitcoin,gjhiggins/vcoincore,lateminer/bitcoin,dscotese/bitcoin,itmanagerro/tresting,TeamBitBean/bitcoin-core,111t8e/bitcoin,BitzenyCoreDevelopers/bitzeny,MazaCoin/maza,theuni/bitcoin,acid1789/bitcoin,GlobalBoost/GlobalBoost,drwasho/bitcoinxt,worldbit/worldbit,kevcooper/bitcoin,EntropyFactory/creativechain-core,REAP720801/bitcoin,joshrabinowitz/bitcoin,KnCMiner/bitcoin,sickpig/BitcoinUnlimited,bitcoinec/bitcoinec,rnicoll/dogecoin,mm-s/bitcoin,Alex-van-der-Peet/bitcoin,mruddy/bitcoin,StarbuckBG/BTCGPU,SoreGums/bitcoinxt,btcdrak/bitcoin,xawksow/GroestlCoin,ptschip/bitcoin,CodeShark/bitcoin,monacoinproject/monacoin,psionin/smartcoin,RongxinZhang/bitcoinxt,btc1/bitcoin,Bitcoin-ABC/bitcoin-abc,bittylicious/bitcoin,misdess/bitcoin,viacoin/viacoin,pinheadmz/bitcoin,bespike/litecoin,nailtaras/nailcoin,x-kalux/bitcoin_WiG-B,se3000/bitcoin,okinc/bitcoin,gzuser01/zetacoin-bitcoin,r8921039/bitcoin,gravio-net/graviocoin,scmorse/bitcoin,schinzelh/dash,phelix/namecore,Exgibichi/statusquo,schinzelh/dash,zixan/bitcoin,welshjf/bitcoin,ardsu/bitcoin,jnewbery/bitcoin,wiggi/huntercore,Krellan/bitcoin,ixcoinofficialpage/master,guncoin/guncoin,ShadowMyst/creativechain-core,GroestlCoin/bitcoin,bitcoinknots/bitcoin,phelix/namecore,tjth/lotterycoin,bitcoinplusorg/xbcwalletsource,loxal/zcash,dagurval/bitcoinxt,hyperwang/bitcoin,daliwangi/bitcoin,fsb4000/bitcoin,funkshelper/woodcore,sstone/bitcoin,FeatherCoin/Feathercoin,MeshCollider/bitcoin,ixcoinofficialpage/master,faircoin/faircoin,NateBrune/bitcoin-nate,alecalve/bitcoin,Xekyo/bitcoin,sipsorcery/bitcoin,coinkeeper/2015-06-22_18-31_bitcoin,okinc/bitcoin,OmniLayer/omnicore,jrick/bitcoin,r8921039/bitcoin,dperel/bitcoin,daveperkins-github/bitcoin-dev,prark/bitcoinxt,zotherstupidguy/bitcoin,marcusdiaz/BitcoinUnlimited,isghe/bitcoinxt,wcwu/bitcoin,fanquake/bitcoin,bitcoin/bitcoin,deadalnix/bitcoin,goldcoin/Goldcoin-GLD,goldcoin/Goldcoin-GLD,practicalswift/bitcoin,dgenr8/bitcoin,accraze/bitcoin,ivansib/sib16,ravenbyron/phtevencoin,Theshadow4all/ShadowCoin,bitpay/bitcoin,metacoin/florincoin,Xekyo/bitcoin,goldcoin/goldcoin,jameshilliard/bitcoin,jmcorgan/bitcoin,nvmd/bitcoin,jl2012/litecoin,shelvenzhou/BTCGPU,prusnak/bitcoin,Har01d/bitcoin,superjudge/bitcoin,thesoftwarejedi/bitcoin,BTCGPU/BTCGPU,sipsorcery/bitcoin,ryanxcharles/bitcoin,droark/bitcoin,bankonmecoin/bitcoin,deadalnix/bitcoin,MeshCollider/bitcoin,nlgcoin/guldencoin-official,nlgcoin/guldencoin-official,rsdevgun16e/energi,parvez3019/bitcoin,40thoughts/Coin-QualCoin,arnuschky/bitcoin,ahmedbodi/temp_vert,Gazer022/bitcoin,oleganza/bitcoin-duo,vcoin-project/vcoincore,ShadowMyst/creativechain-core,antonio-fr/bitcoin,acid1789/bitcoin,dgenr8/bitcoin,Friedbaumer/litecoin,stamhe/bitcoin,HeliumGas/helium,sugruedes/bitcoin,aniemerg/zcash,laudaa/bitcoin,wederw/bitcoin,lclc/bitcoin,matlongsi/micropay,habibmasuro/bitcoinxt,Rav3nPL/polcoin,Kogser/bitcoin,UASF/bitcoin,AllanDoensen/BitcoinUnlimited,worldbit/worldbit,drwasho/bitcoinxt,phelix/namecore,Gazer022/bitcoin,rdqw/sscoin,Kangmo/bitcoin,h4x3rotab/BTCGPU,shouhuas/bitcoin,rebroad/bitcoin,ftrader-bitcoinabc/bitcoin-abc,ionomy/ion,AdrianaDinca/bitcoin,gravio-net/graviocoin,MeshCollider/bitcoin,syscoin/syscoin,DigiByte-Team/digibyte,BlockchainTechLLC/3dcoin,Friedbaumer/litecoin,rdqw/sscoin,acid1789/bitcoin,SartoNess/BitcoinUnlimited,dgenr8/bitcoin,CryptArc/bitcoinxt,sbaks0820/bitcoin,aniemerg/zcash,ahmedbodi/vertcoin,karek314/bitcoin,gjhiggins/vcoincore,bitreserve/bitcoin,jrick/bitcoin,ravenbyron/phtevencoin,bankonmecoin/bitcoin,Michagogo/bitcoin,wederw/bitcoin,bitcoinclassic/bitcoinclassic,CryptArc/bitcoinxt,sugruedes/bitcoin,nathaniel-mahieu/bitcoin,botland/bitcoin,ajtowns/bitcoin,rnicoll/dogecoin,pouta/bitcoin,BTCDDev/bitcoin,wangxinxi/litecoin,goku1997/bitcoin,ludbb/bitcoin,vertcoin/vertcoin,AkioNak/bitcoin,gmaxwell/bitcoin,mm-s/bitcoin,nathan-at-least/zcash,kallewoof/elements,janko33bd/bitcoin,bitcoinxt/bitcoinxt,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,lclc/bitcoin,fanquake/bitcoin,sugruedes/bitcoinxt,millennial83/bitcoin,GroestlCoin/GroestlCoin,plncoin/PLNcoin_Core,haobtc/bitcoin,kallewoof/elements,coinkeeper/2015-06-22_18-31_bitcoin,phplaboratory/psiacoin,bitcoinec/bitcoinec,kleetus/bitcoin,Sjors/bitcoin,myriadcoin/myriadcoin,2XL/bitcoin,namecoin/namecoin-core,pastday/bitcoinproject,myriadcoin/myriadcoin,Petr-Economissa/gvidon,elliotolds/bitcoin,MeshCollider/bitcoin,senadmd/coinmarketwatch,zixan/bitcoin,oleganza/bitcoin-duo,simdeveloper/bitcoin,sarielsaz/sarielsaz,achow101/bitcoin,koharjidan/dogecoin,czr5014iph/bitcoin4e,GroestlCoin/bitcoin,stevemyers/bitcoinxt,drwasho/bitcoinxt,CryptArc/bitcoinxt,keo/bitcoin,ShadowMyst/creativechain-core,hsavit1/bitcoin,omefire/bitcoin,thrasher-/litecoin,xurantju/bitcoin,vmp32k/litecoin,lbrtcoin/albertcoin,ardsu/bitcoin,Alex-van-der-Peet/bitcoin,tjth/lotterycoin,peercoin/peercoin,ryanofsky/bitcoin,zotherstupidguy/bitcoin,lbryio/lbrycrd,zcoinofficial/zcoin,gandrewstone/bitcoinxt,Christewart/bitcoin,JeremyRand/bitcoin,butterflypay/bitcoin,myriadteam/myriadcoin,REAP720801/bitcoin,llluiop/bitcoin,biblepay/biblepay,theuni/bitcoin,FeatherCoin/Feathercoin,wederw/bitcoin,jtimon/bitcoin,funkshelper/woodcore,bitcoinknots/bitcoin,habibmasuro/bitcoinxt,svost/bitcoin,myriadcoin/myriadcoin,coinkeeper/2015-06-22_18-31_bitcoin,AkioNak/bitcoin,Cocosoft/bitcoin,botland/bitcoin,UFOCoins/ufo,mincoin-project/mincoin,globaltoken/globaltoken,Jcing95/iop-hd,haobtc/bitcoin,1185/starwels,kevcooper/bitcoin,faircoin/faircoin2,peercoin/peercoin,wiggi/huntercore,romanornr/viacoin,braydonf/bitcoin,GroestlCoin/GroestlCoin,Flowdalic/bitcoin,qtumproject/qtum,shaolinfry/litecoin,Sjors/bitcoin,NicolasDorier/bitcoin,Michagogo/bitcoin,isle2983/bitcoin,jameshilliard/bitcoin,terracoin/terracoin,TierNolan/bitcoin,goku1997/bitcoin,segsignal/bitcoin,particl/particl-core,dgarage/bc2,morcos/bitcoin,alecalve/bitcoin,gavinandresen/bitcoin-git,EthanHeilman/bitcoin,ftrader-bitcoinabc/bitcoin-abc,Christewart/bitcoin,TierNolan/bitcoin,jmgilbert2/energi,earonesty/bitcoin,earonesty/bitcoin,Justaphf/BitcoinUnlimited,CryptArc/bitcoinxt,n1bor/bitcoin,thesoftwarejedi/bitcoin,habibmasuro/bitcoinxt,ptschip/bitcoin,xieta/mincoin,ahmedbodi/vertcoin,bdelzell/creditcoin-org-creditcoin,bitcoinec/bitcoinec,rawodb/bitcoin,jonghyeopkim/bitcoinxt,shouhuas/bitcoin,midnightmagic/bitcoin,Lucky7Studio/bitcoin,crowning-/dash,mm-s/bitcoin,millennial83/bitcoin,RHavar/bitcoin,omefire/bitcoin,BTCfork/hardfork_prototype_1_mvf-bu,ludbb/bitcoin,gandrewstone/BitcoinUnlimited,ptschip/bitcoin,gavinandresen/bitcoin-git,Alex-van-der-Peet/bitcoin,nlgcoin/guldencoin-official,afk11/bitcoin,Chancoin-core/CHANCOIN,cddjr/BitcoinUnlimited,bitcoinxt/bitcoinxt,Bushstar/UFO-Project,MarcoFalke/bitcoin,tdudz/elements,ashleyholman/bitcoin,GIJensen/bitcoin,CTRoundTable/Encrypted.Cash,UdjinM6/dash,MikeAmy/bitcoin,nathan-at-least/zcash,HashUnlimited/Einsteinium-Unlimited,GroestlCoin/bitcoin,initaldk/bitcoin,monacoinproject/monacoin,dpayne9000/Rubixz-Coin,senadmd/coinmarketwatch,vmp32k/litecoin,bitcoinclassic/bitcoinclassic,worldbit/worldbit,UFOCoins/ufo,morcos/bitcoin,AllanDoensen/BitcoinUnlimited,x-kalux/bitcoin_WiG-B,wbchen99/bitcoin-hnote0,krzysztofwos/BitcoinUnlimited,BTCTaras/bitcoin,starwels/starwels,error10/bitcoin,Bitcoin-ABC/bitcoin-abc,vtafaucet/virtacoin,crowning-/dash,andres-root/bitcoinxt,PRabahy/bitcoin,Friedbaumer/litecoin,marlengit/BitcoinUnlimited,oklink-dev/bitcoin,midnightmagic/bitcoin,bitcoinknots/bitcoin,PandaPayProject/PandaPay,appop/bitcoin,maaku/bitcoin,RongxinZhang/bitcoinxt,xawksow/GroestlCoin,globaltoken/globaltoken,pataquets/namecoin-core,s-matthew-english/bitcoin,kallewoof/bitcoin,Kcoin-project/kcoin,rnicoll/bitcoin,laudaa/bitcoin,nomnombtc/bitcoin,cryptoprojects/ultimateonlinecash,cybermatatu/bitcoin,vericoin/vericoin-core,dan-mi-sun/bitcoin,isocolsky/bitcoinxt,gzuser01/zetacoin-bitcoin,lbrtcoin/albertcoin,anditto/bitcoin,emc2foundation/einsteinium,drwasho/bitcoinxt,shelvenzhou/BTCGPU,guncoin/guncoin,DigitalPandacoin/pandacoin,matlongsi/micropay,habibmasuro/bitcoinxt,zetacoin/zetacoin,syscoin/syscoin2,kirkalx/bitcoin,itmanagerro/tresting,rdqw/sscoin,ahmedbodi/terracoin,DigitalPandacoin/pandacoin,zetacoin/zetacoin,lbrtcoin/albertcoin,nvmd/bitcoin,CryptArc/bitcoinxt,NicolasDorier/bitcoin,jambolo/bitcoin,goku1997/bitcoin,xawksow/GroestlCoin,cdecker/bitcoin,laudaa/bitcoin,alejandromgk/Lunar,ludbb/bitcoin,ShadowMyst/creativechain-core,dashpay/dash,Justaphf/BitcoinUnlimited,Alonzo-Coeus/bitcoin,arruah/ensocoin,2XL/bitcoin,Kangmo/bitcoin,ajweiss/bitcoin,dmrtsvetkov/flowercoin,HeliumGas/helium,koharjidan/bitcoin,marlengit/hardfork_prototype_1_mvf-bu,isghe/bitcoinxt,denverl/bitcoin,ivansib/sib16,XertroV/bitcoin-nulldata,vmp32k/litecoin,Friedbaumer/litecoin,haobtc/bitcoin,Bitcoin-com/BUcash,mruddy/bitcoin,reorder/viacoin,yenliangl/bitcoin,AllanDoensen/BitcoinUnlimited,Petr-Economissa/gvidon,xawksow/GroestlCoin,zcoinofficial/zcoin,coinkeeper/2015-06-22_18-31_bitcoin,Mirobit/bitcoin,diggcoin/diggcoin,sipsorcery/bitcoin,BTCGPU/BTCGPU,gzuser01/zetacoin-bitcoin,dogecoin/dogecoin,ftrader-bitcoinabc/bitcoin-abc,particl/particl-core,dmrtsvetkov/flowercoin,superjudge/bitcoin,earonesty/bitcoin,litecoin-project/litecore-litecoin,Kogser/bitcoin,royosherove/bitcoinxt,Michagogo/bitcoin,EthanHeilman/bitcoin,starwels/starwels,bitcoin/bitcoin,nomnombtc/bitcoin,omefire/bitcoin,irvingruan/bitcoin,EthanHeilman/bitcoin,coinkeeper/2015-06-22_18-31_bitcoin,tuaris/bitcoin,terracoin/terracoin,hyperwang/bitcoin,sebrandon1/bitcoin,diggcoin/diggcoin,rawodb/bitcoin,CodeShark/bitcoin,syscoin/syscoin,segwit/atbcoin-insight,zixan/bitcoin,aspanta/bitcoin,ludbb/bitcoin,atgreen/bitcoin,faircoin/faircoin2,Kcoin-project/kcoin,psionin/smartcoin,digibyte/digibyte,pouta/bitcoin,se3000/bitcoin,czr5014iph/bitcoin4e,martindale/elements,qtumproject/qtum,pastday/bitcoinproject,myriadteam/myriadcoin,Electronic-Gulden-Foundation/egulden,GreenParhelia/bitcoin,spiritlinxl/BTCGPU,djpnewton/bitcoin,diggcoin/diggcoin,vericoin/vericoin-core,NateBrune/bitcoin-fio,cheehieu/bitcoin,BitzenyCoreDevelopers/bitzeny,CodeShark/bitcoin,ericshawlinux/bitcoin,Har01d/bitcoin,vlajos/bitcoin,DigitalPandacoin/pandacoin,stevemyers/bitcoinxt,PandaPayProject/PandaPay,sbellem/bitcoin,adpg211/bitcoin-master,n1bor/bitcoin,Exgibichi/statusquo,oklink-dev/bitcoin,jameshilliard/bitcoin,jrmithdobbs/bitcoin,stamhe/bitcoin,starwels/starwels,hyperwang/bitcoin,EntropyFactory/creativechain-core,dexX7/bitcoin,n1bor/bitcoin,rsdevgun16e/energi,CryptArc/bitcoinxt,GlobalBoost/GlobalBoost,dagurval/bitcoinxt,domob1812/huntercore,vtafaucet/virtacoin,JeremyRand/bitcoin,gandrewstone/BitcoinUnlimited,MazaCoin/maza,lateminer/bitcoin,sstone/bitcoin,BitcoinHardfork/bitcoin,shaolinfry/litecoin,BTCfork/hardfork_prototype_1_mvf-bu,dcousens/bitcoin,cdecker/bitcoin,NicolasDorier/bitcoin,sbellem/bitcoin,TierNolan/bitcoin,okinc/bitcoin,theuni/bitcoin,awemany/BitcoinUnlimited,BitzenyCoreDevelopers/bitzeny,fsb4000/bitcoin,metacoin/florincoin,dcousens/bitcoin,litecoin-project/litecoin,UASF/bitcoin,ctwiz/stardust,tjth/lotterycoin,daliwangi/bitcoin,lbrtcoin/albertcoin,instagibbs/bitcoin,syscoin/syscoin2,elliotolds/bitcoin,sugruedes/bitcoin,lbrtcoin/albertcoin,awemany/BitcoinUnlimited,ivansib/sibcoin,welshjf/bitcoin,royosherove/bitcoinxt,maaku/bitcoin,braydonf/bitcoin,josephbisch/namecoin-core,llluiop/bitcoin,prark/bitcoinxt,misdess/bitcoin,Jcing95/iop-hd,unsystemizer/bitcoin,rnicoll/bitcoin,bespike/litecoin,bdelzell/creditcoin-org-creditcoin,hsavit1/bitcoin,spiritlinxl/BTCGPU,MarcoFalke/bitcoin,wangxinxi/litecoin,shelvenzhou/BTCGPU,metacoin/florincoin,RongxinZhang/bitcoinxt,error10/bitcoin,gzuser01/zetacoin-bitcoin,digibyte/digibyte,koharjidan/bitcoin,ivansib/sibcoin,FarhanHaque/bitcoin,bitcoinsSG/zcash,jaromil/faircoin2,daliwangi/bitcoin,zander/bitcoinclassic,core-bitcoin/bitcoin,jmgilbert2/energi,guncoin/guncoin,royosherove/bitcoinxt,rsdevgun16e/energi,rat4/bitcoin,martindale/elements,jtimon/bitcoin,zcoinofficial/zcoin,jiangyonghang/bitcoin,sdaftuar/bitcoin,dan-mi-sun/bitcoin,marlengit/hardfork_prototype_1_mvf-bu,odemolliens/bitcoinxt,untrustbank/litecoin,ptschip/bitcoin,Xekyo/bitcoin,constantine001/bitcoin,fsb4000/bitcoin,Petr-Economissa/gvidon,CodeShark/bitcoin,rdqw/sscoin,zcoinofficial/zcoin,scippio/bitcoin,odemolliens/bitcoinxt,prusnak/bitcoin,meighti/bitcoin,UdjinM6/dash,Bitcoin-com/BUcash,ekankyesme/bitcoinxt,Kogser/bitcoin,ryanxcharles/bitcoin,goldcoin/goldcoin,rawodb/bitcoin,instagibbs/bitcoin,fsb4000/bitcoin,gazbert/bitcoin,bespike/litecoin,Anfauglith/iop-hd,ryanofsky/bitcoin,scippio/bitcoin,plncoin/PLNcoin_Core,Justaphf/BitcoinUnlimited,dperel/bitcoin,jimmykiselak/lbrycrd,mruddy/bitcoin,Christewart/bitcoin,BlockchainTechLLC/3dcoin,namecoin/namecoin-core,funkshelper/woodcore,sdaftuar/bitcoin,UdjinM6/dash,BitcoinHardfork/bitcoin,BTCfork/hardfork_prototype_1_mvf-core,elecoin/elecoin,rromanchuk/bitcoinxt,lbryio/lbrycrd,r8921039/bitcoin,Kcoin-project/kcoin,goldcoin/goldcoin,40thoughts/Coin-QualCoin,GreenParhelia/bitcoin,ftrader-bitcoinabc/bitcoin-abc,randy-waterhouse/bitcoin,BTCDDev/bitcoin,jmgilbert2/energi,prusnak/bitcoin,segsignal/bitcoin,marlengit/BitcoinUnlimited,nathan-at-least/zcash,btc1/bitcoin,lakepay/lake,mb300sd/bitcoin,jiangyonghang/bitcoin,okinc/bitcoin,ElementsProject/elements,zcoinofficial/zcoin,kirkalx/bitcoin,isocolsky/bitcoinxt,CodeShark/bitcoin,lbryio/lbrycrd,dashpay/dash,zander/bitcoinclassic,bitpay/bitcoin,djpnewton/bitcoin,jamesob/bitcoin,globaltoken/globaltoken,ediston/energi,Mirobit/bitcoin,tuaris/bitcoin,ctwiz/stardust,dcousens/bitcoin,butterflypay/bitcoin,gandrewstone/BitcoinUnlimited,andreaskern/bitcoin,worldbit/worldbit,constantine001/bitcoin,pastday/bitcoinproject,guncoin/guncoin,gravio-net/graviocoin,sstone/bitcoin,droark/bitcoin,NateBrune/bitcoin-nate,mruddy/bitcoin,ppcoin/ppcoin,chaincoin/chaincoin,ElementsProject/elements,shaulkf/bitcoin,karek314/bitcoin,segsignal/bitcoin,nvmd/bitcoin,dgarage/bc3,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,Michagogo/bitcoin,wcwu/bitcoin,bitcoinplusorg/xbcwalletsource,afk11/bitcoin,destenson/bitcoin--bitcoin,BlockchainTechLLC/3dcoin,achow101/bitcoin,tecnovert/particl-core,prark/bitcoinxt,magacoin/magacoin,domob1812/bitcoin,HeliumGas/helium,jn2840/bitcoin,oklink-dev/bitcoin,GlobalBoost/GlobalBoost,iosdevzone/bitcoin,deeponion/deeponion,TheBlueMatt/bitcoin,XertroV/bitcoin-nulldata,UFOCoins/ufo,ekankyesme/bitcoinxt,vcoin-project/vcoincore,destenson/bitcoin--bitcoin,Mirobit/bitcoin,millennial83/bitcoin,josephbisch/namecoin-core,crowning-/dash,stamhe/bitcoin,jimmykiselak/lbrycrd,21E14/bitcoin,jn2840/bitcoin,wcwu/bitcoin,gjhiggins/vcoincore,destenson/bitcoin--bitcoin,projectinterzone/ITZ,millennial83/bitcoin,starwels/starwels,cannabiscoindev/cannabiscoin420,dogecoin/dogecoin,BitcoinPOW/BitcoinPOW,apoelstra/bitcoin,sugruedes/bitcoin,gavinandresen/bitcoin-git,kleetus/bitcoin,JeremyRubin/bitcoin,braydonf/bitcoin,Bitcoin-ABC/bitcoin-abc,GroestlCoin/bitcoin,Bitcoin-com/BUcash,earonesty/bitcoin,btc1/bitcoin,JeremyRand/bitcoin,NateBrune/bitcoin-nate,GIJensen/bitcoin,ppcoin/ppcoin,Bitcoin-ABC/bitcoin-abc,Rav3nPL/PLNcoin,jambolo/bitcoin,hyperwang/bitcoin,nathaniel-mahieu/bitcoin,SoreGums/bitcoinxt,accraze/bitcoin,Rav3nPL/polcoin,Rav3nPL/bitcoin,ixcoinofficialpage/master,gandrewstone/bitcoinxt,domob1812/huntercore,llluiop/bitcoin,lbryio/lbrycrd,JeremyRubin/bitcoin,SartoNess/BitcoinUnlimited,achow101/bitcoin,syscoin/syscoin2,MikeAmy/bitcoin,wangxinxi/litecoin,Christewart/bitcoin,PandaPayProject/PandaPay,bitbrazilcoin-project/bitbrazilcoin,jrmithdobbs/bitcoin,superjudge/bitcoin,XertroV/bitcoin-nulldata,psionin/smartcoin,litecoin-project/litecoin,GroestlCoin/GroestlCoin,funbucks/notbitcoinxt,jonghyeopkim/bitcoinxt,scippio/bitcoin,cannabiscoindev/cannabiscoin420,ryanxcharles/bitcoin,xurantju/bitcoin,jaromil/faircoin2,BTCTaras/bitcoin,fanquake/bitcoin,biblepay/biblepay,kevcooper/bitcoin,koharjidan/dogecoin,practicalswift/bitcoin,cmgustavo/bitcoin,vmp32k/litecoin,oklink-dev/bitcoin,meighti/bitcoin,fujicoin/fujicoin,BTCfork/hardfork_prototype_1_mvf-core,atgreen/bitcoin,thelazier/dash,wangxinxi/litecoin,unsystemizer/bitcoin,namecoin/namecore,apoelstra/bitcoin,tjps/bitcoin,habibmasuro/bitcoinxt,GreenParhelia/bitcoin,dgenr8/bitcoin,qtumproject/qtum,trippysalmon/bitcoin,ftrader-bitcoinabc/bitcoin-abc,emc2foundation/einsteinium,keo/bitcoin,josephbisch/namecoin-core,domob1812/i0coin,monacoinproject/monacoin,chaincoin/chaincoin,MarcoFalke/bitcoin,jtimon/bitcoin,cheehieu/bitcoin,elecoin/elecoin,dashpay/dash,lbrtcoin/albertcoin,torresalyssa/bitcoin,cyrixhero/bitcoin,AdrianaDinca/bitcoin,elecoin/elecoin,h4x3rotab/BTCGPU,koharjidan/dogecoin,marcusdiaz/BitcoinUnlimited,Bitcoin-ABC/bitcoin-abc,NateBrune/bitcoin-nate,Kogser/bitcoin,zetacoin/zetacoin,Vector2000/bitcoin,NateBrune/bitcoin-fio,pouta/bitcoin,multicoins/marycoin,aspirecoin/aspire,untrustbank/litecoin,faircoin/faircoin,dexX7/bitcoin,paveljanik/bitcoin,myriadteam/myriadcoin,stamhe/bitcoin,instagibbs/bitcoin,marlengit/BitcoinUnlimited,joshrabinowitz/bitcoin,dgarage/bc3,jimmykiselak/lbrycrd,midnightmagic/bitcoin,thrasher-/litecoin,rat4/bitcoin,sugruedes/bitcoin,BitcoinUnlimited/BitcoinUnlimited,schinzelh/dash,UASF/bitcoin,ftrader-bitcoinabc/bitcoin-abc,zsulocal/bitcoin,millennial83/bitcoin,ivansib/sib16,BTCfork/hardfork_prototype_1_mvf-core,sbaks0820/bitcoin,Chancoin-core/CHANCOIN,daveperkins-github/bitcoin-dev,sugruedes/bitcoinxt,FeatherCoin/Feathercoin,ionomy/ion,initaldk/bitcoin,aspirecoin/aspire,GIJensen/bitcoin,prusnak/bitcoin,coinkeeper/2015-06-22_18-31_bitcoin,funkshelper/woodcore,zetacoin/zetacoin,ryanofsky/bitcoin,jl2012/litecoin,jlopp/statoshi,bitpay/bitcoin,vmp32k/litecoin,isle2983/bitcoin,Kangmo/bitcoin,KnCMiner/bitcoin,SoreGums/bitcoinxt,anditto/bitcoin,tjth/lotterycoin,litecoin-project/litecore-litecoin,bitcoinsSG/bitcoin,RyanLucchese/energi,zotherstupidguy/bitcoin,cddjr/BitcoinUnlimited,Flurbos/Flurbo,jrick/bitcoin,EthanHeilman/bitcoin,rjshaver/bitcoin,Flurbos/Flurbo,BitcoinPOW/BitcoinPOW,willwray/dash,emc2foundation/einsteinium,Kogser/bitcoin,acid1789/bitcoin,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,zixan/bitcoin,nathaniel-mahieu/bitcoin,bitcoinsSG/zcash,phelix/bitcoin,torresalyssa/bitcoin,tdudz/elements,PandaPayProject/PandaPay,Bushstar/UFO-Project,MarcoFalke/bitcoin,joshrabinowitz/bitcoin,lateminer/bitcoin,biblepay/biblepay,welshjf/bitcoin,bittylicious/bitcoin,wbchen99/bitcoin-hnote0,faircoin/faircoin,irvingruan/bitcoin,qtumproject/qtum,worldbit/worldbit,ahmedbodi/vertcoin,vlajos/bitcoin,balajinandhu/bitcoin,uphold/bitcoin,destenson/bitcoin--bitcoin,domob1812/i0coin,jmcorgan/bitcoin,Diapolo/bitcoin,cmgustavo/bitcoin,vertcoin/vertcoin,CTRoundTable/Encrypted.Cash,janko33bd/bitcoin,sebrandon1/bitcoin,ahmedbodi/vertcoin,goldcoin/Goldcoin-GLD,Cocosoft/bitcoin,torresalyssa/bitcoin,dogecoin/dogecoin,core-bitcoin/bitcoin,shouhuas/bitcoin,bitcoinsSG/bitcoin,alecalve/bitcoin,loxal/zcash,diggcoin/diggcoin,TheBlueMatt/bitcoin,chaincoin/chaincoin,adpg211/bitcoin-master,ftrader-bitcoinabc/bitcoin-abc,worldbit/worldbit,Exgibichi/statusquo,rromanchuk/bitcoinxt,zixan/bitcoin,djpnewton/bitcoin,vtafaucet/virtacoin,atgreen/bitcoin,AdrianaDinca/bitcoin,keo/bitcoin,elecoin/elecoin,misdess/bitcoin,senadmd/coinmarketwatch,Diapolo/bitcoin,phelix/bitcoin,BitcoinPOW/BitcoinPOW,domob1812/huntercore,viacoin/viacoin,UFOCoins/ufo,BTCfork/hardfork_prototype_1_mvf-core,karek314/bitcoin,bitcoinsSG/bitcoin,pinheadmz/bitcoin,kleetus/bitcoin,ivansib/sib16,omefire/bitcoin,schinzelh/dash,wcwu/bitcoin,Kogser/bitcoin,kleetus/bitcoin,BitzenyCoreDevelopers/bitzeny,AllanDoensen/BitcoinUnlimited,kallewoof/bitcoin,bitcoinec/bitcoinec,dogecoin/dogecoin,thesoftwarejedi/bitcoin,core-bitcoin/bitcoin,vertcoin/vertcoin,multicoins/marycoin,kazcw/bitcoin,StarbuckBG/BTCGPU,phplaboratory/psiacoin,vlajos/bitcoin,monacoinproject/monacoin,XertroV/bitcoin-nulldata,Bitcoinsulting/bitcoinxt,anditto/bitcoin,NateBrune/bitcoin-nate,lclc/bitcoin,btcdrak/bitcoin,bitcoinclassic/bitcoinclassic,digibyte/digibyte,rnicoll/bitcoin,phelix/bitcoin,czr5014iph/bitcoin4e,rromanchuk/bitcoinxt,ShadowMyst/creativechain-core,funkshelper/woodcore,jnewbery/bitcoin,gandrewstone/bitcoinxt,marlengit/hardfork_prototype_1_mvf-bu,awemany/BitcoinUnlimited,cddjr/BitcoinUnlimited,odemolliens/bitcoinxt,ashleyholman/bitcoin,bittylicious/bitcoin,Flurbos/Flurbo,Rav3nPL/bitcoin,Vector2000/bitcoin,denverl/bitcoin,daliwangi/bitcoin,svost/bitcoin,jmcorgan/bitcoin,tdudz/elements,DigiByte-Team/digibyte,projectinterzone/ITZ,wellenreiter01/Feathercoin,Electronic-Gulden-Foundation/egulden,lbrtcoin/albertcoin,wellenreiter01/Feathercoin,ftrader-bitcoinabc/bitcoin-abc,Gazer022/bitcoin,brandonrobertz/namecoin-core,lclc/bitcoin,Flurbos/Flurbo,Alonzo-Coeus/bitcoin,h4x3rotab/BTCGPU,experiencecoin/experiencecoin,initaldk/bitcoin,antonio-fr/bitcoin,jonasschnelli/bitcoin,oleganza/bitcoin-duo,shelvenzhou/BTCGPU,hsavit1/bitcoin,dexX7/bitcoin,jnewbery/bitcoin,rawodb/bitcoin,gandrewstone/BitcoinUnlimited,domob1812/i0coin,namecoin/namecore,dagurval/bitcoinxt,maaku/bitcoin,jamesob/bitcoin,UASF/bitcoin,MasterX1582/bitcoin-becoin,bitcoinplusorg/xbcwalletsource,2XL/bitcoin,Flowdalic/bitcoin,dscotese/bitcoin,llluiop/bitcoin,Theshadow4all/ShadowCoin,untrustbank/litecoin,apoelstra/bitcoin,SoreGums/bitcoinxt,adpg211/bitcoin-master,karek314/bitcoin,Michagogo/bitcoin,AllanDoensen/BitcoinUnlimited,dashpay/dash,RyanLucchese/energi,CodeShark/bitcoin,Cloudsy/bitcoin,rawodb/bitcoin,ekankyesme/bitcoinxt,ShwoognationHQ/bitcoin,BlockchainTechLLC/3dcoin,BigBlueCeiling/augmentacoin,ixcoinofficialpage/master,initaldk/bitcoin,174high/bitcoin,droark/bitcoin,bitreserve/bitcoin,ShwoognationHQ/bitcoin,stamhe/bitcoin,RyanLucchese/energi,bespike/litecoin,Chancoin-core/CHANCOIN,Bitcoin-com/BUcash,fsb4000/bitcoin,meighti/bitcoin,REAP720801/bitcoin,dogecoin/dogecoin,phelix/bitcoin,atgreen/bitcoin,koharjidan/bitcoin,Sjors/bitcoin,dan-mi-sun/bitcoin,DigitalPandacoin/pandacoin,atgreen/bitcoin,randy-waterhouse/bitcoin,marlengit/hardfork_prototype_1_mvf-bu,lbryio/lbrycrd,argentumproject/argentum,rat4/bitcoin,lbrtcoin/albertcoin,Anfauglith/iop-hd,Chancoin-core/CHANCOIN,ctwiz/stardust,jnewbery/bitcoin,ionomy/ion,domob1812/namecore,sebrandon1/bitcoin,PRabahy/bitcoin,vlajos/bitcoin,CryptArc/bitcoin,UdjinM6/dash,gandrewstone/BitcoinUnlimited,romanornr/viacoin,joshrabinowitz/bitcoin,adpg211/bitcoin-master,svost/bitcoin,appop/bitcoin,braydonf/bitcoin,deeponion/deeponion,dmrtsvetkov/flowercoin,bitcoinxt/bitcoinxt,shouhuas/bitcoin,atgreen/bitcoin,patricklodder/dogecoin,ftrader-bitcoinabc/bitcoin-abc,AdrianaDinca/bitcoin,AkioNak/bitcoin,lbrtcoin/albertcoin,karek314/bitcoin,xurantju/bitcoin,svost/bitcoin,xurantju/bitcoin,mincoin-project/mincoin,afk11/bitcoin,RHavar/bitcoin,rebroad/bitcoin,ediston/energi,isghe/bitcoinxt,Lucky7Studio/bitcoin,error10/bitcoin,thelazier/dash,Bushstar/UFO-Project,BitzenyCoreDevelopers/bitzeny,ivansib/sibcoin,sbaks0820/bitcoin,qtumproject/qtum,habibmasuro/bitcoin,daliwangi/bitcoin,PRabahy/bitcoin,ericshawlinux/bitcoin,JeremyRand/bitcoin,bitreserve/bitcoin,gzuser01/zetacoin-bitcoin,digibyte/digibyte,dgarage/bc2,vcoin-project/vcoincore,SartoNess/BitcoinUnlimited,koharjidan/dogecoin,gmaxwell/bitcoin,ajtowns/bitcoin,faircoin/faircoin,rjshaver/bitcoin,Kore-Core/kore,segwit/atbcoin-insight,midnightmagic/bitcoin,cryptoprojects/ultimateonlinecash,cannabiscoindev/cannabiscoin420,StarbuckBG/BTCGPU,mitchellcash/bitcoin,jaromil/faircoin2,MikeAmy/bitcoin,multicoins/marycoin,neuroidss/bitcoin,rnicoll/bitcoin,kallewoof/elements,instagibbs/bitcoin,antonio-fr/bitcoin,robvanbentem/bitcoin,kallewoof/bitcoin,GreenParhelia/bitcoin,kallewoof/bitcoin,misdess/bitcoin,bespike/litecoin,droark/bitcoin,jrmithdobbs/bitcoin,Anfauglith/iop-hd,antonio-fr/bitcoin,ryanofsky/bitcoin,emc2foundation/einsteinium,jonasschnelli/bitcoin,sipsorcery/bitcoin,jimmysong/bitcoin,litecoin-project/litecore-litecoin,domob1812/i0coin,litecoin-project/litecore-litecoin,dscotese/bitcoin,kleetus/bitcoin,droark/bitcoin,myriadteam/myriadcoin,namecoin/namecore,HashUnlimited/Einsteinium-Unlimited,error10/bitcoin,h4x3rotab/BTCGPU,mincoin-project/mincoin,cddjr/BitcoinUnlimited,btc1/bitcoin,afk11/bitcoin,RHavar/bitcoin,fujicoin/fujicoin,iosdevzone/bitcoin,ahmedbodi/terracoin,RyanLucchese/energi,nvmd/bitcoin,gandrewstone/bitcoinxt,robvanbentem/bitcoin,experiencecoin/experiencecoin,MikeAmy/bitcoin,marlengit/BitcoinUnlimited,hasanatkazmi/bitcoin,midnightmagic/bitcoin,koharjidan/dogecoin,nikkitan/bitcoin,plncoin/PLNcoin_Core,ryanofsky/bitcoin,bitcoinec/bitcoinec,BTCfork/hardfork_prototype_1_mvf-bu,EntropyFactory/creativechain-core,patricklodder/dogecoin,alejandromgk/Lunar,BTCfork/hardfork_prototype_1_mvf-core,goldcoin/Goldcoin-GLD,REAP720801/bitcoin,sugruedes/bitcoinxt,dexX7/bitcoin,maaku/bitcoin,MasterX1582/bitcoin-becoin,jmgilbert2/energi,wbchen99/bitcoin-hnote0,n1bor/bitcoin,Rav3nPL/polcoin,ashleyholman/bitcoin,botland/bitcoin,ashleyholman/bitcoin,wiggi/huntercore,chaincoin/chaincoin,jn2840/bitcoin,accraze/bitcoin,oleganza/bitcoin-duo,rebroad/bitcoin,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,jimmysong/bitcoin,pouta/bitcoin,gameunits/gameunits,rsdevgun16e/energi,litecoin-project/litecore-litecoin,Kogser/bitcoin,DigitalPandacoin/pandacoin,dpayne9000/Rubixz-Coin,Cocosoft/bitcoin,lbryio/lbrycrd,gravio-net/graviocoin,simonmulser/bitcoin,trippysalmon/bitcoin,unsystemizer/bitcoin,rdqw/sscoin,Friedbaumer/litecoin,alecalve/bitcoin,pstratem/bitcoin,shaulkf/bitcoin,nikkitan/bitcoin,nailtaras/nailcoin,andres-root/bitcoinxt,Exgibichi/statusquo,jambolo/bitcoin,OmniLayer/omnicore,pouta/bitcoin,multicoins/marycoin,faircoin/faircoin,ElementsProject/elements,btc1/bitcoin,butterflypay/bitcoin,BTCGPU/BTCGPU,rnicoll/dogecoin,myriadteam/myriadcoin,dgarage/bc3,ediston/energi,vcoin-project/vcoincore,nailtaras/nailcoin,EthanHeilman/bitcoin,gjhiggins/vcoincore,Theshadow4all/ShadowCoin,funbucks/notbitcoinxt,robvanbentem/bitcoin,2XL/bitcoin,vericoin/vericoin-core,Justaphf/BitcoinUnlimited,krzysztofwos/BitcoinUnlimited,daveperkins-github/bitcoin-dev,uphold/bitcoin,KnCMiner/bitcoin,UFOCoins/ufo,cybermatatu/bitcoin,syscoin/syscoin2,ahmedbodi/vertcoin,janko33bd/bitcoin,gavinandresen/bitcoin-git,diggcoin/diggcoin,jimmykiselak/lbrycrd,jlopp/statoshi,robvanbentem/bitcoin,elecoin/elecoin,ravenbyron/phtevencoin,fujicoin/fujicoin,ardsu/bitcoin,martindale/elements,marcusdiaz/BitcoinUnlimited,Mirobit/bitcoin,111t8e/bitcoin,1185/starwels,multicoins/marycoin,wangxinxi/litecoin,pastday/bitcoinproject,wederw/bitcoin,FarhanHaque/bitcoin,core-bitcoin/bitcoin,Anfauglith/iop-hd,dmrtsvetkov/flowercoin,fanquake/bitcoin,lbrtcoin/albertcoin,kazcw/bitcoin,untrustbank/litecoin,error10/bitcoin,antonio-fr/bitcoin,bdelzell/creditcoin-org-creditcoin,argentumproject/argentum,mb300sd/bitcoin,sebrandon1/bitcoin,thesoftwarejedi/bitcoin,jimmysong/bitcoin,cdecker/bitcoin,trippysalmon/bitcoin,nlgcoin/guldencoin-official,itmanagerro/tresting,pastday/bitcoinproject,Christewart/bitcoin,isle2983/bitcoin,trippysalmon/bitcoin,BTCTaras/bitcoin,denverl/bitcoin,djpnewton/bitcoin,cannabiscoindev/cannabiscoin420,rat4/bitcoin,core-bitcoin/bitcoin,gavinandresen/bitcoin-git,jonasschnelli/bitcoin,Jcing95/iop-hd,RongxinZhang/bitcoinxt,marlengit/BitcoinUnlimited,ivansib/sib16,awemany/BitcoinUnlimited,cmgustavo/bitcoin,jtimon/bitcoin,bittylicious/bitcoin,HashUnlimited/Einsteinium-Unlimited,Bushstar/UFO-Project,BTCTaras/bitcoin,sstone/bitcoin,starwels/starwels,TeamBitBean/bitcoin-core,Bitcoin-ABC/bitcoin-abc,stevemyers/bitcoinxt,SartoNess/BitcoinUnlimited,elecoin/elecoin,wederw/bitcoin,ahmedbodi/temp_vert,nmarley/dash,GroestlCoin/GroestlCoin,rawodb/bitcoin,yenliangl/bitcoin,Electronic-Gulden-Foundation/egulden,r8921039/bitcoin,nlgcoin/guldencoin-official,experiencecoin/experiencecoin,aniemerg/zcash,ericshawlinux/bitcoin,TrainMAnB/vcoincore,zander/bitcoinclassic,iosdevzone/bitcoin,BTCfork/hardfork_prototype_1_mvf-bu,bankonmecoin/bitcoin,DigiByte-Team/digibyte,rnicoll/bitcoin,Cloudsy/bitcoin,rat4/bitcoin,cyrixhero/bitcoin,111t8e/bitcoin,neuroidss/bitcoin,tecnovert/particl-core,rnicoll/dogecoin,Rav3nPL/bitcoin,oleganza/bitcoin-duo,btcdrak/bitcoin,Alonzo-Coeus/bitcoin,matlongsi/micropay,hsavit1/bitcoin,sdaftuar/bitcoin,Theshadow4all/ShadowCoin,gazbert/bitcoin,cmgustavo/bitcoin,TeamBitBean/bitcoin-core,dcousens/bitcoin,neuroidss/bitcoin,cyrixhero/bitcoin,dperel/bitcoin,thelazier/dash,marcusdiaz/BitcoinUnlimited,dperel/bitcoin,apoelstra/bitcoin,paveljanik/bitcoin,phelix/namecore,metacoin/florincoin,cybermatatu/bitcoin,jonghyeopkim/bitcoinxt,sbellem/bitcoin,ShwoognationHQ/bitcoin,domob1812/i0coin,rnicoll/dogecoin,ahmedbodi/vertcoin,bdelzell/creditcoin-org-creditcoin,llluiop/bitcoin,kirkalx/bitcoin,BitcoinHardfork/bitcoin,constantine001/bitcoin,anditto/bitcoin,HashUnlimited/Einsteinium-Unlimited,gjhiggins/vcoincore,jameshilliard/bitcoin,phelix/bitcoin,practicalswift/bitcoin,sipsorcery/bitcoin,mitchellcash/bitcoin,dgenr8/bitcoin,itmanagerro/tresting,shouhuas/bitcoin,cddjr/BitcoinUnlimited,1185/starwels,gameunits/gameunits,djpnewton/bitcoin,marlengit/hardfork_prototype_1_mvf-bu,theuni/bitcoin,alejandromgk/Lunar,segwit/atbcoin-insight,sugruedes/bitcoinxt,BlockchainTechLLC/3dcoin,svost/bitcoin,s-matthew-english/bitcoin,RHavar/bitcoin,magacoin/magacoin,argentumproject/argentum,domob1812/namecore,magacoin/magacoin,nomnombtc/bitcoin,constantine001/bitcoin,EthanHeilman/bitcoin,wellenreiter01/Feathercoin,laudaa/bitcoin,goldcoin/goldcoin,shaolinfry/litecoin,BitcoinHardfork/bitcoin,Bushstar/UFO-Project,terracoin/terracoin,syscoin/syscoin,AkioNak/bitcoin,mm-s/bitcoin,jiangyonghang/bitcoin,schinzelh/dash,aniemerg/zcash,BitcoinUnlimited/BitcoinUnlimited,domob1812/namecore,vericoin/vericoin-core,isocolsky/bitcoinxt,viacoin/viacoin,StarbuckBG/BTCGPU,Bitcoin-com/BUcash,svost/bitcoin,jlopp/statoshi,syscoin/syscoin,earonesty/bitcoin,hasanatkazmi/bitcoin,FeatherCoin/Feathercoin,namecoin/namecore,spiritlinxl/BTCGPU,bitcoin/bitcoin,mitchellcash/bitcoin,HeliumGas/helium,arruah/ensocoin,pataquets/namecoin-core,jimmykiselak/lbrycrd,rebroad/bitcoin,GIJensen/bitcoin,kevcooper/bitcoin,shaolinfry/litecoin,tjth/lotterycoin,sebrandon1/bitcoin,shaulkf/bitcoin,domob1812/bitcoin,instagibbs/bitcoin,jimmykiselak/lbrycrd,Mirobit/bitcoin,Bitcoinsulting/bitcoinxt,krzysztofwos/BitcoinUnlimited,domob1812/namecore,plncoin/PLNcoin_Core,Gazer022/bitcoin,jrick/bitcoin,MazaCoin/maza,biblepay/biblepay,janko33bd/bitcoin,cheehieu/bitcoin,PRabahy/bitcoin,irvingruan/bitcoin,GlobalBoost/GlobalBoost,ravenbyron/phtevencoin,dgarage/bc2,prark/bitcoinxt,cheehieu/bitcoin,nathaniel-mahieu/bitcoin,bitcoinxt/bitcoinxt,HeliumGas/helium,tdudz/elements,phelix/namecore,tecnovert/particl-core,vmp32k/litecoin,prusnak/bitcoin,zotherstupidguy/bitcoin,elliotolds/bitcoin,xieta/mincoin,ivansib/sibcoin,dan-mi-sun/bitcoin,plncoin/PLNcoin_Core,ajweiss/bitcoin,myriadcoin/myriadcoin,jmgilbert2/energi,terracoin/terracoin,marcusdiaz/BitcoinUnlimited,zetacoin/zetacoin,jn2840/bitcoin,Alex-van-der-Peet/bitcoin,n1bor/bitcoin,litecoin-project/litecoin,faircoin/faircoin2,dpayne9000/Rubixz-Coin,jl2012/litecoin,aniemerg/zcash,projectinterzone/ITZ,alecalve/bitcoin,TeamBitBean/bitcoin-core,pinheadmz/bitcoin,Electronic-Gulden-Foundation/egulden,bitreserve/bitcoin,ShadowMyst/creativechain-core,Bitcoinsulting/bitcoinxt,faircoin/faircoin2,midnightmagic/bitcoin,dgarage/bc3,morcos/bitcoin,JeremyRand/bitcoin,ediston/energi,starwels/starwels,dgarage/bc2,goldcoin/Goldcoin-GLD,nbenoit/bitcoin,biblepay/biblepay,wellenreiter01/Feathercoin,PRabahy/bitcoin,FarhanHaque/bitcoin,GlobalBoost/GlobalBoost,nmarley/dash,xawksow/GroestlCoin,dan-mi-sun/bitcoin,nomnombtc/bitcoin,AllanDoensen/BitcoinUnlimited,Jcing95/iop-hd,parvez3019/bitcoin,zcoinofficial/zcoin,royosherove/bitcoinxt,destenson/bitcoin--bitcoin,omefire/bitcoin,accraze/bitcoin,sbaks0820/bitcoin,ryanofsky/bitcoin,ryanxcharles/bitcoin,mb300sd/bitcoin,shaulkf/bitcoin,jmgilbert2/energi,ardsu/bitcoin,balajinandhu/bitcoin,bitcoinxt/bitcoinxt,yenliangl/bitcoin,uphold/bitcoin,Anfauglith/iop-hd,dscotese/bitcoin,iosdevzone/bitcoin,deeponion/deeponion,simdeveloper/bitcoin,segwit/atbcoin-insight,cryptoprojects/ultimateonlinecash,sebrandon1/bitcoin,neuroidss/bitcoin,hyperwang/bitcoin,OmniLayer/omnicore,BitcoinHardfork/bitcoin,paveljanik/bitcoin,keo/bitcoin,unsystemizer/bitcoin,ElementsProject/elements,Flurbos/Flurbo,Kangmo/bitcoin,r8921039/bitcoin,kleetus/bitcoin,dashpay/dash,keo/bitcoin,xurantju/bitcoin,uphold/bitcoin,Diapolo/bitcoin,bitcoinsSG/bitcoin,ahmedbodi/temp_vert,oklink-dev/bitcoin,litecoin-project/litecoin,zsulocal/bitcoin,sdaftuar/bitcoin,terracoin/terracoin,MazaCoin/maza,tecnovert/particl-core,goku1997/bitcoin,MeshCollider/bitcoin,sugruedes/bitcoin,haobtc/bitcoin,acid1789/bitcoin,paveljanik/bitcoin,vtafaucet/virtacoin,ajweiss/bitcoin,romanornr/viacoin,matlongsi/micropay,simdeveloper/bitcoin,s-matthew-english/bitcoin,aspirecoin/aspire,isocolsky/bitcoinxt,jrmithdobbs/bitcoin,GlobalBoost/GlobalBoost,appop/bitcoin,segsignal/bitcoin,rromanchuk/bitcoinxt,ElementsProject/elements,se3000/bitcoin,okinc/bitcoin,dcousens/bitcoin,jamesob/bitcoin,bespike/litecoin,CTRoundTable/Encrypted.Cash,untrustbank/litecoin,Rav3nPL/polcoin,SoreGums/bitcoinxt,Gazer022/bitcoin,nbenoit/bitcoin,vtafaucet/virtacoin,romanornr/viacoin,NateBrune/bitcoin-fio,pataquets/namecoin-core,daveperkins-github/bitcoin-dev,okinc/bitcoin,pinheadmz/bitcoin,wellenreiter01/Feathercoin,core-bitcoin/bitcoin,tdudz/elements,dagurval/bitcoinxt,Krellan/bitcoin,aspirecoin/aspire,Kogser/bitcoin,ediston/energi,drwasho/bitcoinxt,JeremyRubin/bitcoin,jonasschnelli/bitcoin,Flowdalic/bitcoin,NateBrune/bitcoin-nate,elliotolds/bitcoin,40thoughts/Coin-QualCoin,uphold/bitcoin,MazaCoin/maza,habibmasuro/bitcoin,unsystemizer/bitcoin,kallewoof/elements,ctwiz/stardust,nbenoit/bitcoin,tecnovert/particl-core,bankonmecoin/bitcoin,laudaa/bitcoin,aspirecoin/aspire,ahmedbodi/temp_vert,andreaskern/bitcoin,neuroidss/bitcoin,syscoin/syscoin2,phplaboratory/psiacoin,FeatherCoin/Feathercoin,uphold/bitcoin,Bitcoin-ABC/bitcoin-abc,Kcoin-project/kcoin,habibmasuro/bitcoin,BTCfork/hardfork_prototype_1_mvf-bu,zander/bitcoinclassic,PRabahy/bitcoin,martindale/elements,jamesob/bitcoin,Krellan/bitcoin,TheBlueMatt/bitcoin,NicolasDorier/bitcoin,x-kalux/bitcoin_WiG-B,x-kalux/bitcoin_WiG-B,Kore-Core/kore,wiggi/huntercore,nomnombtc/bitcoin,jrick/bitcoin,Kore-Core/kore,gandrewstone/bitcoinxt,reorder/viacoin,metacoin/florincoin,jaromil/faircoin2,ivansib/sibcoin,nailtaras/nailcoin,goldcoin/Goldcoin-GLD,biblepay/biblepay,arruah/ensocoin,djpnewton/bitcoin,bitcoinsSG/zcash,bitreserve/bitcoin,chaincoin/chaincoin,zcoinofficial/zcoin,welshjf/bitcoin,scmorse/bitcoin,BigBlueCeiling/augmentacoin,Justaphf/BitcoinUnlimited,MasterX1582/bitcoin-becoin,donaloconnor/bitcoin,syscoin/syscoin,jrmithdobbs/bitcoin,jaromil/faircoin2,trippysalmon/bitcoin,BTCDDev/bitcoin,joshrabinowitz/bitcoin,hsavit1/bitcoin,dan-mi-sun/bitcoin,habibmasuro/bitcoinxt,alejandromgk/Lunar,isghe/bitcoinxt,scmorse/bitcoin,Bushstar/UFO-Project,Bitcoin-ABC/bitcoin-abc,romanornr/viacoin,loxal/zcash,experiencecoin/experiencecoin,nmarley/dash,parvez3019/bitcoin,se3000/bitcoin,phelix/namecore,BTCGPU/BTCGPU,namecoin/namecore,Christewart/bitcoin,morcos/bitcoin,kevcooper/bitcoin,phplaboratory/psiacoin,cmgustavo/bitcoin,denverl/bitcoin,robvanbentem/bitcoin,174high/bitcoin,nvmd/bitcoin,alejandromgk/Lunar,peercoin/peercoin,arruah/ensocoin,cdecker/bitcoin,jonghyeopkim/bitcoinxt,ashleyholman/bitcoin,wcwu/bitcoin,Justaphf/BitcoinUnlimited,nikkitan/bitcoin,irvingruan/bitcoin,odemolliens/bitcoinxt,BTCfork/hardfork_prototype_1_mvf-bu,BigBlueCeiling/augmentacoin,111t8e/bitcoin,Bitcoinsulting/bitcoinxt,shouhuas/bitcoin,1185/starwels,tjps/bitcoin,rnicoll/bitcoin,EntropyFactory/creativechain-core,BigBlueCeiling/augmentacoin,jamesob/bitcoin,CTRoundTable/Encrypted.Cash,ryanxcharles/bitcoin,sbaks0820/bitcoin,Gazer022/bitcoin,BTCfork/hardfork_prototype_1_mvf-core,fujicoin/fujicoin,mincoin-project/mincoin,habibmasuro/bitcoin,josephbisch/namecoin-core,magacoin/magacoin,andreaskern/bitcoin,ludbb/bitcoin,sarielsaz/sarielsaz,namecoin/namecoin-core,Exgibichi/statusquo,parvez3019/bitcoin,thrasher-/litecoin,deadalnix/bitcoin,zixan/bitcoin,1185/starwels,wbchen99/bitcoin-hnote0,MasterX1582/bitcoin-becoin,willwray/dash,zcoinofficial/zcoin,shaulkf/bitcoin,bitbrazilcoin-project/bitbrazilcoin,TheBlueMatt/bitcoin,qtumproject/qtum,stevemyers/bitcoinxt,bitcoin/bitcoin,CryptArc/bitcoin,romanornr/viacoin,fujicoin/fujicoin,cannabiscoindev/cannabiscoin420,2XL/bitcoin,scmorse/bitcoin,prusnak/bitcoin,zsulocal/bitcoin,Kogser/bitcoin,matlongsi/micropay,isghe/bitcoinxt,nathaniel-mahieu/bitcoin,vlajos/bitcoin,cmgustavo/bitcoin,thesoftwarejedi/bitcoin,alecalve/bitcoin,40thoughts/Coin-QualCoin,Rav3nPL/polcoin,krzysztofwos/BitcoinUnlimited,funbucks/notbitcoinxt,jmcorgan/bitcoin,funbucks/notbitcoinxt,tuaris/bitcoin,bitcoinclassic/bitcoinclassic,goku1997/bitcoin,faircoin/faircoin2,x-kalux/bitcoin_WiG-B,spiritlinxl/BTCGPU,welshjf/bitcoin,ShwoognationHQ/bitcoin,ajtowns/bitcoin,dscotese/bitcoin,Petr-Economissa/gvidon,tuaris/bitcoin,x-kalux/bitcoin_WiG-B,bitcoinclassic/bitcoinclassic,Cloudsy/bitcoin,gazbert/bitcoin,kallewoof/elements,projectinterzone/ITZ,cyrixhero/bitcoin,mincoin-project/mincoin,BitcoinUnlimited/BitcoinUnlimited,argentumproject/argentum,morcos/bitcoin,cheehieu/bitcoin,ardsu/bitcoin,iosdevzone/bitcoin,bdelzell/creditcoin-org-creditcoin,dgarage/bc2,BitcoinUnlimited/BitcoinUnlimited,Har01d/bitcoin,BTCTaras/bitcoin,s-matthew-english/bitcoin,shelvenzhou/BTCGPU,yenliangl/bitcoin,bitpay/bitcoin,sickpig/BitcoinUnlimited,senadmd/coinmarketwatch,josephbisch/namecoin-core,TrainMAnB/vcoincore,jiangyonghang/bitcoin,gravio-net/graviocoin,ShwoognationHQ/bitcoin,magacoin/magacoin,crowning-/dash,nomnombtc/bitcoin,spiritlinxl/BTCGPU,awemany/BitcoinUnlimited,ivansib/sibcoin,Bitcoin-ABC/bitcoin-abc,cddjr/BitcoinUnlimited,lateminer/bitcoin,daveperkins-github/bitcoin-dev,ivansib/sib16,btcdrak/bitcoin,Cloudsy/bitcoin,bitbrazilcoin-project/bitbrazilcoin,bitcoinsSG/bitcoin,StarbuckBG/BTCGPU,bitcoin/bitcoin,scippio/bitcoin,StarbuckBG/BTCGPU,Kcoin-project/kcoin,UASF/bitcoin,royosherove/bitcoinxt,pstratem/bitcoin,BTCGPU/BTCGPU,nikkitan/bitcoin,Diapolo/bitcoin,brandonrobertz/namecoin-core,butterflypay/bitcoin,shelvenzhou/BTCGPU,BTCGPU/BTCGPU,achow101/bitcoin,patricklodder/dogecoin,constantine001/bitcoin,metacoin/florincoin,bitcoinknots/bitcoin,globaltoken/globaltoken,UdjinM6/dash,cculianu/bitcoin-abc,CryptArc/bitcoin,thrasher-/litecoin,TierNolan/bitcoin,thrasher-/litecoin,jnewbery/bitcoin,OmniLayer/omnicore,Vector2000/bitcoin,jambolo/bitcoin,rjshaver/bitcoin,Krellan/bitcoin,sbellem/bitcoin,andreaskern/bitcoin,BitzenyCoreDevelopers/bitzeny,aspanta/bitcoin,TierNolan/bitcoin,jl2012/litecoin,rebroad/bitcoin,odemolliens/bitcoinxt,apoelstra/bitcoin,brandonrobertz/namecoin-core,nathan-at-least/zcash,myriadcoin/myriadcoin,rsdevgun16e/energi,bitcoinplusorg/xbcwalletsource,rdqw/sscoin,nikkitan/bitcoin,sarielsaz/sarielsaz,janko33bd/bitcoin,andreaskern/bitcoin,s-matthew-english/bitcoin,ptschip/bitcoin,Rav3nPL/PLNcoin,donaloconnor/bitcoin,nathaniel-mahieu/bitcoin,aspanta/bitcoin,shaolinfry/litecoin,botland/bitcoin,bitreserve/bitcoin,arruah/ensocoin,reorder/viacoin,laudaa/bitcoin,isocolsky/bitcoinxt,ixcoinofficialpage/master,xurantju/bitcoin,BitcoinPOW/BitcoinPOW,balajinandhu/bitcoin,SartoNess/BitcoinUnlimited,hasanatkazmi/bitcoin,rromanchuk/bitcoinxt,Krellan/bitcoin,hyperwang/bitcoin,sarielsaz/sarielsaz,koharjidan/bitcoin,mitchellcash/bitcoin,EntropyFactory/creativechain-core,Kangmo/bitcoin,NateBrune/bitcoin-fio,donaloconnor/bitcoin,achow101/bitcoin,Sjors/bitcoin,cculianu/bitcoin-abc,simonmulser/bitcoin,yenliangl/bitcoin,ahmedbodi/terracoin,rat4/bitcoin,Mirobit/bitcoin,FeatherCoin/Feathercoin,Cloudsy/bitcoin,balajinandhu/bitcoin,mb300sd/bitcoin,crowning-/dash,40thoughts/Coin-QualCoin,jameshilliard/bitcoin,biblepay/biblepay,constantine001/bitcoin,sickpig/BitcoinUnlimited,segwit/atbcoin-insight,domob1812/bitcoin,Kogser/bitcoin,terracoin/terracoin,vertcoin/vertcoin,apoelstra/bitcoin,Bitcoin-com/BUcash,gmaxwell/bitcoin,biblepay/biblepay,martindale/elements,Diapolo/bitcoin,TeamBitBean/bitcoin-core,BTCDDev/bitcoin,dgarage/bc2,zander/bitcoinclassic,globaltoken/globaltoken,morcos/bitcoin,jtimon/bitcoin,sickpig/BitcoinUnlimited,dperel/bitcoin,gameunits/gameunits,JeremyRand/bitcoin,matlongsi/micropay,gmaxwell/bitcoin,h4x3rotab/BTCGPU,zetacoin/zetacoin,ekankyesme/bitcoinxt,czr5014iph/bitcoin4e,ionomy/ion,fsb4000/bitcoin,FarhanHaque/bitcoin,Xekyo/bitcoin,pstratem/bitcoin,MikeAmy/bitcoin,FarhanHaque/bitcoin,simonmulser/bitcoin,wiggi/huntercore,czr5014iph/bitcoin4e,dcousens/bitcoin,monacoinproject/monacoin,ixcoinofficialpage/master,destenson/bitcoin--bitcoin,scippio/bitcoin,kevcooper/bitcoin,Kogser/bitcoin,bitcoinplusorg/xbcwalletsource,balajinandhu/bitcoin,BitcoinPOW/BitcoinPOW,Lucky7Studio/bitcoin,bitbrazilcoin-project/bitbrazilcoin,Xekyo/bitcoin,projectinterzone/ITZ,segwit/atbcoin-insight,bittylicious/bitcoin,domob1812/namecore,REAP720801/bitcoin,bankonmecoin/bitcoin,ajtowns/bitcoin,kazcw/bitcoin,jrick/bitcoin,nmarley/dash,kazcw/bitcoin,ionomy/ion,koharjidan/bitcoin,mruddy/bitcoin,superjudge/bitcoin,gazbert/bitcoin,DigiByte-Team/digibyte,aspirecoin/aspire,sugruedes/bitcoinxt,deadalnix/bitcoin,MikeAmy/bitcoin,aspanta/bitcoin,CryptArc/bitcoin,kirkalx/bitcoin,TheBlueMatt/bitcoin,Electronic-Gulden-Foundation/egulden,dagurval/bitcoinxt,rebroad/bitcoin,domob1812/huntercore,sstone/bitcoin,40thoughts/Coin-QualCoin,dmrtsvetkov/flowercoin,ajweiss/bitcoin,h4x3rotab/BTCGPU,butterflypay/bitcoin,wiggi/huntercore,jlopp/statoshi,174high/bitcoin,kirkalx/bitcoin,ftrader-bitcoinabc/bitcoin-abc,zsulocal/bitcoin,arruah/ensocoin,namecoin/namecoin-core,CryptArc/bitcoin,tjth/lotterycoin,jn2840/bitcoin,josephbisch/namecoin-core,Alonzo-Coeus/bitcoin,21E14/bitcoin,hasanatkazmi/bitcoin,bitpay/bitcoin,iosdevzone/bitcoin,TeamBitBean/bitcoin-core,Bitcoinsulting/bitcoinxt,zotherstupidguy/bitcoin,pstratem/bitcoin,jtimon/bitcoin,mm-s/bitcoin,haobtc/bitcoin,Michagogo/bitcoin,arnuschky/bitcoin,anditto/bitcoin,Flowdalic/bitcoin,wangxinxi/litecoin,BTCTaras/bitcoin,Exgibichi/statusquo,wbchen99/bitcoin-hnote0,reorder/viacoin,bitbrazilcoin-project/bitbrazilcoin,MarcoFalke/bitcoin,stamhe/bitcoin,Har01d/bitcoin,bitpay/bitcoin,thelazier/dash,Rav3nPL/bitcoin,vtafaucet/virtacoin,SartoNess/BitcoinUnlimited,tjps/bitcoin,nmarley/dash,zcoinofficial/zcoin,brandonrobertz/namecoin-core,prark/bitcoinxt,tecnovert/particl-core,EntropyFactory/creativechain-core,jameshilliard/bitcoin,PandaPayProject/PandaPay,trippysalmon/bitcoin,thesoftwarejedi/bitcoin,KnCMiner/bitcoin,AkioNak/bitcoin,GroestlCoin/GroestlCoin,practicalswift/bitcoin,theuni/bitcoin,rjshaver/bitcoin,cryptoprojects/ultimateonlinecash,NicolasDorier/bitcoin,krzysztofwos/BitcoinUnlimited,Flowdalic/bitcoin,BitcoinUnlimited/BitcoinUnlimited,accraze/bitcoin,btcdrak/bitcoin,pataquets/namecoin-core,gazbert/bitcoin,nbenoit/bitcoin,willwray/dash,patricklodder/dogecoin,RyanLucchese/energi,BTCDDev/bitcoin,cryptoprojects/ultimateonlinecash,ardsu/bitcoin,xieta/mincoin,nailtaras/nailcoin,TrainMAnB/vcoincore,sugruedes/bitcoinxt,lbryio/lbrycrd,cdecker/bitcoin,dexX7/bitcoin,dagurval/bitcoinxt,gavinandresen/bitcoin-git,litecoin-project/litecoin,crowning-/dash,CTRoundTable/Encrypted.Cash,goku1997/bitcoin,randy-waterhouse/bitcoin,cybermatatu/bitcoin,royosherove/bitcoinxt,gjhiggins/vcoincore,pstratem/bitcoin,droark/bitcoin,bitcoinxt/bitcoinxt,ionomy/ion,rsdevgun16e/energi,prark/bitcoinxt,tuaris/bitcoin,phplaboratory/psiacoin,1185/starwels,Petr-Economissa/gvidon,shaulkf/bitcoin,wederw/bitcoin,arnuschky/bitcoin,mitchellcash/bitcoin,gzuser01/zetacoin-bitcoin,Flurbos/Flurbo,RongxinZhang/bitcoinxt,arnuschky/bitcoin,MeshCollider/bitcoin,Theshadow4all/ShadowCoin,afk11/bitcoin,viacoin/viacoin,jmcorgan/bitcoin,cyrixhero/bitcoin,marcusdiaz/BitcoinUnlimited,multicoins/marycoin,goldcoin/goldcoin,Rav3nPL/PLNcoin,adpg211/bitcoin-master,nmarley/dash,krzysztofwos/BitcoinUnlimited,hasanatkazmi/bitcoin,deadalnix/bitcoin,KnCMiner/bitcoin,stevemyers/bitcoinxt,sbellem/bitcoin,Cloudsy/bitcoin,dogecoin/dogecoin,AdrianaDinca/bitcoin,marlengit/hardfork_prototype_1_mvf-bu,vericoin/vericoin-core,appop/bitcoin,particl/particl-core,GreenParhelia/bitcoin,rromanchuk/bitcoinxt,patricklodder/dogecoin,pataquets/namecoin-core,zcoinofficial/zcoin,ptschip/bitcoin,kirkalx/bitcoin,cybermatatu/bitcoin,domob1812/bitcoin,dpayne9000/Rubixz-Coin,afk11/bitcoin,simonmulser/bitcoin,mb300sd/bitcoin,sdaftuar/bitcoin,KnCMiner/bitcoin,OmniLayer/omnicore,bitcoin/bitcoin,simonmulser/bitcoin,dexX7/bitcoin,BitcoinUnlimited/BitcoinUnlimited,jlopp/statoshi,ctwiz/stardust,ericshawlinux/bitcoin,jlopp/statoshi,lakepay/lake,DigiByte-Team/digibyte,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,marlengit/BitcoinUnlimited,unsystemizer/bitcoin,isle2983/bitcoin,GIJensen/bitcoin,jrmithdobbs/bitcoin,goldcoin/goldcoin,Friedbaumer/litecoin,oleganza/bitcoin-duo,elliotolds/bitcoin,AdrianaDinca/bitcoin,sipsorcery/bitcoin,emc2foundation/einsteinium,isghe/bitcoinxt,jl2012/litecoin,Rav3nPL/PLNcoin,GlobalBoost/GlobalBoost,nailtaras/nailcoin,Petr-Economissa/gvidon,ajweiss/bitcoin,cybermatatu/bitcoin,dexX7/bitcoin,yenliangl/bitcoin,pastday/bitcoinproject,accraze/bitcoin,2XL/bitcoin,Vector2000/bitcoin,aspanta/bitcoin,n1bor/bitcoin,s-matthew-english/bitcoin,syscoin/syscoin2,kazcw/bitcoin,ashleyholman/bitcoin,bitcoinplusorg/xbcwalletsource,peercoin/peercoin,JeremyRubin/bitcoin,XertroV/bitcoin-nulldata,Theshadow4all/ShadowCoin,gameunits/gameunits,pataquets/namecoin-core,meighti/bitcoin,UASF/bitcoin,deeponion/deeponion,particl/particl-core,torresalyssa/bitcoin,mitchellcash/bitcoin,tjps/bitcoin,andres-root/bitcoinxt,Har01d/bitcoin,brandonrobertz/namecoin-core,parvez3019/bitcoin,segsignal/bitcoin,lakepay/lake,achow101/bitcoin,experiencecoin/experiencecoin,Bitcoin-ABC/bitcoin-abc,millennial83/bitcoin,nlgcoin/guldencoin-official,keo/bitcoin,TrainMAnB/vcoincore,viacoin/viacoin,kallewoof/bitcoin,argentumproject/argentum,bitcoinknots/bitcoin,jimmysong/bitcoin,neuroidss/bitcoin,robvanbentem/bitcoin,zsulocal/bitcoin,JeremyRubin/bitcoin,jonghyeopkim/bitcoinxt,misdess/bitcoin,argentumproject/argentum,jl2012/litecoin,se3000/bitcoin,wbchen99/bitcoin-hnote0,RongxinZhang/bitcoinxt,mm-s/bitcoin,MasterX1582/bitcoin-becoin,simdeveloper/bitcoin,viacoin/viacoin,scmorse/bitcoin,Kore-Core/kore,cyrixhero/bitcoin,domob1812/bitcoin,deeponion/deeponion,RHavar/bitcoin,willwray/dash,lateminer/bitcoin,litecoin-project/litecore-litecoin,theuni/bitcoin,ajtowns/bitcoin,Kore-Core/kore,arnuschky/bitcoin,Cocosoft/bitcoin,psionin/smartcoin,appop/bitcoin,Kore-Core/kore,maaku/bitcoin,MarcoFalke/bitcoin,paveljanik/bitcoin,cheehieu/bitcoin,lclc/bitcoin,myriadcoin/myriadcoin,Lucky7Studio/bitcoin,ShwoognationHQ/bitcoin,meighti/bitcoin,donaloconnor/bitcoin,Bitcoinsulting/bitcoinxt,gmaxwell/bitcoin,guncoin/guncoin,ryanxcharles/bitcoin,argentumproject/argentum,isle2983/bitcoin,vcoin-project/vcoincore,itmanagerro/tresting,superjudge/bitcoin,maaku/bitcoin,dpayne9000/Rubixz-Coin,plncoin/PLNcoin_Core,willwray/dash,dgarage/bc3,simdeveloper/bitcoin,lclc/bitcoin,PandaPayProject/PandaPay,andres-root/bitcoinxt,phelix/bitcoin,dscotese/bitcoin,appop/bitcoin,pinheadmz/bitcoin,mincoin-project/mincoin,vertcoin/vertcoin,Vector2000/bitcoin,Bitcoin-ABC/bitcoin-abc,bitcoinsSG/zcash,lbrtcoin/albertcoin,dperel/bitcoin,nikkitan/bitcoin,particl/particl-core,phplaboratory/psiacoin,SoreGums/bitcoinxt,mruddy/bitcoin,GroestlCoin/GroestlCoin,sbaks0820/bitcoin,braydonf/bitcoin,bankonmecoin/bitcoin,ekankyesme/bitcoinxt,NateBrune/bitcoin-fio,oklink-dev/bitcoin,lbrtcoin/albertcoin,loxal/zcash,JeremyRubin/bitcoin,Lucky7Studio/bitcoin,pstratem/bitcoin,randy-waterhouse/bitcoin,practicalswift/bitcoin,BigBlueCeiling/augmentacoin,tjps/bitcoin,bitcoinsSG/zcash,odemolliens/bitcoinxt,drwasho/bitcoinxt,Lucky7Studio/bitcoin,BitcoinPOW/BitcoinPOW,BlockchainTechLLC/3dcoin,faircoin/faircoin2,REAP720801/bitcoin,botland/bitcoin,untrustbank/litecoin,denverl/bitcoin,Chancoin-core/CHANCOIN,sdaftuar/bitcoin,BTCDDev/bitcoin,welshjf/bitcoin,nathan-at-least/zcash,funbucks/notbitcoinxt,HashUnlimited/Einsteinium-Unlimited,habibmasuro/bitcoin,practicalswift/bitcoin,GIJensen/bitcoin,diggcoin/diggcoin,gazbert/bitcoin,21E14/bitcoin,segsignal/bitcoin,Kangmo/bitcoin,vertcoin/vertcoin,digibyte/digibyte,Xekyo/bitcoin,isocolsky/bitcoinxt,fujicoin/fujicoin,litecoin-project/litecoin,willwray/dash,irvingruan/bitcoin,andres-root/bitcoinxt,koharjidan/dogecoin,anditto/bitcoin,namecoin/namecoin-core,scippio/bitcoin,jamesob/bitcoin,GroestlCoin/bitcoin,Alex-van-der-Peet/bitcoin,chaincoin/chaincoin,sickpig/BitcoinUnlimited,Rav3nPL/bitcoin,Alonzo-Coeus/bitcoin,senadmd/coinmarketwatch,andres-root/bitcoinxt,xieta/mincoin,thelazier/dash,TierNolan/bitcoin,jaromil/faircoin2,cculianu/bitcoin-abc,111t8e/bitcoin,ajweiss/bitcoin,sbellem/bitcoin,dmrtsvetkov/flowercoin,btc1/bitcoin,jimmysong/bitcoin,174high/bitcoin,gandrewstone/BitcoinUnlimited,botland/bitcoin,ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu,bdelzell/creditcoin-org-creditcoin,ravenbyron/phtevencoin,jambolo/bitcoin,xieta/mincoin,randy-waterhouse/bitcoin,dpayne9000/Rubixz-Coin,gameunits/gameunits,ahmedbodi/temp_vert,randy-waterhouse/bitcoin,NicolasDorier/bitcoin,21E14/bitcoin,Jcing95/iop-hd,FarhanHaque/bitcoin,butterflypay/bitcoin,magacoin/magacoin,111t8e/bitcoin,r8921039/bitcoin,isle2983/bitcoin,parvez3019/bitcoin,XertroV/bitcoin-nulldata,bitcoinec/bitcoinec,SartoNess/BitcoinUnlimited,21E14/bitcoin,BigBlueCeiling/augmentacoin,vericoin/vericoin-core,brandonrobertz/namecoin-core,donaloconnor/bitcoin,sarielsaz/sarielsaz,bitcoinsSG/zcash,ahmedbodi/terracoin,ahmedbodi/terracoin,stevemyers/bitcoinxt,nmarley/dash,myriadteam/myriadcoin,Chancoin-core/CHANCOIN,cdecker/bitcoin,Sjors/bitcoin,experiencecoin/experiencecoin,ctwiz/stardust,174high/bitcoin,ravenbyron/phtevencoin,jimmysong/bitcoin,instagibbs/bitcoin,spiritlinxl/BTCGPU,kallewoof/bitcoin,domob1812/huntercore,adpg211/bitcoin-master,kallewoof/elements,ekankyesme/bitcoinxt,CTRoundTable/Encrypted.Cash,janko33bd/bitcoin,syscoin/syscoin,21E14/bitcoin,deeponion/deeponion,rjshaver/bitcoin,vlajos/bitcoin,Kcoin-project/kcoin,jiangyonghang/bitcoin,se3000/bitcoin,Anfauglith/iop-hd,Rav3nPL/bitcoin,deadalnix/bitcoin,btcdrak/bitcoin,jiangyonghang/bitcoin,schinzelh/dash,guncoin/guncoin,DigitalPandacoin/pandacoin
qa/rpc-tests/invalidateblock.py
qa/rpc-tests/invalidateblock.py
#!/usr/bin/env python2 # Copyright (c) 2014 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test InvalidateBlock code # from test_framework import BitcoinTestFramework from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException from util import * class InvalidateTest(BitcoinTestFramework): def setup_chain(self): print("Initializing test directory "+self.options.tmpdir) initialize_chain_clean(self.options.tmpdir, 2) def setup_network(self): self.nodes = [] self.is_network_split = False self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"])) self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"])) def run_test(self): print "Mine 4 blocks on Node 0" self.nodes[0].setgenerate(True, 4) assert(self.nodes[0].getblockcount() == 4) besthash = self.nodes[0].getbestblockhash() print "Mine competing 6 blocks on Node 1" self.nodes[1].setgenerate(True, 6) assert(self.nodes[1].getblockcount() == 6) print "Connect nodes to force a reorg" connect_nodes_bi(self.nodes,0,1) sync_blocks(self.nodes) assert(self.nodes[0].getblockcount() == 6) badhash = self.nodes[1].getblockhash(2) print "Invalidate block 2 on node 0 and verify we reorg to node 0's original chain" self.nodes[0].invalidateblock(badhash) newheight = self.nodes[0].getblockcount() newhash = self.nodes[0].getbestblockhash() if (newheight != 4 or newhash != besthash): raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight)) if __name__ == '__main__': InvalidateTest().main()
mit
Python
fadac460052cb1a778bf8398879e1cb616c26228
Add new migration for Django 1.8
nabucosound/django-propaganda
propaganda/migrations/0002_auto_20150802_1841.py
propaganda/migrations/0002_auto_20150802_1841.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('propaganda', '0001_initial'), ] operations = [ migrations.AlterField( model_name='subscriber', name='email', field=models.EmailField(unique=True, max_length=254, verbose_name='email'), ), ]
bsd-3-clause
Python
b29417d3b387c8ab62c1e09589c2d93dae905993
Add skeleton tle.api
thelinuxkid/gumjabi
tle/api.py
tle/api.py
import json import logging import bottle import functools from paste import httpserver from paste.translogger import TransLogger from collections import OrderedDict log = logging.getLogger(__name__) class APILogger(TransLogger): def write_log( self, environ, method, req_uri, start, status, bytes_, ): remote_addr = environ['REMOTE_ADDR'] protocol = environ['SERVER_PROTOCOL'] referer = environ.get('HTTP_REFERER', '-') user_agent = environ.get('HTTP_USER_AGENT', '-') msg = ('{remote_addr} {method} {req_uri} {protocol} {status} ' '{bytes_} {referer} {user_agent}' ).format( remote_addr=remote_addr, method=method, req_uri=req_uri, protocol=protocol, status=status, bytes_=bytes_, referer=referer, user_agent=user_agent, ) log.info(msg) class APIServer(bottle.ServerAdapter): def run(self, handler): handler = APILogger(handler) httpserver.serve( handler, host=self.host, port=str(self.port), **self.options ) def set_content(type_, charset='charset=UTF-8'): bottle.response.content_type = '{type_}; {charset}'.format( type_=type_, charset=charset, ) def json_content(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): set_content('application/json') return fn(*args, **kwargs) return wrapper @bottle.error(404) @bottle.error(403) @bottle.error(500) @json_content def api_error(error): status = OrderedDict([ ('code', error.status), ('message', error.body) ]) status = OrderedDict([ ('status', status), ]) return json.dumps(status) class EventAPI01(object): def __init__(self): pass def apply(self, callback, context): """ Similar to a bottle.JSONPlugin's apply method. This one also ensures that self is available to methods with bottle decorators. """ @functools.wraps(callback) @json_content def wrapper(*args, **kwargs): kwargs['self'] = self return callback(*args, **kwargs) return wrapper
mit
Python
b042675463c34340d4d3ae5d6868b243abf9741b
Create Average_sorting.py
AeyS/Python
Average_sorting.py
Average_sorting.py
# coding: utf-8 import rw success_list=[] #Meet the requirements of the combined group max_min=[] #Max ad min volue save var;[function_name : max_min_mark] def count(x,y): result=x+y return result def count_list(x,y): total=count(len(x),len(y)) return total def max_min_mark(var): for i in var: length=len(i) max_min.append(length) def merger_group(textdir): textlines = open(textdir,'r').readlines() b_split=[] for i in xrange(0,len(textlines)): if i%2!=0: if count_list(x, textlines[i])>35: b_split.append(x) b_split.append(textlines[i]) else: success_list.append(x.replace('\n','')+' '+textlines[i]) else: x=textlines[i] return b_split def best_value(b_split): max_min_mark(b_split) min_value_location=max_min.index(min(max_min)) while min_value_location: max_value_location=max_min.index(max(max_min)) if max_min[max_value_location]+max_min[min_value_location]>35: success_list.append(b_split[max_value_location]) success_list.append(b_split[max_value_location]) max_min[max_value_location]=None else: success_list.append(b_split[max_value_location].replace('\n','')+' '+b_split[min_value_location]) max_min[max_value_location]=None max_min[min_value_location]=None min_value_location=max_min.index(min(max_min)) def main(textdir): path=raw_input('save_filename:') best_value(merger_group(textdir)) rw.handle(success_list,path) if __name__ == '__main__': textdir = 'd:/name.txt' main(textdir)
mit
Python
1136824ab60dbb8774ba5cb8d011e898f9286e06
Add a missing file
brennie/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,custode/reviewboard,sgallagher/reviewboard,KnowNo/reviewboard,1tush/reviewboard,brennie/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,1tush/reviewboard,davidt/reviewboard,davidt/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,beol/reviewboard,reviewboard/reviewboard,custode/reviewboard,custode/reviewboard,custode/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,reviewboard/reviewboard,1tush/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,beol/reviewboard,davidt/reviewboard,reviewboard/reviewboard,beol/reviewboard,1tush/reviewboard,davidt/reviewboard,1tush/reviewboard,brennie/reviewboard,brennie/reviewboard,1tush/reviewboard,1tush/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,KnowNo/reviewboard
reviewboard/admin/validation.py
reviewboard/admin/validation.py
from django import forms def validate_bug_tracker(input_url): """ Validates that an issue tracker URI string contains one `%s` Python format specification type (no other types are supported). """ try: # Ignore escaped `%`'s test_url = input_url.replace('%%', '') if test_url.find('%s') == -1: raise TypeError # Ensure an arbitrary value can be inserted into the URL string test_url = test_url % 1 except (TypeError, ValueError): raise forms.ValidationError(["%s has invalid format specification " "type(s). Use only one '%%s' to mark the " "location of the bug id. If the URI " "contains encoded values (e.g. '%%20'), " "prepend the encoded values with an " "additional '%%'." % input_url])
mit
Python
298d3e352193e574e0c8980e37a50d226552109e
Create conf.py
OpenTransfr/Core,OpenTransfr/Core,OpenTransfr/Core,OpenTransfr/Core
docs/conf.py
docs/conf.py
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'repoze.sphinx.autointerface', 'sphinxcontrib.programoutput', 'sphinxcontrib.images', ]
mit
Python
acdb13c3680b7958f9a1def3e538ef9ebd166922
add migration for org name + apptext
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
portal/migrations/versions/9b1bedfa916b_.py
portal/migrations/versions/9b1bedfa916b_.py
from alembic import op import sqlalchemy as sa from sqlalchemy.orm import sessionmaker from portal.models.app_text import AppText from portal.models.organization import Organization """empty message Revision ID: 9b1bedfa916b Revises: 441185240f62 Create Date: 2017-10-26 15:24:32.623899 """ # revision identifiers, used by Alembic. revision = '9b1bedfa916b' down_revision = '441185240f62' Session = sessionmaker() def update_org_name(old, new): bind = op.get_bind() session = Session(bind=bind) session.execute("UPDATE organizations SET name='{}' " "WHERE name='{}'".format(new, old)) for at in session.query(AppText).filter(AppText.name.contains(old)): at.name = at.name.replace(old, new) session.commit() def upgrade(): # ### commands auto generated by Alembic - please adjust! ### update_org_name('CRV', 'TrueNTH Global Registry') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### update_org_name('TrueNTH Global Registry', 'CRV') # ### end Alembic commands ###
bsd-3-clause
Python
ac45b330072ec7d171602c698711844f29af77e8
Create bgg_games.py
ed-kung/bgg
bgg_games.py
bgg_games.py
import csv import time from boardgamegeek import BoardGameGeek bgg = BoardGameGeek() # first import the game id list f = open("bgg_game_ids.txt") idlist = [] for line in f: idlist.append(int(line.split()[0])) f.close() # data file datafile = "bgg_games.csv" # max and min game id # (if you don't want to scrape the whole dataset in one go) min_game_id = 1 max_game_id = 100 # header line (variable names) header = ( 'snapshot_date', \ 'id', \ 'name', \ 'year', \ 'artists', \ 'categories', \ 'designers', \ 'expansion', \ 'expands', \ 'expansions', \ 'families', \ 'implementations', \ 'max_players', \ 'mechanics', \ 'min_age', \ 'min_players', \ 'playing_time', \ 'publishers', \ 'ranks', \ 'rating_average', \ 'rating_average_weight', \ 'rating_bayes_average', \ 'rating_median', \ 'rating_num_weights', \ 'rating_stddev', \ 'users_commented', \ 'users_owned', \ 'users_rated', \ 'users_trading', \ 'users_wanting', \ 'users_wishing') # comment this part out if data file exists and you are appending f = open(datafile, 'w', newline='') csv.writer(f).writerow(header) f.close() # begin data collection f = open(datafile, 'a', newline='') writer = csv.writer(f) for id in idlist: if (id>=min_game_id and id<=max_game_id): print(id, end="") print('... ', end="") try: g = bgg.game(game_id = id) line = ( time.strftime("%Y-%m-%d"), \ g.id, \ g.name, \ g.year, \ (' | '.join(g.artists)), \ (' | '.join(g.categories)), \ (' | '.join(g.designers)), \ g.expansion, \ str(g.expands), \ str(g.expansions), \ (' | '.join(g.families)), \ (' | '.join(g.implementations)), \ g.max_players, \ (' | '.join(g.mechanics)), \ g.min_age, \ g.min_players, \ g.playing_time, \ (' | '.join(g.publishers)), \ str(g.ranks), \ g.rating_average, \ g.rating_average_weight, \ g.rating_bayes_average, \ g.rating_median, \ g.rating_num_weights, \ g.rating_stddev, \ g.users_commented, \ g.users_owned, \ g.users_rated, \ g.users_trading, \ g.users_wanting, \ g.users_wishing) writer.writerow(line) except: print('failed... ', end="") f.close()
mit
Python
4ce5e57b882ae057fa21d0397925512073447b77
Add admin interface
josl/django-chunked-upload,voidrank/django-chunked-upload,CodeYellowBV/django-chunked-upload
chunked_upload/admin.py
chunked_upload/admin.py
from django.contrib import admin from .models import ChunkedUpload class ChunkedUploadAdmin(admin.ModelAdmin): list_display = ('upload_id', 'file', 'filename', 'user', 'offset', 'created_on', 'status', 'completed_on') admin.site.register(ChunkedUpload, ChunkedUploadAdmin)
mit
Python
1db74fafd5f281053dc82d2d4ff2d24447db8338
add initial Nose tests
ralfonso/harvestmedia
tests/test_connection.py
tests/test_connection.py
from nose.tools import raises from unittest.case import SkipTest from urllib2 import urlopen import StringIO import mock import datetime, md5 import harvestmedia.api.exceptions import harvestmedia.api.config import harvestmedia.api.client api_key = '12345' webservice_url = 'https://service.harvestmedia.net/HMP-WS.svc' @mock.patch('harvestmedia.api.client.urlopen') @raises(harvestmedia.api.exceptions.InvalidAPIResponse) def test_xml_failure(urlopen_mock): urlopen_mock.return_value = StringIO.StringIO('<xml><this xml is malformed</xml>') hmconfig = harvestmedia.api.config.Config() hmconfig.api_key = api_key hmconfig.webservice_url = webservice_url client = harvestmedia.api.client.Client() @mock.patch('harvestmedia.api.client.urlopen') def test_get_service_token(UrlOpenMock): u = UrlOpenMock() expiry = datetime.datetime.today().isoformat() test_token = md5.md5(expiry).hexdigest() # generate an md5 from the date for testing u.read.return_value = '<?xml version="1.0" encoding="utf-8"?><responseservicetoken><token value="%s" expiry="%s"/></responseservicetoken>' % (test_token, expiry) hmconfig = harvestmedia.api.config.Config() hmconfig.api_key = api_key hmconfig.webservice_url = webservice_url client = harvestmedia.api.client.Client() assert client.service_token == test_token assert client.service_token_expires == expiry
mit
Python
a3deadbc54fad13e4e40da143f25ae4b26cf690b
Add missed travis-ci manage.py.
uw-it-aca/mdot,charlon/mdot,uw-it-aca/mdot,uw-it-aca/mdot,uw-it-aca/mdot,charlon/mdot,charlon/mdot
travis-ci/manage.py
travis-ci/manage.py
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "travis-ci.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
apache-2.0
Python
58a5257505a4ae9d32cf233d059b4350f9494d86
Create timer.py
jasoncg/pytimer
timer.py
timer.py
# # jasoncg # 2015-02-23 # # timer.py # # A simple timer supporting the Python "with" statement # import time # # Use in a "with" statement: # with timer.Timer(): # perform_expensive_calculation() # # May also print the current progress: # with timer.Timer() as t: # perform_expensive_calculation_1() # t.print_progress() # perform_expensive_calculation_2() # class Timer(): def __enter__(self): self.reset() return self def __exit__(self, type, value, traceback): end = time.time() print("Took %s seconds\n" %(end-self.start)) def reset(self): # Reset the start to now self.start = time.time() self.elapsed = time.time() def get_progress(self): # Get the current time elapsed since start return time.time() - self.start def print_progress(self, message=None): if message is None: message="" else: message=message+" " print("%s%s seconds\n" %(message, self.get_progress())) def get_elapsed(self): # Get the current time elapsed since start newelapsed = time.time() e = newelapsed - self.elapsed self.elapsed = newelapsed return e def print_elapsed(self, message=None): if message is None: message="" else: message=message+" " print("%s%s seconds\n" %(message, self.get_elapsed()))
mit
Python
d1e568ab1e238586ed914de35ed44dc2231af3d2
Create version.py
stanfordmlgroup/ngboost,stanfordmlgroup/ngboost
ngboost/version.py
ngboost/version.py
__version__ = "0.2.0"
apache-2.0
Python
29bdfc794f759a5f8189d4c89dcaa3fa9699bc2b
Add sfp_numinfo
smicallef/spiderfoot,smicallef/spiderfoot,smicallef/spiderfoot
modules/sfp_numinfo.py
modules/sfp_numinfo.py
#------------------------------------------------------------------------------- # Name: sfp_numinfo # Purpose: SpiderFoot plug-in to search numinfo.net for a phone number # and retrieve email address. # # Author: <bcoles@gmail.com> # # Created: 2019-05-28 # Copyright: (c) bcoles 2019 # Licence: GPL #------------------------------------------------------------------------------- import json import re import time from sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent class sfp_numinfo(SpiderFootPlugin): """numinfo:Footprint,Investigate,Passive:Real World::Lookup phone number information.""" # Default options opts = { } # Option descriptions optdescs = { } results = dict() errorState = False def setup(self, sfc, userOpts=dict()): self.sf = sfc self.__dataSource__ = 'numinfo' self.results = dict() self.errorState = False for opt in userOpts.keys(): self.opts[opt] = userOpts[opt] # What events is this module interested in for input def watchedEvents(self): return ['PHONE_NUMBER'] # What events this module produces def producedEvents(self): return ['RAW_RIR_DATA', 'EMAILADDR'] # Query numinfo for the specified phone number def query(self, qry): number = qry.strip('+').strip('(').strip(')').strip('-').strip(' ') if not number.isdigit(): self.sf.debug('Invalid phone number: ' + number) return None res = self.sf.fetchUrl("http://" + number + '.numinfo.net/', timeout=self.opts['_fetchtimeout'], useragent=self.opts['_useragent']) time.sleep(1) if res['content'] is None: self.sf.debug('No response from numinfo.net') return None if res['code'] != '200': return None json_data = re.findall(r'<script type="application/ld\+json">(.+?)</script>', res['content'], re.MULTILINE | re.DOTALL) if not json_data: return None try: data = json.loads(json_data[0]) except BaseException as e: self.sf.debug('Error processing JSON response: ' + str(e)) return None return data # Handle events sent to this module def handleEvent(self, event): eventName = event.eventType srcModuleName = event.module eventData = event.data if self.errorState: return None if eventData in self.results: return None self.results[eventData] = True self.sf.debug("Received event, " + eventName + ", from " + srcModuleName) data = self.query(eventData) if data is None: self.sf.debug('No phone information found for ' + eventData) return None evt = SpiderFootEvent("RAW_RIR_DATA", str(data), self.__name__, event) self.notifyListeners(evt) if data.get('email'): email_match = re.findall(r'^mailto:([a-zA-Z\.0-9_\-]+@[a-zA-Z\.0-9\-]+\.[a-zA-Z\.0-9\-]+)$', data.get('email')) if email_match is not None: evt = SpiderFootEvent('EMAILADDR', email_match[0], self.__name__, event) self.notifyListeners(evt) # End of sfp_numinfo class
mit
Python
b6a6e6a9bf0254f9c79215c98b392b02db53827b
Add wireless module #305
jorik041/CrackMapExec,byt3bl33d3r/CrackMapExec
cme/modules/wireless.py
cme/modules/wireless.py
class CMEModule: name = 'wireless' description = "Get key of all wireless interfaces" supported_protocols = ['smb'] opsec_safe = True multiple_hosts = True def options(self, context, module_options): ''' ''' def on_admin_login(self, context, connection): command = 'powershell.exe -c "(netsh wlan show profiles) | Select-String """"\:(.+)$"""" | %{$name=$_.Matches.Groups[1].Value.Trim(); $_} | %{(netsh wlan show profile name="$name" key=clear)}"' context.log.info('Executing command') p = connection.execute(command, True) context.log.success(p)
bsd-2-clause
Python
74aaf7f459875c4dec9ed1076bf748786db4af0d
Add example for downloading TOP N files from VT matching an Intelligence search
VirusTotal/vt-py
examples/search_and_download_topn_files.py
examples/search_and_download_topn_files.py
#!/usr/bin/python """ This example program shows how to download files from VirusTotal matching a VirusTotal Intelligence search. NOTE: In order to use this script you will need to have access to VT Intelligence or to the Premium API. Learn more about these services at: https://www.virustotal.com/gui/intelligence-overview https://developers.virustotal.com/v3.0/reference#search https://www.virustotal.com/learn/ """ import argparse import asyncio import logging import os import sys import time import vt DEFAULT_PATH = 'intelligencefiles' LOGGING_LEVEL = logging.INFO # Modify if you just want to focus on errors logging.basicConfig(level=LOGGING_LEVEL, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', stream=sys.stdout) class DownloadTopNFilesHandler: """Handler for Downloading files from VT.""" def __init__(self, apikey, num_files): self.apikey = apikey self.num_files = num_files self.queue = asyncio.Queue() async def download_files(self, download_path): """Download files in queue to the path referenced by `download_path`. Args: download_path: string representing the path where the files will be stored. """ async with vt.Client(self.apikey) as client: while True: file_hash = await self.queue.get() file_path = os.path.join(download_path, file_hash) with open(file_path, 'wb') as f: await client.download_file_async(file_hash, f) self.queue.task_done() async def queue_file_hashes(self, search): """Retrieve files from VT and enqueue them for being downloaded. Args: search: VT intelligence search query. """ async with vt.Client(self.apikey) as client: it = client.iterator( '/intelligence/search', params={'query': search}, limit=self.num_files) async for file_obj in it: await self.queue.put(file_obj.sha256) @staticmethod def create_download_folder(path=None): """Create the folder where the downloaded files will be put.""" local_path = path or DEFAULT_PATH folder_name = time.strftime('%Y%m%dT%H%M%S') folder_path = os.path.join(local_path, folder_name) if not os.path.exists(local_path): os.mkdir(local_path) if not os.path.exists(folder_path): os.mkdir(folder_path) return folder_path async def main(): """Download the top-n results of a given Intelligence search.""" usage = 'usage: prog [options] <intelligence_query/local_file_with_hashes>' parser = argparse.ArgumentParser( usage=usage, description='Allows you to download the top-n files returned by a given' 'VirusTotal Intelligence search. Example: ' 'python %prog type:"peexe" positives:5+ -n 10 --apikey=<your api key>') parser.add_argument( 'query', type=str, nargs='+', help='a VirusTotal Intelligence search query.') parser.add_argument( '-n', '--numfiles', dest='numfiles', default=100, help='Number of files to download') parser.add_argument('--apikey', required=True, help='Your VirusTotal API key') parser.add_argument( '-o', '--output-path', required=False, help='The path where you want to put the files in') parser.add_argument( '-w', '--workers', dest='workers', default=4, help='Concurrent workers for downloading files') args = parser.parse_args() if not args.query: parser.error('No search query provided') if not args.apikey: parser.error('No API key provided') search = ' '.join(args.query) search = search.strip().strip('\'') storage_path = args.output_path numfiles = int(args.numfiles) workers = int(args.workers) api_key = args.apikey loop = asyncio.get_event_loop() handler = DownloadTopNFilesHandler(api_key, numfiles) logging.info('Starting VirusTotal Intelligence downloader') logging.info('* VirusTotal Intelligence search: %s', search) logging.info('* Number of files to download: %s', numfiles) files_path = handler.create_download_folder(storage_path) enqueue_files_task = loop.create_task(handler.queue_file_hashes(search)) download_tasks = [] for i in range(workers): download_tasks.append(loop.create_task(handler.download_files(files_path))) await asyncio.gather(enqueue_files_task) # Wait until all the files have been queued and downloaded, then cancel # download tasks that are idle await handler.queue.join() for w in download_tasks: w.cancel() if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main()) loop.close()
apache-2.0
Python
5ad1170c2515fd799acc43e99e35299bbab9cec1
Add tests for harmonic in 791628c4df60369583474c07d64f1439bd5c19e0
c11/yatsm,ceholden/yatsm,jmorton/yatsm,jmorton/yatsm,valpasq/yatsm,ceholden/yatsm,c11/yatsm,valpasq/yatsm,jmorton/yatsm
tests/test_transforms.py
tests/test_transforms.py
""" Test for `yatsm.regression.transforms` """ import numpy as np import patsy import py.test from yatsm.regression.transforms import harm def test_harmonic_transform(): x = np.arange(735688, 735688 + 100, 1) design = patsy.dmatrix('0 + harm(x, 1)') truth = np.vstack((np.cos(2 * np.pi / 365.25 * x), np.sin(2 * np.pi / 365.25 * x))).T np.testing.assert_equal(np.asarray(design), truth)
mit
Python
5681684a4df6cd70ba4c2e4c667b81a7e8367e25
add missing wrapmodule.py
pymor/dune-pymor,pymor/dune-pymor
dune/pymor/core/wrapmodule.py
dune/pymor/core/wrapmodule.py
# This file is part of the dune-pymor project: # https://github.com/pyMor/dune-pymor # Copyright Holders: Felix Albrecht, Stephan Rave # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from inspect import isclass from types import ModuleType from dune.pymor.core.wrapper import Wrapper from dune.pymor.la.container import wrap_vector from dune.pymor.discretizations import wrap_stationary_discretization from dune.pymor.functionals import wrap_affinely_decomposed_functional, wrap_functional from dune.pymor.operators import wrap_affinely_decomposed_operator, wrap_operator def wrap_module(mod): AffinelyDecomposedFunctionalInterface = mod.Dune.Pymor.AffinelyDecomposedFunctionalInterfaceDynamic AffinelyDecomposedOperatorInterface = mod.Dune.Pymor.AffinelyDecomposedOperatorInterfaceDynamic FunctionalInterface = mod.Dune.Pymor.FunctionalInterfaceDynamic VectorInterface = mod.Dune.Pymor.LA.VectorInterfaceDynamic OperatorInterface = mod.Dune.Pymor.OperatorInterfaceDynamic Parameter = mod.Dune.Pymor.Parameter ParameterFunctional = mod.Dune.Pymor.ParameterFunctional ParameterType = mod.Dune.Pymor.ParameterType StationaryDiscretizationInterface = mod.Dune.Pymor.StationaryDiscretizationInterfaceDynamic wrapped_modules = {} wrapper = Wrapper(DuneParameterType = ParameterType, DuneParameter = Parameter) def create_modules(mod, parent_name=''): wrapped_mod = ModuleType(mod.__name__.lower()) full_name = '.'.join([parent_name, wrapped_mod.__name__]) if parent_name else wrapped_mod.__name__ wrapped_modules[mod] = {'wrapped': wrapped_mod, 'empty': True, 'full_name': full_name} for k, v in mod.__dict__.iteritems(): if isinstance(v, ModuleType): create_modules(v, full_name) def add_to_module(k, v, mod): wrapped_mod = wrapped_modules[mod]['wrapped'] full_name = wrapped_modules[mod]['full_name'] try: v.__module__ = full_name except AttributeError: pass wrapped_mod.__dict__[k] = v wrapped_modules[mod]['empty'] = False def add_modules(mod): wrapped_mod = wrapped_modules[mod]['wrapped'] for k, v in mod.__dict__.iteritems(): if isinstance(v, ModuleType): wv = add_modules(v) if not wrapped_modules[v]['empty']: wrapped_mod.__dict__[k.lower()] = wv wrapped_modules[mod]['empty'] = False return wrapped_mod def wrap_vectors(mod): for k, v in mod.__dict__.iteritems(): if isinstance(v, ModuleType): wrap_vectors(v) elif v == VectorInterface: continue elif isclass(v) and issubclass(v, VectorInterface): wrapped_vector, vector_array = wrap_vector(v) add_to_module(k, wrapped_vector, mod) add_to_module(vector_array.__name__, vector_array, mod) wrapper.add_vector_class(v, wrapped_vector, vector_array) def wrap_classes(mod): for k, v in mod.__dict__.iteritems(): if isinstance(v, ModuleType): wrap_classes(v) elif hasattr(v, '__name__') and 'Interface' in v.__name__: continue elif isclass(v): if issubclass(v, AffinelyDecomposedFunctionalInterface): wrapped_class = wrap_affinely_decomposed_functional(v, wrapper) elif issubclass(v, AffinelyDecomposedOperatorInterface): wrapped_class = wrap_affinely_decomposed_operator(v, wrapper) elif issubclass(v, FunctionalInterface): wrapped_class = wrap_functional(v, wrapper) elif issubclass(v, OperatorInterface): wrapped_class = wrap_operator(v, wrapper) elif issubclass(v, StationaryDiscretizationInterface): wrapped_class = wrap_stationary_discretization(v, wrapper) else: continue add_to_module(k, wrapped_class, mod) wrapper.add_class(v, wrapped_class) create_modules(mod) wrap_vectors(mod) wrap_classes(mod) wrapped_module = add_modules(mod) return wrapped_module, wrapper
bsd-2-clause
Python
ddb9c9b3108ac587e3c29e7a45bacea6afd488cc
add python solution to "project euler - problem 11"
mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler,mo/project-euler
problem11.py
problem11.py
number_string = """08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48""" # convert the big block number string into a two dimensional array of integers # This list comprehension parses the rows first and then each column, which means that we will # end up with matrix[y][x] instead of matrix[x][y] which would have been more intuitive int_matrix = [[int(number_string) for number_string in row_string.split(" ")] for row_string in number_string.split("\n")] def get_cell(x, y): if (0 <= x <= 19 and 0 <= y <= 19): # reversed coordinate axis (use y,x instead of x,y) due to parsing return int_matrix[y][x] else: # hack to make sure products involving this cell value will be zero # wow this is sooo ugly :-( return 0 def check_vertical(x, y): return get_cell(x,y) * get_cell(x,y+1) * get_cell(x,y+2) * get_cell(x,y+3) def check_horizontal(x, y): return get_cell(x,y) * get_cell(x+1,y) * get_cell(x+2,y) * get_cell(x+3,y) # south west (sw) to north east (ne) def check_nw_se_diagonal(x, y): return get_cell(x,y) * get_cell(x+1,y+1) * get_cell(x+2,y+2) * get_cell(x+3,y+3) # north east (ne) to south west (sw) def check_ne_sw_diagonal(x, y): return get_cell(x,y) * get_cell(x-1,y+1) * get_cell(x-2,y+2) * get_cell(x-3,y+3) def get_highest_cell_product(x, y): return max(check_vertical(x, y), check_horizontal(x, y), check_nw_se_diagonal(x, y), check_ne_sw_diagonal(x, y)) for y in xrange(0,20): for x in xrange(0,20): print str(get_cell(x,y)).zfill(2), print "" greatest_cell_product = 0 for y in xrange(0,20): for x in xrange(0,20): cell_product = get_highest_cell_product(x, y) if (cell_product > greatest_cell_product): greatest_cell_product = cell_product print "greatest_product==", greatest_cell_product
mit
Python
0d390edeeb8829c0b8afef090f133d0fee8bce4f
Bump PROVISION_VERSION for latest changes.
mahim97/zulip,shubhamdhama/zulip,andersk/zulip,timabbott/zulip,zulip/zulip,jackrzhang/zulip,brainwane/zulip,andersk/zulip,andersk/zulip,punchagan/zulip,jackrzhang/zulip,zulip/zulip,dhcrzf/zulip,Galexrt/zulip,hackerkid/zulip,punchagan/zulip,eeshangarg/zulip,zulip/zulip,Galexrt/zulip,mahim97/zulip,andersk/zulip,vaidap/zulip,timabbott/zulip,hackerkid/zulip,zulip/zulip,rishig/zulip,jackrzhang/zulip,shubhamdhama/zulip,zulip/zulip,amanharitsh123/zulip,kou/zulip,dhcrzf/zulip,eeshangarg/zulip,vaidap/zulip,hackerkid/zulip,brockwhittaker/zulip,punchagan/zulip,Galexrt/zulip,tommyip/zulip,vaidap/zulip,brockwhittaker/zulip,kou/zulip,rishig/zulip,jackrzhang/zulip,verma-varsha/zulip,timabbott/zulip,kou/zulip,andersk/zulip,shubhamdhama/zulip,brainwane/zulip,andersk/zulip,brainwane/zulip,rht/zulip,synicalsyntax/zulip,eeshangarg/zulip,hackerkid/zulip,eeshangarg/zulip,brockwhittaker/zulip,tommyip/zulip,tommyip/zulip,brainwane/zulip,jackrzhang/zulip,synicalsyntax/zulip,dhcrzf/zulip,dhcrzf/zulip,vaidap/zulip,showell/zulip,verma-varsha/zulip,zulip/zulip,shubhamdhama/zulip,brainwane/zulip,mahim97/zulip,Galexrt/zulip,Galexrt/zulip,amanharitsh123/zulip,rht/zulip,rishig/zulip,mahim97/zulip,amanharitsh123/zulip,verma-varsha/zulip,rishig/zulip,dhcrzf/zulip,verma-varsha/zulip,hackerkid/zulip,punchagan/zulip,tommyip/zulip,Galexrt/zulip,amanharitsh123/zulip,showell/zulip,hackerkid/zulip,rishig/zulip,tommyip/zulip,mahim97/zulip,rishig/zulip,synicalsyntax/zulip,brainwane/zulip,rht/zulip,andersk/zulip,synicalsyntax/zulip,rht/zulip,shubhamdhama/zulip,jackrzhang/zulip,eeshangarg/zulip,kou/zulip,zulip/zulip,synicalsyntax/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,verma-varsha/zulip,showell/zulip,amanharitsh123/zulip,rht/zulip,brockwhittaker/zulip,tommyip/zulip,synicalsyntax/zulip,eeshangarg/zulip,showell/zulip,punchagan/zulip,vaidap/zulip,rht/zulip,dhcrzf/zulip,brockwhittaker/zulip,kou/zulip,punchagan/zulip,eeshangarg/zulip,hackerkid/zulip,showell/zulip,showell/zulip,kou/zulip,rishig/zulip,synicalsyntax/zulip,timabbott/zulip,brainwane/zulip,verma-varsha/zulip,timabbott/zulip,Galexrt/zulip,brockwhittaker/zulip,shubhamdhama/zulip,dhcrzf/zulip,showell/zulip,shubhamdhama/zulip,rht/zulip,kou/zulip,amanharitsh123/zulip,mahim97/zulip,vaidap/zulip,tommyip/zulip,punchagan/zulip
version.py
version.py
ZULIP_VERSION = "1.6.0+git" PROVISION_VERSION = '9.1'
ZULIP_VERSION = "1.6.0+git" PROVISION_VERSION = '9.0'
apache-2.0
Python
59a228312bb3091db8bfb6bf9a75ce4ae47431f4
Add zero system test to neural net
WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox
neuralnets/net_test.py
neuralnets/net_test.py
from net import NeuralNet import numpy as np #TODO(Wesley) More tests class TestNeuralNet(object): def test_zero_system(self): net = NeuralNet(3, 2, 4, 1, seed=0) net.weights = [ np.zeros((3,4)), np.zeros((4,4)), np.zeros((4,4)), np.zeros((4,1)) ] inpt = np.asarray([1, 1, 1]) print(net.forward(inpt)) for layer in net.forward(inpt)[1:]: for neuron in layer: assert neuron == 0.5
mit
Python
d97b9f6c508dd24da0f86bc1587ea64708c84a89
Add parser for the advisory mail recipients.
YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion
tools/dist/security/mailinglist.py
tools/dist/security/mailinglist.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # """ Parser for advisory e-mail distribution addresses """ from __future__ import absolute_import import os import re class MailingList(object): """ A list of e-mail addresses for security advisory pre-notifications. Parses ^/pmc/subversion/security/pre-notifications.txt """ __PRE_NOTIFICATIONS = 'pre-notifications.txt' __ADDRESS_LINE = re.compile(r'^\s{6}(?:[^<]+)?<[^<>]+>\s*$') def __init__(self, rootdir): self.__addresses = [] self.__parse_addresses(rootdir) def __iter__(self): return self.__addresses.__iter__() def __len__(self): return len(self.__addresses) def __parse_addresses(self, rootdir): pre_notifications = os.path.join(rootdir, self.__PRE_NOTIFICATIONS) with open(pre_notifications, 'rt') as pn: for line in pn: m = self.__ADDRESS_LINE.match(line) if not m: continue self.__addresses.append(line.strip())
apache-2.0
Python
80a7493e56b1ba6b01bf44f6dd9140de916511a7
add twisted interface to psycopg2
akrherz/pyIEM
pyiem/twistedpg.py
pyiem/twistedpg.py
""" module twistedpg.py Author: Federico Di Gregorio http://twistedmatrix.com/pipermail/twisted-python/2006-April/012955.html """ from psycopg2 import * from psycopg2 import connect as _2connect from psycopg2.extensions import connection as _2connection from psycopg2.extras import RealDictCursor del connect def connect(*args, **kwargs): kwargs['connection_factory'] = connection return _2connect(*args, **kwargs) class connection(_2connection): def cursor(self): return _2connection.cursor(self, cursor_factory=RealDictCursor)
mit
Python
009182d0c603f9c1f8fa650f6a9771b38a74c6cc
Add a proper validator for disable_builtins
thalamus/Flexget,offbyone/Flexget,drwyrm/Flexget,Danfocus/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,sean797/Flexget,oxc/Flexget,jawilson/Flexget,jacobmetrick/Flexget,ZefQ/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,asm0dey/Flexget,drwyrm/Flexget,vfrc2/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,xfouloux/Flexget,OmgOhnoes/Flexget,camon/Flexget,thalamus/Flexget,antivirtel/Flexget,vfrc2/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,X-dark/Flexget,lildadou/Flexget,ibrahimkarahan/Flexget,oxc/Flexget,ianstalk/Flexget,Pretagonist/Flexget,Pretagonist/Flexget,thalamus/Flexget,qk4l/Flexget,OmgOhnoes/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,jacobmetrick/Flexget,Flexget/Flexget,lildadou/Flexget,tobinjt/Flexget,tarzasai/Flexget,antivirtel/Flexget,tarzasai/Flexget,ZefQ/Flexget,ibrahimkarahan/Flexget,crawln45/Flexget,qvazzler/Flexget,xfouloux/Flexget,dsemi/Flexget,camon/Flexget,gazpachoking/Flexget,cvium/Flexget,asm0dey/Flexget,offbyone/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,grrr2/Flexget,Danfocus/Flexget,qk4l/Flexget,antivirtel/Flexget,dsemi/Flexget,qvazzler/Flexget,ratoaq2/Flexget,Pretagonist/Flexget,ZefQ/Flexget,Flexget/Flexget,patsissons/Flexget,crawln45/Flexget,X-dark/Flexget,sean797/Flexget,qk4l/Flexget,malkavi/Flexget,dsemi/Flexget,grrr2/Flexget,JorisDeRieck/Flexget,spencerjanssen/Flexget,ratoaq2/Flexget,ratoaq2/Flexget,ianstalk/Flexget,malkavi/Flexget,Flexget/Flexget,poulpito/Flexget,tsnoam/Flexget,crawln45/Flexget,tarzasai/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,LynxyssCZ/Flexget,vfrc2/Flexget,jacobmetrick/Flexget,sean797/Flexget,cvium/Flexget,grrr2/Flexget,patsissons/Flexget,voriux/Flexget,v17al/Flexget,spencerjanssen/Flexget,patsissons/Flexget,ianstalk/Flexget,asm0dey/Flexget,cvium/Flexget,xfouloux/Flexget,X-dark/Flexget,tvcsantos/Flexget,tsnoam/Flexget,oxc/Flexget,spencerjanssen/Flexget,poulpito/Flexget,tobinjt/Flexget,jawilson/Flexget,v17al/Flexget,tvcsantos/Flexget,ibrahimkarahan/Flexget,v17al/Flexget,jawilson/Flexget,tsnoam/Flexget,voriux/Flexget,offbyone/Flexget,Flexget/Flexget,qvazzler/Flexget,tobinjt/Flexget,crawln45/Flexget
flexget/plugins/plugin_disable_builtins.py
flexget/plugins/plugin_disable_builtins.py
import logging from flexget import plugin from flexget.plugin import priority, register_plugin, plugins log = logging.getLogger('builtins') def all_builtins(): """Helper function to return an iterator over all builtin plugins.""" return (plugin for plugin in plugins.itervalues() if plugin.builtin) class PluginDisableBuiltins(object): """Disables all (or specific) builtin plugins from a feed.""" def validator(self): from flexget import validator root = validator.factory() root.accept('boolean') root.accept('list').accept('choice').accept_choices(plugin.name for plugin in all_builtins()) return root def debug(self): log.debug('Builtin plugins: %s' % ', '.join(plugin.name for plugin in all_builtins())) @priority(255) def on_feed_start(self, feed, config): self.disabled = [] if not config: return for plugin in all_builtins(): if config is True or plugin.name in config: plugin.builtin = False self.disabled.append(plugin.name) log.debug('Disabled builtin plugin(s): %s' % ', '.join(self.disabled)) @priority(-255) def on_feed_exit(self, feed, config): if not self.disabled: return for name in self.disabled: plugin.plugins[name].builtin = True log.debug('Enabled builtin plugin(s): %s' % ', '.join(self.disabled)) self.disabled = [] on_feed_abort = on_feed_exit register_plugin(PluginDisableBuiltins, 'disable_builtins', api_ver=2)
import logging from flexget import plugin from flexget.plugin import priority, register_plugin log = logging.getLogger('builtins') class PluginDisableBuiltins(object): """ Disables all builtin plugins from a feed. """ def __init__(self): self.disabled = [] def validator(self): from flexget import validator # TODO: accept only list (of texts) or boolean return validator.factory('any') def debug(self): for name, info in plugin.plugins.iteritems(): if not info.builtin: continue log.debug('Builtin plugin: %s' % name) def on_feed_start(self, feed): for name, info in plugin.plugins.iteritems(): if info.builtin: if isinstance(feed.config['disable_builtins'], list): if info.name in feed.config['disable_builtins']: info.builtin = False self.disabled.append(name) else: # disabling all builtins info.builtin = False self.disabled.append(name) log.debug('Disabled builtin plugin %s' % ', '.join(self.disabled)) @priority(-255) def on_feed_exit(self, feed): names = [] for name in self.disabled: names.append(name) plugin.plugins[name].builtin = True self.disabled = [] log.debug('Enabled builtin plugins %s' % ', '.join(names)) on_feed_abort = on_feed_exit register_plugin(PluginDisableBuiltins, 'disable_builtins')
mit
Python
f47482df83a8ab643a55062b12fce11fbd703886
add 90. The first 100 problems have been solved! Oh~~~~~~~~Yeah
zeyuanxy/project-euler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,EdisonAlgorithms/ProjectEuler
vol2/90.py
vol2/90.py
from itertools import combinations def valid(c1, c2): return all(x in c1 and y in c2 or x in c2 and y in c1 for x, y in squares) if __name__ == "__main__": squares = [(0,1), (0,4), (0,6), (1,6), (2,5), (3,6), (4,6), (8,1)] cube = list(combinations([0,1,2,3,4,5,6,7,8,6], 6)) print sum(1 for i, c1 in enumerate(cube) for c2 in cube[i+1:] if valid(c1, c2))
mit
Python
c6cd7d2a310bc0b107e0d2a481260b2e95bac577
add prime_factors function to utils
dawran6/project-euler
utils.py
utils.py
"Utilities to help solving problems." def prime_factors(num): i = 2 while i * i <= num: if num % i: i += 1 else: num //= i yield i if num > 1: yield num
mit
Python
6be93bfbaf254234f008e2c714b0aae10434fe68
add orm
YuiJL/myweblog,YuiJL/myweblog,YuiJL/myweblog
www/orm.py
www/orm.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'Jiayi Li' import asyncio, aiomysql, logging def log(sql, args=()): logging.info('SQL: %s' % sql) # create a connection pool, stored by global variable '__pool' async def create_pool(loop, **kw): logging.info('create database connection pool...') global __pool __pool = await aiomysql.create_pool( host = kw.get('host', 'localhost'), port = kw.get('port', 3306), user = kw.get('user'), password = kw.get('password'), db = kw.get('db'), charset = kw.get('charset', 'utf-8'), autocommit = kw.get('autocommit', True), maxsize = kw.get('maxsize', 10), minsize = kw.get('minsize', 1), loop = loop ) # SELECT async def select(sql, args, size=None): log(sql, args) global __pool async with __pool.get() as conn: async with conn.cursor(aiomysql.DictCursor) as cur: await cur.execute(sql.replace('?', '%s'), args or ()) if size: rs = await cur.fetchmany(size) else: rs = yield from cur.fetchall() await cur.close() logging.info('rows returned: %s' % len(rs)) return rs # INSERT, UPDATE and DELETE async def execute(sql, args, autocommit=True): log(sql) async with __pool.get() as conn: if not autocommit: await conn.begin() try: async with conn.cursor(aiomysql.DictCursor) as cur: await cur.execute(sql.replace('?', '%s'), args) affected = cur.rowcount if not autocommit: await conn.commit() except BaseException as e: if not autocommit: await conn.rollback() raise return affected
mit
Python
5b05640a60c66d9d12b9794f2ae55785efe1e099
Define solidfill.
soasme/riotpy
riot/tags/solidfill.py
riot/tags/solidfill.py
# -*- coding: utf-8 -*- from urwid import SolidFill def parse_tag_from_node(node): return SolidFill()
mit
Python
785f2d3a6d10d8d6ba72712eec29c5be5849f671
Add build_raw_data.py
PaddlePaddle/models,PaddlePaddle/models,kuke/models,kuke/models,PaddlePaddle/models,kuke/models,kuke/models
fluid/PaddleNLP/text_classification/async_executor/data_generator/build_raw_data.py
fluid/PaddleNLP/text_classification/async_executor/data_generator/build_raw_data.py
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Build lego raw data """ from __future__ import print_function import sys import os import random import re data_type = sys.argv[1] if not (data_type == "train" or data_type == "test"): print("python %s [test/train]" % sys.argv[0], file=sys.stderr) sys.exit(-1) pos_folder = "aclImdb/" + data_type + "/pos/" neg_folder = "aclImdb/" + data_type + "/neg/" pos_train_list = [(pos_folder + x, "1") for x in os.listdir(pos_folder)] neg_train_list = [(neg_folder + x, "0") for x in os.listdir(neg_folder)] all_train_list = pos_train_list + neg_train_list random.shuffle(all_train_list) def load_dict(dictfile): """ Load word id dict """ vocab = {} wid = 0 with open(dictfile) as f: for line in f: vocab[line.strip()] = str(wid) wid += 1 return vocab vocab = load_dict("aclImdb/imdb.vocab") unk_id = str(len(vocab)) print("vocab size: ", len(vocab), file=sys.stderr) pattern = re.compile(r'(;|,|\.|\?|!|\s|\(|\))') for fitem in all_train_list: label = str(fitem[1]) fname = fitem[0] with open(fname) as f: sent = f.readline().lower().replace("<br />", " ").strip() out_s = "%s | %s" % (sent, label) print(out_s, file=sys.stdout)
apache-2.0
Python
f99c8e6e26b85ae7805ff38e4d89978d06e93c97
Add SQSRequest base class
MA3STR0/AsyncAWS
sqs.py
sqs.py
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient from tornado.httputil import url_concat import datetime import hashlib import hmac class SQSRequest(HTTPRequest): """SQS AWS Adapter for Tornado HTTP request""" def __init__(self, *args, **kwargs): super(SQSRequest, self).__init__(*args, **kwargs)
mit
Python
972f58d3280c95c2823b04a76469c401e7d9fc02
add border and basic functional
pkulev/xoinvader,pankshok/xoinvader
xoi.py
xoi.py
#! /usr/bin/env python import sys import curses from curses import KEY_ENTER import time from collections import namedtuple KEY = "KEY" K_A = ord("a") K_D = ord("d") class Point: def __init__(self, x, y): self._x = x self._y = y @property def x(self): return self._x @x.setter def x(self, val): self._x = val @property def y(self): return self._y @y.setter def y(self, val): self._y = val Event = namedtuple("Event", ["type", "val"]) class Spaceship(object): def __init__(self, border): self._image = "<i>" self._dx = 1 self.border = border self._pos = Point(self.border.x // 2, self.border.y - 1) def events(self, event): if event.type == KEY: if event.val == K_A: self._dx = -1 if event.val == K_D: self._dx = 1 def update(self): if self._pos.x == self.border.x - len(self._image) - 1 and self._dx > 0: self._pos.x = 0 elif self._pos.x == 1 and self._dx < 0: self._pos.x = self.border.x - len(self._image) self._pos.x += self._dx self._dx = 0 def draw(self, screen): screen.addstr(self._pos.y, self._pos.x, self._image, curses.A_BOLD) class App(object): def __init__(self): #self.screen = curses.initscr() curses.initscr() self.border = namedtuple("border", ["y", "x"])(24, 80) self.field = namedtuple("field", ["y". "x"])(self.border.y-1, self.border.x-1) self.screen = curses.newwin(self.border.y, self.border.x, 0, 0) self.screen.keypad(True) self.screen.nodelay(True) curses.noecho() curses.cbreak() curses.curs_set(False) self.spaceship = Spaceship(self.field) self._objects = [] self._objects.append(self.spaceship) def deinit(self): #self.screen.nodelay(False) self.screen.keypad(False) curses.nocbreak() curses.echo() curses.curs_set(True) curses.endwin() def events(self): c = self.screen.getch() if c == 27: #Escape #self.deinit() sys.exit(1) else: for o in self._objects: o.events(Event(type="KEY", val=c)) def update(self): for o in self._objects: o.update() def render(self): self.screen.clear() self.screen.border(0) for o in self._objects: o.draw(self.screen) self.screen.refresh() def loop(self): while True: self.events() self.update() self.render() def main(): app = App() app.loop() if __name__ == "__main__": main()
mit
Python
9e2669539c5d7662bb6d6a89877b30235eef1bc2
Write solution to DEC14 XOR question.
paramsingh/cp,paramsingh/codechef-solutions,paramsingh/cp,paramsingh/codechef-solutions,paramsingh/cp,paramsingh/cp,paramsingh/codechef-solutions,paramsingh/codechef-solutions,paramsingh/cp
xor.py
xor.py
# http://www.codechef.com/DEC14/problems/XORSUB import operator def f(p): if p == []: return 0 elif len(p) == 1: return p[0] else: return reduce(operator.xor, p) def list_powerset(lst): result = [[]] for x in lst: result.extend([subset + [x] for subset in result]) return result t = int(raw_input()) while t: k = int(raw_input().split()[1]) array = map(int, raw_input().split()) max = -1 for i in list_powerset(array): if max < (k ^ f(i)): max = k ^ f(i) print max t -= 1
mit
Python
135cdb7f16372978774acf06d4da556d0a7a7db7
add solution template
pheanex/xpython,pheanex/xpython,exercism/python,behrtam/xpython,exercism/python,jmluy/xpython,exercism/xpython,smalley/python,jmluy/xpython,behrtam/xpython,smalley/python,exercism/xpython,N-Parsons/exercism-python,N-Parsons/exercism-python
exercises/error-handling/error_handling.py
exercises/error-handling/error_handling.py
def handle_error_by_throwing_exception(): pass def handle_error_by_returning_none(input_data): pass def handle_error_by_returning_tuple(input_data): pass def filelike_objects_are_closed_on_exception(filelike_object): pass
mit
Python
286e996c8dd7a299a5db148e78bbdaa0e1cb1b5c
Add sample base.
mtoshi/airwaveapiclient,mtoshi/airwaveapiclient
samples/sample.py
samples/sample.py
# -*- coding: utf-8 -*- """AirWaveAPIClient sample.""" def main(): """Sample main.""" if __name__ == "__main__": main()
mit
Python
c1fae9e5ace57320b4f4e69efc941c7fe6266381
add stft graph writer
silppuri/birdwatcher-brain,silppuri/birdwatcher-brain
write_stft_graph.py
write_stft_graph.py
import pdb import tensorflow as tf from birdwatcher.generators import compose, stft, amplitude_to_db, read_audio, reshape AUDIO_SHAPE = (44100*3, 1) clean_samples = compose(reshape, amplitude_to_db, stft, read_audio) x = tf.placeholder(tf.float32, shape=AUDIO_SHAPE) out = clean_samples(x) sess = tf.Session() tf.train.write_graph(sess.graph_def, 'models', 'stft.pbtxt')
mit
Python
aa517e0300e3e5079523d30c0bb7bfe1fe9640a4
Add script to wait for ELB instances to come into service.
shinesolutions/aem-aws-stack-builder,shinesolutions/aem-aws-stack-builder
scripts/wait-for-elb-instances-in-service.py
scripts/wait-for-elb-instances-in-service.py
#!/usr/bin/env python import argparse, re, time from assume_role_lib import log, sts from assume_role_lib.util import unwrap from datetime import datetime, timedelta from collections import Counter as counter __version__ = '0.1' logger = None def add_arguments(argument_parser): argument_parser.add_argument( '--version', '-V', action = 'version', version = '%(prog)s {0}'.format(__version__), help = 'Show version information and exit.', ) argument_parser.add_argument( '--timeout', metavar = 'TIME', default = '15m', help = unwrap(""" Stack timeout: XXn; for n - d = days, h = hour, m = mins, s = seconds. """) ) argument_parser.add_argument( 'elb', metavar = 'NAME', nargs = '+', help = unwrap(""" The ELB to watch. Multiple are allowed. """) ) def parse_timeout(timeout): days, hours, minutes, seconds = 0, 0, 0, 0 findall = re.findall(r'([0-9]+)([dhms])', timeout.lower()) for number, unit in findall: number = int(number) if unit == 'd': days = number elif unit == 'h': hours = number elif unit == 'm': minutes = number elif unit == 's': seconds = number return timedelta(days, seconds, 0, 0, minutes, hours) def get_elb_instance_info(elb, elb_client): instance_health = elb_client.describe_instance_health( LoadBalancerName = elb, ).get('InstanceStates', []) instance_states = counter(( i.get('State') for i in instance_health )) return len(instance_health), instance_states def main(): global logger p = argparse.ArgumentParser( description=unwrap(""" Wait for all instances added to an ELB to come into service. Exits non-zero if there are no instances added to an ELB. """), ) sts.add_arguments(p) log.add_arguments(p) add_arguments(p) args = p.parse_args() logger = log.get_logger(args) logger.debug('Args: %r', args) session = sts.get_session(args) elb_client = session.client('elb') elbs = set( args.elb ) complete_elbs = set() remaining_elbs = elbs - complete_elbs timeout = datetime.now() + parse_timeout(args.timeout) while remaining_elbs: for elb in remaining_elbs: count, states = get_elb_instance_info(elb, elb_client) logger.debug('%s: %r', elb, states) if states.get('InService') == count: complete_elbs.add(elb) logger.info( '%s has %d of %d instances in service', elb, states.get('InService', 0), count, ) remaining_elbs = elbs - complete_elbs if remaining_elbs: if datetime.now() > timeout: logger.error( 'Timeout waiting for ELBs. %d have not completed.', len(remaining_elbs), ) for elb in remaining_elbs: count, states = get_elb_instance_info(elb, elb_client) logger.error( '%s has only %d of %d instances in service', elb, states.get('InService', 0), count, ) raise SystemExit(2) logger.info('Waiting for %d ELB\'s instances to come into service.', len(remaining_elbs)) logger.debug('%r', remaining_elbs) time.sleep(15) if __name__ == '__main__': main()
apache-2.0
Python
eff5016653980f24c5c55dfb866dbe108f50dedf
Add the cbtf spack build package. cbtf is the base package for the component based tool framework and is used for building and connecting cbtf components, including distributed components via the MRNet transfer mechanism.
lgarren/spack,lgarren/spack,skosukhin/spack,TheTimmy/spack,tmerrick1/spack,TheTimmy/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,skosukhin/spack,mfherbst/spack,mfherbst/spack,tmerrick1/spack,matthiasdiener/spack,krafczyk/spack,iulian787/spack,lgarren/spack,LLNL/spack,TheTimmy/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,TheTimmy/spack,mfherbst/spack,TheTimmy/spack,krafczyk/spack,iulian787/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,mfherbst/spack,iulian787/spack,iulian787/spack,LLNL/spack,matthiasdiener/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack,skosukhin/spack,EmreAtes/spack,LLNL/spack,lgarren/spack,tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack
var/spack/packages/cbtf/package.py
var/spack/packages/cbtf/package.py
################################################################################ # Copyright (c) 2015 Krell Institute. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 Temple # Place, Suite 330, Boston, MA 02111-1307 USA ################################################################################ from spack import * class Cbtf(Package): """CBTF project contains the base code for CBTF that supports creating components, component networks and the support to connect these components and component networks into sequential and distributed network tools.""" homepage = "http://sourceforge.net/p/cbtf/wiki/Home" url = "http://sourceforge.net/projects/cbtf/files/cbtf-1.5/cbtf-1.5.tar.gz/download" version('1.5', '75a97e0161d19b3a12305ed1ffb3d3e2') # Mirror access template example #url = "file:/opt/spack-mirror-2015-02-27/cbtf/cbtf-1.5.tar.gz" #version('1.5', '1ca88a8834759c4c74452cb97fe7b70a') # Use when the git repository is available #version('1.5', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf') depends_on("cmake") depends_on("boost@1.41:") depends_on("mrnet@4.1.0+krelloptions") depends_on("xerces-c@3.1.1:") depends_on("libxml2") parallel = False def install(self, spec, prefix): with working_dir('build', create=True): # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching # in system paths (or other locations outside of BOOST_ROOT # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT. # Defaults to OFF. cmake('..', '--debug-output', '-DBoost_NO_SYSTEM_PATHS=TRUE', '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix, '-DBOOST_ROOT=%s' % spec['boost'].prefix, '-DMRNET_DIR=%s' % spec['mrnet'].prefix, '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'), *std_cmake_args) make("clean") make() make("install")
lgpl-2.1
Python
fc21bb14600f79a3d9970272fb7edd4eba548262
Add test for python runner action wrapper process script performance.
tonybaloney/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,lakshmi-kannan/st2,peak6/st2,StackStorm/st2,Plexxi/st2,Plexxi/st2,lakshmi-kannan/st2,StackStorm/st2,peak6/st2,StackStorm/st2,Plexxi/st2,peak6/st2,nzlosh/st2,nzlosh/st2,nzlosh/st2,tonybaloney/st2,tonybaloney/st2,lakshmi-kannan/st2
st2actions/tests/integration/test_python_action_process_wrapper.py
st2actions/tests/integration/test_python_action_process_wrapper.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Test case which tests that Python runner action wrapper finishes in <= 200ms. If the process takes more time to finish, this means it probably directly or in-directly imports some modules which have side affects and are very slow to import. Examples of such modules include: * jsonschema * pecan * jinja2 * kombu * mongoengine If the tests fail, look at the recent changes and analyze the import graph using the following command: "profimp "from st2common.runners import python_action_wrapper" --html > report.html" """ import os import unittest2 from st2common.util.shell import run_command # Maximum limit for the process wrapper script execution time (in seconds) WRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT = 0.35 ASSERTION_ERROR_MESSAGE = (""" Python wrapper process script took more than %s seconds to execute (%s). This most likely means that a direct or in-direct import of a module which takes a long time to load has been added (e.g. jsonschema, pecan, kombu, etc). Please review recently changed and added code for potential slow import issues and refactor / re-organize code if possible. """.strip()) BASE_DIR = os.path.dirname(os.path.abspath(__file__)) WRAPPER_SCRIPT_PATH = os.path.join(BASE_DIR, '../../st2common/runners/python_action_wrapper.py') class PythonRunnerActionWrapperProcess(unittest2.TestCase): def test_process_wrapper_exits_in_reasonable_timeframe(self): _, _, stderr = run_command('/usr/bin/time -f "%%e" python %s' % (WRAPPER_SCRIPT_PATH), shell=True) stderr = stderr.strip().split('\n')[-1] run_time_seconds = float(stderr) assertion_msg = ASSERTION_ERROR_MESSAGE % (WRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT, run_time_seconds) self.assertTrue(run_time_seconds <= WRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT, assertion_msg)
apache-2.0
Python
c6df42ca99c8f633c2f1efeb9af26ad4b88c4d75
Create 04.py
ezralalonde/cloaked-octo-sansa
02/hw/04.py
02/hw/04.py
# Define a procedure, find_last, that takes as input # two strings, a search string and a target string, # and returns the last position in the search string # where the target string appears, or -1 if there # are no occurences. # # Example: find_last('aaaa', 'a') returns 3 # Make sure your procedure has a return statement. def find_last(search, target): ii = 0 ans = -1 while ii >= 0: ii = search.find(target, ii) if ii != -1: ans = ii ii = ii + 1 return ans #print find_last('aaaa', 'a') #>>> 3 #print find_last('aaaaa', 'aa') #>>> 3 #print find_last('aaaa', 'b') #>>> -1 #print find_last("111111111", "1") #>>> 8 #print find_last("222222222", "") #>>> 9 #print find_last("", "3") #>>> -1 #print find_last("", "") #>>> 0
bsd-2-clause
Python
ba0093c8b6801bdbded870ea5cc27eeec05abb58
create db script
WSCoders/whatsgoodhere
web/create_db.py
web/create_db.py
__author__ = 'David Mitchell' #This script creates an example/test db. from app import db from app import MenuCategory, MenuItem db.drop_all() db.create_all() appetizer_category = MenuCategory(name='Appetizers') entree_category = MenuCategory(name='Entrees') desert_category = MenuCategory(name='Deserts') bacon_item = MenuItem(name='Bacon', description='Delicious bacon', category=appetizer_category) baconz_item = MenuItem(name='Baconz', description='Bacon with Bacon on top, fried in a bacon crust', category=entree_category) baconIceCream_item = MenuItem(name='Bacon Ice Cream', description='Bacon Ice Cream topped with bacon bits', category=desert_category) db.session.add_all([appetizer_category, entree_category, desert_category, bacon_item, baconz_item, baconIceCream_item]) db.session.commit()
apache-2.0
Python
955a2a7e467cdcf83a19525e421feb9a5eaca7e3
Add huxley/js.py for javascript
ijl/gossamer,ijl/gossamer
huxley/js.py
huxley/js.py
# Copyright (c) 2013 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or impliedriver. # See the License for the specific language governing permissions and # limitations under the License. """ JavaScript to be executed in the testing user agent. """ getHuxleyEvents = """ (function() { var events = []; window.addEventListener( 'click', function (e) { events.push([Date.now(), 'click', [e.clientX, e.clientY]]); }, true ); window.addEventListener( 'keyup', function (e) { events.push([Date.now(), 'keyup', String.fromCharCode(e.keyCode)]); }, true ); window.addEventListener( 'scroll', function(e) { events.push([Date.now(), 'scroll', [this.pageXOffset, this.pageYOffset]]); }, true ); window._getHuxleyEvents = function() { return events }; })(); """
apache-2.0
Python
79710bb5b77b8cfa95d92f7f39ac44fc0c605022
Create featureCC.py
beekay-/arcpy-scripts
featureCC.py
featureCC.py
### *- Program: FeatureCC ### *- Objective: To determine the total number of point, line, and polygon shapefiles ### in a directory ### *- Input: Provided by the user (workspace) ### *- Output: Display total files for point, line, and polygon shapefiles to the user # - START PROGRAM - # Import OS module to load appropriate paths depending on which system is being used import os # Import ArcPy module to use built-in functions to achieve the program objective import arcpy # From ArcPy, import the environment/workspace from arcpy import env # Ask the user to input a file path to set as a workspace env.workspace = raw_input("\nPlease enter your file path: ") # Assign the workspace to a new variable filePath = env.workspace x = 0 while x < 1: # Set up a file validation system if os.path.exists(filePath): # If file path, exists: continue. Otherwise.. x = 1 # ..go to Line 45 point = arcpy.ListFeatureClasses("*", "Point") # List point feature classes line = arcpy.ListFeatureClasses("*", "Line") # List line feature classes poly = arcpy.ListFeatureClasses("*", "Polygon") # List polygon feature classes pointCount = len(point) # Count the number of point feature classes lineCount = len(line) # Count the number of line feature classes polyCount = len(poly) # Count the number of polygon feature classes print("\nPOINTS:"), pointCount, ("files") # Print total for point feature classes print("LINES:"), lineCount, ("files") # Print total for line feature classes print("POLYGONS:"), polyCount, ("files\n") # Print total for polygon feature classes else: raw_input("\n!ERROR! - File path does not exist." # If file path does not exist.. "\nPress Enter to continue. ") # ..display an error message.. env.workspace = raw_input("\nPlease enter your file path: ") # ..and ask user to.. filePath = env.workspace # ..enter it again # Import time module and exit the program in 10 seconds import time time.sleep(10) # - END PROGRAM - # I'm gonna make him an offer he can't refuse # - Don Vito Corleone (The Godfather)
mit
Python
61fa5c26b9b2eff24e88313671c7aa673e24bb0f
Create pythagoras.py
thedarkcoder/SPSE,ahhh/SPSE
pythagoras.py
pythagoras.py
#!/bin/python from math import sqrt print "a^2 + b^2 = c^2" leg1 = raw_input("Leg1 (a): ") leg2 = raw_input("Leg2 (b): ") hypotenuse = sqrt((int(leg1) ** 2) + (int(leg2) ** 2)) print hypotenuse
mit
Python
c1bed8533d479112df6ae4aea0bb31e4419ae4f8
change location of jianfan lib in data repo
siongui/pali,siongui/pali,siongui/pali,wisperwinter/pali,wisperwinter/pali,siongui/pali,wisperwinter/pali
setup/setupdev.py
setup/setupdev.py
#!/usr/bin/env python # -*- coding:utf-8 -*- import os import shutil """ $PALI_DIR is the dir of git clone https://github.com/siongui/pali.git Manual setup (for reference): 1. setup TongWen (deprecated): ```bash cd $PALI_DIR mkdir -p common/app/scripts/ext cd common/app/scripts/ext/ wget http://tongwen.openfoundry.org/src/web/tongwen_core.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_s2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_t2s.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_ps2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_pt2s.js ``` 2. setup jianfan (deprecated): ```bash wget https://python-jianfan.googlecode.com/files/jianfan-0.0.2.zip unzip jianfan-0.0.2.zip mv jianfan-0.0.2/jianfan $PALI_DIR/common/pylib/ rm -rf jianfan-0.0.2 ``` 3. create symbolic links: ```bash cd $PALI_DIR/tipitaka ln -s ../common/ common cd $PALI_DIR/tipitaka/pylib ln -s ../../../data/pali/common/translation/ translation ln -s ../../../data/pali/common/romn/ romn cd $PALI_DIR/dictionary ln -s ../common/ common cd $PALI_DIR/common/pylib ln -s ../../../data/pylib/jianfan/ jianfan ``` """ def ln(source, link_name): if os.path.islink(link_name): os.unlink(link_name) os.symlink(source, link_name) def setupSymlinks(): # enter tipitaka dir os.chdir(os.path.join(os.path.dirname(__file__), '../tipitaka')) ln('../common/', 'common') os.chdir('pylib') ln('../../../data/pali/common/translation/', 'translation') ln('../../../data/pali/common/romn/', 'romn') # enter dictionary dir os.chdir('../../dictionary') ln('../common/', 'common') # enter common dir os.chdir('../common/pylib') ln('../../../data/pylib/jianfan/', 'jianfan') if __name__ == '__main__': tipitakaLatnCssPath = os.path.join(os.path.dirname(__file__), '../../data/pali/common/romn/cscd/tipitaka-latn.css') dstPath = os.path.join(os.path.dirname(__file__), '../tipitaka/app/css/tipitaka-latn.css') shutil.copyfile(tipitakaLatnCssPath, dstPath) setupSymlinks()
#!/usr/bin/env python # -*- coding:utf-8 -*- import os import shutil """ $PALI_DIR is the dir of git clone https://github.com/siongui/pali.git Manual setup (for reference): 1. setup TongWen (deprecated): ```bash cd $PALI_DIR mkdir -p common/app/scripts/ext cd common/app/scripts/ext/ wget http://tongwen.openfoundry.org/src/web/tongwen_core.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_s2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_t2s.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_ps2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_pt2s.js ``` 2. setup jianfan (deprecated): ```bash wget https://python-jianfan.googlecode.com/files/jianfan-0.0.2.zip unzip jianfan-0.0.2.zip mv jianfan-0.0.2/jianfan $PALI_DIR/common/pylib/ rm -rf jianfan-0.0.2 ``` 3. create symbolic links: ```bash cd $PALI_DIR/tipitaka ln -s ../common/ common cd $PALI_DIR/tipitaka/pylib ln -s ../../../data/pali/common/translation/ translation ln -s ../../../data/pali/common/romn/ romn cd $PALI_DIR/dictionary ln -s ../common/ common cd $PALI_DIR/common/pylib ln -s ../../../data/pali/common/gae/libs/jianfan/ jianfan ``` """ def ln(source, link_name): if os.path.islink(link_name): os.unlink(link_name) os.symlink(source, link_name) def setupSymlinks(): # enter tipitaka dir os.chdir(os.path.join(os.path.dirname(__file__), '../tipitaka')) ln('../common/', 'common') os.chdir('pylib') ln('../../../data/pali/common/translation/', 'translation') ln('../../../data/pali/common/romn/', 'romn') # enter dictionary dir os.chdir('../../dictionary') ln('../common/', 'common') # enter common dir os.chdir('../common/pylib') ln('../../../data/pali/common/gae/libs/jianfan/', 'jianfan') if __name__ == '__main__': tipitakaLatnCssPath = os.path.join(os.path.dirname(__file__), '../../data/pali/common/romn/cscd/tipitaka-latn.css') dstPath = os.path.join(os.path.dirname(__file__), '../tipitaka/app/css/tipitaka-latn.css') shutil.copyfile(tipitakaLatnCssPath, dstPath) setupSymlinks()
unlicense
Python
7b09ba64c0327ecea04cc95057ffa7d5c8d939c8
Add test for setopt to demonstrate that edit_config retains non-ASCII characters.
pypa/setuptools,pypa/setuptools,pypa/setuptools
setuptools/tests/test_setopt.py
setuptools/tests/test_setopt.py
# coding: utf-8 from __future__ import unicode_literals import io import six from setuptools.command import setopt from setuptools.extern.six.moves import configparser class TestEdit: @staticmethod def parse_config(filename): parser = configparser.ConfigParser() with io.open(filename, encoding='utf-8') as reader: (parser.read_file if six.PY3 else parser.readfp)(reader) return parser @staticmethod def write_text(file, content): with io.open(file, 'wb') as strm: strm.write(content.encode('utf-8')) def test_utf8_encoding_retained(self, tmpdir): """ When editing a file, non-ASCII characters encoded in UTF-8 should be retained. """ config = tmpdir.join('setup.cfg') self.write_text(config, '[names]\njaraco=йарацо') setopt.edit_config(str(config), dict(names=dict(other='yes'))) parser = self.parse_config(str(config)) assert parser['names']['jaraco'] == 'йарацо' assert parser['names']['other'] == 'yes'
mit
Python
cbf0d257bcbaeddeb9390047f575038b5d842dc8
update version
duoduo369/django-paginator-plus
paginator_plus/__init__.py
paginator_plus/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.0.1'
mit
Python
dc5e87f4a7bb1399951423c3a4236c58ab723665
change AjaxImageField to behave like standard django FileField
subhaoi/kioskuser,bradleyg/django-ajaximage,bradleyg/django-ajaximage,subhaoi/kioskuser,subhaoi/kioskuser,bradleyg/django-ajaximage
ajaximage/fields.py
ajaximage/fields.py
#-*- coding: utf-8 -*- from django.core.files.storage import default_storage from django.db.models.fields.files import FileDescriptor, FieldFile from django.db.models import Field from django.conf import settings from .widgets import AjaxImageEditor class AjaxImageField(Field): storage = default_storage attr_class = FieldFile descriptor_class = FileDescriptor def __init__(self, *args, **kwargs): upload_to = kwargs.pop('upload_to', '') max_height = kwargs.pop('max_height', 0) max_width = kwargs.pop('max_width', 0) crop = kwargs.pop('crop', False) crop = 1 if crop is True else 0 if crop is 1 and (max_height is 0 or max_width is 0): raise Exception('Both max_width and max_height are needed if cropping') self.widget = AjaxImageEditor( upload_to=upload_to, max_width=max_width, max_height=max_height, crop=crop ) super(AjaxImageField, self).__init__(*args, **kwargs) def contribute_to_class(self, cls, name, virtual_only=False): super(AjaxImageField, self).contribute_to_class(cls, name, virtual_only) setattr(cls, self.name, self.descriptor_class(self)) def get_prep_value(self, value): """Returns field's value prepared for saving into a database.""" # Need to convert File objects provided via a form to unicode for database insertion if value is None: return None return str(value) def get_internal_type(self): return "TextField" def formfield(self, **kwargs): defaults = {'widget': self.widget} defaults.update(kwargs) return super(AjaxImageField, self).formfield(**defaults) if 'south' in settings.INSTALLED_APPS: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^ajaximage\.fields\.AjaxImageField"])
#-*- coding: utf-8 -*- from django.db.models import Field from django.forms import widgets from ajaximage.widgets import AjaxImageEditor from django.conf import settings class AjaxImageField(Field): def __init__(self, *args, **kwargs): upload_to = kwargs.pop('upload_to', '') max_height = kwargs.pop('max_height', 0) max_width = kwargs.pop('max_width', 0) crop = kwargs.pop('crop', False) crop = 1 if crop is True else 0 if(crop is 1 and (max_height is 0 or max_width is 0)): raise Exception('Both max_width and max_height are needed if cropping') self.widget = AjaxImageEditor(upload_to=upload_to, max_width=max_width, max_height=max_height, crop=crop) super(AjaxImageField, self).__init__(*args, **kwargs) def get_internal_type(self): return "TextField" def formfield(self, **kwargs): defaults = {'widget': self.widget} defaults.update(kwargs) return super(AjaxImageField, self).formfield(**defaults) if 'south' in settings.INSTALLED_APPS: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^ajaximage\.fields\.AjaxImageField"])
mit
Python
a7ab3b5e4d75a4f5a887bafc980d24dce7983b4a
add Alfred.py
kasoki/Maestro
Alfred.py
Alfred.py
# Copyright (c) 2013 Christopher Kaster (@Kasoki) # # This file is part of alfred.py <https://github.com/Kasoki/alfred.py> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os """ IMPORTANT: Not sure how to use this lib? Check out the "example.py" file :) """ Version="0.1.1" class Handler: """ Alfred.Handler, this class is responsible for handling Alfred! """ def __init__(self, args=[], query="", use_no_query_string=True): """ Create a new handler Keyword arguments: args -- This list should be *sys.argv* (default: []) query -- This string should only be used if args is not set! use_no_query_string -- If there is no query, should the handler use "NO QUERY" instead of one? """ if type(args) != list: raise TypeError("Alfred.Handler(args): args is no list!") if len(args) > 1: self.query = args[1] elif query != "": self.query = query else: if use_no_query_string: self.query = "EMPTY_QUERY" else: self.query = "" self.items = [] def get_current_directory(self): return os.getcwd() def query_is_empty(self): if self.query == "EMPTY_QUERY" or self.query == "": return True else: return False def add_item(self, item): """ Adds a new Alfred.Item to this handler Keyword arguments: item -- The Alfred.Item you want to add ;) """ if not isinstance(item, Item): raise TypeError("Alfred.Handler.add_item(item): item is no instance of Alfred.Item") self.items.append(item) def add_new_item(self, title="", subtitle="", uid=None, arg="", icon=None): """ Adds a new Item to this handler without using the Alfred.Item class! Keyword arguments: title -- The title of this item subtitle -- The subtitle of this item uid -- The uid of this item (default: None) arg -- The argument of this item icon -- The icon of this item (Default: None) """ self.add_item(Item(title, subtitle, uid, arg, icon)) def __str__(self): return self.to_xml() def to_xml(self, max_results=None): """ Generates a XML string Keyword arguments: max_results -- How many results should be in this string? (Default: None - No limitation) """ xml_string = '<?xml version="1.0" encoding="UTF-8" ?>' xml_string += '<items>' counter = 0 for item in self.items: xml_string += item.__str__() counter += 1 if max_results is not None and counter >= max_results: break xml_string += '</items>' return xml_string def push(self, max_results=None): """ Push the content to Alfred Keyword arguments: max_results -- How many results should be in this string? (Default: None - No limitation) """ print self.to_xml(max_results) class Item: def __init__(self, title="", subtitle="", uid=None, arg="", icon=None): """ Creates a new Item for Alfred Keyword arguments: title -- The title of this item subtitle -- The subtitle of this item uid -- The uid of this item (default: None) arg -- The argument of this item icon -- The icon of this item (Default: None) """ self.title = title self.subtitle = subtitle self.uid = uid self.arg = arg self.icon = icon def __str__(self): title = '<title>%s</title>' % self.title subtitle = '<subtitle>%s</subtitle>' % self.subtitle icon = '' if self.icon is not None: icon = '<icon>%s</icon>' % self.icon item_content = "%s%s%s" % (title, subtitle, icon) item_info = '<item uid="%s" arg="%s">%s</item>' % (self.uid, self.arg, item_content) return item_info
bsd-3-clause
Python
2a963c4d13035b6f8e301a7f0240b28e0e0764d3
Create WordLadder_001.py
Chasego/cod,Chasego/codirit,Chasego/cod,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/codirit,Chasego/cod,cc13ny/algo,Chasego/cod,cc13ny/algo,Chasego/cod,Chasego/codi,cc13ny/Allin,cc13ny/Allin,cc13ny/algo,cc13ny/Allin,cc13ny/Allin,Chasego/codi,Chasego/codirit,Chasego/codi,Chasego/codirit,cc13ny/algo,Chasego/codi
leetcode/127-Word-Ladder/WordLadder_001.py
leetcode/127-Word-Ladder/WordLadder_001.py
class Solution(object): def ladderLength(self, beginWord, endWord, wordList): """ :type beginWord: str :type endWord: str :type wordList: Set[str] :rtype: int """ if beginWord == endWord: return 1 cnt = 1 q = [beginWord] while q: nq = [] for word in q: for i in range(len(beginWord)): for j in 'abcdefghijklmnopqrstuvwxyz': if j != word[i]: nword = word[:i] + j + word[i + 1:] if nword == endWord: return cnt + 1 if nword in wordList: nq.append(nword) wordList.remove(nword) cnt += 1 q = nq return 0
mit
Python