commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
bd0bdc543ba1e44ddc9d149fbaadd12ab051614d | Add migrations | unt-libraries/django-accession,unt-libraries/django-accession | accession/migrations/0003_auto_20191101_1625.py | accession/migrations/0003_auto_20191101_1625.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2019-11-01 16:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accession', '0002_auto_20191031_2139'),
]
operations = [
migrations.AlterField(
model_name='object',
name='object_era',
field=models.CharField(blank=True, choices=[('Pre-1770', 'Pre-1770'), ('1770-1779', '1770-1779'), ('1780-1789', '1780-1789'), ('1790-1799', '1790-1799'), ('1800-1809', '1800-1809'), ('1810-1819', '1810-1819'), ('1820-1829', '1820-1829'), ('1830-1839', '1830-1839'), ('1840-1849', '1840-1849'), ('1850-1859', '1850-1859'), ('1860-1869', '1860-1869'), ('1870-1879', '1870-1879'), ('1880-1889', '1880-1889'), ('1890-1899', '1890-1899'), ('1900-1909', '1900-1909'), ('1910-1919', '1910-1919'), ('1920-1929', '1920-1929'), ('1930-1939', '1930-1939'), ('1940-1949', '1940-1949'), ('1950-1959', '1950-1959'), ('1960-1969', '1960-1969'), ('1970-1979', '1970-1979'), ('1980-1989', '1980-1989'), ('1990-1999', '1990-1999'), ('2000-2009', '2000-2009'), ('2010-2019', '2010-2019'), ('2020-2029', '2020-2029')], max_length=10),
),
]
| bsd-3-clause | Python |
|
7b0ebe74cbaad610bb65f24cc2555d82e7d7a750 | read attachments path from settings, catch jpeg/png | SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq | apps/photos/views.py | apps/photos/views.py | from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from rapidsms.webui.utils import render_to_response
from photos.models import Photo
import os
import settings
# default page - show all thumbnails by date
@login_required()
def recent(request, template_name="photos/list.html"):
photos = Photo.objects.all()
return render_to_response(request, template_name, {'photos' : photos})
# show a single photo + comments
@login_required()
def show(request, photo_id, template_name="photos/single.html"):
p = Photo.objects.get(id=photo_id)
return render_to_response(request, template_name, {'photo' : p})
@login_required()
def import_photos(request):
path = settings.RAPIDSMS_APPS['receiver']['attachments_path'] # -> data/attachments
def is_img(filename):
return (filename.endswith('.jpg') or filename.endswith('.jpeg') or filename.endswith('.png'))
def not_in_db_already(filename):
# Note that there's a query for each file here - another way would be to load all existing files to a list in one operation and work with that
# but, that might generate huge list when there are a lot of photos in the DB, and might cause data freshness issues in some edge cases
# so, we just do n queries each time (where n is probably not too big) instead
return (Photo.objects.filter(original_image="%s/%s" % (path, filename)).count() == 0)
files = os.listdir(path)
img_files = filter(is_img, files)
new_img_files = filter(not_in_db_already, img_files)
for f in new_img_files:
p = Photo(name=f, original_image="%s/%s" % (path, f))
p.save()
return HttpResponseRedirect("/photos")
@login_required()
def populate(request):
for i in (1,2,3):
p = Photo(name="test image #%s" % i, original_image="apps/photos/tests/test%s.jpg" % i)
p.save()
return HttpResponseRedirect("/photos")
| from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from rapidsms.webui.utils import render_to_response
from photos.models import Photo
import os
import settings
# default page - show all thumbnails by date
@login_required()
def recent(request, template_name="photos/list.html"):
photos = Photo.objects.all()
return render_to_response(request, template_name, {'photos' : photos})
# show a single photo + comments
@login_required()
def show(request, photo_id, template_name="photos/single.html"):
p = Photo.objects.get(id=photo_id)
return render_to_response(request, template_name, {'photo' : p})
@login_required()
def import_photos(request):
path = 'data/attachments' #settings.RAPIDSMS_APPS['receiver']['attachments_path']
def is_img(filename):
return filename.endswith('.jpg')
def not_in_db_already(filename):
# Note that there's a query for each file here - another way would be to load all existing files to a list in one operation and work with that
# but, that might generate huge list when there are a lot of photos in the DB, and might cause data freshness issues in some edge cases
# so, we just do n queries each time (where n is probably not too big) instead
return (Photo.objects.filter(original_image="%s/%s" % (path, filename)).count() == 0)
files = os.listdir(path)
img_files = filter(is_img, files)
new_img_files = filter(not_in_db_already, img_files)
out = ''
for f in new_img_files:
out += "%s/%s <br/> " % (path, f)
p = Photo(name=f, original_image="%s/%s" % (path, f))
p.save()
return HttpResponseRedirect("/photos")
# return HttpResponse(out)
@login_required()
def populate(request):
for i in (1,2,3):
p = Photo(name="test image #%s" % i, original_image="apps/photos/tests/test%s.jpg" % i)
p.save()
return HttpResponseRedirect("/photos")
| bsd-3-clause | Python |
fda4f436bbaea9215efa03648d2df8e413fb47dd | add class loader tests | wolcomm/rptk,wolcomm/rptk | test/test_loader.py | test/test_loader.py | # Copyright (c) 2018 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the Apache License version 2.0
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""rptk class loader test cases."""
from __future__ import print_function
from __future__ import unicode_literals
from helpers import default_format_classes, default_query_classes
import pytest
class_sets = (
default_query_classes().items(),
default_format_classes().items(),
pytest.param([("foo", "rptk.foo.FooClass")], marks=pytest.mark.xfail),
pytest.param(0, marks=pytest.mark.xfail)
)
class TestClassLoader(object):
"""Test cases for rptk class loader classes."""
@pytest.mark.parametrize("class_set", class_sets)
def test_class_loader(self, class_set):
"""Test rptk class loader."""
from rptk.load import ClassLoader
loader = ClassLoader(items=class_set)
assert isinstance(loader.class_names, list)
for name, path in class_set:
assert name in loader.class_names
assert name in loader.class_info
assert loader.class_info[name]
assert loader.get_class(name=name).__name__ in path
assert isinstance(loader.classes, list)
for cls in loader.classes:
assert isinstance(cls, type)
| apache-2.0 | Python |
|
dd75e1c5afb05c5d46adae465947fb3f893cdf6b | Create 7kyu_complete_the_pattern4.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/7kyu/7kyu_complete_the_pattern4.py | Solutions/7kyu/7kyu_complete_the_pattern4.py | def pattern(n):
l=list(range(1,n+1))
return '\n'.join(''.join(map(str,l[i:])) for i in range(n))
| mit | Python |
|
422b5573b72cc2014893aa15758b9d0bc61baf05 | refactor from core.py | stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis | Synopsis/Formatters/HTML/DeclarationStyle.py | Synopsis/Formatters/HTML/DeclarationStyle.py | # $Id: DeclarationStyle.py,v 1.1 2003/11/15 19:55:06 stefan Exp $
#
# Copyright (C) 2000 Stephen Davies
# Copyright (C) 2000 Stefan Seefeld
# All rights reserved.
# Licensed to the public under the terms of the GNU LGPL (>= 2),
# see the file COPYING for details.
#
class Style:
"""This class just maintains a mapping from declaration to display style.
The style is an enumeration, possible values being: SUMMARY (only display
a summary for this declaration), DETAIL (summary and detailed info),
INLINE (summary and detailed info, where detailed info is an inline
version of the declaration even if it's a class, etc.)"""
SUMMARY = 0
DETAIL = 1
INLINE = 2
def __init__(self):
self.__dict = {}
def style_of(self, decl):
"""Returns the style of the given decl"""
SUMMARY = self.SUMMARY
DETAIL = self.DETAIL
key = id(decl)
if self.__dict.has_key(key): return self.__dict[key]
if len(decl.comments()) == 0:
# Set to summary, as this will mean no detailed section
style = SUMMARY
else:
comment = decl.comments()[0]
# Calculate the style. The default is detail
if not comment.text():
# No comment, don't show detail
style = SUMMARY
elif comment.summary() != comment.text():
# There is more to the comment than the summary, show detail
style = DETAIL
else:
# Summary == Comment, don't show detail
style = SUMMARY
# Always show tags
if comment.tags():
style = DETAIL
# Always show enums
if isinstance(decl, AST.Enum):
style = DETAIL
# Show functions if they have exceptions
if isinstance(decl, AST.Function) and len(decl.exceptions()):
style = DETAIL
# Don't show detail for scopes (they have their own pages)
if isinstance(decl, AST.Scope):
style = SUMMARY
self.__dict[key] = style
return style
__getitem__ = style_of
| lgpl-2.1 | Python |
|
b8cc84245ae7f3ceda0e0cd92b6b2eecb0426ee3 | add start of peg generator | scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown | src/mugen/parser/peg.py | src/mugen/parser/peg.py | #!/usr/bin/env python
next_var = 0
def nextVar():
global next_var;
next_var += 1;
return next_var
class Pattern:
def __init__(self):
pass
def generate(self, result):
pass
class PatternNot(Pattern):
def __init__(self, next):
Pattern.__init__(self)
self.next = next
def generate(self, result):
my_result = "result_%d" % nextVar()
data = """
Result %s = 0;
%s
%s = ! %s;
""" % (my_result, self.next.generate(my_result), result, my_result)
return data
class PatternVerbatim(Pattern):
def __init__(self, letters):
Pattern.__init__(self)
self.letters = letters
def generate(self, result):
data = """
%s = "%s";
""" % (result, self.letters)
return data
class Rule:
def __init__(self, name, patterns):
self.name = name
self.patterns = patterns
def generate(self):
result = "result_%d" % nextVar()
data = """
static Result rule_%s(){
Result %s = 0;
%s
return Result;
}
""" % (self.name, result, '\n'.join([pattern.generate(result) for pattern in self.patterns]))
return data
class Peg:
def __init__(self, start, rules):
self.start = start
self.rules = rules
def generate(self):
namespace = "Peg"
data = """
namespace %s{
%s
Result main(){
return rule_%s();
}
}
""" % (namespace, '\n'.join([rule.generate() for rule in self.rules]), self.start)
return data
def generate(peg):
print peg.generate()
def test():
rules = [
Rule("s", [PatternNot(PatternVerbatim("hello"))]),
]
peg = Peg("s", rules)
generate(peg)
test()
| bsd-3-clause | Python |
|
82f15b2dae1b23b75a019362e5925c4a3591fa92 | Create InputNeuronGroup_multiple_inputs_1.py | ricardodeazambuja/BrianConnectUDP | examples/InputNeuronGroup_multiple_inputs_1.py | examples/InputNeuronGroup_multiple_inputs_1.py | '''
Example of a spike generator (only outputs spikes)
In this example spikes are generated and sent through UDP packages. At the end of the simulation a raster plot of the
spikes is created.
'''
from brian import *
import numpy
from brian_multiprocess_udp import BrianConnectUDP
number_of_neurons_total = 40
number_of_neurons_spiking = 30
def main_NeuronGroup(input_Neuron_Group, simulation_clock):
print "main_NeuronGroup!" #DEBUG!
simclock = simulation_clock
delta_t=5
random_list=numpy.random.randint(number_of_neurons_total,size=number_of_neurons_spiking)
random_list.sort()
spiketimes = [(i, delta_t*ms) for i in random_list]
SpikesOut = SpikeGeneratorGroup(number_of_neurons_total, spiketimes, period=300*ms, clock=simclock) # the maximum clock of the input spikes is limited here (period)
MSpkOut=SpikeMonitor(SpikesOut) # Spikes sent by UDP
return ([SpikesOut],[],[MSpkOut])
def post_simulation_function(input_NG, simulation_NG, simulation_SYN, simulation_MN):
"""
input_NG: the neuron group that receives the input spikes
simulation_NG: the neuron groups list passed to the system by the user function (main_NeuronGroup)
simulation_SYN: the synapses list passed to the system by the user function (main_NeuronGroup)
simulation_MN: the monitors list passed to the system by the user function (main_NeuronGroup)
This way it is possible to plot, save or do whatever you want with these objects after the end of the simulation!
"""
figure()
raster_plot(simulation_MN[0])
title("Spikes Sent by UDP")
show(block=True)
if __name__=="__main__":
my_simulation = BrianConnectUDP(main_NeuronGroup, NumOfNeuronsOutput=number_of_neurons_total, post_simulation_function=post_simulation_function,
output_addresses=[("127.0.0.1", 14141)], simclock_dt=5, TotalSimulationTime=10000, brian_address=0)
| cc0-1.0 | Python |
|
765897a05a7aae6a89bfd62d8493fb14aa16048a | Create db_migrate.py | ASpelling/mw-trading,ASpelling/mw-trading,ASpelling/mw-trading | db_migrate.py | db_migrate.py | #!venv/bin/python
import imp
from migrate.versioning import api
from app import db
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1))
tmp_module = imp.new_module('old_model')
old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
exec(old_model, tmp_module.__dict__)
script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata)
open(migration, "wt").write(script)
api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('New migration saved as ' + migration)
print('Current database version: ' + str(v))
| apache-2.0 | Python |
|
c085f9b5af73a50a86d592b3d8b02b1e8e444cde | Create optsimulate.py | OpenPTrack/open_ptrack,OpenPTrack/open_ptrack,chanbrown007/open_ptrack,chanbrown007/open_ptrack,chanbrown007/open_ptrack,OpenPTrack/open_ptrack | docs/assets/optsimulate.py | docs/assets/optsimulate.py |
# OpenPTrack Sender Simulator
# Sept 13, 2015
# jburke@ucla.edu
import socket, time, json, time, random
UDP_IP = "127.0.0.1"
UDP_PORT = 21234
PERIOD = .100 # how often to publish in time
# For the random walk
MAXSTEP_X = 10
MAXSTEP_Y = 10
WOBBLE_Z = 1
Z_NOMINAL = 40
# Increasing packet seq number
_SEQ = 0
# Current message format
# https://github.com/OpenPTrack/open_ptrack/wiki/Using%20The%20Data
#
#MESSAGE = '{"header":{"seq":336988,"stamp":{"sec":1441244414,"nsec":266356327},"frame_id":"world"},"tracks":[{"id":170,"x":0.740519,"y":-3.21577,"height":1.01898,"age":79.4518,"confidence":0.491777},{"id":172,"x":0.843167,"y":-3.29433,"height":1.10497,"age":29.471,"confidence":0.500193}]}'
def track( id, x, y, height, age, confidence ) :
return {"id":id, "x":x, "y":y, "height":height, "age": age, "confidence":confidence}
def packet( tracks ) :
global _SEQ
_SEQ+=1
now = float(time.time())
sec = int(now)
nsec = int((now-sec) * 1e9)
header = { "seq":_SEQ, "stamp": {"sec":sec, "nsec":nsec}, "frame_id":"world" }
return { "header":header, "tracks":tracks }
# Provide two random walkers
# More is exercise for reader ...
def walk(W):
for w in W:
w[0] += MAXSTEP_X * 2*(random.random() - 0.5)
w[1] += MAXSTEP_Y * 2*(random.random() - 0.5)
w[2] = Z_NOMINAL + WOBBLE_Z*2*(random.random()-0.5)
walkers = [ [random.randrange(200)-100, random.randrange(200)-100, Z_NOMINAL],
[random.randrange(200)-100, random.randrange(200)-100, Z_NOMINAL] ]
print("^C to stop")
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
try:
while True:
walk(walkers)
MESSAGE = json.dumps( packet( [ track(42, walkers[0][0], walkers[0][1], walkers[0][2], _SEQ+100+random.random(), random.random()),
track(43, walkers[1][0], walkers[1][1], walkers[1][2], _SEQ+100+random.random(), random.random())] ) )
# We throw some zeroes at the end to simulate OpenPTrack's current zero padding,
# so parsers make sure to handle it. This padding should be removed soon.
# https://github.com/OpenPTrack/open_ptrack/issues/52
payload = bytes(MESSAGE.encode('utf-8')) + bytes(bytearray(100))
sock.sendto(payload, (UDP_IP, UDP_PORT))
print(payload)
time.sleep(PERIOD)
except KeyboardInterrupt:
pass # do cleanup here
| bsd-3-clause | Python |
|
7e30de04cad1070eb84c1de0c370e950b5e2c783 | Annotate zerver.views.webhooks.pingdom. | mohsenSy/zulip,timabbott/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,dhcrzf/zulip,dawran6/zulip,peguin40/zulip,ryanbackman/zulip,grave-w-grave/zulip,AZtheAsian/zulip,christi3k/zulip,ahmadassaf/zulip,verma-varsha/zulip,TigorC/zulip,Galexrt/zulip,arpith/zulip,Diptanshu8/zulip,rht/zulip,mohsenSy/zulip,andersk/zulip,dattatreya303/zulip,j831/zulip,zulip/zulip,christi3k/zulip,cosmicAsymmetry/zulip,showell/zulip,isht3/zulip,andersk/zulip,ryanbackman/zulip,Galexrt/zulip,sonali0901/zulip,reyha/zulip,umkay/zulip,paxapy/zulip,eeshangarg/zulip,peguin40/zulip,niftynei/zulip,showell/zulip,brainwane/zulip,samatdav/zulip,jainayush975/zulip,shubhamdhama/zulip,hackerkid/zulip,christi3k/zulip,souravbadami/zulip,rht/zulip,grave-w-grave/zulip,punchagan/zulip,souravbadami/zulip,shubhamdhama/zulip,vikas-parashar/zulip,JPJPJPOPOP/zulip,dawran6/zulip,Galexrt/zulip,jainayush975/zulip,vikas-parashar/zulip,ahmadassaf/zulip,punchagan/zulip,kou/zulip,krtkmj/zulip,dhcrzf/zulip,eeshangarg/zulip,KingxBanana/zulip,timabbott/zulip,Galexrt/zulip,samatdav/zulip,timabbott/zulip,dawran6/zulip,KingxBanana/zulip,amanharitsh123/zulip,zacps/zulip,tommyip/zulip,Juanvulcano/zulip,JPJPJPOPOP/zulip,dattatreya303/zulip,AZtheAsian/zulip,brockwhittaker/zulip,rht/zulip,punchagan/zulip,aakash-cr7/zulip,jainayush975/zulip,zacps/zulip,isht3/zulip,aakash-cr7/zulip,Jianchun1/zulip,joyhchen/zulip,j831/zulip,jrowan/zulip,SmartPeople/zulip,susansls/zulip,sup95/zulip,dhcrzf/zulip,dhcrzf/zulip,niftynei/zulip,reyha/zulip,hackerkid/zulip,kou/zulip,KingxBanana/zulip,aakash-cr7/zulip,umkay/zulip,dattatreya303/zulip,krtkmj/zulip,zulip/zulip,amyliu345/zulip,ahmadassaf/zulip,dhcrzf/zulip,jrowan/zulip,synicalsyntax/zulip,souravbadami/zulip,hackerkid/zulip,calvinleenyc/zulip,sonali0901/zulip,brainwane/zulip,JPJPJPOPOP/zulip,blaze225/zulip,isht3/zulip,aakash-cr7/zulip,eeshangarg/zulip,amanharitsh123/zulip,isht3/zulip,brockwhittaker/zulip,Diptanshu8/zulip,punchagan/zulip,sonali0901/zulip,grave-w-grave/zulip,AZtheAsian/zulip,AZtheAsian/zulip,sonali0901/zulip,AZtheAsian/zulip,cosmicAsymmetry/zulip,Diptanshu8/zulip,andersk/zulip,Diptanshu8/zulip,shubhamdhama/zulip,tommyip/zulip,vaidap/zulip,ahmadassaf/zulip,rht/zulip,PhilSk/zulip,susansls/zulip,dattatreya303/zulip,jackrzhang/zulip,eeshangarg/zulip,Jianchun1/zulip,ryanbackman/zulip,amyliu345/zulip,jackrzhang/zulip,jphilipsen05/zulip,brainwane/zulip,jackrzhang/zulip,shubhamdhama/zulip,Vallher/zulip,andersk/zulip,Vallher/zulip,shubhamdhama/zulip,synicalsyntax/zulip,arpith/zulip,aakash-cr7/zulip,samatdav/zulip,ryanbackman/zulip,joyhchen/zulip,Vallher/zulip,PhilSk/zulip,mohsenSy/zulip,krtkmj/zulip,kou/zulip,SmartPeople/zulip,hackerkid/zulip,joyhchen/zulip,jackrzhang/zulip,Juanvulcano/zulip,brainwane/zulip,rishig/zulip,showell/zulip,calvinleenyc/zulip,peguin40/zulip,sup95/zulip,souravbadami/zulip,krtkmj/zulip,paxapy/zulip,mahim97/zulip,Vallher/zulip,andersk/zulip,TigorC/zulip,tommyip/zulip,aakash-cr7/zulip,rishig/zulip,vaidap/zulip,niftynei/zulip,shubhamdhama/zulip,amyliu345/zulip,zacps/zulip,peguin40/zulip,dawran6/zulip,brainwane/zulip,Galexrt/zulip,Galexrt/zulip,joyhchen/zulip,reyha/zulip,mahim97/zulip,joyhchen/zulip,tommyip/zulip,umkay/zulip,arpith/zulip,niftynei/zulip,vikas-parashar/zulip,samatdav/zulip,sup95/zulip,mohsenSy/zulip,vaidap/zulip,ahmadassaf/zulip,jrowan/zulip,Juanvulcano/zulip,arpith/zulip,PhilSk/zulip,brainwane/zulip,zulip/zulip,krtkmj/zulip,synicalsyntax/zulip,arpith/zulip,zulip/zulip,dhcrzf/zulip,sonali0901/zulip,arpith/zulip,umkay/zulip,rht/zulip,Diptanshu8/zulip,susansls/zulip,brockwhittaker/zulip,Jianchun1/zulip,eeshangarg/zulip,andersk/zulip,jphilipsen05/zulip,verma-varsha/zulip,verma-varsha/zulip,vaidap/zulip,synicalsyntax/zulip,zulip/zulip,SmartPeople/zulip,samatdav/zulip,j831/zulip,verma-varsha/zulip,paxapy/zulip,Vallher/zulip,SmartPeople/zulip,jrowan/zulip,vabs22/zulip,Diptanshu8/zulip,umkay/zulip,verma-varsha/zulip,krtkmj/zulip,rht/zulip,synicalsyntax/zulip,vabs22/zulip,rishig/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,jainayush975/zulip,Galexrt/zulip,paxapy/zulip,reyha/zulip,kou/zulip,vaidap/zulip,showell/zulip,jrowan/zulip,kou/zulip,shubhamdhama/zulip,cosmicAsymmetry/zulip,jphilipsen05/zulip,mahim97/zulip,mohsenSy/zulip,mahim97/zulip,sharmaeklavya2/zulip,grave-w-grave/zulip,timabbott/zulip,samatdav/zulip,calvinleenyc/zulip,umkay/zulip,blaze225/zulip,showell/zulip,Jianchun1/zulip,showell/zulip,jphilipsen05/zulip,sharmaeklavya2/zulip,amanharitsh123/zulip,andersk/zulip,PhilSk/zulip,j831/zulip,timabbott/zulip,TigorC/zulip,hackerkid/zulip,verma-varsha/zulip,TigorC/zulip,amanharitsh123/zulip,zulip/zulip,j831/zulip,reyha/zulip,rishig/zulip,amyliu345/zulip,mahim97/zulip,jackrzhang/zulip,mohsenSy/zulip,blaze225/zulip,umkay/zulip,brockwhittaker/zulip,amanharitsh123/zulip,SmartPeople/zulip,sharmaeklavya2/zulip,rishig/zulip,vabs22/zulip,hackerkid/zulip,jackrzhang/zulip,punchagan/zulip,calvinleenyc/zulip,eeshangarg/zulip,Jianchun1/zulip,cosmicAsymmetry/zulip,zacps/zulip,Jianchun1/zulip,synicalsyntax/zulip,tommyip/zulip,isht3/zulip,eeshangarg/zulip,kou/zulip,isht3/zulip,jphilipsen05/zulip,tommyip/zulip,calvinleenyc/zulip,rishig/zulip,kou/zulip,ryanbackman/zulip,timabbott/zulip,blaze225/zulip,souravbadami/zulip,JPJPJPOPOP/zulip,vabs22/zulip,amanharitsh123/zulip,punchagan/zulip,susansls/zulip,hackerkid/zulip,PhilSk/zulip,KingxBanana/zulip,tommyip/zulip,TigorC/zulip,zulip/zulip,christi3k/zulip,cosmicAsymmetry/zulip,brainwane/zulip,rht/zulip,jainayush975/zulip,dawran6/zulip,JPJPJPOPOP/zulip,zacps/zulip,niftynei/zulip,christi3k/zulip,dhcrzf/zulip,amyliu345/zulip,peguin40/zulip,showell/zulip,brockwhittaker/zulip,souravbadami/zulip,Juanvulcano/zulip,PhilSk/zulip,vaidap/zulip,vabs22/zulip,sup95/zulip,jrowan/zulip,susansls/zulip,zacps/zulip,dattatreya303/zulip,paxapy/zulip,joyhchen/zulip,timabbott/zulip,Juanvulcano/zulip,sup95/zulip,reyha/zulip,ryanbackman/zulip,Juanvulcano/zulip,jackrzhang/zulip,vabs22/zulip,KingxBanana/zulip,paxapy/zulip,dawran6/zulip,krtkmj/zulip,mahim97/zulip,j831/zulip,vikas-parashar/zulip,sharmaeklavya2/zulip,punchagan/zulip,ahmadassaf/zulip,grave-w-grave/zulip,grave-w-grave/zulip,Vallher/zulip,sonali0901/zulip,jainayush975/zulip,susansls/zulip,sharmaeklavya2/zulip,amyliu345/zulip,sup95/zulip,blaze225/zulip,rishig/zulip,Vallher/zulip,blaze225/zulip,SmartPeople/zulip,christi3k/zulip,KingxBanana/zulip,TigorC/zulip,dattatreya303/zulip,calvinleenyc/zulip,vikas-parashar/zulip,cosmicAsymmetry/zulip,jphilipsen05/zulip,peguin40/zulip,brockwhittaker/zulip,AZtheAsian/zulip,vikas-parashar/zulip,niftynei/zulip | zerver/views/webhooks/pingdom.py | zerver/views/webhooks/pingdom.py | # Webhooks for external integrations.
from __future__ import absolute_import
from typing import Any
from django.utils.translation import ugettext as _
from django.http import HttpRequest, HttpResponse
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import Client, UserProfile
import ujson
import six
PINGDOM_SUBJECT_TEMPLATE = '{name} status.'
PINGDOM_MESSAGE_TEMPLATE = 'Service {service_url} changed its {type} status from {previous_state} to {current_state}.'
PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE = 'Description: {description}.'
SUPPORTED_CHECK_TYPES = (
'HTTP',
'HTTP_CUSTOM'
'HTTPS',
'SMTP',
'POP3',
'IMAP',
'PING',
'DNS',
'UDP',
'PORT_TCP',
)
@api_key_only_webhook_view('Pingdom')
@has_request_variables
def api_pingdom_webhook(request, user_profile, client, payload=REQ(argument_type='body'), stream=REQ(default='pingdom')):
# type: (HttpRequest, UserProfile, Client, Dict[str, Any], six.text_type) -> HttpResponse
check_type = get_check_type(payload)
if check_type in SUPPORTED_CHECK_TYPES:
subject = get_subject_for_http_request(payload)
body = get_body_for_http_request(payload)
else:
return json_error(_('Unsupported check_type: {check_type}').format(check_type=check_type))
check_send_message(user_profile, client, 'stream', [stream], subject, body)
return json_success()
def get_subject_for_http_request(payload):
# type: (Dict[str, Any]) -> six.text_type
return PINGDOM_SUBJECT_TEMPLATE.format(name=payload['check_name'])
def get_body_for_http_request(payload):
# type: (Dict[str, Any]) -> six.text_type
current_state = payload['current_state']
previous_state = payload['previous_state']
data = {
'service_url': payload['check_params']['hostname'],
'previous_state': previous_state,
'current_state': current_state,
'type': get_check_type(payload)
}
body = PINGDOM_MESSAGE_TEMPLATE.format(**data)
if current_state == 'DOWN' and previous_state == 'UP':
description = PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE.format(description=payload['long_description'])
body += '\n{description}'.format(description=description)
return body
def get_check_type(payload):
# type: (Dict[str, Any]) -> six.text_type
return payload['check_type']
| # Webhooks for external integrations.
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
import ujson
PINGDOM_SUBJECT_TEMPLATE = '{name} status.'
PINGDOM_MESSAGE_TEMPLATE = 'Service {service_url} changed its {type} status from {previous_state} to {current_state}.'
PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE = 'Description: {description}.'
SUPPORTED_CHECK_TYPES = (
'HTTP',
'HTTP_CUSTOM'
'HTTPS',
'SMTP',
'POP3',
'IMAP',
'PING',
'DNS',
'UDP',
'PORT_TCP',
)
@api_key_only_webhook_view('Pingdom')
@has_request_variables
def api_pingdom_webhook(request, user_profile, client, payload=REQ(argument_type='body'), stream=REQ(default='pingdom')):
check_type = get_check_type(payload)
if check_type in SUPPORTED_CHECK_TYPES:
subject = get_subject_for_http_request(payload)
body = get_body_for_http_request(payload)
else:
return json_error(_('Unsupported check_type: {check_type}').format(check_type=check_type))
check_send_message(user_profile, client, 'stream', [stream], subject, body)
return json_success()
def get_subject_for_http_request(payload):
return PINGDOM_SUBJECT_TEMPLATE.format(name=payload['check_name'])
def get_body_for_http_request(payload):
current_state = payload['current_state']
previous_state = payload['previous_state']
data = {
'service_url': payload['check_params']['hostname'],
'previous_state': previous_state,
'current_state': current_state,
'type': get_check_type(payload)
}
body = PINGDOM_MESSAGE_TEMPLATE.format(**data)
if current_state == 'DOWN' and previous_state == 'UP':
description = PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE.format(description=payload['long_description'])
body += '\n{description}'.format(description=description)
return body
def get_check_type(payload):
return payload['check_type']
| apache-2.0 | Python |
d8c359b27d371f5bd66825202860a0a376a2466c | add script to convert old plans to new ones | jamesmarva/myria,jamesmarva/myria,uwescience/myria,uwescience/myria,bsalimi/myria,bsalimi/myria,bsalimi/myria,uwescience/myria,jamesmarva/myria | jsonQueries/old_to_new_plan.py | jsonQueries/old_to_new_plan.py | #!/usr/bin/env python
import json
import sys
def read_json(filename):
with open(filename, 'r') as f:
return json.load(f)
def uniquify_fragments(query_plan):
fragment_inv = []
for worker in sorted(query_plan.keys()):
worker_plan = query_plan[worker]
for fragment in worker_plan:
flag = False
for (i,(x,y)) in enumerate(fragment_inv):
if y == fragment:
fragment_inv[i] = (x + [worker], y)
flag = True
break
if flag:
continue
fragment_inv.append(([worker], fragment))
return fragment_inv
def json_pretty(obj):
return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': '))
if __name__ == "__main__":
if len(sys.argv) != 2:
print >> sys.stderr, "Usage: %s <old json file>" % sys.argv[0]
sys.exit(1)
myria_json_plan = read_json(sys.argv[1])
fragments = []
frags = uniquify_fragments(myria_json_plan['query_plan'])
for (ws,ops) in frags:
fragments.append({
'workers' : ws,
'operators' : ops
})
output = {
'raw_datalog' : myria_json_plan['raw_datalog'],
'logical_ra' : myria_json_plan['logical_ra'],
'fragments' : fragments
}
print json_pretty(output)
| bsd-3-clause | Python |
|
f71ce70330f7dea86820f1d9cdc390ea972aaeca | add 2s-complement | EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank | algorithms/bit-manipulation/2s-complement.py | algorithms/bit-manipulation/2s-complement.py | import sys
def ones(x):
uCount = x - ((x >> 1) & 033333333333) - ((x >> 2) & 011111111111);
return ((uCount + (uCount >> 3)) & 030707070707) % 63;
def count(x):
if x >= 0:
if x == 0:
return 0
if x % 2 == 0:
return count(x - 1) + ones(x)
return (x + 1) / 2 + 2 * count(x / 2)
else:
x += 1
return 32 * (1 - x) - count(-x)
def solve(A, B):
if A >= 0:
if A == 0:
return count(B)
return count(B) - count(A - 1)
else:
if B >= 0:
return count(A) + count(B)
return count(A) - count(B + 1)
if __name__ == '__main__':
T = int(sys.stdin.readline())
for i in range(T):
[A, B] = map(int, sys.stdin.readline().split());
#print count(A), count(B)
print solve(A, B)
| mit | Python |
|
173565f7f2b9ffa548b355a0cbc8f972f1445a50 | Add test coverage for rdopkg.guess version2tag and tag2version | redhat-openstack/rdopkg,redhat-openstack/rdopkg,openstack-packages/rdopkg,openstack-packages/rdopkg | tests/test_guess.py | tests/test_guess.py | from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
| apache-2.0 | Python |
|
e50060ca76c667b77db433ca03ef640140831dc9 | Add migration for dagman_metrics | pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics | migrations/004_add_dagman_metrics.py | migrations/004_add_dagman_metrics.py | import migrations
conn = migrations.connect()
cur = conn.cursor()
cur.execute("""
create table dagman_metrics (
id INTEGER UNSIGNED NOT NULL,
ts DOUBLE,
remote_addr VARCHAR(15),
hostname VARCHAR(256),
domain VARCHAR(256),
version VARCHAR(10),
wf_uuid VARCHAR(36),
root_wf_uuid VARCHAR(36),
start_time DOUBLE,
end_time DOUBLE,
duration FLOAT,
exitcode SMALLINT,
dagman_id VARCHAR(32),
parent_dagman_id VARCHAR(32),
jobs INTEGER,
jobs_failed INTEGER,
jobs_succeeded INTEGER,
dag_jobs INTEGER,
dag_jobs_failed INTEGER,
dag_jobs_succeeded INTEGER,
dag_status INTEGER,
planner VARCHAR(1024),
planner_version VARCHAR(10),
rescue_dag_number INTEGER,
total_job_time DOUBLE,
total_jobs INTEGER,
total_jobs_run INTEGER,
PRIMARY KEY (id),
FOREIGN KEY (id) REFERENCES raw_data(id) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
""")
conn.commit()
cur.close()
| apache-2.0 | Python |
|
dc314e50a573f3ecb2cf41d1e08df29ea991d3b6 | Add migrations versions | SerryJohns/bucket-list | migrations/versions/d71a3e9499ef_.py | migrations/versions/d71a3e9499ef_.py | """empty message
Revision ID: d71a3e9499ef
Revises:
Create Date: 2017-11-21 23:19:12.740735
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd71a3e9499ef'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=50), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('surname', sa.String(length=100), nullable=False),
sa.Column('first_name', sa.String(length=100), nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('bucket_list',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('interests', sa.String(length=120), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('status', sa.Text(), nullable=True),
sa.Column('date_accomplished', sa.DateTime(), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('bucketlists', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['bucketlists'], ['bucket_list.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('item')
op.drop_table('bucket_list')
op.drop_table('user')
# ### end Alembic commands ###
| mit | Python |
|
5bac311ac9da94edbd08b0b43c5214ba6b9fc1c8 | add scollable pages | brendonparker/nuimo-py-web | app2.py | app2.py | from webkit import WebView
import pygtk
pygtk.require('2.0')
import gtk, threading, time
from nuimo import NuimoScanner, Nuimo, NuimoDelegate
class App:
def __init__(self):
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
fixed = gtk.Fixed()
views = [WebView(), WebView(), WebView()]
width = gtk.gdk.screen_width()
height = gtk.gdk.screen_height()
for idx, view in enumerate(views):
view.set_usize(width, height)
fixed.put(views[idx], -width+(idx*width), 0)
window.add(fixed)
#self.loadUrls()
window.fullscreen()
window.show_all()
views[0].open('http://google.com?q=page1')
views[1].open('http://google.com?q=page2')
views[2].open('http://google.com?q=page3')
self.views = views
self.fixed = fixed
self.x = 0
self.width = width
def rotate(self, val):
w = self.width
x = self.x = (self.x - val) % (3 * w)
for idx, view in enumerate(self.views):
if idx == 0 and x > w:
self.fixed.move(view, ((idx+3)*w)-x, 0)
else:
self.fixed.move(view, (idx*w)-x, 0)
def loadUrls(self):
self.current = 0
try:
with open('urls.csv') as f:
self.urls = f.readlines()
#remove empties
self.urls = filter(None, self.urls)
except:
print 'failed to read urls.csv'
self.urls = ['http://google.com']
def next(self):
self.current = (self.current + 1) % len(self.urls)
self.view.open(self.urls[self.current])
def previous(self):
self.current = self.current - 1
if self.current < 0:
self.current = len(self.urls) - 1
self.view.open(self.urls[self.current])
class CustomNuimoDelegate(NuimoDelegate):
def __init__(self, nuimo, app):
NuimoDelegate.__init__(self, nuimo)
self.app = app
def handleRotation(self, value):
NuimoDelegate.handleRotation(self, value)
gtk.idle_add(app.rotate, value)
def showImagesOnNuimo(nuimo):
nuimo.displayLedMatrix(
" " +
" *** " +
" * * * " +
" * * " +
" *** * " +
" * * " +
" * * " +
" * * " +
" ", 2.0)
time.sleep(2)
nuimo.displayLedMatrix(
" ** ** " +
" * * * * " +
" ***** " +
" * * " +
" * * * * " +
" * * * " +
" * * * * " +
" * * " +
" *** ", 20.0)
def main():
try:
gtk.main()
except Exception, e:
print '%s', e
return 0
if __name__ == "__main__":
app = App()
def nuimo_process():
def foundDevice(addr):
print 'found device: ' + addr
nuimo = Nuimo(addr)
nuimo.set_delegate(CustomNuimoDelegate(nuimo, app))
nuimo.connect()
showImagesOnNuimo(nuimo)
while True:
nuimo.waitForNotifications()
while True:
try:
NuimoScanner().start(foundDevice)
except Exception, e:
print 'failed to connect to nuimo: %s' % e
time.sleep(5)
thread = threading.Thread(target=nuimo_process)
thread.daemon = True
thread.start()
main()
| mit | Python |
|
4933e4ca107516a667ae3449337746bf7e002cc2 | Create bkvm.py | rmx-tools/rmx-internals | bkvm.py | bkvm.py | #!/usr/bin/python
import commands, time
def prepareTarget():
print "prepare backup Target"
print "---------------------"
cmd = "mount -t cifs //10.0.0.9/public/BK\ VM\ XEN -o username=xxx,password=yyy /bak/"
output = commands.getoutput(cmd)
cmd = "ls -lht --time-style=\"long-iso\" /bak/"
output = commands.getoutput(cmd)
print output
print "..."
def releaseTarget():
print "release backup Target"
print "---------------------"
cmd = "ls -lht --time-style=\"long-iso\" /bak/"
output = commands.getoutput(cmd)
print output
cmd = "umount /bak/"
output = commands.getoutput(cmd)
print "..."
def get_backup_vms():
result = []
cmd = "xe vm-list is-control-domain=false is-a-snapshot=false power-state=running"
output = commands.getoutput(cmd)
for vm in output.split("\n\n\n"):
lines = vm.splitlines()
uuid = lines[0].split(":")[1][1:]
name = lines[1].split(":")[1][1:]
result += [(uuid, name)]
return result
def backup_vm(uuid, filename, timestamp):
cmd = "xe vm-snapshot uuid=" + uuid + " new-name-label=" + timestamp
snapshot_uuid = commands.getoutput(cmd)
cmd = "xe template-param-set is-a-template=false ha-always-run=false uuid="
cmd = cmd + snapshot_uuid
commands.getoutput(cmd)
cmd = "rm " + filename+".tmp"
commands.getoutput(cmd)
cmd = "xe vm-export vm=" + snapshot_uuid + " filename=" + filename+".tmp"
(status,output)=commands.getstatusoutput(cmd)
if (status==0):
cmd = "rm " + filename + " ; mv " + filename+".tmp"+ " " + filename
commands.getoutput(cmd)
else:
print "Error"
print output
cmd = "xe vm-uninstall uuid=" + snapshot_uuid + " force=true"
commands.getoutput(cmd)
prepareTarget()
print "Backup Running VMs"
print "------------------"
for (uuid, name) in get_backup_vms():
timestamp = time.strftime("%Y%m%d-%H%M", time.gmtime())
# filename = "\"/bak/" + timestamp + " " + name + ".xva\""
filename = "\"/bak/" + name + ".xva\""
print timestamp, uuid, name," to ", filename
backup_vm(uuid, filename, timestamp)
print "..."
releaseTarget()
| apache-2.0 | Python |
|
e050d9ce4fb4d63ec7857f581033258f87c805b0 | Create pyPdfMerger.py | johnhimics/automations,johnhimics/automations | pyPdfMerger.py | pyPdfMerger.py | # -*- coding: utf-8 -*-
"""
TITLE: pyPdfMerger.py
AUTHOR: John Himics
EMAIL: john@johnhimics.com
TIMEZONE: EST
VERSION: 0
DESCRIPTION: Merges pdf files together
DEPENDANCIES: PyPDF2
"""
from PyPDF2 import PdfFileMerger
#Global Variables
merger = PdfFileMerger()
#Methods
#Program starts here
if __name__ == "__main__":
input1 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\All emails 11-18-13 2.pdf", "rb")
input2 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\Wade 343005 [compatibility mode].pdf", "rb")
input3 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\1050LF Mill Mix MDR.pdf", "rb")
# add the first 3 pages of input1 document to output
#merger.append(fileobj = input1, pages = (0,3))
# insert the first page of input2 into the output beginning after the second page
#merger.merge(position = 2, fileobj = input2, pages = (0,1))
# append entire input3 document to the end of the output document
merger.append(input1)
merger.append(input2)
merger.append(input3)
# Write to an output PDF document
output = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\document-output.pdf", "wb")
merger.write(output)
| mit | Python |
|
62e65ae978b703b6af0b594e958e79d467e83421 | add 63 | ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler | python/p063.py | python/p063.py | def g(power):
count = 0
i = 1
min = 10**(power - 1)
max = 10**power - 1
while True:
result = i**power
if result >= min:
if result <= max:
count += 1
else:
break
i += 1
return count
count = 0
for i in xrange(1, 1000):
current = g(i)
if current > 0:
count += current
else:
break
print count
| bsd-3-clause | Python |
|
600bf1bbce7db5f62d55537a33d4586fa2892d8a | Create conf.py | jcketz/PIGAL,jcketz/PIGAL | conf.py | conf.py | #OK
| mit | Python |
|
66c8c6f587c49f587901cf6a9cf7e122d110d668 | Add migration to encrypt secrets | jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine | migrations/versions/3bac7f8ccfdb_encrypt_secrets.py | migrations/versions/3bac7f8ccfdb_encrypt_secrets.py | """encrypt_secrets
Revision ID: 3bac7f8ccfdb
Revises: 291237183b82
Create Date: 2019-01-14 17:35:58.872052
"""
# revision identifiers, used by Alembic.
revision = '3bac7f8ccfdb'
down_revision = '291237183b82'
from alembic import op, context
import sqlalchemy as sa
# def upgrade():
# op.add_column('secret',
# sa.Column('secret', sa.String(length=512), nullable=True))
# import nacl.secret
# import nacl.utils
# from inbox.ignition import engine, engine_manager
# from inbox.models.session import session_scope
# from inbox.config import config
# print engine_manager.engines
# _engine = engine_manager.engines[0]
# Base = sa.ext.declarative.declarative_base()
# Base.metadata.reflect(_engine)
# key = config.get_required('SECRET_ENCRYPTION_KEY')
# class Secret(Base):
# __table__ = Base.metadata.tables['secret']
# with session_scope(0, versioned=False) as db_session:
# secrets = db_session.query(Secret).filter(
# Secret.encryption_scheme == 0,
# Secret._secret.isnot(None)).order_by(Secret.id).all()
# for s in secrets:
# unencrypted = s._secret
# nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
# s.secret = nacl.secret.SecretBox(
# key=key,
# encoder=nacl.encoding.HexEncoder
# ).encrypt(
# plaintext=unencrypted,
# nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
# )
# # Picked arbitrarily
# # s.acl_id = 0
# # s.type = 0
# db_session.add(s)
# db_session.commit()
# op.drop_column('secret', '_secret')
def upgrade():
from inbox.config import config
import nacl.secret
import nacl.utils
from inbox.ignition import engine_manager
from inbox.models.session import session_scope
shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
engine = engine_manager.engines[shard_id]
Base = sa.ext.declarative.declarative_base()
Base.metadata.reflect(engine)
class Secret(Base):
__table__ = Base.metadata.tables['secret']
class GenericAccount(Base):
__table__ = Base.metadata.tables['genericaccount']
with session_scope(shard_id, versioned=False) as db_session:
secrets = db_session.query(Secret).filter(
Secret._secret.isnot(None),
Secret.encryption_scheme == 0).all()
# Join on the genericaccount and optionally easaccount tables to
# determine which secrets should have type 'password'.
generic_query = db_session.query(Secret.id).join(
GenericAccount, Secret.id == GenericAccount.password_id)
password_secrets = [id_ for id_, in generic_query]
if engine.has_table('easaccount'):
class EASAccount(Base):
__table__ = Base.metadata.tables['easaccount']
eas_query = db_session.query(Secret.id).join(
EASAccount).filter(Secret.id == EASAccount.password_id)
password_secrets.extend([id_ for id_, in eas_query])
for s in secrets:
plain = s._secret.encode('utf-8') if isinstance(s._secret, unicode) \
else s._secret
if config.get_required('ENCRYPT_SECRETS'):
s._secret = nacl.secret.SecretBox(
key=config.get_required('SECRET_ENCRYPTION_KEY'),
encoder=nacl.encoding.HexEncoder
).encrypt(
plaintext=plain,
nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))
# 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
s.encryption_scheme = 1
else:
s._secret = plain
if s.id in password_secrets:
s.type = 'password'
else:
s.type = 'token'
db_session.add(s)
db_session.commit()
def downgrade():
pass
| agpl-3.0 | Python |
|
45cb6df45df84cb9ae85fc8aa15710bde6a15bad | Add create image functional negative tests | rahulunair/nova,rahulunair/nova,mahak/nova,vmturbo/nova,klmitch/nova,hanlind/nova,openstack/nova,mikalstill/nova,jianghuaw/nova,klmitch/nova,hanlind/nova,vmturbo/nova,Juniper/nova,rajalokan/nova,Juniper/nova,klmitch/nova,gooddata/openstack-nova,rajalokan/nova,Juniper/nova,klmitch/nova,rajalokan/nova,phenoxim/nova,mahak/nova,gooddata/openstack-nova,openstack/nova,gooddata/openstack-nova,mahak/nova,mikalstill/nova,Juniper/nova,phenoxim/nova,jianghuaw/nova,vmturbo/nova,mikalstill/nova,hanlind/nova,rajalokan/nova,openstack/nova,vmturbo/nova,rahulunair/nova,jianghuaw/nova,gooddata/openstack-nova,jianghuaw/nova | nova/tests/functional/test_images.py | nova/tests/functional/test_images.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
| apache-2.0 | Python |
|
18e2263a636e97519272a21562cbba4b978fcf49 | Create EmailForm | alchermd/headlines,alchermd/headlines | headlines/forms.py | headlines/forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, SubmitField
from wtforms.validators import DataRequired, Email
class EmailForm(FlaskForm):
""" Form used to submit messages to the admin. """
name = StringField('Name')
reply_to = StringField('Email', validators=[Email(), DataRequired()])
message = TextAreaField('Message', validators=[DataRequired()])
submit = SubmitField('Submit') | mit | Python |
|
61b21d1ec14e0be683f8da2b92b3ca2aa9fdcf59 | add sample for api caller | inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree | InvenTree/plugin/samples/integration/api_caller.py | InvenTree/plugin/samples/integration/api_caller.py | """
Sample plugin for calling an external API
"""
from django.utils.translation import ugettext_lazy as _
from plugin import IntegrationPluginBase
from plugin.mixins import APICallMixin, SettingsMixin
class SampleApiCallerPlugin(APICallMixin, SettingsMixin, IntegrationPluginBase):
"""
A small api call sample
"""
PLUGIN_NAME = "Sample API Caller"
SETTINGS = {
'API_TOKEN': {
'name': 'API Token',
'protected': True,
},
'API_URL': {
'name': 'External URL',
'description': 'Where is your API located?',
'default': 'https://reqres.in',
},
}
API_URL_SETTING = 'API_URL'
API_TOKEN_SETTING = 'API_TOKEN'
def get_external_url(self):
"""
returns data from the sample endpoint
"""
return self.api_call('api/users/2')
| mit | Python |
|
1a68a1a461a66c4a4aaf3a19a607ab64475cb05c | Create simpleExamples.py | uchouinard/MechDSO | simpleExamples.py | simpleExamples.py | import DSM as dsmx
import random as rnd
import copy
def example1():
myDSM=dsmx.DSM('example')
## adding components
myDSM.addComponent(['c1'])
myDSM.addComponent(['c2'])
myDSM.addComponent(['c3'])
#
myDSM.display()
print "--------"
## adding relations between existing components
myDSM.addRelation(['c1'], ['c2'], [1])
myDSM.addRelation(['c3'], ['c1'], [1])
myDSM.addRelation(['c2'], ['c3'], [1])
myDSM.display()
print "--------"
## adding relations with non existing elements
myDSM.addRelation(['c4'], ['c5'], [1.0])
myDSM.display()
#using pandas for better visualisation
myDSM.dispPDFrame()
def example2():
### simple examples un-directional dsm
myDSMU=dsmx.DSM('example undirectional','simple','no')
## adding components
myDSMU.addComponent(['c1'])
myDSMU.addComponent(['c2'])
myDSMU.addComponent(['c3'])
#
myDSMU.display()
print "--------"
## adding relations between existing components
myDSMU.addRelation(['c1'], ['c2'], [1])
myDSMU.addRelation(['c3'], ['c1'], [1])
myDSMU.addRelation(['c2'], ['c3'], [1])
myDSMU.display()
print "--------"
## adding relations with non existing elements
myDSMU.addRelation(['c4'], ['c5'], [1.0])
myDSMU.display()
def example3():
### simple examples for array inputs
myDSM=dsmx.DSM('example array')
#print 'creating a list of elements'
myList=list(range(0,10))
#print myList
## adding components
myDSM.addComponent(myList)
#print 'creating two shuffled list'
rnd.shuffle(myList)
myList1=copy.copy(myList)
rnd.shuffle(myList)
myList2=copy.copy(myList)
#print myList1
#print myList2
#print "--------"
#
myDSM.display()
print "--------"
## adding relations between existing components
myDSM.addRelation(myList1, myList2, [1.0]*len(myList))
myDSM.display()
print "--------"
## adding relations with non existing elements
#using pandas for better visualisation
myDSM.dispPDFrame()
def example4():
## Example using Interactions
#Based on interactions of Pimmler and Eppinger (1994)
## http://web.mit.edu/eppinger/www/pdf/Pimmler_DTM1994.pdf
## required = 2
## desired = 1
## indifferent = 0 (default value)
## undesired = -1
## detrimental = -2
##
## create a dict of format [ S E
## I M ]
###########################################################
myDSM2=dsmx.DSM(name='example 2', dsmType='interactions')
#adding components
myDSM2.addComponent(['c1'])
myDSM2.addComponent(['c2'])
myDSM2.addComponent(['c3'])
#
myDSM2.display()
print "--------"
## adding relations between existing components
# using complete interaction list
myDSM2.addRelation(['c1'], ['c2'], [{'s':1, 'e':0, 'i':0 ,'m':-2}])
myDSM2.addRelation(['c3'], ['c1'], [{'s':0, 'e':1, 'i':1 ,'m':0}])
#one interaction at a time
myDSM2.addRelation(['c2'], ['c3'], [{'s':2}])
myDSM2.addRelation(['c2'], ['c3'], [{'e':-1}])
myDSM2.addRelation(['c2'], ['c3'], [{'i':0}])
myDSM2.addRelation(['c2'], ['c3'], [{'m':-1}])
#using lists of components and interactions, and new components
myDSM2.addRelation(['c4', 'c6'], ['c5', 'c4'], [{'s':1, 'e':1, 'i':1 ,'m':1},{'s':-1, 'e':1, 'i':-1 ,'m':-2}])
myDSM2.display()
print "--------"
myDSM2.dispPDFrame()
| mit | Python |
|
d5fcaf05d100d3fe709b34b8f6b839736773a130 | Create dict.py | Myselfminer/nCrypt | dict.py | dict.py | import random
a=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\
,"t","u","v","w","x","y","z"]
def create():
dictionary=open("dictionary.py","w")
tablewenn=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\
,"t","u","v","w","x","y","z"," "]
tablewennupper=["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S"\
,"T","U","V","W","X","Y","Z"]
tabledann=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\
,"t","u","v","w","x","y","z","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S"\
,"T","U","V","W","X","Y","Z"," "]
dictionary.write("def ver(letter):\n")
entkeys=[]
for i in tablewenn:
returning=random.choice(tabledann)
tabledann.remove(returning)
dictionary.write(" if letter == '"+i+"' :\n return '"+returning+"'\n")
entkeys.append([returning,i])
for i in tablewennupper:
returning=random.choice(tabledann)
tabledann.remove(returning)
dictionary.write(" if letter == '"+i+"' :\n return '"+returning+"'\n")
entkeys.append([returning,i])
dictionary.write(" else:\n return letter\n")
dictionary.write("def ent(letter):\n")
for i in entkeys:
dictionary.write(" if letter == '"+i[0]+"':\n return '"+i[1]+"'\n")
dictionary.write(" else:\n return letter")
def debug():
pass
| apache-2.0 | Python |
|
2fba29b90156e844d7d61a15c9ad9c37e2b5dfe2 | load template | cathywu/flow,cathywu/flow | examples/aimsun/load_template.py | examples/aimsun/load_template.py | """
Load an already existing Aimsun template and run the simulation
"""
from flow.core.experiment import Experiment
from flow.core.params import AimsunParams, EnvParams, NetParams
from flow.core.params import VehicleParams
from flow.envs import TestEnv
from flow.scenarios.loop import Scenario
from flow.controllers.rlcontroller import RLController
sim_params = AimsunParams(
sim_step=0.1,
render=True,
emission_path='data',
subnetwork_name="Subnetwork 8028981")
env_params = EnvParams()
vehicles = VehicleParams()
vehicles.add(
veh_id="rl",
acceleration_controller=(RLController, {}),
num_vehicles=22)
scenario = Scenario(
name="test",
vehicles=vehicles,
net_params=NetParams(template="/Users/nathan/internship/I-210Pasadena/I-210subnetwork.ang")
)
env = TestEnv(env_params, sim_params, scenario, simulator='aimsun')
exp = Experiment(env)
exp.run(1, 3000)
| mit | Python |
|
c6f6278c1915ef90e8825f94cc33a4dea4124722 | Add http directory listing with content display | dgengtek/scripts,dgengtek/scripts | network/http_server_cat.py | network/http_server_cat.py | #!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
| mit | Python |
|
8fa81263cfcc63f6bf22ed2ad50103f91bc43b21 | Create hira.py | tomohiko/8946 | hira.py | hira.py | #coding:utf-8
import hashlib
start = ord(u'あ')
end = ord(u'ん')
hira = []
print "Create hiragana"
for i in range(start, end+1, 1):
hira.append(unichr(i).encode('utf-8'))
num = len(hira)
for i4 in range(num):
for i3 in range(num):
for i2 in range(num):
for i1 in range(num):
msg = hira[i1] + hira[i2] + hira[i3] + hira[i4]
print msg,
print hashlib.md5(msg).hexdigest()
| apache-2.0 | Python |
|
92bc1ad22b6147f61ef4b51b16e115109bc04596 | add build.gyp | 256481788jianghao/opengl_test,256481788jianghao/opengl_test,256481788jianghao/opengl_test | build.gyp | build.gyp | {
'targets':[
{
'target_name':'start_first',
'type':'executable',
'dependencies':[],
'defines':[],
'include_dirs':[],
'sources':[
'start_first/opengl_first.c',
],
'libraries':[
'-lGLU -lGL -lglut'
],
'conditions':[]
}
],
}
| apache-2.0 | Python |
|
45a0b65106f665872f14780e93ab9f09e65bbce3 | add genRandomGraph.py | zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi | ComplexCiPython/genRandomGraph.py | ComplexCiPython/genRandomGraph.py | import networkx
import sys
if len(sys.argv) < 2:
print ("python genRandomGraph.py [output folder]");
input()
sys.exit(0);
outputPath = sys.argv[1]
G=networkx.erdos_renyi_graph(100000,3/100000)
networkx.write_edgelist(G, outputPath + "/genRandomGraph.csv", data=False , delimiter=',')
| mit | Python |
|
3b15fb1d43bad6d6cf2112538d1de8c1710d0272 | add test for within_page_range | ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder | freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py | freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py | """Test the within_page_range function."""
from ..templatetags.within_page_range import within_filter
def test_in_range_above():
"""One page above current should be displayed."""
test_page = 5
current_page = 4
result = within_filter(test_page, current_page)
assert result
def test_in_range_below():
"""One page below current should be displayed."""
test_page = 3
current_page = 4
result = within_filter(test_page, current_page)
assert result
def test_out_of_range_above():
"""20 pages above current should not be displayed."""
test_page = 74
current_page = 54
result = within_filter(test_page, current_page)
assert not result
def test_out_of_range_below():
"""20 pages below current should not be displayed."""
test_page = 34
current_page = 54
result = within_filter(test_page, current_page)
assert not result
| bsd-3-clause | Python |
|
0c315f766b31c105c60b39746db977d6702955ca | Remove unneeded model attributes | manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,Mariatta/pythondotorg,manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,python/pythondotorg,python/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg | successstories/views.py | successstories/views.py | from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.decorators import method_decorator
from django.views.generic import CreateView, DetailView, ListView
from honeypot.decorators import check_honeypot
from .forms import StoryForm
from .models import Story, StoryCategory
class ContextMixin:
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['category_list'] = StoryCategory.objects.all()
return ctx
class StoryCreate(ContextMixin, CreateView):
model = Story
form_class = StoryForm
template_name = 'successstories/story_form.html'
success_message = (
'Your success story submission has been recorded. '
'It will be reviewed by the PSF staff and published.'
)
@method_decorator(check_honeypot)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def get_success_url(self):
return reverse('success_story_create')
def form_valid(self, form):
messages.add_message(self.request, messages.SUCCESS, self.success_message)
return super().form_valid(form)
class StoryDetail(ContextMixin, DetailView):
template_name = 'successstories/story_detail.html'
context_object_name = 'story'
def get_queryset(self):
if self.request.user.is_staff:
return Story.objects.select_related()
return Story.objects.select_related().published()
class StoryList(ListView):
template_name = 'successstories/story_list.html'
context_object_name = 'stories'
def get_queryset(self):
return Story.objects.select_related().published()
class StoryListCategory(ContextMixin, DetailView):
model = StoryCategory
| from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.decorators import method_decorator
from django.views.generic import CreateView, DetailView, ListView
from honeypot.decorators import check_honeypot
from .forms import StoryForm
from .models import Story, StoryCategory
class ContextMixin:
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['category_list'] = StoryCategory.objects.all()
return ctx
class StoryCreate(ContextMixin, CreateView):
model = Story
form_class = StoryForm
template_name = 'successstories/story_form.html'
success_message = (
'Your success story submission has been recorded. '
'It will be reviewed by the PSF staff and published.'
)
@method_decorator(check_honeypot)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def get_success_url(self):
return reverse('success_story_create')
def form_valid(self, form):
messages.add_message(self.request, messages.SUCCESS, self.success_message)
return super().form_valid(form)
model = Story
class StoryDetail(ContextMixin, DetailView):
template_name = 'successstories/story_detail.html'
context_object_name = 'story'
def get_queryset(self):
if self.request.user.is_staff:
return Story.objects.select_related()
return Story.objects.select_related().published()
class StoryList(ListView):
model = Story
template_name = 'successstories/story_list.html'
context_object_name = 'stories'
def get_queryset(self):
return Story.objects.select_related().published()
class StoryListCategory(ContextMixin, DetailView):
model = StoryCategory
| apache-2.0 | Python |
9abb8108f62451fb993a398c8165a4605e40ec4a | Add tests for JSONPResponseMiddleware | Code4SA/mapit,chris48s/mapit,Sinar/mapit,Code4SA/mapit,chris48s/mapit,opencorato/mapit,chris48s/mapit,Sinar/mapit,opencorato/mapit,opencorato/mapit,Code4SA/mapit | mapit/tests/test_middleware.py | mapit/tests/test_middleware.py | from django.test import TestCase
from django.test.client import RequestFactory
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ..middleware import JSONPMiddleware
class JSONPMiddlewareTest(TestCase):
def setUp(self):
self.middleware = JSONPMiddleware()
self.factory = RequestFactory()
def test_process_response_ignores_302_redirects(self):
request = self.factory.get("/dummy_url", {"callback": "xyz"})
response = HttpResponsePermanentRedirect("/new_url")
middleware_response = self.middleware.process_response(request, response)
self.assertEqual(middleware_response, response)
def test_process_response_uses_callback(self):
request = self.factory.get("/dummy_url", {"callback": "xyz"})
response = HttpResponse(content="blah")
middleware_response = self.middleware.process_response(request, response)
self.assertEqual(middleware_response.content, u'xyz(blah)')
def test_process_response_uses_ignores_requests_without_callback(self):
request = self.factory.get("/dummy_url")
response = HttpResponse(content="blah")
middleware_response = self.middleware.process_response(request, response)
self.assertEqual(middleware_response, response)
def test_process_response_callback_allowed_characters(self):
request = self.factory.get("/dummy_url", {"callback": "xyz123_$."})
response = HttpResponse(content="blah")
middleware_response = self.middleware.process_response(request, response)
self.assertEqual(middleware_response.content, u'xyz123_$.(blah)')
# Try with a character not allowed in the callback
request = self.factory.get("/dummy_url", {"callback": "xyz123_$.["})
response = HttpResponse(content="blah")
middleware_response = self.middleware.process_response(request, response)
self.assertEqual(middleware_response, response)
| agpl-3.0 | Python |
|
e20d3ff6147b857cb9a8efa32bfb4ee80610dd34 | Revert "dump" | assassinen/python_training | dump/fastMessageReaderOriginal.py | dump/fastMessageReaderOriginal.py | #!/usr/bin/python
import sys
import re
# ============================================================================
class MessageReader:
messageRegexp = r"s*(\w+)\[\d+\]=(.*?)(?=\s\w+\[\d+\]|$)";
def __init__(self, fileName):
self.fileName = fileName
#self.file = open(fileName, encoding="utf8")
self.file = open(fileName)
self.carryover = "";
def __del__(self):
self.file.close()
def getMessage(self):
if (self.carryover != ""):
line = self.carryover
self.carryover = ""
else:
line = self.file.readline()
while (line.startswith('ApplVerID') is not True):
if not line: return {}
line = self.file.readline()
message = dict(re.findall(self.messageRegexp, line))
message['entries'] = []
line = self.file.readline();
noEntries = re.sub(".*?NoMDEntries\[268\]\s*=\s*(\d+)[^\d]*", r'\1', line)
if (noEntries == line):
self.carryover = line;
return message
for i in range(int(noEntries)):
line = self.file.readline().split(':')[1].strip()
entry = dict(re.findall(self.messageRegexp, line))
message["entries"].append(entry)
return message
# ============================================================================
| apache-2.0 | Python |
|
f917c7ccfbe22a50049e76957a05f35eaaa46b2a | migrate child table | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py | polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-02-15 14:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("addressbase", "0009_onsud_ced")]
operations = [migrations.RemoveField(model_name="onsud", name="ctry_flag")]
| bsd-3-clause | Python |
|
1553cdda2edc16368ba2281616923e849f09bdee | Create matching_{x,y}.py | JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking | hacker_rank/regex/repetitions/matching_{x,y}.py | hacker_rank/regex/repetitions/matching_{x,y}.py | Regex_Pattern = r'^\d{1,2}[a-zA-Z]{3,}\W{0,3}$' # Do not delete 'r'.
| mit | Python |
|
527a53ee1e43f59462b94b50ea997058836a7031 | Create voicersss-inmoovservice-test.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/moz4r/Test/voicersss-inmoovservice-test.py | home/moz4r/Test/voicersss-inmoovservice-test.py | i01 = Runtime.createAndStart("i01", "InMoov")
i01.mouth = Runtime.createAndStart("i01.mouth", "voiceRSS")
python.subscribe(i01.mouth.getName(),"publishStartSpeaking")
python.subscribe(i01.mouth.getName(),"publishEndSpeaking")
def onEndSpeaking(text):
print "end speak"
def onStartSpeaking(text):
print "start speak"
i01.mouth.setKey("6b714718f09e48c9a7f260e385ca99a4")
i01.mouth.setVoice("fr-fr");
i01.mouth.speakBlocking(u"test accent utf8 : éléphant")
| apache-2.0 | Python |
|
75980fc2e2f63e210f1e58e9a1d56c09072aa04e | add play_camera.py | physacco/cv-test,physacco/cv-test,physacco/cv-test,physacco/cv-test | python/video/play_camera.py | python/video/play_camera.py | #!/usr/bin/env python3
# encoding: utf-8
# pylint: disable=no-member
"""Play a video with OpenCV."""
import sys
import cv2
def main():
"""The main function of this module."""
cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE)
cap = cv2.VideoCapture(0)
i = 0
while cap.isOpened():
ret, frame = cap.read()
if not ret: # done
break
i += 1
if i == 1:
print frame.shape, frame.dtype, frame.size
cv2.imshow('video', frame)
key = cv2.waitKey(30)
if key & 0xFF == ord('q'): # quit
break
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| unlicense | Python |
|
6dfc5a3d7845633570b83aac06c47756292cf8ac | Add tests for get_uid() method for common DB models. | dennybaa/st2,StackStorm/st2,pixelrebel/st2,Itxaka/st2,Plexxi/st2,pixelrebel/st2,nzlosh/st2,punalpatel/st2,nzlosh/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,peak6/st2,dennybaa/st2,StackStorm/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,StackStorm/st2,armab/st2,alfasin/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,punalpatel/st2,armab/st2,pixelrebel/st2,alfasin/st2,lakshmi-kannan/st2,tonybaloney/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,peak6/st2,nzlosh/st2,Plexxi/st2,lakshmi-kannan/st2,alfasin/st2,Plexxi/st2 | st2common/tests/unit/test_db_model_uids.py | st2common/tests/unit/test_db_model_uids.py | # contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
| apache-2.0 | Python |
|
5d64acfd475ca0bb0db2ef7c032fc4ee16df4f75 | remove highlight table | pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot | alembic/versions/186928676dbc_remove_highlights.py | alembic/versions/186928676dbc_remove_highlights.py | """remove_highlights
Revision ID: 186928676dbc
Revises: f163a00a02aa
Create Date: 2019-06-01 15:14:13.999836
"""
# revision identifiers, used by Alembic.
revision = '186928676dbc'
down_revision = 'f163a00a02aa'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_stream_chunk_highlight')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_stream_chunk_highlight',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('stream_chunk_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('created_at', mysql.DATETIME(), nullable=False),
sa.Column('highlight_offset', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('description', mysql.VARCHAR(length=128), nullable=True),
sa.Column('override_link', mysql.VARCHAR(length=256), nullable=True),
sa.Column('thumbnail', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True),
sa.Column('created_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('last_edited_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['stream_chunk_id'], ['tb_stream_chunk.id'], name='tb_stream_chunk_highlight_ibfk_1'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
# ### end Alembic commands ###
| mit | Python |
|
8658ad72c74306617e58ca82ff0f3fdba35bd353 | implement auto build database interface | free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog | app/tools/dbautocreat.py | app/tools/dbautocreat.py | #-*- coding:utf-8 -*-
import asyncio
import aiomysql
from tools.config import Config
class AutoCreate(obj):
def __init__(self):
pass
def _create_db(self):
pass
def _create_field_type(self):
pass
def _create_field_primary_key(self):
pass
def _create_field_unique_key(self):
pass
def _create_auto_increment(self):
pass
def _create_default(self):
pass
def _create_table(self):
pass
def run(self):
pass
@asyncio.coroutine
def auto_create():
conn=yield from aiomysql.connect(db=Config.database.database,
host=Config.database.host,
password=Config.database.password,
user=Config.database.user)
cursor =yield from conn.cursor()
yield from cursor.execute('show databases;')
ret=yield from cursor.fetchall()
print(ret)
if __name__=='__main__':
loop=asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait([auto_create()]))
loop.close()
| mit | Python |
|
36b8c44f8c2554109ab4ab09add9ac10fae20781 | add entities orm | clicheio/cliche,item4/cliche,item4/cliche,clicheio/cliche,clicheio/cliche | cliche/services/tvtropes/entities.py | cliche/services/tvtropes/entities.py | from sqlalchemy import Column, DateTime, ForeignKey, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
Base = declarative_base()
__all__ = 'Entity'
class Entity(Base):
namespace = Column(String, primary_key=True)
name = Column(String, primary_key=True)
url = Column(String)
last_crawled = Column(DateTime)
type = Column(String)
relations = relationship('Relation', foreign_keys=[namespace, name],
primaryjoin='and_(Entity.namespace == \
Relation.origin_namespace, \
Entity.name == Relation.origin)',
collection_class=set)
def __init__(self, namespace, name, url, last_crawled, type):
self.namespace = namespace
self.name = name
self.url = url
self.last_crawled = last_crawled
self.type = type
def __repr__(self):
return "<Entity('%s', '%s', '%s', '%s', '%s')" % (
self.namespace, self.name, self.url, str(self.last_crawled),
self.type
)
__tablename__ = 'entities'
__repr_columns__ = namespace, name
class Relation(Base):
origin_namespace = Column(String, ForeignKey(Entity.namespace),
primary_key=True)
origin = Column(String, ForeignKey(Entity.name), primary_key=True)
destination_namespace = Column(String, primary_key=True)
destination = Column(String, primary_key=True)
origin_entity = relationship('Entity',
foreign_keys=[origin_namespace, origin])
def __init__(self, origin_namespace, origin, destination_namespace,
destination):
self.origin_namespace = origin_namespace
self.origin = origin
self.destination_namespace = destination_namespace
self.destination = destination
__tablename__ = 'relations'
__repr_columns__ = origin_namespace, origin, destination_namespace, \
destination
| mit | Python |
|
ad664a7722da63d783a2b9d73077d91a8a012057 | Create hello.py | WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17 | Python/hello.py | Python/hello.py | print("hello world!!!")
| mit | Python |
|
dfed8f837b5fe07445b3914b33c1dab1b0b5741b | add basic UAV object incl. very basic search algo | DakotaNelson/freesearch | uav.py | uav.py | import random
class Uav:
def __init__(x,y, worldMap):
self.x = x
self.y = y
self.worldMap = worldMap
self.sensorStrength = None
def setMap(self, newMap):
self.worldMap = newMap
def nextStep(self):
""" where should we go next tick? """
options = self.surroundingValues()
m = max(a)
maxIndexes = [i for i, j in enumerate(a) if j == m]
return random.choice(maxIndexes)
def surroundingValues(self):
return [self.worldMap[self.x][self.y+1],
self.worldMap[self.x+1][self.y],
self.worldMap[self.x][self.y-1],
self.worldMap[self.x-1][self.y]]
| mit | Python |
|
8d1946c9656ea6c29d4730a68cbf4610152cd98b | make migrations | sebastianlan/wedfairy-api | poll/migrations/0002_vote_user_id.py | poll/migrations/0002_vote_user_id.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('poll', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='vote',
name='user_id',
field=models.IntegerField(default=None),
preserve_default=False,
),
]
| mit | Python |
|
64109dddedb7441456ae8e255c6a4b20ccaa6a73 | Create ReinhardNorm.py | DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK | ReinhardNorm.py | ReinhardNorm.py | import numpy
def ReinhardNorm(I, TargetMu, TargetSigma):
'''
Performs Reinhard color normalization to transform the color characteristics of an image to
a desired standard. The standard is defined by the mean and standard deviations of
the target image in LAB color space defined by Ruderman. The input image is converted to
Ruderman's LAB space, the LAB channels are each centered and scaled to zero-mean unit
variance, and then rescaled and shifted to match the target image statistics.
*Inputs:
I (rgbimage) - an RGB image of type unsigned char.
TargetMu - a 3-element list containing the means of the target image channels in LAB
color space.
TargetSigma - a 3-element list containing the standard deviations of the target image
channels in LAB color space.
*Outputs:
Normalized (rgbimage) - a normalized RGB image with corrected color characteristics.
*Related functions:
RudermanLABFwd, RudermanLABInv
*References:
Erik Reinhard, Michael Ashikhmin, Bruce Gooch, and Peter Shirley. 2001. Color Transfer between Images. IEEE Comput. Graph. Appl. 21, 5 (September 2001), 34-41.
Daniel Ruderman, Thomas Cronin, Chuan-Chin Chiao, Statistics of Cone Responses to Natural Images: Implications for Visual Coding, J. Optical Soc. of America, vol. 15, no. 8, 1998, pp. 2036-2045.
'''
#get input image dimensions
m = I.shape[0]
n = I.shape[1]
#convert input image to LAB color space
LAB = RudermanLAB(I)
#center and scale to zero-mean and unit variance
Mu = LAB.sum(axis=0).sum(axis=0)
LAB[:,:,0] = LAB[:,:,0] - Mu[0]
LAB[:,:,1] = LAB[:,:,1] - Mu[1]
LAB[:,:,2] = LAB[:,:,2] - Mu[2]
Sigma = (LAB*LAB).sum(axis=0).sum(axis=0) / (m*n-1)
LAB[:,:,0] = LAB[:,:,0] / Sigma[0]
LAB[:,:,1] = LAB[:,:,1] / Sigma[1]
LAB[:,:,2] = LAB[:,:,2] / Sigma[2]
#rescale and recenter to match target statistics
LAB[:,:,0] = LAB[:,:,0] * TargetSigma[0] + TargetMu[0]
LAB[:,:,1] = LAB[:,:,1] * TargetSigma[1] + TargetMu[1]
LAB[:,:,2] = LAB[:,:,2] * TargetSigma[2] + TargetMu[2]
#convert back to RGB colorspace
Normalized = RudermanLABInv(LAB)
return(Normalized)
| apache-2.0 | Python |
|
ebabfa0e14bdfd061e248285b8f7b5473f5a676e | Create convert_to_morse.py | clhq/work-tools | morse_code/convert_to_morse.py | morse_code/convert_to_morse.py | from ConfigParser import SafeConfigParser
import string
target = 'target.txt'
def parse_ini():
parser = SafeConfigParser()
parser.read('conversion.ini')
morselist = list(string.ascii_uppercase)
number = 0
for i in morselist:
i = parser.get('CONVERSIONS', i)
morselist[number] = i
number += 1
return morselist
def convert_target():
with open(target, "r") as targetfile:
targetstring = targetfile.read()
for i in xrange(0, len(targetstring)):
print targetstring[i]
if any(character in targetstring)
pass
morselist = parse_ini()
#print morselist
capital_alphabet = list(string.ascii_uppercase)
lower_alphabet = list(string.ascii_lowercase)
#print capital_alphabet
#print lower_alphabet
convert_target()
| mit | Python |
|
fcb311ffd264821767f58c92e96101aa8086acf5 | rewrite DHKE.py as crypto.py | aburgd/DHKE-py | crypto.py | crypto.py | import random
import time
timestamp = int(time.time())
random.seed(timestamp)
def gen_check(n):
if not isprime(n):
while not isprime(n):
n = random.randint(0, timestamp)
def input_check(n):
if not isprime(n):
n = input("Sorry, that number isn't prime. Please try another: ")
def isprime(n):
'''check if integer n is a prime'''
# make sure n is a positive integer
n = abs(int(n))
# 0 and 1 are not primes
if n < 2:
return False
# 2 is the only even prime number
if n == 2:
return True
# all other even numbers are not primes
if not n & 1:
return False
# range starts with 3 and only needs to go up the squareroot of n
# for all odd numbers
for x in range(3, int(n**0.5) + 1, 2):
if n % x == 0:
return False
return True
def publicKey():
resp = input("Do you have a shared base integer? (y/n): ")
if resp.lower() == "y":
b = input("Please enter your shared base integer: ")
input_check(b)
elif resp.lower() == "n":
b = random.randint(0, timestamp)
gen_check(b)
print("Your shared base integer is: ", b)
resp = input("Do you have a secret integer? (y/n): ")
if resp.lower() == "y":
alex = input("Please enter your secret integer: ")
input_check(alex)
elif resp.lower() == "n":
alex = random.randint(0, timestamp)
gen_check(alex)
print("Your secret integer is: ", alex)
resp = input("Do you have a shared modulus? (y/n): ")
if resp.lower() == "y":
mp = input("Please enter your shared modulus: ")
input_check(mp)
elif resp.lower() == "n":
mp = random.randint(0, timestamp)
gen_check(mp)
print("Your shared modulus is: ", mp)
b = int(b)
alex = int(alex)
mp = int(mp)
pubKey = b ** alex
pubKey = pubKey % mp
return pubKey
def sharedSecret():
pK = input("Please enter your public key: ")
mp = input("Please enter your shared modulus: ")
alex = input("Please enter your secret integer: ")
sharedSec = (int(pK) ** int(alex)) % int(mp)
return sharedSec
answer = input("Would you like to calculate a public key, or a shared secret? ")
if answer.lower() == "public key":
public = publicKey()
print("Your public key is: ", public)
elif answer.lower() == "shared secret":
shared = sharedSecret()
print("Your shared secret is: ", shared)
| mit | Python |
|
81c793870387910cd0c4eda02b2b95588a02cc7f | Add files via upload | I-Spangler/Enigma-Machine | cypher.py | cypher.py | #!/usr/bin/python
import argparse, sys
ALPHABET_SIZE = 26
parser = argparse.ArgumentParser(description='Encrypts a message from a text file.')
parser.add_argument('walzenlage1', metavar='w1', type=int, action='store',
help='')
parser.add_argument('walzenlage2', metavar='w2', type=int, action='store',
help='')
parser.add_argument('walzenlage3', metavar='w3', type=int, action='store',
help='')
parser.add_argument('ringstellung', metavar='rs', type=str, action='store',
help='')
#parser.add_argument('--decrypt', nargs='?', const=decrypt, default=encrypt,
#help='decrypts the message')
parser.add_argument('file', metavar='filename', type=str,
help='name or path to the file wich contains your message')
args = parser.parse_args()
text = open(args.file, 'r')
msg = text.read()
lenmsg = len(msg)
w1 = args.walzenlage1
w2 = args.walzenlage2
w3 = args.walzenlage3
rs1 = args.ringstellung
#inicia os rotores
class Rotor:
config = {'1':[13, 17, 21, 16, 15, 24, 9, 25, 4, 18, 14, 8, 0, 20, 10, 19, 11, 1, 12, 22, 3, 6, 23, 5, 7, 2],
'2':[17, 8, 18, 2, 11, 1, 6, 19, 24, 10, 16, 14, 7, 4, 23, 13, 0, 25, 20, 12, 22, 5, 9, 15, 21, 3],
'3':[24, 16, 13, 0, 18, 12, 3, 25, 21, 8, 10, 15, 22, 2, 6, 7, 5, 17, 14, 1, 9, 11, 20, 23, 4, 19],
'Reflector':[14, 18, 1, 19, 25, 21, 5, 3, 24, 7, 8, 23, 4, 0, 9, 15, 6, 16, 12, 13, 10, 22, 20, 2, 17, 11]
}
def __init__(self, Id):
self.len = ALPHABET_SIZE
self.numbers = self.config[Id]
def rotate(self):
init = self.numbers[0]
for index in range (0, self.len-1):
self.numbers[index] = self.numbers[index+1]
self.numbers[self.len-1] = init
def set(self, rs):
while self.numbers[0] != rs:
self.rotate
def do(self, previousOut):
if previousOut < 0:
pass
return self.numbers[previousOut]
#inicia a maquina baseada na configuração da chave
class Enigma:
counter = [0, 0, 0]
def __init__(self, r1, r2, r3, ref):
self.r1 = r1
self.r2 = r2
self.r3 = r3
self.ref = ref
def ringset(self, rs):
self.r1.set(int(rs[0])-96)
self.r2.set(int(rs[1])-96)
self.r3.set(int(rs[2])-96)
def encrypt(self, message):
EncryptedMessage = []
for i in message:
EncryptedMessage.append(self.newLetter(ord(i.lower())-96, message))
self.rotateAll()
return EncryptedMessage
# def decrypt(self, message)
def newLetter(self, num):
return self.r1.do(self.r2.do(self.r3.do(self.ref.do(self.r3.do(self.r2.do(self.r1.do(num)))))))
def rotateAll(self):
self.r1.rotate()
self.counter[0] = self.counter[0] + 1
if self.counter[0] == ALPHABET_SIZE:
self.r2.rotate()
self.counter[1] = self.counter[1] + 1
self.counter[0] = 0
if self.counter[1] == ALPHABET_SIZE:
self.r3.rotate()
self.counter[2] = self.counter[2] + 1
self.counter[1] = 0
E = Enigma(Rotor(w1), Rotor(w2), Rotor(w3), Rotor('Reflector'))
E.ringset(rs)
print(E.r1)
| mit | Python |
|
47c8aa34eb9f4d2c4f702bc3957c87ef92cf7d28 | add simple learning switch app for OF1.3 | openvapour/ryu,lsqtongxin/ryu,ysywh/ryu,elahejalalpour/ELRyu,takahashiminoru/ryu,alanquillin/ryu,habibiefaried/ryu,jkoelker/ryu,gareging/SDN_Framework,ynkjm/ryu,Tesi-Luca-Davide/ryu,OpenState-SDN/ryu,ntts-clo/mld-ryu,zangree/ryu,lzppp/mylearning,jazzmes/ryu,sivaramakrishnansr/ryu,TakeshiTseng/ryu,ttsubo/ryu,hisaharu/ryu,elahejalalpour/ELRyu,zyq001/ryu,yamt/ryu,muzixing/ryu,hisaharu/ryu,torufuru/OFPatchPanel,fkakuma/ryu,habibiefaried/ryu,ttsubo/ryu,zyq001/ryu,shinpeimuraoka/ryu,takahashiminoru/ryu,lagopus/ryu-lagopus-ext,hisaharu/ryu,umkcdcrg01/ryu_openflow,lagopus/ryu-lagopus-ext,John-Lin/ryu,TakeshiTseng/ryu,unifycore/ryu,jkoelker/ryu,castroflavio/ryu,muzixing/ryu,Tesi-Luca-Davide/ryu,Zouyiran/ryu,TakeshiTseng/ryu,ynkjm/ryu,fujita/ryu,gopchandani/ryu,ttsubo/ryu,torufuru/oolhackathon,lzppp/mylearning,Zouyiran/ryu,lagopus/ryu-lagopus-ext,evanscottgray/ryu,lsqtongxin/ryu,shinpeimuraoka/ryu,citrix-openstack-build/ryu,citrix-openstack-build/ryu,fujita/ryu,yamt/ryu,StephenKing/summerschool-2015-ryu,StephenKing/ryu,ttsubo/ryu,umkcdcrg01/ryu_openflow,sivaramakrishnansr/ryu,lzppp/mylearning,alanquillin/ryu,haniehrajabi/ryu,evanscottgray/ryu,OpenState-SDN/ryu,sivaramakrishnansr/ryu,fujita/ryu,jazzmes/ryu,lzppp/mylearning,castroflavio/ryu,yamt/ryu,Zouyiran/ryu,zangree/ryu,StephenKing/summerschool-2015-ryu,takahashiminoru/ryu,darjus-amzn/ryu,habibiefaried/ryu,muzixing/ryu,StephenKing/summerschool-2015-ryu,Tejas-Subramanya/RYU_MEC,ynkjm/ryu,jazzmes/ryu,StephenKing/ryu,yamt/ryu,torufuru/oolhackathon,alyosha1879/ryu,ntts-clo/ryu,Zouyiran/ryu,pichuang/ryu,mikhaelharswanto/ryu,Tejas-Subramanya/RYU_MEC,iwaseyusuke/ryu,osrg/ryu,openvapour/ryu,osrg/ryu,TakeshiTseng/ryu,torufuru/oolhackathon,umkcdcrg01/ryu_openflow,gareging/SDN_Framework,elahejalalpour/ELRyu,diogommartins/ryu,Zouyiran/ryu,fkakuma/ryu,lsqtongxin/ryu,lsqtongxin/ryu,John-Lin/ryu,Tesi-Luca-Davide/ryu,StephenKing/ryu,lagopus/ryu-lagopus-ext,zyq001/ryu,fkakuma/ryu,StephenKing/summerschool-2015-ryu,zyq001/ryu,gareging/SDN_Framework,fkakuma/ryu,fujita/ryu,torufuru/OFPatchPanel,jalilm/ryu,jalilm/ryu,iwaseyusuke/ryu,o3project/ryu-oe,ysywh/ryu,StephenKing/summerschool-2015-ryu,diogommartins/ryu,iwaseyusuke/ryu,TakeshiTseng/ryu,alanquillin/ryu,pichuang/ryu,unifycore/ryu,ntts-clo/mld-ryu,StephenKing/ryu,alyosha1879/ryu,StephenKing/ryu,OpenState-SDN/ryu,haniehrajabi/ryu,diogommartins/ryu,ysywh/ryu,openvapour/ryu,alanquillin/ryu,gopchandani/ryu,jalilm/ryu,OpenState-SDN/ryu,darjus-amzn/ryu,osrg/ryu,ynkjm/ryu,haniehrajabi/ryu,pichuang/ryu,John-Lin/ryu,castroflavio/ryu,takahashiminoru/ryu,zangree/ryu,umkcdcrg01/ryu_openflow,fkakuma/ryu,alyosha1879/ryu,umkcdcrg01/ryu_openflow,ysywh/ryu,lzppp/mylearning,iwaseyusuke/ryu,ntts-clo/ryu,hisaharu/ryu,shinpeimuraoka/ryu,habibiefaried/ryu,citrix-openstack/build-ryu,openvapour/ryu,darjus-amzn/ryu,John-Lin/ryu,elahejalalpour/ELRyu,darjus-amzn/ryu,diogommartins/ryu,habibiefaried/ryu,diogommartins/ryu,Tesi-Luca-Davide/ryu,fujita/ryu,zyq001/ryu,alyosha1879/ryu,yamada-h/ryu,shinpeimuraoka/ryu,citrix-openstack/build-ryu,zangree/ryu,darjus-amzn/ryu,openvapour/ryu,lsqtongxin/ryu,lagopus/ryu-lagopus-ext,jalilm/ryu,jalilm/ryu,John-Lin/ryu,zangree/ryu,ynkjm/ryu,Tejas-Subramanya/RYU_MEC,o3project/ryu-oe,pichuang/ryu,yamt/ryu,muzixing/ryu,yamada-h/ryu,evanscottgray/ryu,Tejas-Subramanya/RYU_MEC,gopchandani/ryu,haniehrajabi/ryu,takahashiminoru/ryu,osrg/ryu,muzixing/ryu,ysywh/ryu,alanquillin/ryu,hisaharu/ryu,gareging/SDN_Framework,shinpeimuraoka/ryu,pichuang/ryu,osrg/ryu,sivaramakrishnansr/ryu,gareging/SDN_Framework,elahejalalpour/ELRyu,Tejas-Subramanya/RYU_MEC,gopchandani/ryu,sivaramakrishnansr/ryu,iwaseyusuke/ryu,mikhaelharswanto/ryu,jkoelker/ryu,Tesi-Luca-Davide/ryu,ttsubo/ryu,OpenState-SDN/ryu,gopchandani/ryu,haniehrajabi/ryu | ryu/app/simple_switch_13.py | ryu/app/simple_switch_13.py | # Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import struct
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
class SimpleSwitch13(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(SimpleSwitch13, self).__init__(*args, **kwargs)
self.mac_to_port = {}
def add_flow(self, datapath, port, dst, actions):
ofproto = datapath.ofproto
match = datapath.ofproto_parser.OFPMatch(in_port=port,
eth_dst=dst)
inst = [datapath.ofproto_parser.OFPInstructionActions(
ofproto.OFPIT_APPLY_ACTIONS, actions)]
mod = datapath.ofproto_parser.OFPFlowMod(
datapath=datapath, cookie=0, cookie_mask=0, table_id=0,
command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0,
priority=0, buffer_id=ofproto.OFP_NO_BUFFER,
out_port=ofproto.OFPP_ANY,
out_group=ofproto.OFPG_ANY,
flags=0, match=match, instructions=inst)
datapath.send_msg(mod)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
eth = pkt.get_protocols(ethernet.ethernet)[0]
dst = eth.dst
src = eth.src
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port)
# learn a mac address to avoid FLOOD next time.
self.mac_to_port[dpid][src] = in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
actions = [datapath.ofproto_parser.OFPActionOutput(out_port)]
# install a flow to avoid packet_in next time
if out_port != ofproto.OFPP_FLOOD:
self.add_flow(datapath, in_port, dst, actions)
out = datapath.ofproto_parser.OFPPacketOut(
datapath=datapath, buffer_id=msg.buffer_id, in_port=in_port,
actions=actions)
datapath.send_msg(out)
| apache-2.0 | Python |
|
615e51ce1bf15c012a6c7cc2d026cb69bf0ce2b8 | Create MAIN.py | bubblegumwar/Pythagoras-Calculator | MAIN.py | MAIN.py | def pythagoras(SIDE, LEN1, LEN2):
from math import sqrt # This is function is needed to work, it **SHOULD** be included with the default install.
ANSWER = "Error Code 1" # This should not logicaly happen if the user is not an idiot and follows the usage.
if type(LEN1) is str or type(LEN2) is str: # This checks if the user didn't listen to the usage // Was the LEN a string?
ANWSER = "Error Code 2"
return ANWSER # This will return an error to the user that didn't listen.
if type(SIDE) is int or type(SIDE) is float: # This checks if the user didn't listen to the usage // Was the SIDE an integer or float?
ANWSER = "Error Code 4"
return ANWSER # This will return an error to the user that didn't listen.
#--SIDE C--
if SIDE.lower() == "c":
#SIDE C CALCULATION (Hypotenuse)
A_SIDE = LEN1
B_SIDE = LEN2
C_SIDE = sqrt(A_SIDE * A_SIDE + B_SIDE * B_SIDE)
ANSWER = C_SIDE # This sets the answer to be returned.
#--SIDE A--
elif SIDE.lower() == 'a':
if LEN1 < LEN2: # This will happen if the user did not follow instructions. See error below.
print("The hypotenues should be bigger")
anwser = "Error code 2"
return ANSWER # This will return an error to the user that didn't listen.
#SIDE A CALCULATION
B_SIDE = LEN2
C_SIDE = LEN1
ASIDE = sqrt((C_SIDE * C_SIDE) - (B_SIDE * B_SIDE))
ANSWER = A_SIDE # This sets the answer to be returned.
#--SIDE B--
elif SIDE.lower() == 'b':
if LEN1 < LEN2: # This will happen if the user did not follow instructions. See error below.
print("The hypotenues should be bigger")
ANSWER = "Error code 2"
return ANSWER # This will return an error to the user that didn't listen.
#SIDE B CALCULATION
A_SIDE = LEN2
C_SIDE = LEN1
B_SIDE = sqrt(C_SIDE * C_SIDE - A_SIDE * A_SIDE)
ANSWER = B_SIDE # This sets the answer to be returned.
return ANSWER # Returns the anwser for the user to use.
| mit | Python |
|
cbe0d5b37d4055ea78568838c3fd4cc953342b80 | remove stale data | IQSS/geoconnect,IQSS/geoconnect,IQSS/geoconnect,IQSS/geoconnect | geoconnect/apps/gis_tabular/utils_stale_data.py | geoconnect/apps/gis_tabular/utils_stale_data.py | from datetime import datetime, timedelta
from apps.gis_tabular.models import TabularFileInfo # for testing
from apps.gis_tabular.models import WorldMapTabularLayerInfo,\
WorldMapLatLngInfo, WorldMapJoinLayerInfo
from apps.worldmap_connect.models import WorldMapLayerInfo
DEFAULT_STALE_AGE = 3 * 60 * 60 # 3 hours, in seconds
def remove_stale_map_data(stale_age_in_seconds=DEFAULT_STALE_AGE):
"""
Remove old map data...
"""
current_time = datetime.now()
for wm_info in WorldMapLatLngInfo.objects.all():
remove_if_stale(wm_info, stale_age_in_seconds, current_time)
for wm_info in WorldMapLatLngInfo.objects.all():
remove_if_stale(wm_info, stale_age_in_seconds, current_time)
for wm_info in WorldMapLayerInfo.objects.all():
remove_if_stale(wm_info, stale_age_in_seconds, current_time)
def remove_if_stale(info_object, stale_age_in_seconds, current_time=None):
assert hasattr(info_object, 'modified'),\
'The info_object must have "modified" date'
if not current_time:
current_time = datetime.now()
mod_time = info_object.modified
if hasattr(mod_time, 'tzinfo'):
mod_time = mod_time.replace(tzinfo=None)
# Is this object beyond it's time limit
time_diff = (current_time - mod_time).total_seconds()
if time_diff > stale_age_in_seconds:
# Yes! delete it
print 'Removing: ', info_object
info_object.delete()
"""
from apps.gis_tabular.utils_stale_data import *
remove_stale_map_data()
"""
| apache-2.0 | Python |
|
cb56e0151b37a79e2ba95815555cde0633e167e7 | add client subscribe testing | beerfactory/hbmqtt | samples/client_subscribe.py | samples/client_subscribe.py | import logging
from hbmqtt.client._client import MQTTClient
import asyncio
logger = logging.getLogger(__name__)
C = MQTTClient()
@asyncio.coroutine
def test_coro():
yield from C.connect(uri='mqtt://iot.eclipse.org:1883/', username=None, password=None)
yield from C.subscribe([
{'filter': '$SYS/broker/uptime', 'qos': 0x00},
])
logger.info("Subscribed")
yield from asyncio.sleep(60)
yield from C.disconnect()
if __name__ == '__main__':
formatter = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
logging.basicConfig(level=logging.DEBUG, format=formatter)
asyncio.get_event_loop().run_until_complete(test_coro()) | mit | Python |
|
c422b5019c6e638bce40a7fecef6977aa5e63ce0 | add __init__.py | weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016 | python/18-package/parent/__init__.py | python/18-package/parent/__init__.py | #!/usr/bin/env python
#-*- coding=utf-8 -*-
if __name__ == "__main__":
print "Package parent running as main program"
else:
print "Package parent initializing"
| bsd-2-clause | Python |
|
8d6a5c4092d4f092416fc39fc7faa8bb20e701c3 | Add a manage command to sync reservations from external hook .. hard coded first product only atm (cherry picked from commit 63a80b711e1be9a6047965b8d0061b676d8c50ed) | jaywink/cartridge-reservable,jaywink/cartridge-reservable,jaywink/cartridge-reservable | cartridge/shop/management/commands/syncreshooks.py | cartridge/shop/management/commands/syncreshooks.py | from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
class Command(BaseCommand):
help = 'Sync reservations from external hook'
def handle(self, *args, **options):
p = ReservableProduct.objects.all()[0]
p.update_from_hook()
| bsd-2-clause | Python |
|
afab4bcd795da4395920eab6107bc33e401ed86a | Create PiWS.py | llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station | PiWS.py | PiWS.py | import time
import datetime
import csv
from math import log
from flask import Flask, render_template
from sense_hat import SenseHat
app = Flask(__name__)
def weather():
sense = SenseHat()
sense.clear()
celcius = round(sense.get_temperature(), 1)
fahrenheit = round(1.8 * celcius + 32, 1)
humidity = round(sense.get_humidity(), 1)
pressure = round(sense.get_pressure(), 1)
dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1)
acceleration = sense.get_accelerometer_raw()
x = round(acceleration['x'], 0)
y = round(acceleration['y'], 0)
z = round(acceleration['z'], 0)
if x == -1:
sense.set_rotation(90)
elif y == 1:
sense.set_rotation(0)
elif y == -1:
sense.set_rotation(180)
else:
sense.set_rotation(180)
if fahrenheit > 20 and fahrenheit < 80:
bg_color = [0, 0, 155] # blue
if fahrenheit > 81 and fahrenheit < 90:
bg_color = [0, 155, 0] # Green
if fahrenheit > 91 and fahrenheit < 100:
bg_color = [155, 155, 0] # Yellow
if fahrenheit > 101 and fahrenheit < 102:
bg_color = [255, 127, 0] # Orange
if fahrenheit > 103 and fahrenheit < 104:
bg_color = [155, 0, 0] # Red
if fahrenheit > 105 and fahrenheit < 109:
bg_color = [255, 0, 0] # Bright Red
if fahrenheit > 110 and fahrenheit < 120:
bg_color = [155, 155, 155] # White
else:
bg_color = [0, 155, 0] # Green
result = ' Temp. F ' + str(fahrenheit) + ' Temp. C ' + str(celcius) + ' Hum. ' + str(humidity) + ' Press. ' + str(pressure) + ' DewPoint ' + str(dewpoint)
print(result)
result_list = [(datetime.datetime.now(), celcius, fahrenheit, humidity, pressure, dewpoint)]
with open('weather_logs.csv', 'a', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerows(result_list)
for x in range(5):
sense.show_message(result, scroll_speed=0.10, back_colour=bg_color, text_colour=[155, 155, 155])
@app.route('/')
def index():
sense = SenseHat()
sense.clear()
celcius = round(sense.get_temperature(), 1)
fahrenheit = round(1.8 * celcius + 32, 1)
humidity = round(sense.get_humidity(), 1)
pressure = round(sense.get_pressure(), 1)
dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1)
acceleration = sense.get_accelerometer_raw()
x = round(acceleration['x'], 1)
y = round(acceleration['y'], 1)
z = round(acceleration['z'], 1)
return render_template('weather.html', celcius=celcius, fahrenheit=fahrenheit, humidity=humidity, pressure=pressure, dewpoint=dewpoint, x=x, y=y, z=z)
while __name__ == '__main__':
weather()
#app.run(host='0.0.0.0')
| mit | Python |
|
7e71b21f655ec35bd5ebd79aeb5dbec6945a77a7 | Add purdue harvester | erinspace/scrapi,fabianvf/scrapi,erinspace/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi | scrapi/harvesters/purdue.py | scrapi/harvesters/purdue.py | '''
Harvester for the Purdue University Research Repository for the SHARE project
Example API call: http://purr.purdue.edu/oaipmh?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class PurdueHarvester(OAIHarvester):
short_name = 'purdue'
long_name = 'PURR - Purdue University Research Repository'
url = 'http://purr.purdue.edu'
base_url = 'http://purr.purdue.edu/oaipmh'
property_list = ['date', 'relation', 'identifier', 'type', 'setSpec']
timezone_granularity = True
| apache-2.0 | Python |
|
7ecfe7d20f8708a1dada5761cdc02905b0e370e5 | use correct separator | owlabs/incubator-airflow,opensignal/airflow,hamedhsn/incubator-airflow,DinoCow/airflow,jiwang576/incubator-airflow,rishibarve/incubator-airflow,MetrodataTeam/incubator-airflow,andrewmchen/incubator-airflow,yoziru-desu/airflow,malmiron/incubator-airflow,nathanielvarona/airflow,r39132/airflow,Fokko/incubator-airflow,r39132/airflow,preete-dixit-ck/incubator-airflow,nathanielvarona/airflow,dud225/incubator-airflow,yati-sagade/incubator-airflow,sergiohgz/incubator-airflow,airbnb/airflow,wndhydrnt/airflow,mrkm4ntr/incubator-airflow,gilt/incubator-airflow,brandsoulmates/incubator-airflow,skudriashev/incubator-airflow,Acehaidrey/incubator-airflow,storpipfugl/airflow,jesusfcr/airflow,AllisonWang/incubator-airflow,zodiac/incubator-airflow,mtdewulf/incubator-airflow,lyft/incubator-airflow,jiwang576/incubator-airflow,modsy/incubator-airflow,kerzhner/airflow,zoyahav/incubator-airflow,dgies/incubator-airflow,cfei18/incubator-airflow,airbnb/airflow,cjqian/incubator-airflow,sekikn/incubator-airflow,yoziru-desu/airflow,jfantom/incubator-airflow,Acehaidrey/incubator-airflow,artwr/airflow,cademarkegard/airflow,aminghadersohi/airflow,moritzpein/airflow,Fokko/incubator-airflow,lxneng/incubator-airflow,malmiron/incubator-airflow,stverhae/incubator-airflow,griffinqiu/airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mattuuh7/incubator-airflow,alexvanboxel/airflow,btallman/incubator-airflow,DinoCow/airflow,nathanielvarona/airflow,sdiazb/airflow,r39132/airflow,opensignal/airflow,easytaxibr/airflow,sid88in/incubator-airflow,DEVELByte/incubator-airflow,apache/airflow,ty707/airflow,biln/airflow,andrewmchen/incubator-airflow,vineet-rh/incubator-airflow,forevernull/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,mtagle/airflow,apache/incubator-airflow,yati-sagade/incubator-airflow,kerzhner/airflow,NielsZeilemaker/incubator-airflow,jhsenjaliya/incubator-airflow,spektom/incubator-airflow,dmitry-r/incubator-airflow,easytaxibr/airflow,malmiron/incubator-airflow,holygits/incubator-airflow,mtustin-handy/airflow,wolfier/incubator-airflow,MortalViews/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,aminghadersohi/airflow,preete-dixit-ck/incubator-airflow,opensignal/airflow,adamhaney/airflow,N3da/incubator-airflow,jhsenjaliya/incubator-airflow,brandsoulmates/incubator-airflow,dhuang/incubator-airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,Tagar/incubator-airflow,sid88in/incubator-airflow,jgao54/airflow,moritzpein/airflow,Chedi/airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,owlabs/incubator-airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,wxiang7/airflow,plypaul/airflow,preete-dixit-ck/incubator-airflow,RealImpactAnalytics/airflow,yiqingj/airflow,edgarRd/incubator-airflow,andyxhadji/incubator-airflow,hgrif/incubator-airflow,nathanielvarona/airflow,Tagar/incubator-airflow,wileeam/airflow,akosel/incubator-airflow,yk5/incubator-airflow,ronfung/incubator-airflow,edgarRd/incubator-airflow,zodiac/incubator-airflow,rishibarve/incubator-airflow,vijaysbhat/incubator-airflow,Twistbioscience/incubator-airflow,cjqian/incubator-airflow,subodhchhabra/airflow,d-lee/airflow,danielvdende/incubator-airflow,jwi078/incubator-airflow,mattuuh7/incubator-airflow,malmiron/incubator-airflow,yoziru-desu/airflow,ty707/airflow,apache/airflow,mrkm4ntr/incubator-airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,DEVELByte/incubator-airflow,subodhchhabra/airflow,vineet-rh/incubator-airflow,CloverHealth/airflow,easytaxibr/airflow,adamhaney/airflow,janczak10/incubator-airflow,storpipfugl/airflow,gtoonstra/airflow,cfei18/incubator-airflow,biln/airflow,andyxhadji/incubator-airflow,MortalViews/incubator-airflow,griffinqiu/airflow,mrkm4ntr/incubator-airflow,mistercrunch/airflow,aminghadersohi/airflow,wndhydrnt/airflow,fenglu-g/incubator-airflow,wileeam/airflow,AllisonWang/incubator-airflow,yati-sagade/incubator-airflow,mrares/incubator-airflow,Tagar/incubator-airflow,Chedi/airflow,mattuuh7/incubator-airflow,OpringaoDoTurno/airflow,akosel/incubator-airflow,d-lee/airflow,andyxhadji/incubator-airflow,edgarRd/incubator-airflow,mtdewulf/incubator-airflow,mrkm4ntr/incubator-airflow,cfei18/incubator-airflow,spektom/incubator-airflow,ledsusop/airflow,jgao54/airflow,brandsoulmates/incubator-airflow,sid88in/incubator-airflow,plypaul/airflow,DEVELByte/incubator-airflow,biln/airflow,ledsusop/airflow,skudriashev/incubator-airflow,owlabs/incubator-airflow,jhsenjaliya/incubator-airflow,mattuuh7/incubator-airflow,DEVELByte/incubator-airflow,wooga/airflow,KL-WLCR/incubator-airflow,danielvdende/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,jiwang576/incubator-airflow,OpringaoDoTurno/airflow,adrpar/incubator-airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mylons/incubator-airflow,jlowin/airflow,bolkedebruin/airflow,zodiac/incubator-airflow,sid88in/incubator-airflow,lxneng/incubator-airflow,caseyching/incubator-airflow,kerzhner/airflow,zack3241/incubator-airflow,lyft/incubator-airflow,opensignal/airflow,Chedi/airflow,apache/incubator-airflow,wileeam/airflow,criccomini/airflow,stverhae/incubator-airflow,vijaysbhat/incubator-airflow,jwi078/incubator-airflow,mtdewulf/incubator-airflow,subodhchhabra/airflow,wileeam/airflow,bolkedebruin/airflow,hamedhsn/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,sergiohgz/incubator-airflow,ProstoMaxim/incubator-airflow,juvoinc/airflow,KL-WLCR/incubator-airflow,hamedhsn/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,r39132/airflow,wooga/airflow,mistercrunch/airflow,Tagar/incubator-airflow,CloverHealth/airflow,janczak10/incubator-airflow,lxneng/incubator-airflow,alexvanboxel/airflow,MetrodataTeam/incubator-airflow,wolfier/incubator-airflow,caseyching/incubator-airflow,storpipfugl/airflow,btallman/incubator-airflow,NielsZeilemaker/incubator-airflow,wxiang7/airflow,NielsZeilemaker/incubator-airflow,hgrif/incubator-airflow,Fokko/incubator-airflow,nathanielvarona/airflow,jason-z-hang/airflow,neovintage/airflow,zoyahav/incubator-airflow,MetrodataTeam/incubator-airflow,mrares/incubator-airflow,skudriashev/incubator-airflow,dmitry-r/incubator-airflow,yoziru-desu/airflow,wolfier/incubator-airflow,RealImpactAnalytics/airflow,btallman/incubator-airflow,holygits/incubator-airflow,neovintage/airflow,mtagle/airflow,jhsenjaliya/incubator-airflow,zoyahav/incubator-airflow,gritlogic/incubator-airflow,RealImpactAnalytics/airflow,griffinqiu/airflow,cademarkegard/airflow,wndhydrnt/airflow,MortalViews/incubator-airflow,jbhsieh/incubator-airflow,hgrif/incubator-airflow,lxneng/incubator-airflow,mylons/incubator-airflow,biln/airflow,jfantom/incubator-airflow,apache/incubator-airflow,ty707/airflow,adrpar/incubator-airflow,MortalViews/incubator-airflow,juvoinc/airflow,sdiazb/airflow,yiqingj/airflow,holygits/incubator-airflow,cademarkegard/airflow,ProstoMaxim/incubator-airflow,wooga/airflow,edgarRd/incubator-airflow,adrpar/incubator-airflow,apache/airflow,andyxhadji/incubator-airflow,forevernull/incubator-airflow,dud225/incubator-airflow,dhuang/incubator-airflow,mtustin-handy/airflow,wxiang7/airflow,btallman/incubator-airflow,N3da/incubator-airflow,lyft/incubator-airflow,akosel/incubator-airflow,danielvdende/incubator-airflow,vijaysbhat/incubator-airflow,d-lee/airflow,nathanielvarona/airflow,plypaul/airflow,criccomini/airflow,saguziel/incubator-airflow,zack3241/incubator-airflow,andrewmchen/incubator-airflow,AllisonWang/incubator-airflow,dgies/incubator-airflow,KL-WLCR/incubator-airflow,lyft/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,dmitry-r/incubator-airflow,aminghadersohi/airflow,gritlogic/incubator-airflow,easytaxibr/airflow,adrpar/incubator-airflow,jason-z-hang/airflow,ronfung/incubator-airflow,zack3241/incubator-airflow,jlowin/airflow,CloverHealth/airflow,danielvdende/incubator-airflow,adamhaney/airflow,wolfier/incubator-airflow,neovintage/airflow,modsy/incubator-airflow,jason-z-hang/airflow,KL-WLCR/incubator-airflow,dgies/incubator-airflow,danielvdende/incubator-airflow,ronfung/incubator-airflow,vineet-rh/incubator-airflow,moritzpein/airflow,jgao54/airflow,Chedi/airflow,bolkedebruin/airflow,Twistbioscience/incubator-airflow,asnir/airflow,hamedhsn/incubator-airflow,RealImpactAnalytics/airflow,MetrodataTeam/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,cademarkegard/airflow,wooga/airflow,apache/incubator-airflow,cfei18/incubator-airflow,mistercrunch/airflow,wxiang7/airflow,artwr/airflow,akosel/incubator-airflow,dud225/incubator-airflow,N3da/incubator-airflow,jlowin/airflow,sekikn/incubator-airflow,neovintage/airflow,jbhsieh/incubator-airflow,rishibarve/incubator-airflow,Twistbioscience/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,jlowin/airflow,cjqian/incubator-airflow,yk5/incubator-airflow,sdiazb/airflow,yiqingj/airflow,Fokko/incubator-airflow,brandsoulmates/incubator-airflow,storpipfugl/airflow,Acehaidrey/incubator-airflow,NielsZeilemaker/incubator-airflow,apache/airflow,danielvdende/incubator-airflow,modsy/incubator-airflow,caseyching/incubator-airflow,yiqingj/airflow,gilt/incubator-airflow,saguziel/incubator-airflow,cjqian/incubator-airflow,gritlogic/incubator-airflow,gilt/incubator-airflow,owlabs/incubator-airflow,ProstoMaxim/incubator-airflow,criccomini/airflow,jwi078/incubator-airflow,gritlogic/incubator-airflow,OpringaoDoTurno/airflow,jesusfcr/airflow,sergiohgz/incubator-airflow,bolkedebruin/airflow,skudriashev/incubator-airflow,sdiazb/airflow,zoyahav/incubator-airflow,jason-z-hang/airflow,jgao54/airflow,mtustin-handy/airflow,mrares/incubator-airflow,stverhae/incubator-airflow,vineet-rh/incubator-airflow,plypaul/airflow,alexvanboxel/airflow,asnir/airflow,DinoCow/airflow,alexvanboxel/airflow,ronfung/incubator-airflow,andrewmchen/incubator-airflow,griffinqiu/airflow,Twistbioscience/incubator-airflow,AllisonWang/incubator-airflow,dmitry-r/incubator-airflow,sergiohgz/incubator-airflow,sekikn/incubator-airflow,forevernull/incubator-airflow,mtustin-handy/airflow,mtagle/airflow,gtoonstra/airflow,ledsusop/airflow,gtoonstra/airflow,wndhydrnt/airflow,hgrif/incubator-airflow,jwi078/incubator-airflow,jbhsieh/incubator-airflow,CloverHealth/airflow,asnir/airflow,juvoinc/airflow,mylons/incubator-airflow,jbhsieh/incubator-airflow,yk5/incubator-airflow,rishibarve/incubator-airflow,ty707/airflow,apache/airflow,zack3241/incubator-airflow,fenglu-g/incubator-airflow,cfei18/incubator-airflow,artwr/airflow,gilt/incubator-airflow,criccomini/airflow,dud225/incubator-airflow,juvoinc/airflow,zodiac/incubator-airflow,ledsusop/airflow,modsy/incubator-airflow,mtdewulf/incubator-airflow,jesusfcr/airflow,DinoCow/airflow,apache/airflow,yk5/incubator-airflow,bolkedebruin/airflow,moritzpein/airflow,kerzhner/airflow,saguziel/incubator-airflow,artwr/airflow,forevernull/incubator-airflow,d-lee/airflow,jesusfcr/airflow,subodhchhabra/airflow,ProstoMaxim/incubator-airflow,preete-dixit-ck/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow | scripts/ci/wheel_factory.py | scripts/ci/wheel_factory.py | #!/usr/bin/env python
import requirements
import argparse
import glob
import os
parser = argparse.ArgumentParser()
parser.add_argument('file', help="requirements.txt", type=str)
parser.add_argument('wheeldir', help="wheeldir location", type=str)
args = parser.parse_args()
req_file = open(args.file, 'r')
for req in requirements.parse(req_file):
print "Checking " + args.wheeldir + os.path.sep + req.name + "*.whl"
if not glob.glob(args.wheeldir + os.path.sep + req.name + "*.whl"):
os.system("pip wheel --wheel-dir=" + args.wheeldir + " " + req.name + "".join(req.specs) + "".join(req.extras))
| #!/usr/bin/env python
import requirements
import argparse
import glob
import os
parser = argparse.ArgumentParser()
parser.add_argument('file', help="requirements.txt", type=str)
parser.add_argument('wheeldir', help="wheeldir location", type=str)
args = parser.parse_args()
req_file = open(args.file, 'r')
for req in requirements.parse(req_file):
print "Checking " + args.wheeldir + os.path.pathsep + req.name + "*.whl"
if not glob.glob(args.wheeldir + os.path.pathsep + req.name + "*.whl"):
os.system("pip wheel --wheel-dir=" + args.wheeldir + " " + req.name + "".join(req.specs) + "".join(req.extras))
| apache-2.0 | Python |
027a199924ee256170a2e369733a57fcc7483c88 | Add missing numeter namespace in poller | enovance/numeter,redhat-cip/numeter,enovance/numeter,enovance/numeter,redhat-cip/numeter,redhat-cip/numeter,redhat-cip/numeter,enovance/numeter | poller/numeter/__init__.py | poller/numeter/__init__.py | __import__('pkg_resources').declare_namespace(__name__)
| agpl-3.0 | Python |
|
7420f49f8e1508fa2017c629d8d11a16a9e28c4a | add abstract biobox class | pbelmann/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,michaelbarton/command-line-interface,pbelmann/command-line-interface,bioboxes/command-line-interface | biobox_cli/biobox.py | biobox_cli/biobox.py | from abc import ABCMeta, abstractmethod
import biobox_cli.container as ctn
import biobox_cli.util.misc as util
import tempfile as tmp
class Biobox:
__metaclass__ = ABCMeta
@abstractmethod
def prepare_volumes(opts):
pass
@abstractmethod
def get_doc(self):
pass
@abstractmethod
def after_run(self, host_dst_dir):
pass
def run(self, argv):
opts = util.parse_docopt(self.get_doc(), argv, False)
task = opts['--task']
image = opts['<image>']
output = opts['--output']
host_dst_dir = tmp.mkdtemp()
volumes = self.prepare_volumes(opts, host_dst_dir)
ctn.exit_if_no_image_available(image)
ctnr = ctn.create(image, task, volumes)
ctn.run(ctnr)
self.after_run(output, host_dst_dir)
return ctnr
def remove(self, container):
"""
Removes a container
Note this method is not tested due to limitations of circle ci
"""
ctn.remove(container) | mit | Python |
|
4d1b006e5ba559715d55a88528cdfc0bed755182 | add import script for Weymouth | chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_weymouth.py | polling_stations/apps/data_collection/management/commands/import_weymouth.py | from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter
class Command(BaseXpressDCCsvInconsistentPostcodesImporter):
council_id = 'E07000053'
addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV'
stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| bsd-3-clause | Python |
|
2ce80e667de438fca20de7b4ab6847751b683e33 | Add digikey command. | kivhift/qmk,kivhift/qmk | src/commands/digikey.py | src/commands/digikey.py | #
# Copyright (c) 2013 Joshua Hughes <kivhift@gmail.com>
#
import urllib
import webbrowser
import qmk
class DigikeyCommand(qmk.Command):
"""Look up a part on Digi-Key.
A new tab will be opened in the default web browser that contains the
search results.
"""
def __init__(self):
self._name = 'digikey'
self._help = self.__doc__
self.__baseURL = 'http://www.digikey.com/product-search/en?KeyWords={}'
@qmk.Command.actionRequiresArgument
def action(self, arg):
webbrowser.open_new_tab(self.__baseURL.format(urllib.quote_plus(
' '.join(arg.split()).encode('utf-8'))))
def commands(): return [ DigikeyCommand() ]
| mit | Python |
|
8b4bbd23bf37fb946b664f5932e4903f802c6e0d | Add first pass at integration style tests | wdv4758h/flake8,lericson/flake8 | flake8/tests/test_integration.py | flake8/tests/test_integration.py | from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
| mit | Python |
|
0d2adfcce21dd2efb5d781babec3e6b03464b6d5 | Add basic tests | gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin | tests/app/main/test_request_header.py | tests/app/main/test_request_header.py | from tests.conftest import set_config_values
def test_route_correct_secret_key(app_, client):
with set_config_values(app_, {
'ROUTE_SECRET_KEY_1': 'key_1',
'ROUTE_SECRET_KEY_2': '',
'DEBUG': False,
}):
response = client.get(
path='/_status',
headers=[
('X-Custom-forwarder', 'key_1'),
]
)
assert response.status_code == 200
def test_route_incorrect_secret_key(app_, client):
with set_config_values(app_, {
'ROUTE_SECRET_KEY_1': 'key_1',
'ROUTE_SECRET_KEY_2': '',
'DEBUG': False,
}):
response = client.get(
path='/_status',
headers=[
('X-Custom-forwarder', 'wrong_key'),
]
)
assert response.status_code == 403
| mit | Python |
|
77af87198d1116b77df431d9139b30f76103dd64 | Add migration for latitute and longitude of event | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | fellowms/migrations/0023_auto_20160617_1350.py | fellowms/migrations/0023_auto_20160617_1350.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
| bsd-3-clause | Python |
|
fb07837db870a5fdea3a98aa1381793b1b20d2c0 | Create main.py | jbaum517/jcb2tp-grocery | main.py | main.py | import webapp2
import jinja2
import os
import urllib
from google.appengine.api import users
from google.appengine.ext import ndb
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
def user_key(id):
return ndb.Key('GroceryList',id)
class GroceryItem(ndb.Model):
name = ndb.StringProperty()
cost = ndb.FloatProperty()
quantity = ndb.IntegerProperty()
total = ndb.FloatProperty()
picture = ndb.BlobProperty()
time = ndb.DateTimeProperty(auto_now_add=True)
class MainHandler(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
items_query = GroceryItem.query(
ancestor=user_key(users.get_current_user().user_id())).order(-GroceryItem.time)
items = items_query.fetch(10)
if user:
url = users.create_logout_url(self.request.uri)
url_linktext = 'Logout'
else:
url = users.create_login_url(self.request.uri)
url_linktext = 'Login'
template_values = {
'user':users.get_current_user(),
'items':items,
'url':url,
'url_linktext':url_linktext,
}
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render(template_values))
class GroceryList(webapp2.RequestHandler):
def post(self):
user = users.get_current_user();
item = GroceryItem(parent=user_key(user.user_id()))
item.name = self.request.get('name')
item.cost = self.request.get('cost')
item.quantity = self.request.get('quantity')
item.picture = self.request.get('img')
item.total = item.cost * item.quantity
item.put()
query_params = {'user': user_key(user.user_id())}
self.redirect('/?' + urllib.urlencode(query_params))
app = webapp2.WSGIApplication([
('/', MainHandler)
('/add', GroceryList)
], debug=True)
| unlicense | Python |
|
b920f5aeecf7843fcc699db4a70a9a0f124fa198 | Add unit test for protonate.py | jensengroup/propka | tests/test_protonate.py | tests/test_protonate.py | import propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
| lgpl-2.1 | Python |
|
2bf763e39e91ef989c121bba420e4ae09ea0a569 | Add Diagonal Difference HackerRank Problem | PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank | algorithms/diagonal_difference/kevin.py | algorithms/diagonal_difference/kevin.py | #!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
| mit | Python |
|
9098904ffcd47c4327594f8fc6ce8ce8694e5422 | Create getsubinterfaces.py | infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit | python/getsubinterfaces.py | python/getsubinterfaces.py | #Device subinterface data retrieval script. Copyright Ingmar Van Glabbeek ingmar@infoblox.com
#Licensed under Apache-2.0
#This script will pull all devices of a given device group and then list the devices management ip as well as the available management ips.
#By default it saves the output to "deviceinterfacedump.json"
#Tested on NetMRI 7.3.1 and 7.3.2
#Modules required:
import getpass
import requests
import json
import urllib3
from requests.auth import HTTPBasicAuth
from http.client import responses
import time
#You can hardcode credentials here, it's not safe. Don't do it.
#hostname = "netmri.infoblox.com"
#username = "admin"
#password = "infoblox"
#urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def main():
cookie_host = wapi_connect()
#print(cookie_host)
devicelist = getdevices(cookie_host)
filtered_data = devicedata(devicelist)
#uncomment next line if you want to write to console
#print(json.dumps(filtered_data,indent=4, sort_keys=True))
filename = open("deviceinterfacedump.json","w")
filename.write(json.dumps(filtered_data,indent=4))
filename.close()
print("Data retrieved successfully")
def devicedata(devicelist):
listload = json.loads(devicelist)
data = []
for e in listload['rows']:
if not e["if_addrs"]:
device = {"DeviceID":e["DeviceID"],"DeviceName":e["DeviceName"],"DeviceType":e["DeviceType"],"DeviceIPDotted":e["DeviceIPDotted"],"Other InterfaceIP":["none"]}
data.append(device)
else:
device = {"DeviceID": e['DeviceID'], "DeviceName": e["DeviceName"], "DeviceType": e["DeviceType"],
"DeviceIPDotted": e["DeviceIPDotted"], "Other InterfaceIP":[]}
for f in e["if_addrs"]:
i=1
interface = {"InterfaceIP":f["ifIPDotted"], "Interfacename":f["ifName"]}
device["Other InterfaceIP"].insert(i,interface)
data.append(device)
i=i+1
dataftw=json.dumps(data)
returndata=json.loads(dataftw)
return returndata
def getdevices(cookie_host):
if not cookie_host:
print("No connection established.")
return 0
#get current time
ts = time.time()
hostname=cookie_host[1]
#limits number of results
limit = input("Limit to this number of devices: ")
get_url = "https://" + hostname + "/api/3.3/device_groups/index"
response = requests.get(get_url, cookies=cookie_host[0], verify=False)
d=response.text
dl=json.loads(d)
print("List of DeviceGroups")
for e in dl["device_groups"]:
dglist={"GroupName":e["GroupName"],"GroupID":e["GroupID"]}
print(dglist)
devicegroup = input("Based on the output specify the devicegroup ID by its ID: ")
get_url = "https://" + hostname + "/api/3.3/discovery_statuses/static/current.extjs"
querystring = {"_dc": ts, "filename": "recent_activity.csv", "filter": "null", "limit": limit,
"GroupID": devicegroup}
response = requests.get(get_url, cookies=cookie_host[0], verify=False, params=querystring)
t=response.text
print("We are fetching a list of " + str(limit) +
" devices for devicegroup " + str(devicegroup) + ".")
return(t)
def wapi_connect():
hostname = input("Enter the NetMRI hostname or IP: ")
username = input("Enter your NetMRI username: ")
password = getpass.getpass("Enter your Password: ")
https_val = input("Disable SSL validations?(y/n) ")
if https_val in ("y", "Y"):
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
print("SSL validation disabled")
if https_val in ("n", "N"):
print("SSL validation enabled")
login_url = "https://" + hostname + "/api/3.3/device_groups/index"
print("logging in to " + hostname)
try:
login_result = requests.get(
login_url,
auth=HTTPBasicAuth(username, password),
timeout=5,
verify=False)
except requests.exceptions.ConnectTimeout as e:
print("Connection time out after 5 seconds.")
exit(1)
except requests.exceptions.ConnectionError as e:
print("No route to host " + hostname)
exit(1)
if has_error(login_result):
exit(1)
else:
print("Login OK")
return(login_result.cookies,hostname)
def has_error(_result):
if _result.status_code == 200:
return 0
elif _result.status_code == 201:
return 0
try:
err_text = _result.json()['text']
except KeyError as e:
err_text = "Response contains no error text"
except json.decoder.JSONDecodeError as e:
err_text = "No JSON Response"
# print out the HTTP response code, description, and error text
http_code = _result.status_code
http_desc = responses[http_code]
print("HTTP Code [%3d] %s. %s" % (http_code, http_desc, err_text))
return 1
if __name__ == "__main__":
main()
| mit | Python |
|
54a8a77c75660eeae314c410685243e2b5bc59ca | add sw infer wrapper | DLTK/DLTK | dltk/core/utils.py | dltk/core/utils.py | import numpy as np
from dltk.core.io.sliding_window import SlidingWindow
def sliding_window_segmentation_inference(session, ops_list, sample_dict, batch_size=1):
"""
Parameters
----------
session
ops_list
sample_dict
Returns
-------
"""
# TODO: asserts
pl_shape = list(sample_dict.keys()[0].get_shape().as_list())
pl_bshape = pl_shape[1:-1]
inp_shape = list(sample_dict.values()[0].shape)
inp_bshape = inp_shape[1:-1]
out_dummies = [np.zeros([inp_shape[0], ] + inp_bshape + [op.get_shape().as_list()[-1]]
if len(op.get_shape().as_list()) == len(inp_shape) else []) for op in ops_list]
out_dummy_counter = [np.zeros_like(o) for o in out_dummies]
op_shape = list(ops_list[0].get_shape().as_list())
op_bshape = op_shape[1:-1]
out_diff = np.array(pl_bshape) - np.array(op_bshape)
padding = [[0, 0]] + [[diff // 2, diff - diff // 2] for diff in out_diff] + [[0, 0]]
padded_dict = {k: np.pad(v, padding, mode='constant') for k,v in sample_dict.items()}
f_bshape = padded_dict.values()[0].shape[1:-1]
striding = list(np.array(op_bshape) // 2) if all(out_diff == 0) else op_bshape
sw = SlidingWindow(f_bshape, pl_bshape, striding=striding)
out_sw = SlidingWindow(inp_bshape, op_bshape, striding=striding)
if batch_size > 1:
slicers = []
out_slicers = []
done = False
while True:
try:
slicer = next(sw)
out_slicer = next(out_sw)
except StopIteration:
done = True
if batch_size == 1:
sw_dict = {k: v[slicer] for k,v in padded_dict.items()}
op_parts = session.run(ops_list, feed_dict=sw_dict)
for idx in range(len(op_parts)):
out_dummies[idx][out_slicer] += op_parts[idx]
out_dummy_counter[idx][out_slicer] += 1
else:
slicers.append(slicer)
out_slicers.append(out_slicer)
if len(slicers) == batch_size or done:
slices_dict = {k: np.concatenate([v[slicer] for slicer in slicers], 0) for k,v in padded_dict.items()}
all_op_parts = session.run(ops_list, feed_dict=slices_dict)
zipped_parts = zip(*[np.array_split(part, len(slicers)) for part in all_op_parts])
for out_slicer, op_parts in zip(out_slicers, zipped_parts):
for idx in range(len(op_parts)):
out_dummies[idx][out_slicer] += op_parts[idx]
out_dummy_counter[idx][out_slicer] += 1
slicers = []
out_slicers = []
if done:
break
return [o / c for o, c in zip(out_dummies, out_dummy_counter)]
| apache-2.0 | Python |
|
c262cc4cc18336257972105c1cd6c409da8ed5cd | Create mcmc.py | RonsenbergVI/trendpy,RonsenbergVI/trendpy | mcmc.py | mcmc.py | # MIT License
# Copyright (c) 2017 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from numpy import zeros, reshape
from scipy.stats import rv_continuous
__all__ = ['Parameter','Parameters','MCMC']
class Parameter(object):
def __init__(self, name, distribution, size, current_value=None):
self.name = str(name)
self.distribution = distribution
self.size = size
self.current_value = current_value
@property
def current_value(self):
return self.__current_value
@current_value.setter
def current_value(self, current_value):
self.__current_value = current_value
def __str__(self):
return """
parameter name : %s
parameter distribution : %s
""" % (self.name, self.distribution.__str__())
def __len__(self):
return 1
def is_multivariate(self):
return self.size == (1,1)
class Parameters(object):
def __init__(self, list={}, hierarchy=[]):
self.list = list
self.hierarchy = hierarchy
@property
def parameters(self):
return self.__list
@parameters.setter
def parameters(self, list):
if not (list=={}):
self.__list = list
else:
self.__list = {}
@property
def hierarchy(self):
return self.__hierarchy
@hierarchy.setter
def hierarchy(self, hierarchy):
self.__hierarchy = hierarchy
def __len__(self):
return len(self.list)
def __str__(self):
descr = '(parameters: ----------------------- \n'
descr += ', \n'.join(['name: %s, distribution: %s, size: %s' % (str(l.name), l.distribution.__str__(), l.size) for l in self.list.values()])
descr += '\n ----------------------- )'
return descr
def append(self, parameter):
self.list[parameter.name] = parameter
self.hierarchy.append(parameter.name)
class Distribution(rv_continuous):
pass
class MCMC(object):
def __init__(self, data, strategy):
self.data = data
self.strategy = strategy
self.simulations = None
def summary(self):
smry = ""
return smry
def distribution_parameters(self, parameter_name, *args, **kwargs):
return self.strategy.distribution_parameters(parameter_name, *args, **kwargs) # returns a dictionary
def generate(self, parameter_name):
return self.strategy.generate(parameter_name)
def output(self, burn, parameter_name):
return self.strategy.output(self.simulations, burn, parameter_name)
def define_parameters(self):
return self.strategy.define_parameters()
def initial_value(self,parameter_name):
return self.strategy.initial_value(parameter_name)
def run(self, number_simulations=100):
self.simulations = {key : zeros((param.size[0],param.size[1],number_simulations)) for (key, param) in self.strategy.parameters.list.items()}
for name in self.strategy.parameters.hierarchy:
self.strategy.parameters.list[name].current_value = self.initial_value(name)
for i in range(number_simulations):
print("== step %i ==" % (int(i+1),))
restart_step = True
while restart_step:
for name in self.strategy.parameters.hierarchy:
print("== parameter %s ==" % name)
try:
self.strategy.parameters.list[name].current_value = self.generate(name)
self.simulations[name][:,:,i] = self.strategy.parameters.list[name].current_value.reshape(self.strategy.parameters.list[name].size)
restart_step = False
except:
print("== restart step %i ==" % i)
restart_step = True
break
class ConvergenceAnalysis(object):
| mit | Python |
|
4b561d710e9ad72ad94ffb1ff3ae37db668899e4 | Add generate_examples script | chunfengh/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq,google/seq2seq,shashankrajput/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,shashankrajput/seq2seq,kontact-chan/seq2seq,google/seq2seq,liyi193328/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq | seq2seq/scripts/generate_examples.py | seq2seq/scripts/generate_examples.py | #! /usr/bin/env python
"""
Generates a TFRecords file given sequence-aligned source and target files.
Example Usage:
python ./generate_examples.py --source_file <SOURCE_FILE> \
--target_file <TARGET_FILE> \
--output_file <OUTPUT_FILE>
"""
import tensorflow as tf
tf.flags.DEFINE_string('source_file', None,
'File containing content in source language.')
tf.flags.DEFINE_string(
'target_file', None,
'File containing content in target language, parallel line by line to the'
'source file.')
tf.flags.DEFINE_string('output_file', None,
'File to output tf.Example TFRecords.')
FLAGS = tf.flags.FLAGS
def build_example(pair_id, source, target):
"""Transforms pair of 'source' and 'target' strings into a tf.Example.
Assumes that 'source' and 'target' are already tokenized.
Args:
pair_id: id of this pair of source and target strings.
source: a pretokenized source string.
target: a pretokenized target string.
Returns:
a tf.Example corresponding to the 'source' and 'target' inputs.
"""
pair_id = str(pair_id)
source_tokens = source.strip().split(' ')
target_tokens = target.strip().split(' ')
ex = tf.train.Example()
ex.features.feature['pair_id'].bytes_list.value.append(pair_id.encode('utf-8'))
ex.features.feature['source_len'].int64_list.value.append(len(source_tokens))
ex.features.feature['target_len'].int64_list.value.append(len(target_tokens))
source_tokens = [t.encode('utf-8') for t in source_tokens]
target_tokens = [t.encode('utf-8') for t in target_tokens]
ex.features.feature['source_tokens'].bytes_list.value.extend(source_tokens)
ex.features.feature['target_tokens'].bytes_list.value.extend(target_tokens)
return ex
def write_tfrecords(examples, output_file):
"""Writes a list of tf.Examples to 'output_file'.
Args:
examples: An iterator of tf.Example records
outputfile: path to the output file
"""
writer = tf.python_io.TFRecordWriter(output_file)
print('Creating TFRecords file at {}...'.format(output_file))
for row in examples:
writer.write(row.SerializeToString())
writer.close()
print('Wrote to {}'.format(output_file))
def generate_examples(source_file, target_file):
"""Creates an iterator of tf.Example records given aligned source and target files.
Args:
source_file: path to file with newline-separated source strings
target_file: path to file with newline-separated target strings
Returns:
An iterator of tf.Example objects.
"""
with open(source_file) as source_records:
with open(target_file) as target_records:
for i, (source, target) in enumerate(zip(source_records, target_records)):
if i % 10000 == 0:
print('Processed {} records'.format(i))
yield build_example(i, source, target)
def main(unused_argv):
#pylint: disable=unused-argument
examples = generate_examples(
FLAGS.source_file, FLAGS.target_file)
write_tfrecords(examples, FLAGS.output_file)
if __name__ == '__main__':
tf.app.run()
| apache-2.0 | Python |
|
edb28fffe19e2b0de3113b43aeb075119c9e5830 | Work in progress. Creating new data migration. | EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi | emgapi/migrations/0019_auto_20200110_1455.py | emgapi/migrations/0019_auto_20200110_1455.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.24 on 2020-01-10 14:55
from __future__ import unicode_literals
from django.db import migrations
def create_download_description(apps, schema_editor):
DownloadDescriptionLabel = apps.get_model("emgapi", "DownloadDescriptionLabel")
downloads = (
("Phylum level taxonomies UNITE (TSV)", "Phylum level taxonomies UNITE"),
("Phylum level taxonomies ITSoneDB (TSV)", "Phylum level taxonomies ITSoneDB"),
("Taxonomic assignments UNITE (TSV)", "Taxonomic assignments UNITE"),
("Taxonomic assignments ITSoneDB (TSV)", "Taxonomic assignments ITSoneDB"),
)
_downloads = list()
for d in downloads:
_downloads.append(
DownloadDescriptionLabel(
description=d[0],
description_label=d[1]
)
)
DownloadDescriptionLabel.objects.bulk_create(_downloads)
def create_group_types(apps, schema_editor):
DownloadGroupType = apps.get_model("emgapi", "DownloadGroupType")
group_types = (
"Taxonomic analysis ITS",
"Taxonomic analysis ITSoneDB",
"Taxonomic analysis UNITE",
"Pathways and Systems",
# TODO: Do we need sub groups for the function and pathways
)
_groups = list()
for group_type in group_types:
_groups.append(
DownloadGroupType(group_type=group_type)
)
DownloadGroupType.objects.bulk_create(_groups)
class Migration(migrations.Migration):
dependencies = [
('emgapi', '0018_auto_20191105_1052'),
]
operations = [
migrations.RunPython(create_download_description),
migrations.RunPython(create_group_types)
]
| apache-2.0 | Python |
|
d41274ce2a54d37c35f23c8c78de196e57667b0a | add google translate plugin | fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot | plugins_examples/translate.py | plugins_examples/translate.py | #!/usr/bin/env python
import sys
import re
from googletrans import Translator
translator = Translator()
line = sys.stdin.readline()
while line:
match = re.search('^:([^\s]+) PRIVMSG (#[^\s]+) :(.+)', line)
if not match:
line = sys.stdin.readline()
continue
who = match.group(1)
chan = match.group(2)
what = match.group(3).strip().strip('\r\n')
def reply(text):
print("PRIVMSG %s :%s" % (chan, text))
sys.stdout.flush()
if what[:10] == ':translate':
m2 = re.search('^:translate (.*)', what)
if not m2:
line = sys.stdin.readline()
continue
try:
reply(translator.translate(m2.group(1), dest='fr').text)
except:
reply('Oups!')
elif what[:4] == ':tr ':
m2 = re.search('^:tr (\w+) (\w+) (.+)', what)
if not m2:
line = sys.stdin.readline()
continue
try:
reply(translator.translate(m2.group(3), src=m2.group(1), dest=m2.group(2)).text)
except:
reply('Oups!')
line = sys.stdin.readline()
| mit | Python |
|
b450734eea74f5f3536a44ed40c006c3da13656c | Add diff.py | jhogan/commonpy,jhogan/epiphany-py | diff.py | diff.py | # vim: set et ts=4 sw=4 fdm=marker
"""
MIT License
Copyright (c) 2016 Jesse Hogan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from diff_match_patch import diff_match_patch
from entities import entity
from pdb import set_trace; B=set_trace
# TODO Write test
class diff(entity):
def __init__(self, data1, data2):
self._data1 = data1
self._data2 = data2
self._ps = None
self._dmp = None
@property
def _diff_match_patch(self):
if not self._dmp:
self._dmp = diff_match_patch()
return self._dmp;
@property
def _patches(self):
if self._ps == None:
dmp = self._diff_match_patch
diffs = dmp.diff_main(self._data1, self._data2)
dmp.diff_cleanupSemantic(diffs)
self._ps = dmp.patch_make(diffs)
return self._ps
def apply(self, data):
return patch_apply(self._patches, data)[0]
def __str__(self):
dmp = self._diff_match_patch
return dmp.patch_toText(self._patches)
| mit | Python |
|
176af82121da5282842fd7e77809da9780ac57a5 | implement server pool. | tonyseek/rsocks,tonyseek/rsocks | rsocks/pool.py | rsocks/pool.py | from __future__ import unicode_literals
import logging
import contextlib
from .eventlib import GreenPool
from .utils import debug
__all__ = ['ServerPool']
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG if debug() else logging.INFO)
logger.addHandler(logging.StreamHandler())
class ServerPool(object):
def __init__(self):
self.pool = GreenPool()
self.servers = {}
@contextlib.contextmanager
def new_server(self, name, server_class, *args, **kwargs):
server = server_class(*args, **kwargs)
yield server
self.servers[name] = server
def loop(self):
for name, server in self.servers.items():
logger.info('Prepared "%s"' % name)
self.pool.spawn(server.loop)
try:
self.pool.waitall()
except (SystemExit, KeyboardInterrupt):
logger.info('Exit')
| mit | Python |
|
416d2b0ffd617c8c6e58360fefe554ad7dc3057b | add example for discovering existing connections | epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,epage/telepathy-python,detrout/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python,max-posedon/telepathy-python | examples/connections.py | examples/connections.py |
"""
Print out a list of existing Telepathy connections.
"""
import dbus.glib
import telepathy
prefix = 'org.freedesktop.Telepathy.Connection.'
if __name__ == '__main__':
for conn in telepathy.client.Connection.get_connections():
conn_iface = conn[telepathy.CONN_INTERFACE]
handle = conn_iface.GetSelfHandle()
print conn_iface.InspectHandles(
telepathy.CONNECTION_HANDLE_TYPE_CONTACT, [handle])[0]
print ' Protocol:', conn_iface.GetProtocol()
print ' Name:', conn.service_name[len(prefix):]
print
| lgpl-2.1 | Python |
|
ff53f699ac371266791487f0b863531dd8f5236a | Add hug 'hello_world' using to be developed support for optional URLs | jean/hug,shaunstanislaus/hug,origingod/hug,philiptzou/hug,janusnic/hug,STANAPO/hug,gbn972/hug,giserh/hug,philiptzou/hug,STANAPO/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,giserh/hug,alisaifee/hug,alisaifee/hug,timothycrosley/hug,gbn972/hug,shaunstanislaus/hug,origingod/hug,timothycrosley/hug,yasoob/hug,yasoob/hug,jean/hug,janusnic/hug,MuhammadAlkarouri/hug,timothycrosley/hug | examples/hello_world.py | examples/hello_world.py | import hug
@hug.get()
def hello_world():
return "Hello world"
| mit | Python |
|
397ab61df61d5acac46cf60ede38fa928fdacd7c | Create solution.py | lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms | data_structures/linked_list/problems/pos_num_to_linked_list/solution.py | data_structures/linked_list/problems/pos_num_to_linked_list/solution.py | import LinkedList
# Linked List Node inside the LinkedList module is declared as:
#
# class Node:
# def __init__(self, val, nxt=None):
# self.val = val
# self.nxt = nxt
#
def ConvertPositiveNumToLinkedList(val: int) -> LinkedList.Node:
node = None
while True:
dig = val % 10
val //= 10
prev = LinkedList.Node(dig, node)
node = prev
if val == 0:
break
return node
| mit | Python |
|
724bc46c85e6ea75ac8d786f4d1706b74df8f330 | Create dictid.py | diamontip/pract,diamontip/pract | dictid.py | dictid.py | a = (1,2)
b = [1,2]
c = {a: 1} # outcome: c= {(1,2): 1}
d = {b: 1} # outcome: error
| mit | Python |
|
0fb7a5559f525ab1149ac41d4b399442f7649664 | add script to show statistics (number of chunks, data volume) | HumanBrainProject/neuroglancer-scripts | scale_stats.py | scale_stats.py | #! /usr/bin/env python3
#
# Copyright (c) 2016, 2017, Forschungszentrum Juelich GmbH
# Author: Yann Leprince <y.leprince@fz-juelich.de>
#
# This software is made available under the MIT licence, see LICENCE.txt.
import collections
import json
import math
import os
import os.path
import sys
import numpy as np
SI_PREFIXES = [
(1, ""),
(1024, "ki"),
(1024 * 1024, "Mi"),
(1024 * 1024 * 1024, "Gi"),
(1024 * 1024 * 1024 * 1024, "Ti"),
(1024 * 1024 * 1024 * 1024 * 1024, "Pi"),
(1024 * 1024 * 1024 * 1024 * 1024 * 1024, "Ei"),
]
def readable(count):
for factor, prefix in SI_PREFIXES:
if count > 10 * factor:
num_str = format(count / factor, ".0f")
else:
num_str = format(count / factor, ".1f")
if len(num_str) <= 3:
return num_str + " " + prefix
# Fallback: use the last prefix
factor, prefix = SI_PREFIXES[-1]
return "{:,.0f} {}".format(count / factor, prefix)
def show_scales_info(info):
total_size = 0
total_chunks = 0
total_directories = 0
dtype = np.dtype(info["data_type"]).newbyteorder("<")
num_channels = info["num_channels"]
for scale in info["scales"]:
scale_name = scale["key"]
size = scale["size"] #np.array(scale["size"], dtype=np.uintp)
for chunk_size in scale["chunk_sizes"]:
#chunk_size = np.array(chunk_size, dtype=np.uintp)
size_in_chunks = [(s - 1) // cs + 1 for s, cs in zip(size, chunk_size)]
num_chunks = np.prod(size_in_chunks)
num_directories = size_in_chunks[0] * (1 + size_in_chunks[1])
size_bytes = np.prod(size) * dtype.itemsize * num_channels
print("Scale {}, chunk size {}:"
" {:,d} chunks, {:,d} directories, raw uncompressed size {}B"
.format(scale_name, chunk_size,
num_chunks, num_directories, readable(size_bytes)))
total_size += size_bytes
total_chunks += num_chunks
total_directories += num_directories
print("---")
print("Total: {:,d} chunks, {:,d} directories, raw uncompressed size {}B"
.format(total_chunks, total_directories, readable(total_size)))
def show_scale_file_info(input_info_filename):
"""Show information about a list of scales from an input JSON file"""
with open(input_info_filename) as f:
info = json.load(f)
show_scales_info(info)
def parse_command_line(argv):
"""Parse the script's command line."""
import argparse
parser = argparse.ArgumentParser(
description="""\
Show information about a list of scales in Neuroglancer "info" JSON file format
""")
parser.add_argument("info_file", nargs="?", default="./info",
help="JSON file containing the information")
args = parser.parse_args(argv[1:])
return args
def main(argv):
"""The script's entry point."""
args = parse_command_line(argv)
return show_scale_file_info(args.info_file) or 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| mit | Python |
|
7d574c1f6d194df1f2b2009fb2e48fbaacaca873 | Add migration for_insert_base | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | oedb_datamodels/versions/6887c442bbee_insert_base.py | oedb_datamodels/versions/6887c442bbee_insert_base.py | """Add _insert_base
Revision ID: 6887c442bbee
Revises: 3886946416ba
Create Date: 2019-04-25 16:09:20.572057
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6887c442bbee'
down_revision = '3886946416ba'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('_insert_base',
sa.Column('_id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('_message', sa.Text(), nullable=True),
sa.Column('_user', sa.String(length=50), nullable=True),
sa.Column('_submitted', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('_autocheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.Column('_humancheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.Column('_type', sa.String(length=8), nullable=True),
sa.Column('_applied', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.PrimaryKeyConstraint('_id'),
schema='public'
)
def downgrade():
op.drop_table('_insert_base', schema='public')
| agpl-3.0 | Python |
|
2ef707337adc3d0abc33ca638b2adb70a681bd12 | update for new API | vighneshbirodkar/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,rjeli/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image | doc/examples/filters/plot_denoise.py | doc/examples/filters/plot_denoise.py | """
====================
Denoising a picture
====================
In this example, we denoise a noisy version of the picture of the astronaut
Eileen Collins using the total variation and bilateral denoising filter.
These algorithms typically produce "posterized" images with flat domains
separated by sharp edges. It is possible to change the degree of posterization
by controlling the tradeoff between denoising and faithfulness to the original
image.
Total variation filter
----------------------
The result of this filter is an image that has a minimal total variation norm,
while being as close to the initial image as possible. The total variation is
the L1 norm of the gradient of the image.
Bilateral filter
----------------
A bilateral filter is an edge-preserving and noise reducing filter. It averages
pixels based on their spatial closeness and radiometric similarity.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data, img_as_float
from skimage.restoration import denoise_tv_chambolle, denoise_bilateral
astro = img_as_float(data.astronaut())
astro = astro[220:300, 220:320]
noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape)
noisy = np.clip(noisy, 0, 1)
fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True,
sharey=True, subplot_kw={'adjustable': 'box-forced'})
plt.gray()
ax[0, 0].imshow(noisy)
ax[0, 0].axis('off')
ax[0, 0].set_title('noisy')
ax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True))
ax[0, 1].axis('off')
ax[0, 1].set_title('TV')
ax[0, 2].imshow(denoise_bilateral(noisy, sigma_color=0.05, sigma_spatial=15))
ax[0, 2].axis('off')
ax[0, 2].set_title('Bilateral')
ax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True))
ax[1, 0].axis('off')
ax[1, 0].set_title('(more) TV')
ax[1, 1].imshow(denoise_bilateral(noisy, sigma_color=0.1, sigma_spatial=15))
ax[1, 1].axis('off')
ax[1, 1].set_title('(more) Bilateral')
ax[1, 2].imshow(astro)
ax[1, 2].axis('off')
ax[1, 2].set_title('original')
fig.tight_layout()
plt.show()
| """
====================
Denoising a picture
====================
In this example, we denoise a noisy version of the picture of the astronaut
Eileen Collins using the total variation and bilateral denoising filter.
These algorithms typically produce "posterized" images with flat domains
separated by sharp edges. It is possible to change the degree of posterization
by controlling the tradeoff between denoising and faithfulness to the original
image.
Total variation filter
----------------------
The result of this filter is an image that has a minimal total variation norm,
while being as close to the initial image as possible. The total variation is
the L1 norm of the gradient of the image.
Bilateral filter
----------------
A bilateral filter is an edge-preserving and noise reducing filter. It averages
pixels based on their spatial closeness and radiometric similarity.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data, img_as_float
from skimage.restoration import denoise_tv_chambolle, denoise_bilateral
astro = img_as_float(data.astronaut())
astro = astro[220:300, 220:320]
noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape)
noisy = np.clip(noisy, 0, 1)
fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True,
sharey=True, subplot_kw={'adjustable': 'box-forced'})
plt.gray()
ax[0, 0].imshow(noisy)
ax[0, 0].axis('off')
ax[0, 0].set_title('noisy')
ax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True))
ax[0, 1].axis('off')
ax[0, 1].set_title('TV')
ax[0, 2].imshow(denoise_bilateral(noisy, sigma_range=0.05, sigma_spatial=15))
ax[0, 2].axis('off')
ax[0, 2].set_title('Bilateral')
ax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True))
ax[1, 0].axis('off')
ax[1, 0].set_title('(more) TV')
ax[1, 1].imshow(denoise_bilateral(noisy, sigma_range=0.1, sigma_spatial=15))
ax[1, 1].axis('off')
ax[1, 1].set_title('(more) Bilateral')
ax[1, 2].imshow(astro)
ax[1, 2].axis('off')
ax[1, 2].set_title('original')
fig.tight_layout()
plt.show()
| bsd-3-clause | Python |
9e6a016c5a59b25199426f6825b2c83571997e68 | Refactor buildbot tests so that they can be used downstream. | ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,jaruba/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,Chilledheart/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium | build/android/buildbot/tests/bb_run_bot_test.py | build/android/buildbot/tests/bb_run_bot_test.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
eb9f9d8bfa5ea278e1fb39c59ed660a223b1f6a9 | Add flask api app creation to init | EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list | api/__init__.py | api/__init__.py | from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
| mit | Python |
|
c10eb3861daf48c13ec854bd210db5d5e1163b11 | Add LotGroupAutocomplete | 596acres/django-livinglots-lots,596acres/django-livinglots-lots | livinglots_lots/autocomplete_light_registry.py | livinglots_lots/autocomplete_light_registry.py | from autocomplete_light import AutocompleteModelBase, register
from livinglots import get_lotgroup_model
class LotGroupAutocomplete(AutocompleteModelBase):
autocomplete_js_attributes = {'placeholder': 'lot group name',}
search_fields = ('name',)
def choices_for_request(self):
choices = super(LotGroupAutocomplete, self).choices_for_request()
if not self.request.user.is_staff:
choices = choices.none()
return choices
register(get_lotgroup_model(), LotGroupAutocomplete)
| agpl-3.0 | Python |
|
2527683522394c823bc100c75f1ce4885949136e | add paths module for other modules to find paths from one place | aacanakin/glim | glim/paths.py | glim/paths.py | import os
from termcolor import colored
PROJECT_PATH = os.getcwd()
APP_PATH = os.path.join(PROJECT_PATH, 'app')
EXT_PATH = os.path.join(PROJECT_PATH, 'ext')
GLIM_ROOT_PATH = os.path.dirname(os.path.dirname(__file__))
PROTO_PATH = os.path.join(os.path.dirname(__file__), 'prototype')
import sys
from pprint import pprint as p
def configure_sys_path():
if GLIM_ROOT_PATH == PROJECT_PATH:
print colored('Development mode is on, sys.path is being configured', 'yellow')
sys.path.pop(0)
sys.path.insert(0, GLIM_ROOT_PATH)
else:
sys.path.insert(0, PROJECT_PATH)
def controllers():
return os.path.join(APP_PATH, 'controllers.py')
def config(env):
return os.path.join(APP_PATH, 'config', '%s.py' % env)
def start():
return os.path.join(APP_PATH, 'start.py')
def commands():
return os.path.join(APP_PATH, 'commands.py')
def routes():
return os.path.join(APP_PATH, 'routes.py')
def extensions(ext):
return os.path.join(EXT_PATH, '%s' % ext, '%s.py' % ext)
def extension_commands(ext):
return os.path.join(EXT_PATH, '%s' % ext, 'commands.py') | mit | Python |
|
24f21146b01ff75a244df40d1626c54883abeb1a | Add helper-lib for json object conversion and split dicts | UngaForskareStockholm/medlem2 | lib/helpers.py | lib/helpers.py | #! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
| bsd-3-clause | Python |
|
0f5c0168b257436882f837e5d521cce46a740ad6 | Add symbol translator to make utf-8 variables compilable | FInAT/FInAT | finat/greek_alphabet.py | finat/greek_alphabet.py | """Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
| mit | Python |
|
03951a227bfafb0b1017354bdbf3a1247322fc9b | Fix cycler tests | emmagordon/Axelrod,uglyfruitcake/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,uglyfruitcake/Axelrod,mojones/Axelrod,bootandy/Axelrod,risicle/Axelrod,mojones/Axelrod,kathryncrouch/Axelrod,emmagordon/Axelrod,bootandy/Axelrod | axelrod/tests/unit/test_cycler.py | axelrod/tests/unit/test_cycler.py | """Test for the Cycler strategies."""
import itertools
import axelrod
from .test_player import TestPlayer, test_four_vector
C, D = 'C', 'D'
class TestAntiCycler(TestPlayer):
name = "AntiCycler"
player = axelrod.AntiCycler
expected_classifier = {
'memory_depth': float('inf'),
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""Starts by cooperating"""
responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C]
self.responses_test([], [], responses)
def test_cycler_factory(cycle):
class TestCycler(TestPlayer):
name = "Cycler %s" % cycle
player = getattr(axelrod, 'Cycler%s' % cycle)
expected_classifier = {
'memory_depth': len(cycle),
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""Starts by cooperating"""
for i in range(20):
responses = itertools.islice(itertools.cycle(cycle), i)
self.responses_test([], [], responses)
return TestCycler
TestCyclerCCD = test_cycler_factory("CCD")
TestCyclerCCCD = test_cycler_factory("CCCD")
TestCyclerCCCCCD = test_cycler_factory("CCCCCD")
| """Test for the Cycler strategies."""
import itertools
import axelrod
from .test_player import TestPlayer, test_four_vector
C, D = 'C', 'D'
class TestAntiCycler(TestPlayer):
name = "AntiCycler"
player = axelrod.AntiCycler
expected_classifier = {
'memory_depth': float('inf'),
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""Starts by cooperating"""
responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C]
self.responses_test([], [], responses)
def test_cycler_factory(cycle):
class TestCycler(TestPlayer):
name = "Cycler %s" % cycle
player = getattr(axelrod, 'Cycler%s' % cycle)
expected_classifier = {
'memory_depth': 1,
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""Starts by cooperating"""
for i in range(20):
responses = itertools.islice(itertools.cycle(cycle), i)
self.responses_test([], [], responses)
return TestCycler
TestCyclerCCD = test_cycler_factory("CCD")
TestCyclerCCCD = test_cycler_factory("CCCD")
TestCyclerCCCCCD = test_cycler_factory("CCCCCD")
| mit | Python |
ca2269c5ae568cd63253af7bc614a79d26f7f8ac | Add ns_drop_indexes command. | niwinz/needlestack | needlestack/management/commands/ns_drop_indexes.py | needlestack/management/commands/ns_drop_indexes.py | # -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import, unicode_literals
from django.core.management.base import BaseCommand, CommandError
from needlestack import commands
class Command(BaseCommand):
help = 'Sync all defined indexes with a current backend'
option_list = BaseCommand.option_list + (
make_option('--backend',
action='store',
dest='backend',
default='default'),)
def handle(self, *args, **options):
commands.drop_indexes(options["backend"], options["verbosity"])
| bsd-3-clause | Python |
|
b8a84e612d67f7948d6dec8c202ac8a73390f9dc | make sure all protein ids are unique in a genbank file | linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab | proteins/unique_protein_ids.py | proteins/unique_protein_ids.py | """
Test a genbank file and make sure all the protein_ids are unique
"""
import os
import sys
import argparse
from Bio import SeqIO
__author__ = 'Rob Edwards'
__copyright__ = 'Copyright 2020, Rob Edwards'
__credits__ = ['Rob Edwards']
__license__ = 'MIT'
__maintainer__ = 'Rob Edwards'
__email__ = 'raedwards@gmail.com'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('-f', help='genbank file', required=True)
args = parser.parse_args()
pids = set()
rc = 0
for seq in SeqIO.parse(args.f, "genbank"):
rc+=1;
print(f"record {rc}: {seq.id}")
for feat in seq.features:
if feat.type != "CDS":
continue
if 'protein_id' not in feat.qualifiers:
thisid = " ".join(feat.qualifiers.get('locus_tag', [str(feat.location)]))
print(f"No protein id in {thisid}")
continue
pid = "|".join(feat.qualifiers["protein_id"])
if pid in pids:
print(f"{pid} is not unique")
pids.add(pid) | mit | Python |
|
61fa404da3eeb3b695b12f398c27f641e1e681e2 | add codegen script for fname.pyf.src -> _fnamemodule.c | matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy | tools/generate_f2pymod.py | tools/generate_f2pymod.py | """
Process f2py template files (`filename.pyf.src` -> `filename.pyf`)
Usage: python generate_pyf.py filename.pyf.src -o filename.pyf
"""
import os
import sys
import subprocess
import argparse
from numpy.distutils.from_template import process_file
def main():
parser = argparse.ArgumentParser()
parser.add_argument("infile", type=str,
help="Path to the input file")
parser.add_argument("-o", "--outfile", type=str,
help="Path to the output file")
args = parser.parse_args()
# Read .pyf.src file
code = process_file(args.infile)
# Write out the .pyf file
outdir = os.path.split(args.outfile)[0]
outdir_abs = os.path.join(os.getcwd(), outdir)
fname_pyf = os.path.join(outdir,
os.path.splitext(os.path.split(args.infile)[1])[0])
with open(fname_pyf, 'w') as f:
f.write(code)
# Now invoke f2py to generate the C API module file
p = subprocess.Popen([sys.executable, '-m', 'numpy.f2py', fname_pyf,
'--build-dir', outdir_abs], #'--quiet'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.getcwd())
out, err = p.communicate()
if not (p.returncode == 0):
raise RuntimeError(f"Writing {args.outfile} with f2py failed!\n"
f"{out}\n"
r"{err}")
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
|
0f94251c7cc844042c9e3ce160d78e4d81d895ea | add log module | johnnymo87/simple-db-migrate,guilhermechapiewski/simple-db-migrate | src/log.py | src/log.py | import logging
import os
from datetime import datetime
class LOG(object):
logger = None
def __init__(self, log_dir):
if log_dir:
if not os.path.exists(log_dir):
os.makedirs(log_dir)
self.logger = logging.getLogger('simple-db-migrate')
now = datetime.now()
filename = "%s/%s.log" %(os.path.abspath(log_dir), now.strftime("%Y%m%d%H%M%S"))
hdlr = logging.FileHandler(filename)
formatter = logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
self.logger.addHandler(hdlr)
self.logger.setLevel(logging.DEBUG)
def debug(self, msg):
if self.logger:
self.logger.debug(msg)
def info(self, msg):
if self.logger:
self.logger.info(msg)
def error(self, msg):
if self.logger:
self.logger.error(msg)
def warn(self, msg):
if self.logger:
self.logger.warn(msg)
| apache-2.0 | Python |
|
bc567eda01abcaf23717f5da5f494c1be46f47da | Create ValAnagram_001.py | Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/cod,Chasego/codi,cc13ny/algo,Chasego/codirit,Chasego/codirit,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codirit,cc13ny/Allin,Chasego/codi,cc13ny/Allin,Chasego/cod,cc13ny/algo,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/cod | leetcode/242-Valid-Anagram/ValAnagram_001.py | leetcode/242-Valid-Anagram/ValAnagram_001.py | class Solution:
# @param {string} s
# @param {string} t
# @return {boolean}
def anaRepresentation(self, s):
p = {}
for c in s:
if c in p:
p[c] += 1
else:
p[c] = 1
return p
def isAnagram(self, s, t):
if len(s) != len(t):
return False
p = self.anaRepresentation(s)
q = self.anaRepresentation(t)
for c in p:
if c not in q or (c in q and p[c] != q[c]):
return False
return True
| mit | Python |
|
682d6b3ca9c4a0dd49f9762ddd20ac746971e3eb | Create solution.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | leetcode/easy/find_the_difference/py/solution.py | leetcode/easy/find_the_difference/py/solution.py | class Solution(object):
def findTheDifference(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
import collections
import itertools
c1 = collections.Counter(s)
c2 = collections.Counter(t)
for char in set(itertools.chain(s, t)):
if c1[char] != c2[char]:
return char
return None
| mit | Python |
|
9e128fdd5af0598a233416de5a1e8f2d3a74fdc0 | Enforce unique paths and names | jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces | spaces/migrations/0006_unique_space_document.py | spaces/migrations/0006_unique_space_document.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
| mit | Python |
|
0256868a3b261e598689eebdf5ac5f939ea20a0d | add test cases for mni module | arokem/nipy,nipy/nipy-labs,arokem/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,nipy/nipy-labs,nipy/nireg,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/register,bthirion/nipy,nipy/nireg,arokem/nipy,alexis-roche/register,alexis-roche/nireg,alexis-roche/register,bthirion/nipy | lib/neuroimaging/reference/tests/test_mni.py | lib/neuroimaging/reference/tests/test_mni.py | import unittest
import numpy as N
import neuroimaging.reference.mni as mni
class MNITest(unittest.TestCase):
def testMNI(self):
""" ensure all elementes of the interface exist """
m = mni.MNI
g = mni.generic
m_v = mni.MNI_voxel
m_w = mni.MNI_world
m_m = mni.MNI_mapping
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
|
d91adef072e2150edde62a49bea4eecb6a26a6ac | add sns_notify script | pyconjp/pyconjp-cron | sns_notify.py | sns_notify.py | #!/usr/bin/env python
from datetime import datetime, date
from dateutil import parser
from google_sheets import get_service
SHEET_ID = "1lpa9p_dCyTckREf09-oA2C6ZAMACCrgD9W3HQSKeoSI"
def is_valid_period(start, end):
"""
今日が start, end の範囲内かどうかを返す
:params start: 通知開始日の文字列または空文字
:params end: 通知終了日の文字列または空文字
:return: True: 通知範囲内、False: 通知範囲外
"""
# 文字列を date 型にする
try:
start = parser.parse(start).date()
except ValueError:
start = date(2000, 1, 1) # 過去の日付にする
try:
end = parser.parse(end).date()
except ValueError:
end = date(3000, 1, 1) # 未来の日付にする
today = date.today()
# 今日が範囲内かどうかを返す
return start <= today <= end
def sns_notify(row, now):
"""
スプレッドシートのデータ1行分をSNSに通知する。
データは以下の形式。
1. 通知日(YYYY/MM/DD または曜日指定)
2. 通知時刻
3. 送信メッセージ
4. 送信するURL
5. 通知開始日
6. 通知終了日
7. twitter通知フラグ(1なら通知)
8. facebook通知フラグ(1なら通知)
:param row: スプレッドシートの1行分のデータ
:param now: 現在時刻(datetime)
"""
# データの件数が少なかったらなにもしない
if len(row) < 7:
return
# 通知期間の範囲外ならなにもしない
if not is_valid_period(row[4], row[5]):
return
# 通知対象日時じゃなかったらなにもしない
# メッセージ送信する
if row[6] == '1':
pass
if row[7] == '1':
pass
def main():
"""
PyCon JP Twitter/Facebook通知シートからデータを読み込んで通知する
"""
now = datetime.now()
service = get_service()
# シートから全データを読み込む
result = service.spreadsheets().values().get(
spreadsheetId=SHEET_ID, range='messages!A4:H').execute()
for row in result.get('values', []):
# 1行のデータを元にSNSへの通知を実行
sns_notify(row, now)
if __name__ == '__main__':
main()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.