commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
3c2c6002cf25dab301044f2dc4c2c3bbd99e121e | add script file | m4b/get-polymer-imports | get-polymer-imports.py | get-polymer-imports.py | #!/usr/bin/env python
import os
import sys
#rootDir = "bower_components"
numArgs = len(sys.argv)
if numArgs <= 1:
print 'usage: get_all_imports.py <bower_components directory> [prefix (default "..")]'
exit(1)
rootDir = sys.argv[1]
if not (rootDir == "bower_components" or rootDir == "components"):
print 'Cowardly refusing to search non bower directory "' + rootDir + '"'
exit(1)
bowerPrefix = ".."
if numArgs >= 3:
bowerPrefix = sys.argv[2]
def shouldInclude(f, path):
blacklisted = ['src', 'demo', 'test', 'polymer', 'web-animations']
for blacklist in blacklisted:
if blacklist in path: return False
fileName, extension = os.path.splitext(f)
return extension == ".html" and fileName != "index"
def getImports(dir):
imports = []
for root, dirs, files in os.walk(dir):
path = root.split('/')
prefix = os.path.join(bowerPrefix, root)
# print (len(path) - 1) *'---' , os.path.basename(root)
for file in files:
if shouldInclude(file, prefix):
i = os.path.join(prefix, file)
# print "adding import: ", i
imports.append(i)
return imports
def tagify(i):
importTag = '<link rel="import" href="'
importTerminator = '">'
return importTag + i + importTerminator
def htmlify(imports):
html = []
for i in imports:
html.append(tagify(i))
return html
# polymer is special
polymer = os.path.join(bowerPrefix, rootDir, "polymer/polymer.html")
def printHtml(html):
print tagify(polymer)
for tag in html:
print tag
imports = getImports(rootDir)
html = htmlify(imports)
printHtml(html)
| mit | Python |
|
30dcfef191666951a4084a4b9d9c135c9edb5de8 | Create check.py | password123456/check_ssl_chain_certificate | check.py | check.py | # -*- coding: utf-8 -*-
__author__ = 'https://github.com/password123456/'
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import requests
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def DO_CHECK_CERTIFICATE(url):
try:
user_agent = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36', 'Connection':'keep-alive'}
r = requests.get(url, headers=user_agent, verify=True, timeout=05)
result = '%s %s %s' % ( url, r.status_code, r.headers['server'])
print '%s[-] OK::%s %s %s' % (bcolors.OKGREEN, bcolors.OKBLUE, result, bcolors.ENDC)
except Exception as e:
error = '%s' % e
if 'CERTIFICATE_VERIFY_FAILED' in error:
print '%s[-] ERROR::%s %s CERTIFICATE_VERIFY_FAILED %s' % (bcolors.WARNING, bcolors.FAIL, url, bcolors.ENDC)
else:
r.close()
def READ_URL():
f = open('url.txt', 'r')
for line in f.readlines():
line = line.strip()
line = 'https://%s' % line
DO_CHECK_CERTIFICATE(line)
def main():
READ_URL()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit(0)
except Exception, e:
print '%s[-] Exception::%s%s' % (bcolors.WARNING, e, bcolors.ENDC)
| apache-2.0 | Python |
|
0b9810227b91b7ee7bb58cee2dccec992c752768 | add xmpp plugin | melmothx/jsonbot,melmothx/jsonbot,melmothx/jsonbot | gozerlib/plugs/xmpp.py | gozerlib/plugs/xmpp.py | # gozerlib/plugs/xmpp.py
#
#
""" xmpp related commands. """
## gozerlib imports
from gozerlib.commands import cmnds
from gozerlib.examples import examples
from gozerlib.fleet import fleet
## commands
def handle_xmppinvite(bot, event):
""" invite (subscribe to) a different user. """
if not event.rest:
event.missing("<list of jids>")
return
bot = fleet.getfirstjabber()
if bot:
for jid in event.args:
bot.invite(jid)
event.done()
else:
event.reply("can't find jabber bot in fleet")
cmnds.add("xmpp-invite", handle_xmppinvite, 'OPER')
examples.add("xmpp-invite", "invite a user.", "xmpp-invite jsoncloud@appspot.com")
| mit | Python |
|
e1fad0e5759908b3c1f6d3bafa2110cb4c26b7e1 | Add get_jpp_env command... | tamasgal/km3pipe,tamasgal/km3pipe | km3pipe/shell.py | km3pipe/shell.py | # coding=utf-8
# cython: profile=True
# Filename: shell.py
# cython: embedsignature=True
# pylint: disable=C0103
"""
Some shell helpers
"""
from __future__ import division, absolute_import, print_function
import os
from .logger import logging
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
__status__ = "Development"
log = logging.getLogger(__name__) # pylint: disable=C0103
def get_jpp_env(jpp_dir):
"""Return the environment dict of a loaded Jpp env.
The returned env can be passed to `subprocess.Popen("J...", env=env)`
to execute Jpp commands.
"""
env = {v[0]:''.join(v[1:]) for v in
[l.split('=') for l in
os.popen("source {0}/setenv.sh {0} && env"
.format(jpp_dir)).read().split('\n')
if '=' in l]}
return env
| mit | Python |
|
1360a7031d4389f2ecdef24ce3190a88e5f8f794 | add trivial pjit tests | google/jax,tensorflow/probability,google/jax,google/jax,tensorflow/probability,google/jax | tests/pjit_test.py | tests/pjit_test.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
from absl.testing import absltest
from absl.testing import parameterized
import jax.numpy as np
from jax import test_util as jtu
from jax.api import pjit
from jax.interpreters.parallel import psum
from jax.config import config
config.parse_flags_with_absl()
class PmapTest(jtu.JaxTestCase):
@jtu.skip_on_devices("gpu")
def testBasic(self):
f = lambda x: x - psum(x, 'i')
x = onp.arange(8., dtype=onp.float32).reshape(4, 2)
f = pjit(f, axis_name='i', in_axes=0, out_axes=0, mesh_axis=0)
ans = f(x)
expected = x - x.sum(0)
self.assertAllClose(ans, expected, check_dtypes=False)
@jtu.skip_on_devices("gpu")
def testTupleOutput(self):
f = lambda x: (x - psum(x, 'i'),)
x = onp.arange(8., dtype=onp.float32).reshape(4, 2)
f = pjit(f, axis_name='i', in_axes=0, out_axes=0, mesh_axis=0)
ans = f(x)
expected = (x - x.sum(0),)
self.assertAllClose(ans, expected, check_dtypes=False)
@jtu.skip_on_devices("gpu")
def testTupleInput(self):
f = lambda x: x[0] - psum(x[0], 'i')
x = onp.arange(8., dtype=onp.float32).reshape(4, 2)
f = pjit(f, axis_name='i', in_axes=0, out_axes=0, mesh_axis=0)
ans = f((x,))
expected = x - x.sum(0)
self.assertAllClose(ans, expected, check_dtypes=False)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | Python |
|
a8d359fd91cb6e92a034703d6203a2997b28c965 | Add utility to augment jsdoc with @method tag. | ohsu-qin/qiprofile,ohsu-qin/qiprofile,ohsu-qin/qiprofile,ohsu-qin/qiprofile | tools/yuimethod.py | tools/yuimethod.py | #!/usr/bin/env python
"""
Adds the @method tag to method comment blocks.
"""
import sys
import os
import re
import argparse
COMMENT_START_REGEX = re.compile('^(\s*)(/\*|###)\*\s*$')
COMMENT_END_REGEX = re.compile('^\s*(\*/|###)\s*$')
COMMENT_TAG_REGEX = re.compile('^\s*\* @(\w+).*$')
METHOD_REGEX = re.compile('^\s*(\w+)(\(.*\)|: ).*$')
class FormatError(Exception):
"""Error reformatting the method comments."""
def main(argv=sys.argv):
# Parse the command line arguments.
in_files, opts = _parse_arguments()
dest = opts.get('dest', 'out')
if os.path.exists(dest):
# The target location must be a directory.
if not os.path.isdir(dest):
raise FormatError("The download target is not a directory:"
" %s" % dest)
else:
# Make the download directory.
os.makedirs(dest)
# Filter each input file.
for in_file in in_files:
_, base_name = os.path.split(in_file)
out_file = os.path.join(dest, base_name)
with open(in_file) as ip:
with open(out_file, 'w') as op:
_filter(ip, op)
return 0
# Adds the @method tag if necessary.
#
# @param ip the input stream
# @oaram os the output stream
def _filter(ip, op):
comment = []
in_block_comment = False
has_method_tag = False
indent = ''
for line_nl in ip:
line = line_nl.rstrip()
if in_block_comment:
comment.append(line)
if COMMENT_END_REGEX.match(line):
in_block_comment = False
elif not has_method_tag:
tag_match = COMMENT_TAG_REGEX.match(line)
if tag_match and tag_match.group(1) == 'method':
has_method_tag = True
else:
if comment:
method_match = METHOD_REGEX.match(line)
method = method_match.group(1) if method_match else None
for comment_line in comment:
if method and not has_method_tag and COMMENT_TAG_REGEX.match(comment_line):
method_tag = "%s * @method %s" % (indent, method)
print >>op, method_tag
has_method_tag = True
print >>op, comment_line
has_method_tag = False
del comment[:]
comment_match = COMMENT_START_REGEX.match(line)
if comment_match:
in_block_comment = True
indent = comment_match.group(1)
comment.append(line)
else:
print >>op, line
for comment_line in comment:
print >>op, comment_line
def _parse_arguments():
"""Parses the command line arguments."""
parser = argparse.ArgumentParser()
# The input file path.
parser.add_argument('input', nargs='+', metavar="PATH", help='the input files')
# The output file path.
parser.add_argument('-d', '--dest', help='the destination directory (default ./out)')
args = vars(parser.parse_args())
nonempty_args = dict((k, v) for k, v in args.iteritems() if v != None)
return nonempty_args.pop('input'), nonempty_args
if __name__ == '__main__':
sys.exit(main())
| bsd-2-clause | Python |
|
7c1b0d4efd000fee8f065f2f5815075833811331 | Change file location and rename | sibis-platform/ncanda-data-integration,sibis-platform/ncanda-data-integration | scripts/reporting/svn_report.py | scripts/reporting/svn_report.py | '''
This file creates a .csv file containing the name of each laptop and its last changed date
'''
import argparse
import csv
from datetime import datetime, timezone
import os
import svn.local
import pandas as pd
'''
Constants -- paths for reports, default save names, SLA, columns, and sites
TO-DO: Change SLA_DAYS to a parser arg?
'''
REPORTS_DIR = '/fs/storage/laptops/ncanda'
DEFAULT_CSV = '/tmp/chris/import_reports/'
SLA_DAYS = 30
DATA_COLUMNS = ['laptop', 'date_updated', 'time_diff', 'sla', 'sla_percentage']
SITES = ['duke', 'sri', 'ohsu', 'upmc', 'ucsd']
def parse_args(arg_input=None):
'''
Set up parser arguments
'''
parser = argparse.ArgumentParser(
description="Create a CSV file with all laptops and dates they were last modified")
parser.add_argument(
"--file",
help="Path of file name to save as",
action="store",
default=DEFAULT_CSV)
return parser.parse_args(arg_input)
def create_dataframe():
'''
Writes the names of each laptop and the date they were updated to a .csv file
'''
# Grab all directories and set up SVN client
directories = os.listdir(REPORTS_DIR)
r = svn.local.LocalClient(REPORTS_DIR)
df = pd.DataFrame(columns=DATA_COLUMNS)
# Calculate time difference and appends to csv file
for directory in directories:
if (directory != ".svn"):
# Get commit date, time difference from today, and percentage of SLA
info = r.info(directory)
mod_time = info['commit/date']
time_diff = datetime.now(timezone.utc) - mod_time
sla_percentage = time_diff.total_seconds() / (SLA_DAYS * 24 * 60 * 60)
new_row = {
'laptop': directory,
'date_updated': mod_time,
'time_diff': time_diff,
'sla': SLA_DAYS,
'sla_percentage': sla_percentage
}
df = df.append(new_row, ignore_index=True)
# Sort by descending SLA percentage
df = df.sort_values(by=['sla_percentage'], ascending=False)
return df
def write_to_csv(df, path=None):
'''
Save data into a dataframe and save for each individual site
'''
df.to_csv(path + 'reports.csv', index=False)
for site in SITES:
site_df = df.loc[df['laptop'].str.contains(site, case=False)]
site_df.to_csv(path + site + '.csv', index=False)
def main():
'''
Grabs necessary SVN data from folders and then calls to write to the csv
'''
args = parse_args()
df = create_dataframe()
write_to_csv(df, args.file)
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
|
f3f363e8911d3a635d68c7dbe767ee2585ed4f36 | Check for duplicates based on coordinates and select only one database (EU/NASA) | DanielAndreasen/SWEET-Cat | checkDuplicates.py | checkDuplicates.py | import pandas as pd
from astropy import coordinates as coord
from astropy import units as u
class Sweetcat:
"""Load SWEET-Cat database"""
def __init__(self):
self.fname_sc = 'WEBSITE_online_EU-NASA_full_database.rdb'
# Loading the SweetCat database
self.readSC()
def readSC(self):
# TODO: Use the ra and dec, and match with coordinates instead of name
# stored in self.coordinates.
# Read the current version of SWEET-Cat
names_ = ['name', 'hd', 'ra', 'dec', 'V', 'Verr', 'p', 'perr',
'pflag', 'Teff', 'Tefferr', 'logg', 'logger',
'n1', 'n2', 'vt', 'vterr', 'feh', 'feherr', 'M', 'Merr',
'author', 'link', 'source', 'update', 'comment', 'database',
'n3']
# SC = pd.read_csv('WEBSITE_online.rdb', delimiter='\t', names=names_)
SC = pd.read_csv(self.fname_sc, delimiter='\t', names=names_)
# Clean star names
self.sc_names = [x.lower().replace(' ', '').replace('-', '') for x in SC.name]
self.sc_names = list(map(str.strip, self.sc_names))
# Original star names
self.sc_names_orig = [x.strip() for x in SC.name]
# Coordinates of the stars in SWEET-Cat
self.coordinates = SC.loc[:, ['ra', 'dec']]
# SWEET-Cat (used to automatically update the database label)
self.SC = SC
if __name__ == '__main__':
# Loading SWEET Cat
sc = Sweetcat()
# Check for duplicates, subset of columns can be changed
print(sc.SC[sc.SC.duplicated(['ra', 'dec'], keep=False)])
# Indexes of the duplicates
indexes = sc.SC[sc.SC.duplicated(['ra', 'dec'], keep=False)].index
# Remove a row
# new_sc = sc.SC.drop([2728])
# new_sc.to_csv('WEBSITE_online_EU-NASA_full_database_minusHD21749.rdb',
# sep='\t', index=False, header=False)
# Select only the EU data
sc_EU = new_sc[new_sc['database'].str.contains('EU')]
# Drop the database column
sc_like_old = sc_EU.drop(columns=['database'])
#sc_like_old.to_csv('WEBSITE_online_EU-updated_04-03-2020.rdb',
# sep='\t', index=False, header=False)
| mit | Python |
|
485bbe732dfb8539ffaf017f3a005896a7f3e503 | create subhash module | coblo/isccbench | iscc_bench/imageid/subhash.py | iscc_bench/imageid/subhash.py | # -*- coding: utf-8 -*-
"""Test strategy with hashing mutiple shift invariant aligned patches
See: https://stackoverflow.com/a/20316789/51627
"""
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
1742beec320d40e7859ea6f3b72e5fb3a7d1a51e | add flask hello world | jbothma/municipal-data,Code4SA/municipal-data,jbothma/municipal-data,jbothma/municipal-data,jbothma/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data | hello.py | hello.py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
| mit | Python |
|
f2b329d5ab98cfd1c1e9a9c28e373e1411a78967 | Convert text/plain to multipart/alternative | dahlbaek/Ubuntu-dotfiles | home/bin/parse_mail.py | home/bin/parse_mail.py | #!/usr/bin/python3
import email
from email import policy
import pypandoc
import fileinput
import subprocess
from email import charset
# use 8bit encoded utf-8 when applicable
charset.add_charset('utf-8', charset.SHORTEST, '8bit')
# read email
stdin_lines = []
with fileinput.input(["-"]) as stdin:
msg = email.message_from_string("".join(list(stdin)), policy=policy.SMTP)
# determine conversion
convert_simple = all([
not msg.is_multipart(),
msg.get_content_type() == "text/plain",
msg.get_content_disposition() == "inline",
])
convert_multi = all([
msg.get_content_type() == "multipart/mixed",
not any([part.is_multipart() for part in list(msg.walk())[1:]]),
len([part for part in msg.walk() if part.get_content_disposition() == "inline" and part.get_content_type() == "text/plain"]) == 1,
])
convert = any([convert_simple, convert_multi])
if convert:
# extract attachments
attachments = []
for part in msg.walk():
if part.is_multipart():
continue
elif part.get_content_disposition() == "inline" and part.get_content_type() == "text/plain":
inline = part.get_payload()
else:
attachments.append(part)
# copy headers
headers = [
"Date",
"From",
"To",
"CC",
"Subject",
"Message-ID",
]
new_msg = email.message.EmailMessage(policy=policy.SMTP)
for header in headers:
if msg[header]:
new_msg[header] = msg[header]
new_msg.add_header("MIME-Version", "1.0")
# make plain and html parts
text_plain = email.message.MIMEPart(policy=policy.SMTP)
text_plain.set_content(inline)
text_html = email.message.MIMEPart(policy=policy.SMTP)
text_html.set_content(pypandoc.convert_text(inline, "html", format="md"), subtype="html")
# attach attachments
if convert_simple:
new_msg.make_alternative()
new_msg.attach(text_plain)
new_msg.attach(text_html)
elif convert_multi:
new_msg.make_mixed()
alternative = email.message.EmailMessage(policy=policy.SMTP)
alternative.add_header("MIME-Version", "1.0")
alternative.make_alternative()
alternative.add_header("Content-Disposition", "inline")
alternative.attach(text_plain)
alternative.attach(text_html)
new_msg.attach(alternative)
for part in attachments:
new_msg.attach(part)
out_msg = new_msg
else:
out_msg = msg
# send
subprocess.run(["/usr/bin/msmtp", "--read-recipients", "-a", "AAU"], input=out_msg.as_bytes())
#print(out_msg.as_string())
| mit | Python |
|
3fabca45d6071c7fe333050264e8b92f23336c12 | fix type | scienceopen/airtools,scienceopen/pyAIRtools,scienceopen/airtools | kaczmarz.py | kaczmarz.py | import numpy as np
#import matplotlib.pyplot as plt
def kaczmarz_ART(A,b,maxIter=8,x0=None,lambdaRelax=1,stopmode=None,taudelta=0,nonneg=True,dbglvl=0):
# TODO: add randomized ART, and other variants
# Michael Hirsch May 2014
# GPL v3+ license
#
# inputs:
# A: M x N 2-D projection matrix
# b: N x 1 1-D vector of observations
# maxIter: maximum number of ART iterations
# x0: N x 1 1-D vector of initialization (a guess at x)
# lambdaRelax: relaxation parameter (see Herman Ch.11.2)
# stopmode: {None, MDP} stop before maxIter if solution is good enough
# (MDP is Morozov Discrepancy Principle)
# nonneg: enforces non-negativity of solution
#
# outputs:
# x: the estimated solution of A x = b
# residual: the error b-Ax
#
# References:
# Herman, G. " Fundamentals of Computerized Tomography", 2nd Ed., Springer, 2009
# Natterer, F. "The mathematics of computed tomography", SIAM, 2001
#%% user parameters
if dbglvl>0:
print(('Lambda Relaxation: ' + str(lambdaRelax)))
n = A.shape[1] #only need rows
if x0 is None: # we'll use zeros
print('kaczmarz: using zeros to initialize x0')
x0 = np.zeros(n) #1-D vector
if stopmode is None: # just use number of iterations
sr = 0
elif stopmode == 'MDP' or stopmode== 'DP':
sr = 1
if taudelta==0: print('you used tauDelta=0, which effectively disables Morozov discrepancy principle')
else:
sr = 0
print("didn't understand stopmode command, defaulted to maximum iterations")
#%% disregard all-zero columns of A
goodRows = np.where( np.any(A>0,axis=1) )[0] #we want indices
#%% speedup: compute norms along columns at once, and retrieve
RowNormSq = np.linalg.norm(A,ord=2,axis=1)**2
x = np.copy(x0) # we'll leave the original x0 alone, and make a copy in x
iIter = 0
stop = False #FIXME will always run at least once
while not stop: #for each iteration
for iRow in goodRows: #only not all-zero rows
#denominator AND numerator are scalar!
#den = np.linalg.norm(A[iRow,:],2)**2
#print(RowNormSq[iRow] == den)
num = ( b[iRow] - A[iRow,:].dot(x) )
#x = x + np.dot( lambdaRelax * num/den , A[iRow,:] )
x = x + np.dot( lambdaRelax * num/RowNormSq[iRow] , A[iRow,:] )
if nonneg: x[x<0] = 0
residual = b - A.dot(x)
iIter += 1
#handle stop rule
stop = iIter > maxIter
if sr == 0: # no stopping till iterations are done
pass
elif sr == 1:
residualNorm = np.linalg.norm(residual,2)
stop |= (residualNorm <= taudelta)
if iIter % 200 == 0: #print update every N loop iterations for user comfort
print( ('kaczmarz: Iteration ' + str(iIter) + ', ||residual|| = ' + str(residualNorm) ) )
return x,residual,iIter-1
| bsd-3-clause | Python |
|
ef192ebd7679b96317cc6d878fb82c925787710d | Add Pattern based filterer. | 4degrees/sawmill,4degrees/mill | source/bark/filterer/pattern.py | source/bark/filterer/pattern.py | # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
from .base import Filterer
class Pattern(Filterer):
'''Filter logs using pattern matching.'''
INCLUDE, EXCLUDE = ('include', 'exclude')
def __init__(self, pattern, key='name', mode=INCLUDE):
'''Initialise filterer with *pattern* and *key* to test.
If *pattern* is a string it will be converted to a compiled regular
expression instance.
*mode* can be either 'exclude' or 'include'. If set to 'exclude'
then any log matching the pattern will be filtered. Conversely, if set
to 'include' then any log not matching the pattern will be filtered.
'''
super(Pattern, self).__init__()
self.pattern = pattern
if isinstance(self.pattern, basestring):
self.pattern = re.compile(self.pattern)
self.key = key
self.mode = mode
def filter(self, log):
'''Filter *log* based on pattern matching.
If the log does not have the key to test against it will pass the
filter successfully. If the key is present, but not a string then the
log will be filtered.
'''
# If key was not present then pass filter
if self.key not in log:
return False
value = log[self.key]
# If not a string then can't test pattern against it so fail filter.
if not isinstance(value, basestring):
return True
matched = self.pattern.search(value)
if matched and self.mode == self.EXCLUDE:
return True
if not matched and self.mode == self.INCLUDE:
return True
return False
| apache-2.0 | Python |
|
bcd485f240a7eb6373f847d6cc9dd07ebd2c3ef2 | add test case for redeem of default coupon (user limit=1, not bound to user) | rsalmaso/django-fluo-coupons,rsalmaso/django-fluo-coupons | coupons/tests/test_use_cases.py | coupons/tests/test_use_cases.py | from datetime import datetime
from django.contrib.auth.models import User
from django.utils import timezone
from django.test import TestCase
from coupons.forms import CouponForm
from coupons.models import Coupon
class DefaultCouponTestCase(TestCase):
def setUp(self):
self.user = User.objects.create(username="user1")
self.coupon = Coupon.objects.create_coupon('monetary', 100)
def test_redeem(self):
self.coupon.redeem(self.user)
self.assertTrue(self.coupon.is_redeemed)
self.assertEquals(self.coupon.users.count(), 1)
self.assertIsInstance(self.coupon.users.first().redeemed_at, datetime)
self.assertEquals(self.coupon.users.first().user, self.user)
| bsd-3-clause | Python |
|
424d7107944f3ecb8ebf78a62dc35428952b380b | add reindex script | fritz0705/lglass | contrib/reindex.py | contrib/reindex.py | #!/bin/python
# coding: utf-8
import signal
import argparse
from datetime import datetime
argparser = argparse.ArgumentParser()
argparser.add_argument("--database-type", "-T", choices=["nic", "ipam"],
default="nic")
argparser.add_argument("database")
args = argparser.parse_args()
if args.database_type == "nic":
import lglass_sql.nic
db = lglass_sql.nic.NicDatabase(args.database)
elif args.database_type == "ipam":
import lipam.sql
db = lipam.sql.IPAMDatabase(args.database)
n = 0
start = datetime.now()
def sigusr1(*args):
global n
print("Processed {} objects in {}".format(n, datetime.now() - start))
signal.signal(signal.SIGUSR1, sigusr1)
with db.session() as sess:
for obj in sess.find():
n += 1
sess.reindex(obj)
sess.commit()
| mit | Python |
|
65a1c06b6e5d7ec37ac232ab048b3cc541b75a45 | refactor Coupon | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | customermanage/models/Coupon.py | customermanage/models/Coupon.py | from django.db import models
from storemanage.models.Currency import Currency
from storemanage.models.Ticket import Ticket
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
# Create your models here.
class Coupon(models.Model):
ticket = models.ForeignKey(Ticket, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
remaining_date = models.DateTimeField(null=True)
active = models.BooleanField(default=True)
attribute = JSONField(default = dict())
| mit | Python |
|
837dc69a430161f6b942b629793ec1d37db780d4 | Create virtool.db.settings | igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool | virtool/db/settings.py | virtool/db/settings.py | import logging
import pymongo.errors
logger = logging.getLogger(__name__)
async def initialize(db):
try:
await db.settings.insert_one({
"_id": "settings",
"enable_sentry": {"type": "boolean", "default": True},
"sample_group": "none",
"sample_group_read": True,
"sample_group_write": False,
"sample_all_read": True,
"sample_all_write": False,
"sample_unique_names": True,
"hmm_slug": "virtool/virtool-hmm",
"software_channel": "stable",
"minimum_password_length": 8,
"default_source_types": ["isolate", "strain"]
})
except pymongo.errors.DuplicateKeyError:
logger.debug("Settings collection already initialized.")
async def update(db, updates):
return await db.settings.find_one_and_update({"_id": "settings"}, {
"$set": updates
})
| mit | Python |
|
094020855126721827342da98992a8c057d1a135 | fix memory benchmark for reference builds. | pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,ltilve/chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,Just-D/chromium-1,Chilledheart/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,Just-D/chromium-1,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,littlstar/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,M4sse/chromium.src,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,jaruba/chromium.src,dednal/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,markYoungH/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Chilledheart/chromium,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,Just-D/chromium-1,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,M4sse/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,dednal/chromium.src,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,M4sse/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,Chilledheart/chromium,dednal/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,patrickm/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,markYoungH/chromium.src,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,mogoweb/chromium-crosswalk,littlstar/chromium.src,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,ondra-novak/chromium.src,zcbenz/cefode-chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,Just-D/chromium-1,ondra-novak/chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,patrickm/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,hujiajie/pa-chromium,patrickm/chromium.src,hgl888/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,anirudhSK/chromium,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,anirudhSK/chromium,ChromiumWebApps/chromium,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,littlstar/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dednal/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,jaruba/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,ondra-novak/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl | tools/perf/perf_tools/memory_benchmark.py | tools/perf/perf_tools/memory_benchmark.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import multi_page_benchmark
MEMORY_HISTOGRAMS = [
{'name': 'V8.MemoryExternalFragmentationTotal', 'units': 'percent'},
{'name': 'V8.MemoryHeapSampleTotalCommitted', 'units': 'kb'},
{'name': 'V8.MemoryHeapSampleTotalUsed', 'units': 'kb'}]
class MemoryBenchmark(multi_page_benchmark.MultiPageBenchmark):
def __init__(self):
super(MemoryBenchmark, self).__init__('stress_memory')
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--dom-automation')
# For a hard-coded set of Google pages (such as GMail), we produce custom
# memory histograms (V8.Something_gmail) instead of the generic histograms
# (V8.Something), if we detect that a renderer is only rendering this page
# and no other pages. For this test, we need to disable histogram
# customizing, so that we get the same generic histograms produced for all
# pages.
options.AppendExtraBrowserArg('--disable-histogram-customizer')
def CanRunForPage(self, page):
return hasattr(page, 'stress_memory')
def MeasurePage(self, page, tab, results):
for histogram in MEMORY_HISTOGRAMS:
name = histogram['name']
data = tab.runtime.Evaluate(
'window.domAutomationController.getHistogram ? '
'window.domAutomationController.getHistogram("%s") : ""' % name)
if data:
results.Add(name.replace('.', '_'), histogram['units'], data,
data_type='histogram')
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import multi_page_benchmark
MEMORY_HISTOGRAMS = [
{'name': 'V8.MemoryExternalFragmentationTotal', 'units': 'percent'},
{'name': 'V8.MemoryHeapSampleTotalCommitted', 'units': 'kb'},
{'name': 'V8.MemoryHeapSampleTotalUsed', 'units': 'kb'}]
class MemoryBenchmark(multi_page_benchmark.MultiPageBenchmark):
def __init__(self):
super(MemoryBenchmark, self).__init__('stress_memory')
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--dom-automation')
# For a hard-coded set of Google pages (such as GMail), we produce custom
# memory histograms (V8.Something_gmail) instead of the generic histograms
# (V8.Something), if we detect that a renderer is only rendering this page
# and no other pages. For this test, we need to disable histogram
# customizing, so that we get the same generic histograms produced for all
# pages.
options.AppendExtraBrowserArg('--disable-histogram-customizer')
def CanRunForPage(self, page):
return hasattr(page, 'stress_memory')
def MeasurePage(self, page, tab, results):
for histogram in MEMORY_HISTOGRAMS:
name = histogram['name']
data = tab.runtime.Evaluate(
'window.domAutomationController.getHistogram("%s")' % name)
results.Add(name.replace('.', '_'), histogram['units'], data,
data_type='histogram')
| bsd-3-clause | Python |
4e797dd9c8b43ab62f70b0515dee9e6b5c17d043 | Create secret.py | toms3t/Propalyzer,toms3t/Propalyzer,toms3t/Propalyzer | propalyzer_site/propalyzer_site/secret.py | propalyzer_site/propalyzer_site/secret.py | class Secret():
SECRET_KEY = ''
| mit | Python |
|
3d8fe5cfc64c3667f938fa221353489846a9aeb0 | Add test of F.diagonal | tkerola/chainer,pfnet/chainer,keisuke-umezawa/chainer,chainer/chainer,ktnyt/chainer,okuta/chainer,hvy/chainer,ktnyt/chainer,niboshi/chainer,wkentaro/chainer,jnishi/chainer,hvy/chainer,niboshi/chainer,rezoo/chainer,ronekko/chainer,okuta/chainer,jnishi/chainer,okuta/chainer,jnishi/chainer,niboshi/chainer,hvy/chainer,chainer/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,niboshi/chainer,jnishi/chainer,hvy/chainer,anaruse/chainer,chainer/chainer,keisuke-umezawa/chainer,okuta/chainer,wkentaro/chainer,chainer/chainer,keisuke-umezawa/chainer,ktnyt/chainer,wkentaro/chainer | tests/chainer_tests/functions_tests/array_tests/test_diagonal.py | tests/chainer_tests/functions_tests/array_tests/test_diagonal.py | import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
@testing.parameterize(*testing.product_dict(
[
{'shape': (2, 4, 6), 'args': (1, 2, 0)},
{'shape': (2, 4, 6), 'args': (-1, 2, 0)},
{'shape': (2, 4, 6), 'args': (0, -1, -2)},
{'shape': (2, 4, 6), 'args': (0, -1, 1)},
],
[
{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64},
],
))
class TestDiagonal(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.y_expected = self.x.diagonal(*self.args)
self.y_shape = self.y_expected.shape
self.gy = numpy.random.uniform(-1, 1, self.y_shape).astype(self.dtype)
self.ggx = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.check_double_backward_options = {'atol': 1e-3, 'rtol': 1e-2}
if self.dtype == numpy.float16:
self.check_double_backward_options.update(dtype=numpy.float64)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = functions.diagonal(x, *self.args)
testing.assert_allclose(y.data, self.y_expected)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
gradient_check.check_backward(
lambda x: functions.diagonal(x, *self.args),
x_data, y_grad, dtype=numpy.float64)
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
def check_double_backward(self, x_data, y_grad, x_grad_grad):
def f(x):
x = functions.diagonal(x, *self.args)
return x * x
gradient_check.check_double_backward(
f, x_data, y_grad, x_grad_grad,
**self.check_double_backward_options)
def test_double_backward_cpu(self):
self.check_double_backward(self.x, self.gy, self.ggx)
@attr.gpu
def test_double_backward_gpu(self):
self.check_double_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx))
testing.run_module(__name__, __file__)
| mit | Python |
|
254564ceb905dc512693febed44e908c27f249ce | Add tests for cupyx.scipy.ndimage.label | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | tests/cupyx_tests/scipy_tests/ndimage_tests/test_measurements.py | tests/cupyx_tests/scipy_tests/ndimage_tests/test_measurements.py | import unittest
import numpy
from cupy import testing
import cupyx.scipy.ndimage # NOQA
try:
import scipy.ndimage # NOQA
except ImportError:
pass
def _generate_binary_structure(rank, connectivity):
if connectivity < 1:
connectivity = 1
if rank < 1:
return numpy.array(True, dtype=bool)
output = numpy.fabs(numpy.indices([3] * rank) - 1)
output = numpy.add.reduce(output, 0)
return output <= connectivity
@testing.parameterize(*testing.product({
'ndim': [1, 2, 3, 4],
'size': [50, 100],
'density': [0.2, 0.3, 0.4],
'connectivity': [None, 2, 3],
'x_dtype': [bool, numpy.int8, numpy.int32, numpy.int64,
numpy.float32, numpy.float64],
'output': [None, numpy.int32, numpy.int64],
'o_type': [None, 'ndarray']
}))
@testing.gpu
@testing.with_requires('scipy')
class TestLabel(unittest.TestCase):
@testing.numpy_cupy_array_equal(scipy_name='scp')
def test_label(self, xp, scp):
size = int(pow(self.size, 1 / self.ndim))
x_shape = range(size, size + self.ndim)
x = xp.zeros(x_shape, dtype=self.x_dtype)
# x[numpy.where(testing.shaped_random(x_shape, xp) < self.density)] = 1
x[testing.shaped_random(x_shape, xp) < self.density] = 1
if self.connectivity is None:
structure = None
else:
structure = _generate_binary_structure(self.ndim,
self.connectivity)
if self.o_type == 'ndarray' and self.output is not None:
output = xp.empty(x_shape, dtype=self.output)
num_features = scp.ndimage.label(x, structure=structure,
output=output)
return output
labels, num_features = scp.ndimage.label(x, structure=structure,
output=self.output)
return labels
| mit | Python |
|
2a6907ddf9c7b5df2e1b59c8feeb0fa4bd4b5752 | add rudimentary validation tests for azure | sonchang/validation-tests,rancherio/validation-tests,wlan0/validation-tests,aruneli/validation-tests,cjellick/validation-tests,rancher/validation-tests,hibooboo2/validation-tests,wlan0/validation-tests,sangeethah/validation-tests,aruneli/validation-tests,sonchang/validation-tests,rancher/validation-tests,hibooboo2/validation-tests,cjellick/validation-tests,sangeethah/validation-tests,rancherio/validation-tests | tests/validation/cattlevalidationtest/core/test_machine_azure.py | tests/validation/cattlevalidationtest/core/test_machine_azure.py | import logging
from common_fixtures import * # NOQA
DEFAULT_TIMEOUT = 900
subscription_id = os.environ.get('AZURE_SUBSCRIPTION_ID')
subscription_cert = os.environ.get('AZURE_SUBSCRIPTION_CERT')
# Use azure settings from environment variables , if set
i = 'b39f27a8b8c64d52b05eac6a62ebad85__'
i = i + 'Ubuntu-14_04_1-LTS-amd64-server-20140927-en-us-30GB'
image = os.environ.get('AZURE_IMAGE', i)
location = os.environ.get('AZURE_LOCATION', "West US")
username = os.environ.get('AZURE_USERNAME', "")
password = os.environ.get('AZURE_PASSWORD', "")
size = os.environ.get('AZURE_SIZE', "Small")
if_machine_azure = pytest.mark.skipif(
not os.environ.get('AZURE_SUBSCRIPTION_ID') or
not os.environ.get('AZURE_SUBSCRIPTION_CERT'),
reason='Azure SubscriptionId/SubscriptionCert/AuthToken is not set')
# Get logger
logger = logging.getLogger(__name__)
@pytest.fixture(scope='session', autouse=True)
def register_host(admin_client):
test_url = cattle_url()
start = test_url.index("//") + 2
api_host = test_url[start:]
admin_client.create_setting(name="api.host", value=api_host)
@if_machine_azure
def test_azure_machine_all_params(client):
name = random_str()
create_args = {"name": name,
"azureConfig": {"subscriptionId": subscription_id,
"subscriptionCert": subscription_cert,
"image": image,
"location": location,
"username": username,
"password": password,
"size": size}}
expected_values = {"subscriptionId": subscription_id,
"subscriptionCert": subscription_cert,
"image": image,
"location": location,
"username": username,
"password": password,
"size": size}
azure_machine_life_cycle(client, create_args, expected_values)
def azure_machine_life_cycle(client, configs, expected_values):
machine = client.create_machine(**configs)
machine = client.wait_success(machine, timeout=DEFAULT_TIMEOUT)
assert machine.state == 'active'
# Wait until host shows up with some physicalHostId
machine = wait_for_host(client, machine)
host = machine.hosts()[0]
assert host.state == 'active'
assert machine.accountId == host.accountId
# Remove the machine and make sure that the host
# and the machine get removed
machine = client.wait_success(machine.remove())
assert machine.state == 'removed'
host = client.reload(machine.hosts()[0])
assert host.state == 'removed'
def wait_for_host(client, machine):
wait_for_condition(client,
machine,
lambda x: len(x.hosts()) == 1,
lambda x: 'Number of hosts associated with machine ' +
str(len(x.hosts())),
DEFAULT_TIMEOUT)
host = machine.hosts()[0]
host = wait_for_condition(client,
host,
lambda x: x.state == 'active',
lambda x: 'Host state is ' + x.state
)
return machine
| apache-2.0 | Python |
|
e9091be4ae9ddf0cb83bd7535c4ced5bb2d691d2 | add config_edit.py | wijjo/scripts,wijjo/scripts | castiron/lib/castiron/actions/config_edit.py | castiron/lib/castiron/actions/config_edit.py | from castiron.tools import Action, register_actions
import os
import re
class G:
all_edits = []
def _file_contains_re(runner, path, contains_re):
real_path = os.path.realpath(os.path.expanduser(path))
if os.path.exists(real_path):
with open(real_path) as f:
for line in f:
if contains_re.search(line.rstrip()):
return True
return False
def _append_text(runner, path, text):
real_path = os.path.realpath(os.path.expanduser(path))
with open(real_path, 'a' if os.path.exists(real_path) else 'w') as f:
f.write('\n')
f.write(text)
if not text.endswith('\n'):
f.write('\n')
class EditBase(object):
def __init__(self, path):
self.path = path
class Inject(EditBase):
'''
Append to existing file or create new file.
'''
def __init__(self, path, skip_if, text):
'''
path is the file to edit or create.
text is the text to inject.
skip_if skips the edit when a line matches a regex pattern.
'''
super(Inject, self).__init__(path)
self.skip_if_re = re.compile(skip_if)
self.text = text
self.needed = False
def check(self, runner):
return _file_contains_re(runner, self.path, self.skip_if_re)
def perform(self, runner):
if _file_contains_re(runner, self.path, self.skip_if_re):
_append_text(runner, self.path, self.text)
def edits(*edits):
G.all_edits.extend(edits)
class ConfigEditAction(Action):
description = 'edit configuration files'
enabled = True
def __init__(self):
super(ConfigEditAction, self).__init__()
class CheckedEdit(object):
def __init__(self, edit):
self.edit = edit
self.needed = False
self.checked_edits = [CheckedEdit(edit) for edit in G.all_edits]
def check(self, runner):
okay = False
for checked_edit in self.checked_edits:
if runner.call(checked_edit.edit.check):
okay = checked_edit.needed = True
return okay
def perform(self, runner, needed):
for checked_edit in self.checked_edits:
if checked_edit.needed:
runner.call(checked_edit.edit.perform)
else:
print 'Configuration file was already changed: %s' % checked_edit.edit.path
register_actions(ConfigEditAction)
| apache-2.0 | Python |
|
99430e9f51eccb79f32af49bedfb28ba5f39cd09 | update : minor changes | black-perl/ptop | ptop/plugins/system_sensor.py | ptop/plugins/system_sensor.py | '''
System sensor plugin
Generates the basic system info
'''
from ptop.core import Plugin
import psutil, socket, getpass
import datetime, time
class SystemSensor(Plugin):
def __init__(self,**kwargs):
super(SystemSensor,self).__init__(**kwargs)
# overriding the update method
def update(self):
# only text part for the system info
self.currentValue['text'] = {}
# updating values
self.currentValue['text']['user'] = getpass.getuser()
self.currentValue['text']['host_name'] = socket.gethostname()
self.currentValue['text']['running_time'] = datetime.timedelta(seconds=int(time.time() - psutil.boot_time()))
system_sensor = SystemSensor(name='System',sensorType='text',interval=1)
| mit | Python |
|
ded21520c1fde89336480b48387d383a2e449c2a | Write test for array | cupy/cupy,wkentaro/chainer,laysakura/chainer,cemoody/chainer,kikusu/chainer,t-abe/chainer,ktnyt/chainer,cupy/cupy,niboshi/chainer,hvy/chainer,benob/chainer,okuta/chainer,kashif/chainer,rezoo/chainer,jnishi/chainer,chainer/chainer,ytoyama/yans_chainer_hackathon,niboshi/chainer,tscohen/chainer,chainer/chainer,jnishi/chainer,muupan/chainer,ronekko/chainer,keisuke-umezawa/chainer,niboshi/chainer,ktnyt/chainer,cupy/cupy,keisuke-umezawa/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,delta2323/chainer,wkentaro/chainer,cupy/cupy,hvy/chainer,hvy/chainer,kikusu/chainer,ysekky/chainer,hvy/chainer,wkentaro/chainer,t-abe/chainer,1986ks/chainer,chainer/chainer,ktnyt/chainer,tigerneil/chainer,Kaisuke5/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,aonotas/chainer,truongdq/chainer,benob/chainer,pfnet/chainer,jnishi/chainer,sou81821/chainer,sinhrks/chainer,keisuke-umezawa/chainer,truongdq/chainer,kiyukuta/chainer,muupan/chainer,anaruse/chainer,AlpacaDB/chainer,jnishi/chainer,minhpqn/chainer,AlpacaDB/chainer,okuta/chainer,sinhrks/chainer,chainer/chainer,tkerola/chainer | tests/chainer_tests/utils_tests/test_array.py | tests/chainer_tests/utils_tests/test_array.py | import unittest
import numpy
from chainer import cuda
from chainer.utils import array
from chainer.testing import attr
class TestFullLike(unittest.TestCase):
def test_full_like_cpu(self):
x = numpy.array([1, 2], numpy.float32)
y = array.full_like(x, 3)
self.assertIsInstance(y, numpy.ndarray)
self.assertEqual(y.shape, (2,))
self.assertEqual(y[0], 3)
self.assertEqual(y[1], 3)
@attr.gpu
def test_full_like_gpu(self):
x = cuda.cupy.array([1, 2], numpy.float32)
y = array.full_like(x, 3)
self.assertIsInstance(y, cuda.cupy.ndarray)
y = cuda.to_cpu(y)
self.assertEqual(y.shape, (2,))
self.assertEqual(y[0], 3)
self.assertEqual(y[1], 3)
| mit | Python |
|
f4260ad3e652a09922395e64d29bcf8f96ee12bc | Add test_colormap.py | talespaiva/folium,shankari/folium,QuLogic/folium,python-visualization/folium,BibMartin/folium,ocefpaf/folium,QuLogic/folium,talespaiva/folium,BibMartin/folium,shankari/folium,talespaiva/folium,talespaiva/folium,BibMartin/folium,QuLogic/folium,shankari/folium,python-visualization/folium,ocefpaf/folium | tests/test_colormap.py | tests/test_colormap.py | # -*- coding: utf-8 -*-
""""
Folium Colormap Module
----------------------
"""
import folium.colormap as cm
def test_simple_step():
step = cm.StepColormap(['green','yellow','red'], vmin=3., vmax=10., index=[3,4,8,10], caption='step')
step = cm.StepColormap(['r','y','g','c','b','m'])
step._repr_html_()
def test_simple_linear():
linear = cm.LinearColormap(['green','yellow','red'], vmin=3., vmax=10.)
linear = cm.LinearColormap(['red','orange', 'yellow','green'], index=[0,0.1,0.9,1.])
linear._repr_html_()
def test_linear_to_step():
some_list = [30.6, 50, 51, 52, 53, 54, 55, 60, 70, 100]
lc = cm.linear.YlOrRd
lc.to_step(n=12)
lc.to_step(index=[0,2,4,6,8,10])
lc.to_step(data=some_list, n=12)
lc.to_step(data=some_list, n=12, method='linear')
lc.to_step(data=some_list, n=12, method='log')
lc.to_step(data=some_list, n=30, method='quantiles')
lc.to_step(data=some_list, quantiles=[0,0.3,0.7,1])
lc.to_step(data=some_list, quantiles=[0,0.3,0.7,1], round_method='int')
lc.to_step(data=some_list, quantiles=[0,0.3,0.7,1], round_method='log10')
def test_step_to_linear():
step = cm.StepColormap(['green','yellow','red'], vmin=3., vmax=10., index=[3,4,8,10], caption='step')
step.to_linear()
def test_linear_object():
cm.linear.OrRd._repr_html_()
cm.linear.PuBu.to_step(12)
cm.linear.YlGn.scale(3,12)
cm.linear._repr_html_()
| mit | Python |
|
dd65fb84e41b11f8d97e3862d00137969589ab4b | integrate greenify | alex8224/gTornado,zhu327/greentor | tests/test_greenify.py | tests/test_greenify.py | from __future__ import absolute_import
import sys
import time
import greenify
greenify.greenify()
import pylibmc
import random
from tornado.ioloop import IOLoop
from tornado.gen import coroutine
from gtornado import green
greenify.patch_lib("/usr/lib/x86_64-linux-gnu/libmemcached.so")
def call_mc(i):
mc = pylibmc.Client(["localhost"])
mc.get_stats()
mc.disconnect_all()
@coroutine
def use_greenlet():
s = time.time()
yield [green.spawn(call_mc, i) for i in range(1000)]
print(time.time() - s)
if __name__ == "__main__":
IOLoop.instance().run_sync(use_greenlet)
| mit | Python |
|
7401d1ecd6b3323b266cf02eabd42a2c4e40d988 | Add initial tests for test module | timothycrosley/hug,timothycrosley/hug,timothycrosley/hug | tests/test_test.py | tests/test_test.py | """tests/test_test.py.
Test to ensure basic test functionality works as expected.
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
api = hug.API(__name__)
def test_cli():
"""Test to ensure the CLI tester works as intended to allow testing CLI endpoints"""
@hug.cli()
def my_cli_function():
return 'Hello'
assert hug.test.cli(my_cli_function) == 'Hello'
assert hug.test.cli('my_cli_function', api=api) == 'Hello'
# Shouldn't be able to specify both api and module.
with pytest.raises(ValueError):
assert hug.test.cli('my_method', api=api, module=hug)
| mit | Python |
|
925da64adf0b74ba18eb78acd9127e3a6dc6f903 | Add test cases for reported issues | pjwerneck/pyrql | tests/test_reported.py | tests/test_reported.py | # -*- coding: utf-8 -*-
from pyrql import parse
CMP_OPS = ['eq', 'lt', 'le', 'gt', 'ge', 'ne']
class TestReportedErrors:
def test_like_with_string_parameter(self):
expr = 'like(name,*new jack city*)'
rep = {'name': 'like', 'args': ['name', '*new jack city*']}
pd = parse(expr)
assert pd == rep
def test_like_with_string_encoded_parameter(self):
expr = 'like(name,*new%20jack%20city*)'
rep = {'name': 'like', 'args': ['name', '*new jack city*']}
pd = parse(expr)
assert pd == rep
| mit | Python |
|
e61c6eb5b5a9f6f70df036dcfedf552325a6e9bd | move unit test syn import to pytest fixture | thomasyu888/synapsePythonClient | tests/unit/conftest.py | tests/unit/conftest.py | import logging
import pytest
from synapseclient import Synapse
from synapseclient.core.logging_setup import SILENT_LOGGER_NAME
"""
pytest unit test session level fixtures
"""
@pytest.fixture(scope="session")
def syn():
"""
Create a Synapse instance that can be shared by all tests in the session.
"""
syn = Synapse(debug=False, skip_checks=True)
syn.logger = logging.getLogger(SILENT_LOGGER_NAME)
return syn
| apache-2.0 | Python |
|
182762812cb1945dd2b50c21b34609be00b7bf45 | Create wordlist_add_digits.py | Matir/analysis-tools | wordlist_add_digits.py | wordlist_add_digits.py | #!/usr/bin/env python
#Adds 4digits to the end of the common word lists
import os, sys
class Wordlist_Add_Digits():
def add_digits(self, wordlist, outfile):
#File to start with
file=wordlist
#Output file
out=open(outfile, 'w')
#Start loop of 0000-9999 added to each word
with open(file) as f:
content = f.read().splitlines()
for x in content:
for a in range(10):
x0=x+str(a)
for b in range(10):
x1=x0+str(b)
for c in range (10):
x2=x1+str(c)
for d in range (10):
x3=x2+str(d)
# print final combo
out.write(str(x3)+"\n")
if __name__ == '__main__':
try:
wordlist = sys.argv[1]
outfile = sys.argv[2]
wordz = Wordlist_Add_Digits()
wordz.add_digits(wordlist, outfile)
except IndexError:
print('Usage: wordlist_add_digits.py wordlist.txt output.txt')
sys.exit(1)
| mit | Python |
|
da3e9d5f7ffeae68ef7ae3b07247a9f6cb16d40d | Create get_user_statuses.py | RRSCDS/douban-mining | src/Python/get_user_statuses.py | src/Python/get_user_statuses.py | import sys
import urllib2
import time
import re
from lxml import html
def get_user_statuses(userid):
reached_end = False
i = 1
saying_list = []
while not reached_end:
page_url = "http://www.douban.com/people/%s/statuses?p=%d" % (userid, i)
# TODO: User login. Results limited to the first 10 pages without login
response = urllib2.urlopen(page_url)
page_html = response.read()
tree = html.fromstring(page_html)
statuses_element_list = tree.xpath('//*[@class="status-item"]')
if len(statuses_element_list) < 20:
reached_end = True
print len(statuses_element_list)
for s in range(len(statuses_element_list)):
author_element = statuses_element_list[s].findall('.//*[@class="hd"]/*[@class="text"]/a')[0]
author_link = author_element.get('href')
author_id=None
if re.search(r".*people/(.+?)/", author_link):
author_id=re.search(r".*people/(.+?)/", author_link).group(1)
if author_id == userid:
blockquote_element_list = statuses_element_list[s].findall('.//*[@class="status-saying"]/blockquote')
if blockquote_element_list:
content='\n'.join([p.text for p in blockquote_element_list[0].findall('p')])
saying_list.append(content)
i += 1
time.sleep(1)
return saying_list
if __name__ == "__main__":
userid = sys.argv[1]
result_list = get_user_statuses( userid )
for i in result_list:
print i
| mit | Python |
|
c0637f482a95dd7ec02bb7b85bc8d164c0a80585 | add missing check_headers tool | pybee/Python-Android-support,dongguangming/python-for-android,ravsa/python-for-android,eHealthAfrica/python-for-android,kivy/python-for-android,Cheaterman/python-for-android,Cheaterman/python-for-android,kivy/python-for-android,codingang/python-for-android,manashmndl/python-for-android,dongguangming/python-for-android,dvenkatsagar/python-for-android,eHealthAfrica/python-for-android,tsdl2013/python-for-android,bob-the-hamster/python-for-android,olymk2/python-for-android,PKRoma/python-for-android,wexi/python-for-android,ehealthafrica-ci/python-for-android,ckudzu/python-for-android,EMATech/python-for-android,kerr-huang/python-for-android,kived/python-for-android,kronenpj/python-for-android,inclement/python-for-android,ravsa/python-for-android,eHealthAfrica/python-for-android,joliet0l/python-for-android,codingang/python-for-android,dvenkatsagar/python-for-android,manashmndl/python-for-android,tsdl2013/python-for-android,ehealthafrica-ci/python-for-android,codingang/python-for-android,alanjds/python-for-android,ASMfreaK/python-for-android,Stocarson/python-for-android,ASMfreaK/python-for-android,alanjds/python-for-android,ASMfreaK/python-for-android,bob-the-hamster/python-for-android,dl1ksv/python-for-android,rnixx/python-for-android,germn/python-for-android,cbenhagen/python-for-android,dongguangming/python-for-android,germn/python-for-android,dongguangming/python-for-android,Stocarson/python-for-android,joliet0l/python-for-android,tsdl2013/python-for-android,dongguangming/python-for-android,ASMfreaK/python-for-android,niavlys/python-for-android,niavlys/python-for-android,ibobalo/python-for-android,kived/python-for-android,dl1ksv/python-for-android,joliet0l/python-for-android,kerr-huang/python-for-android,kerr-huang/python-for-android,inclement/python-for-android,ehealthafrica-ci/python-for-android,Cheaterman/python-for-android,kivy/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,cbenhagen/python-for-android,germn/python-for-android,tsdl2013/python-for-android,olymk2/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,EMATech/python-for-android,ASMfreaK/python-for-android,olymk2/python-for-android,PKRoma/python-for-android,joliet0l/python-for-android,inclement/python-for-android,chozabu/p4a-ctypes,lc-soft/python-for-android,dongguangming/python-for-android,olymk2/python-for-android,ehealthafrica-ci/python-for-android,ravsa/python-for-android,codingang/python-for-android,kronenpj/python-for-android,niavlys/python-for-android,Stocarson/python-for-android,joliet0l/python-for-android,bob-the-hamster/python-for-android,Cheaterman/python-for-android,eHealthAfrica/python-for-android,dongguangming/python-for-android,niavlys/python-for-android,manashmndl/python-for-android,alanjds/python-for-android,kivatu/python-for-android,alanjds/python-for-android,bob-the-hamster/python-for-android,alanjds/python-for-android,olymk2/python-for-android,kronenpj/python-for-android,chozabu/p4a-ctypes,wexi/python-for-android,EMATech/python-for-android,cbenhagen/python-for-android,kronenpj/python-for-android,ibobalo/python-for-android,alanjds/python-for-android,renpytom/python-for-android,rnixx/python-for-android,joliet0l/python-for-android,Stocarson/python-for-android,codingang/python-for-android,kived/python-for-android,Stocarson/python-for-android,joliet0l/python-for-android,rnixx/python-for-android,tsdl2013/python-for-android,inclement/python-for-android,niavlys/python-for-android,kerr-huang/python-for-android,ehealthafrica-ci/python-for-android,ckudzu/python-for-android,dvenkatsagar/python-for-android,PKRoma/python-for-android,kerr-huang/python-for-android,ckudzu/python-for-android,ravsa/python-for-android,inclement/python-for-android,ckudzu/python-for-android,gonboy/python-for-android,EMATech/python-for-android,chozabu/p4a-ctypes,ckudzu/python-for-android,bob-the-hamster/python-for-android,manashmndl/python-for-android,niavlys/python-for-android,kivy/python-for-android,EMATech/python-for-android,Cheaterman/python-for-android,manashmndl/python-for-android,dvenkatsagar/python-for-android,chozabu/p4a-ctypes,dl1ksv/python-for-android,codingang/python-for-android,renpytom/python-for-android,joliet0l/python-for-android,wexi/python-for-android,codingang/python-for-android,renpytom/python-for-android,germn/python-for-android,dvenkatsagar/python-for-android,renpytom/python-for-android,chozabu/p4a-ctypes,ehealthafrica-ci/python-for-android,tsdl2013/python-for-android,eHealthAfrica/python-for-android,cbenhagen/python-for-android,inclement/python-for-android,ASMfreaK/python-for-android,germn/python-for-android,kived/python-for-android,ckudzu/python-for-android,Stocarson/python-for-android,lc-soft/python-for-android,dvenkatsagar/python-for-android,Cheaterman/python-for-android,renpytom/python-for-android,germn/python-for-android,ravsa/python-for-android,EMATech/python-for-android,ravsa/python-for-android,wexi/python-for-android,kivatu/python-for-android,dvenkatsagar/python-for-android,Cheaterman/python-for-android,manashmndl/python-for-android,gonboy/python-for-android,lc-soft/python-for-android,manashmndl/python-for-android,niavlys/python-for-android,kived/python-for-android,wexi/python-for-android,ckudzu/python-for-android,dl1ksv/python-for-android,EMATech/python-for-android,rnixx/python-for-android,dl1ksv/python-for-android,manashmndl/python-for-android,ravsa/python-for-android,gonboy/python-for-android,kivy/python-for-android,ibobalo/python-for-android,lc-soft/python-for-android,tsdl2013/python-for-android,kivatu/python-for-android,lc-soft/python-for-android,kived/python-for-android,wexi/python-for-android,olymk2/python-for-android,gonboy/python-for-android,kivatu/python-for-android,ASMfreaK/python-for-android,ehealthafrica-ci/python-for-android,Cheaterman/python-for-android,dl1ksv/python-for-android,kerr-huang/python-for-android,dl1ksv/python-for-android,ckudzu/python-for-android,bob-the-hamster/python-for-android,alanjds/python-for-android,Stocarson/python-for-android,cbenhagen/python-for-android,dvenkatsagar/python-for-android,Stocarson/python-for-android,kivatu/python-for-android,dl1ksv/python-for-android,eHealthAfrica/python-for-android,EMATech/python-for-android,lc-soft/python-for-android,gonboy/python-for-android,pybee/Python-Android-support,renpytom/python-for-android,chozabu/p4a-ctypes,kivatu/python-for-android,tsdl2013/python-for-android,ASMfreaK/python-for-android,kerr-huang/python-for-android,gonboy/python-for-android,kivatu/python-for-android,ravsa/python-for-android,ibobalo/python-for-android,gonboy/python-for-android,olymk2/python-for-android,ibobalo/python-for-android,codingang/python-for-android,eHealthAfrica/python-for-android,cbenhagen/python-for-android,dongguangming/python-for-android,niavlys/python-for-android,gonboy/python-for-android,olymk2/python-for-android,ibobalo/python-for-android,chozabu/p4a-ctypes,ehealthafrica-ci/python-for-android,rnixx/python-for-android | tools/check_headers.py | tools/check_headers.py | #!/usr/bin/env python2
import sys
from os import unlink
from os.path import exists
HEADERS = ('Content-Disposition', 'Content-Length', 'Content-Type',
'ETag', 'Last-Modified')
def is_sig_header(header):
header = header.lower()
for s in HEADERS:
if header.startswith(s.lower()):
return True
def do():
headers_fn = sys.argv[1]
signature_fn = sys.argv[2]
# first, get all the headers from the latest request
with open(headers_fn) as fd:
headers = [line.strip() for line in fd.readlines()]
last_index = 0
for index, header in enumerate(headers):
if header.startswith('HTTP/1.'):
last_index = index
headers = headers[last_index:]
# select few headers for the signature
headers = [header for header in headers if is_sig_header(header)]
signature = '\n'.join(headers)
# read the original signature
if exists(signature_fn):
with open(signature_fn) as fd:
original_signature = fd.read()
if original_signature == signature:
return 0
unlink(signature_fn)
if signature:
with open(signature_fn, 'w') as fd:
fd.write(signature)
try:
ret = do()
except:
ret = 1
sys.exit(ret)
| mit | Python |
|
3e5105218976549a0a782f179bb358edfd4e89c9 | Add load_tests / __init__.py to the azure/cli/tests module to allow for simpler unit test discovery | samedder/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli,BurtBiel/azure-cli | src/azure/cli/tests/__init__.py | src/azure/cli/tests/__init__.py | from .test_argparse import Test_argparse
from unittest import TestSuite
test_cases = [Test_argparse]
def load_tests(loader, tests, pattern):
suite = TestSuite()
for testclass in test_cases:
tests = loader.loadTestsFromTestCase(testclass)
suite.addTests(tests)
return suite
| mit | Python |
|
f91d666cc06f5db48bea43de29ca4153e58c473d | add test for os platform check | marshki/pyWipe,marshki/pyWipe | check.py | check.py | #!/bin/py
import os
import sys
def osCheck():
""" Check if OS is 'UNIX-like' """
if not sys.platform.startswith('linux') or sys.platform.startswith('darwin'):
# if not sys.platform.startswith('darwin'):
print("This program was designed for UNIX-like systems. Exiting.")
sys.exit()
osCheck()
| mit | Python |
|
ba82331fa694ec26c7f0108451abf3912b5a37ff | Reimplement deprecated (1.6) _is_ignorable_404 | beniwohli/apm-agent-python,tarkatronic/opbeat_python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,tarkatronic/opbeat_python,daikeren/opbeat_python,patrys/opbeat_python,daikeren/opbeat_python,ticosax/opbeat_python,beniwohli/apm-agent-python,dirtycoder/opbeat_python,patrys/opbeat_python,ticosax/opbeat_python,ticosax/opbeat_python,daikeren/opbeat_python,patrys/opbeat_python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,tarkatronic/opbeat_python,patrys/opbeat_python | opbeat/contrib/django/middleware/__init__.py | opbeat/contrib/django/middleware/__init__.py | """
opbeat.contrib.django.middleware
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011-2012 Opbeat
Large portions are
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.conf import settings
from opbeat.contrib.django.models import client
import threading
import logging
def _is_ignorable_404(self, uri):
"""
Returns True if the given request *shouldn't* notify the site managers.
"""
return any(pattern.search(uri) for pattern in settings.IGNORABLE_404_URLS)
class Opbeat404CatchMiddleware(object):
def process_response(self, request, response):
if response.status_code != 404 or _is_ignorable_404(request.get_full_path()):
return response
data = client.get_data_from_request(request)
data.update({
'level': logging.INFO,
'logger': 'http404',
})
result = client.capture('Message', param_message={'message':'Page Not Found: %s','params':[request.build_absolute_uri()]}, data=data)
request.opbeat = {
'app_id': data.get('app_id', client.app_id),
'id': client.get_ident(result),
}
return response
class OpbeatResponseErrorIdMiddleware(object):
"""
Appends the X-Opbeat-ID response header for referencing a message within
the Opbeat datastore.
"""
def process_response(self, request, response):
if not getattr(request, 'opbeat', None):
return response
response['X-Opbeat-ID'] = request.opbeat['id']
return response
class OpbeatLogMiddleware(object):
# Create a threadlocal variable to store the session in for logging
thread = threading.local()
def process_request(self, request):
self.thread.request = request
| """
opbeat.contrib.django.middleware
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011-2012 Opbeat
Large portions are
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.middleware.common import _is_ignorable_404
from opbeat.contrib.django.models import client
import threading
import logging
class Opbeat404CatchMiddleware(object):
def process_response(self, request, response):
if response.status_code != 404 or _is_ignorable_404(request.get_full_path()):
return response
data = client.get_data_from_request(request)
data.update({
'level': logging.INFO,
'logger': 'http404',
})
result = client.capture('Message', param_message={'message':'Page Not Found: %s','params':[request.build_absolute_uri()]}, data=data)
request.opbeat = {
'app_id': data.get('app_id', client.app_id),
'id': client.get_ident(result),
}
return response
class OpbeatResponseErrorIdMiddleware(object):
"""
Appends the X-Opbeat-ID response header for referencing a message within
the Opbeat datastore.
"""
def process_response(self, request, response):
if not getattr(request, 'opbeat', None):
return response
response['X-Opbeat-ID'] = request.opbeat['id']
return response
class OpbeatLogMiddleware(object):
# Create a threadlocal variable to store the session in for logging
thread = threading.local()
def process_request(self, request):
self.thread.request = request
| bsd-3-clause | Python |
92b572004264c69baed5cce721e20e1a830514f8 | add 'is_changed' filter | serge-name/myansible,serge-name/myansible,serge-name/myansible | filter_plugins/is_changed.py | filter_plugins/is_changed.py | class FilterModule(object):
''' A comment '''
def filters(self):
return {
'is_changed': self.is_changed,
}
def is_changed(self, input_value, key, value):
if type(input_value) is not dict:
raise TypeError, u"{} must be dict (got {})".format(input_value, str(type(input_value)))
if input_value.has_key('results'):
res = input_value['results']
else:
res = [input_value]
for item in res:
if item.has_key(key) and item.has_key('changed'):
if item[key] == value and item['changed'] == True:
return True
return False
| mit | Python |
|
82cab3f91df9b4bb9f60e553d6b9e4ef431cb6ae | Add __init__.py | ra1fh/eppconvert | eppconvert/__init__.py | eppconvert/__init__.py | #
# Copyright (c) 2017 Ralf Horstmann <ralf@ackstorm.de>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
__all__ = ['eppformat', 'eppread', 'gpx2epp']
| isc | Python |
|
2f9699d5088266aaa76dad1742f2432d78da9d3b | add validator class | biothings/biothings_explorer,biothings/biothings_explorer | biothings_explorer/resolve_ids/validator.py | biothings_explorer/resolve_ids/validator.py | from collections import defaultdict
from ..config_new import ID_RESOLVING_APIS
from ..exceptions.id_resolver import InvalidIDResolverInputError
from ..utils.common import getPrefixFromCurie
class Validator:
def __init__(self, user_input):
self.__user_input = user_input
self.__valid = defaultdict(list)
self.__invalid = defaultdict(list)
def get_valid_inputs(self):
return self.__valid
def get_invalid_inputs(self):
return self.__invalid
def _validate_if_input_is_dict(self, user_input):
if not isinstance(user_input, dict):
raise InvalidIDResolverInputError(
user_input,
message="Your Input to ID Resolver is Invalid. It should be a dictionary!",
)
def _validate_if_values_of_input_is_list(self, user_input):
for k, v in user_input.items():
if not isinstance(v, list):
raise InvalidIDResolverInputError(
user_input,
message="Your Input to ID Resolver is Invalid. All values of your input dictionary should be a list!",
)
def _validate_if_each_item_in_input_values_is_curie(self, user_input):
for k, v in user_input.items():
for _v in v:
if not isinstance(_v, str) or ":" not in _v:
raise InvalidIDResolverInputError(
user_input,
message="Your Input to ID Resolver is Invalid. Each item in the values of your input dictionary should be a curie. Spotted {} is not a curie".format(
_v
),
)
def _check_if_semantic_type_can_be_resolved(self, user_input):
res = {}
for k, v in user_input.items():
if k not in ID_RESOLVING_APIS:
self.__invalid[k] = v
else:
res[k] = v
return res
def _check_if_prefix_can_be_resolved(self, user_input):
for k, v in user_input.items():
for _v in v:
if getPrefixFromCurie(_v) not in ID_RESOLVING_APIS[k]["mapping"]:
self.__invalid[k].append(_v)
else:
self.__valid[k].append(_v)
def validate(self):
self._validate_if_input_is_dict(self.__user_input)
self._validate_if_values_of_input_is_list(self.__user_input)
self._validate_if_each_item_in_input_values_is_curie(self.__user_input)
tmp_valid_res = self._check_if_semantic_type_can_be_resolved(self.__user_input)
self._check_if_prefix_can_be_resolved(tmp_valid_res)
| apache-2.0 | Python |
|
c69fdba07aa4228f3e708b49e7fef4d0143e7a13 | Add missing stats.py | voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo | vpr/tests/api_stats.py | vpr/tests/api_stats.py | from django.db import connection
SQL_COUNT = 'select count(id) from vpr_api_apirecord where %s=%s;'
def countValue(field, value, time_start=None, time_end=None):
cur = connection.cursor()
cur.execute(SQL_COUNT % (field, value))
return cur.fetchone()
| agpl-3.0 | Python |
|
13addaf6e5a0423b632efcc4d16e3e5d864fdac3 | Create validate_csv_wd.py | rupendrab/py_unstr_parse | validate_csv_wd.py | validate_csv_wd.py | #!/usr/bin/env python3.5
import sys
import re
import os
import csv
def read_file(fname):
f = open(fname, 'r')
csv_reader = csv.reader(f, delimiter='~')
no_rows = 0
for row in csv_reader:
no_rows += 1
no_cols = len(row)
print("Row %d: columns = %d" % (no_rows, no_cols))
f.close()
print(".........")
print("Number of records in csv file: %d" % no_rows)
if __name__ == '__main__':
args = sys.argv[1:]
for fl in args:
print("File : %s" % fl)
print("..................................")
read_file(fl)
| mit | Python |
|
6c4ef8298bbdf48f82d13fb25a0f3958237392f2 | Add nova client for retrieving instance information | rcritten/novajoin | novajoin/nova.py | novajoin/nova.py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handle communication with Nova."""
from novaclient import client
from oslo_config import cfg
from oslo_log import log as logging
from novajoin import keystone_client
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
NOVA_APIVERSION = 2.1
class NovaClient(object):
"""Wrapper around nova client."""
def __init__(self):
self.version = NOVA_APIVERSION
self.client = self._nova_client()
def _nova_client(self):
"""Instantiate a new novaclient.Client object."""
session = keystone_client.get_session()
return client.Client(str(self.version), session=session)
def get_instance(instance_id):
novaclient = NovaClient()
try:
return novaclient.client.servers.get(instance_id)
except novaclient.exceptions.NotFound:
return None
| apache-2.0 | Python |
|
d53358a6a0a564a5b4982f7f3dfdfd1163d6a295 | Add test covering no RunStop for v2. | ericdill/databroker,ericdill/databroker | databroker/tests/test_v2/test_no_run_stop.py | databroker/tests/test_v2/test_no_run_stop.py | # This is a special test because we corrupt the generated data.
# That is why it does not reuse the standard fixures.
import tempfile
from suitcase.jsonl import Serializer
from bluesky import RunEngine
from bluesky.plans import count
from ophyd.sim import det
from databroker._drivers.jsonl import BlueskyJSONLCatalog
def test_no_stop_document(RE, tmpdir):
"""
When a Run has no RunStop document, whether because it does not exist yet
or because the Run was interrupted in a critical way and never completed,
we expect the field for 'stop' to contain None.
"""
directory = str(tmpdir)
serializer = Serializer(directory)
def insert_all_except_stop(name, doc):
if name != 'stop':
serializer(name, doc)
RE(count([det]), insert_all_except_stop)
serializer.close()
catalog = BlueskyJSONLCatalog(f'{directory}/*.jsonl')
assert catalog[-1].metadata['start'] is not None
assert catalog[-1].metadata['stop'] is None
| bsd-3-clause | Python |
|
ace782a3f4c616f9e22e1a1ce29f053b71391845 | Add missing migration for column description. | pbs/django-cms,pbs/django-cms,pbs/django-cms,pbs/django-cms | cms/migrations/0002_update_template_field.py | cms/migrations/0002_update_template_field.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='page',
name='template',
field=models.CharField(help_text='Templates are used to render the layout of a page.', max_length=100, verbose_name='template', choices=[(b'test-template.html', 'Test Template')]),
preserve_default=True,
),
]
| bsd-3-clause | Python |
|
c1e76dbdf07e67d98814d6f357a70c692af3a31d | Add first pass at db router | sloria/osf.io,cslzchen/osf.io,laurenrevere/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,cwisecarver/osf.io,adlius/osf.io,Johnetordoff/osf.io,acshi/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,binoculars/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,binoculars/osf.io,leb2dg/osf.io,caneruguz/osf.io,crcresearch/osf.io,caseyrollins/osf.io,adlius/osf.io,monikagrabowska/osf.io,acshi/osf.io,chrisseto/osf.io,felliott/osf.io,adlius/osf.io,cwisecarver/osf.io,binoculars/osf.io,cslzchen/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,baylee-d/osf.io,chennan47/osf.io,mattclark/osf.io,baylee-d/osf.io,acshi/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,baylee-d/osf.io,mfraezz/osf.io,aaxelb/osf.io,TomBaxter/osf.io,mattclark/osf.io,cwisecarver/osf.io,chrisseto/osf.io,felliott/osf.io,laurenrevere/osf.io,leb2dg/osf.io,laurenrevere/osf.io,felliott/osf.io,acshi/osf.io,felliott/osf.io,Nesiehr/osf.io,leb2dg/osf.io,saradbowman/osf.io,sloria/osf.io,caneruguz/osf.io,crcresearch/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,erinspace/osf.io,TomBaxter/osf.io,cslzchen/osf.io,pattisdr/osf.io,hmoco/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,chennan47/osf.io,acshi/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,mfraezz/osf.io,icereval/osf.io,hmoco/osf.io,saradbowman/osf.io,erinspace/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,hmoco/osf.io,pattisdr/osf.io,hmoco/osf.io,pattisdr/osf.io,cslzchen/osf.io,cwisecarver/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,icereval/osf.io,sloria/osf.io,adlius/osf.io,HalcyonChimera/osf.io,icereval/osf.io,Nesiehr/osf.io,chennan47/osf.io | osf/db/router.py | osf/db/router.py | from django.conf import settings
import psycopg2
CACHED_MASTER = None
class PostgreSQLFailoverRouter(object):
"""
1. CHECK MASTER_SERVER_DSN @ THREAD LOCAL
2. THERE?, GOTO 9
3. GET RANDOM_SERVER FROM `settings.DATABASES`
4. CONNECT TO RANDOM_SERVER
5. IS MASTER SERVER?
6. YES? GOTO 8
7. NO?, `exit()`
8. STOR MASTER_SERVER_DSN @ THREAD_LOCAL
9. PROFIT
Number of servers can be assumed to be > 1 but shouldn't assume 2 max.
Might be nice to keep track of the servers that have been tried from settings.DATABASES so we don't get into a loop.
"""
DSNS = dict()
def __init__(self):
self._get_dsns()
global CACHED_MASTER
if not CACHED_MASTER:
CACHED_MASTER = self._get_master()
def _get_master(self):
for name, dsn in self.DSNS.iteritems():
conn = self._get_conn(dsn)
cur = conn.cursor()
cur.execute('SELECT pg_is_in_recovery();')
row = cur.fetchone()
if not row[0]:
cur.close()
conn.close()
return name
cur.close()
conn.close()
return None
def _get_dsns(self):
template = '{protocol}://{USER}:{PASSWORD}@{HOST}:{PORT}/{NAME}'
for name, db in settings.DATABASES.iteritems():
if 'postgresql' in db['ENGINE']:
db['protocol'] = 'postgres'
# db.setdefault('protocol', 'postgres')
else:
raise Exception('PostgreSQLFailoverRouter only works with PostgreSQL... ... ...')
self.DSNS[name] = template.format(**db)
def _get_conn(self, dsn):
return psycopg2.connect(dsn)
def db_for_read(self, model, **hints):
if not CACHED_MASTER:
exit()
return CACHED_MASTER
def db_for_write(self, model, **hints):
if not CACHED_MASTER:
exit()
return CACHED_MASTER
def allow_relation(self, obj1, obj2, **hints):
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
return None
| apache-2.0 | Python |
|
aca6b8b4cd221efca6d3a5f59f96b73d70e65714 | test integration against scipy | adrn/gala,adrn/gary,adrn/gary,adrn/gala,adrn/gary,adrn/gala | gary/integrate/tests/test_1d.py | gary/integrate/tests/test_1d.py | # coding: utf-8
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os
import time
import logging
# Third-party
import numpy as np
from astropy import log as logger
from scipy.integrate import simps
# Project
from ..simpsgauss import simpson
logger.setLevel(logging.DEBUG)
plot_path = "plots/tests/TODO"
if not os.path.exists(plot_path):
os.makedirs(plot_path)
def test_simpson():
ncalls = 10
func = lambda x: np.sin(x - 0.2414)*x + 2.
x = np.linspace(0, 10, 250001)
y = func(x)
t0 = time.time()
for i in range(ncalls):
s1 = simpson(y, dx=x[1]-x[0])
print("cython (odd): {0} sec for {1} calls".format(time.time() - t0,ncalls))
t0 = time.time()
for i in range(ncalls):
s2 = simps(y, x=x)
print("python (odd): {0} sec for {1} calls".format(time.time() - t0,ncalls))
np.testing.assert_allclose(s1, s2)
# -----------------------------------------------------
print()
x = np.linspace(0, 10, 250000)
y = func(x)
t0 = time.time()
for i in range(ncalls):
s1 = simpson(y, dx=x[1]-x[0])
print("cython (even): {0} sec for {1} calls".format(time.time() - t0,ncalls))
t0 = time.time()
for i in range(ncalls):
s2 = simps(y, x=x)
print("python (even): {0} sec for {1} calls".format(time.time() - t0,ncalls))
np.testing.assert_allclose(s1, s2)
| mit | Python |
|
ca002a18b7e392bbdca9d7e0ed8c39739dc5b4a3 | Add code to get 99th percentile absolute pointing for POG | sot/aimpoint_mon,sot/aimpoint_mon | pog_absolute_pointing.py | pog_absolute_pointing.py | import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
| bsd-2-clause | Python |
|
874da8664a6ee62937fb859665e17c035a66324b | add utils | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/enikshay/management/commands/utils.py | custom/enikshay/management/commands/utils.py | from __future__ import print_function
import csv
import datetime
from django.core.management import BaseCommand
from casexml.apps.case.mock import CaseFactory
from casexml.apps.case.xform import get_case_updates
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from custom.enikshay.case_utils import (
CASE_TYPE_OCCURRENCE,
CASE_TYPE_PERSON,
get_first_parent_of_case,
)
from custom.enikshay.exceptions import ENikshayCaseNotFound
def get_result_recorded_form(test):
"""get last form that set result_recorded to yes"""
for action in reversed(test.actions):
if action.form is not None:
for update in get_case_updates(action.form):
if (
update.id == test.case_id
and update.get_update_action()
and update.get_update_action().dynamic_properties.get('result_recorded') == 'yes'
):
return action.form.form_data
def is_person_public(domain, test):
try:
occurrence_case = get_first_parent_of_case(domain, test.case_id, CASE_TYPE_OCCURRENCE)
person_case = get_first_parent_of_case(domain, occurrence_case.case_id, CASE_TYPE_PERSON)
except ENikshayCaseNotFound:
return False
return person_case.get_case_property('enrolled_in_private') != 'true'
class BaseEnikshayCaseMigration(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('log_file_name')
parser.add_argument('case_ids', nargs='*')
parser.add_argument('--commit', action='store_true')
def handle(self, domain, log_file_name, case_ids, **options):
commit = options['commit']
print("Starting {} migration on {} at {}".format(
"real" if commit else "fake", domain, datetime.datetime.utcnow()
))
with open(log_file_name, "w") as log_file:
writer = csv.writer(log_file)
writer.writerow(
['case_id']
+ ['current_' + case_prop for case_prop in self.case_properties_to_update]
+ self.case_properties_to_update
+ [self.datamigration_case_property]
)
for case in self.get_cases(domain, self.case_type, case_ids):
updated_case_properties = self.get_case_property_updates(case, domain)
needs_update = bool(updated_case_properties)
updated_case_properties[self.datamigration_case_property] = 'yes' if needs_update else 'no'
writer.writerow(
[case.case_id]
+ [case.get_case_property(case_prop) or '' for case_prop in self.case_properties_to_update]
+ [updated_case_properties.get(case_prop, '') for case_prop in (
self.case_properties_to_update + [self.datamigration_case_property])]
)
if needs_update and commit:
self.commit_updates(domain, case.case_id, updated_case_properties)
@staticmethod
def get_cases(domain, case_type, case_ids):
accessor = CaseAccessors(domain)
case_ids = case_ids or accessor.get_case_ids_in_domain(type=case_type)
return accessor.iter_cases(case_ids)
@staticmethod
def commit_updates(domain, case_id, updated_case_properties):
CaseFactory(domain).update_case(case_id, update=updated_case_properties)
@property
def case_type(self):
raise NotImplementedError
@property
def case_properties_to_update(self):
raise NotImplementedError
@property
def datamigration_case_property(self):
raise NotImplementedError
@staticmethod
def get_case_property_updates(case, domain):
raise NotImplementedError
| bsd-3-clause | Python |
|
fa28e80dc7aeed1eb4fb0a18126a2f8105d5a5d2 | Create Cleverbot.py | alexsiri7/jasper-module-cleverbot | Cleverbot.py | Cleverbot.py | mport re
import cleverbot
import traceback
WORDS = ["CLEVERBOT", "BOT"]
PATTERN = r"\b(cleverbot|bot)\b"
def handle(text, mic, profile):
"""
Responds to user-input, typically speech text, starting a conversation with cleverbot
Arguments:
text -- user-input, typically transcribed speech
mic -- used to interact with the user (for both input and output)
profile -- contains information related to the user (e.g., phone number)
"""
mic.say('Starting clever bot')
exit = False
bot = cleverbot.Cleverbot()
errors = 0
while not exit:
try:
question = mic.activeListen()
if is_exit(question):
break
answer = bot.ask(question)
mic.say(answer)
except Exception as e:
mic.say('Oops')
print traceback.format_exc()
errors += 1
if errors > 5:
break
mic.say('Stopping clever bot')
def is_exit(text):
return bool(re.search(r"(exit|quit|stop)", text, re.IGNORECASE))
def isValid(text):
return bool(re.search(PATTERN, text, re.IGNORECASE))
| mit | Python |
|
405dfc9a0a814001961e4090be83a3da4a4d4369 | Copy in constants file from master | architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst | cea/technologies/constants.py | cea/technologies/constants.py | """
Constants used throughout the cea.technologies package.
History lesson: This is a first step at removing the `cea.globalvars.GlobalVariables` object.
"""
# Heat Exchangers
U_cool = 2500.0 # W/m2K
U_heat = 2500.0 # W/m2K
dT_heat = 5.0 # K - pinch delta at design conditions
dT_cool = 2.0 # K - pinch delta at design conditions
# Specific heat
rho_W = 998.0 # [kg/m^3] density of Water
cp = 4185.0 # [J/kg K]
# Substation data
roughness = 0.02 / 1000 # roughness coefficient for heating network pipe in m (for a steel pipe, from Li &
NetworkDepth = 1 # m
# Initial Diameter guess
REDUCED_TIME_STEPS = 50 # number of time steps of maximum demand which are evaluated as an initial guess of the edge diameters | mit | Python |
|
656cf2955510151675dfb4acae4e92e21021a6b5 | Add the Course of LiaoXueFeng | MiracleWong/PythonBasic,MiracleWong/PythonBasic | LiaoXueFeng/function.py | LiaoXueFeng/function.py | def fact(n):
if n==1:
return 1
return n * fact(n - 1)
print fact(10)
| mit | Python |
|
7a8250e6640c8ebf36cd159607da24b095cf708e | Create Fibonacci.py | MichaelAubry/Fusion360 | Fibonacci.py | Fibonacci.py | #Author-Michael Aubry
#Description-This script outputs a spiraling fibinacci sequence onto a Fusion 360 sketch
import adsk.core, adsk.fusion
app= adsk.core.Application.get()
design = app.activeProduct
ui = app.userInterface;
#**User Inputs**
Steps = 15 #How many steps of Fibonacci would you like to plot?
Length = 2 #How long is the first segment? (cm)
#Get root component
rootComp = design.rootComponent
#Create a new sketch on XY plane
sketch = rootComp.sketches.add(rootComp.xYConstructionPlane)
# Create an object collection for the points.
points = adsk.core.ObjectCollection.create()
# R = total steps to be run thru the For loop
R = Steps - 2
#starting x and y coordiantes
x = 0
y = 0
#Create 1st coordinate
points.add(adsk.core.Point3D.create(x,y,0))
#starting values for sequence
fib = 1
fib1 = 1
#1st fib number
#print str(fib)
#Create 2nd coordinate
x = 1 * Length
points.add(adsk.core.Point3D.create(x,y,0))
#bins for shifting x and y coordinates
Bin1 = range(0,R,4)
Bin2 = range(1,R,4)
Bin3 = range(2,R,4)
Bin4 = range(3,R,4)
for i in range(R):
fib2 = fib + fib1
fib = fib1
fib1 = fib2
fibLength = fib*Length #adds the scalar component to coordinates
if i in Bin1:
x = x
y = y + fibLength
points.add(adsk.core.Point3D.create(x,y,0))
if i in Bin2:
x = x - fibLength
y = y
points.add(adsk.core.Point3D.create(x,y,0))
if i in Bin3:
x = x
y = y - fibLength
points.add(adsk.core.Point3D.create(x,y,0))
if i in Bin4:
x = x + fibLength
y = y
points.add(adsk.core.Point3D.create(x,y,0))
# Create the spline.
sketch.sketchCurves.sketchFittedSplines.add(points)
| mit | Python |
|
85c02da33f5e9ed4ef1e72bef3cec094ca8cf4d5 | add DBMetric class that holds data that has to be recorded | djangothon/django-db-meter,djangothon/django-db-meter,djangothon/django-db-meter | django_db_meter/message.py | django_db_meter/message.py | import datetime
import cPickle as pickle
import pylzma
import json
from collections import namedtuple
from django.core.serializers import serialize, deserialize
from django.conf import settings
from core.log import sclient
from core.utils import run_async
from newsfeed.activity import Actor, Target
from newsfeed.constants import NEWSFEED_QUEUE_NAME
from newsfeed.config import FeedConfig
from realtime.kafka.producer import KafkaProducer
class DBMetric(object):
def __init__(**kwargs):
self.timestamp = kwargs.get('timestamp', datetime.datetime.now())
self.query_start_time = kwargs.get('query_start_time')
self.query_execution_time = kwargs.get('query_execution_time')
self.query_sql = kwargs.get('query_sql')
self.query_tables = kwargs.get('query_tables', [])
self.db_name = kwargs.get('db_name')
self.app_name = kwargs.get('app_name')
self.rows_affected = kwargs.get('rows_affected')
def as_dict(self):
data = {
'timestamp': self.timestamp,
'query_start_time': self.query_start_time,
'query_execution_time': self.query_execution_time,
'query_sql': self.query_sql,
'query_tables': self.query_tables,
'db_name': self.db_name,
'app_name': self.app_name,
'rows_affected': self.rows_affected
}
return data
def as_json(self):
data = self.as_dict()
data_json = json.dumps(data)
return data_json
@classmethod
def from_queryset(cls, queryset):
kwargs = {
'timestamp': datetime.datetime.now(),
'query_start_time': queryset.query_start_time,
'query_execution_time': queryset.query_execution_time,
'query_sql': queryset.query.__str__(),
'query_tables': self._get_query_tables(queryset),
'db_name': self._get_db_name(queryset),
'app_name': queryset.model._meta.app_label,
'rows_affected': queryset.count(),
}
obj = cls(**kwargs)
return obj
def send(self):
msg_json = self.as_json()
@classmethod
def _get_db(cls, queryset):
return settings.DATABASES.get(queryset.db).get('NAME')
@classmethod
def _get_query_tables(self, queryset):
query_tables = queryset.tables
query_tables.extend(queryset.select_related.keys())
return query_tables
def serialize(self):
#self.obj = serialize('json', [self.obj])
#print self.obj
serialized = pickle.dumps(self)
compressed = pylzma.compress(serialized)
return compressed
@staticmethod
def deserialize(compressed_feed_message):
decompressed_msg = pylzma.decompress(compressed_feed_message)
deserialized = pickle.loads(decompressed_msg)
return deserialized
@classmethod
def send_metric(cls, actor_ctype, actor_object_id, action, target_ctype,
target_object_id, properties={},
activity_datetime=None,
activity_source=None):
msg = cls(actor_ctype=actor_ctype,
actor_object_id=actor_object_id,
action=action,
target_ctype=target_ctype,
target_object_id=target_object_id,
properties=properties,
activity_datetime=activity_datetime,
activity_source=activity_source)
msg.send()
def send(self):
| apache-2.0 | Python |
|
b777872d1b06714f538dc8fb21b790de822b5a66 | Update Example folder | DiCarloLab-Delft/PycQED_py3,QudevETH/PycQED_py3,DiCarloLab-Delft/PycQED_py3,DiCarloLab-Delft/PycQED_py3 | examples/listing_instruments.py | examples/listing_instruments.py | import visa
rm = visa.ResourceManager()
rm.list_resources() | mit | Python |
|
735c55d68d4831137255808042684733f93d5c18 | add iconv clone | The-Orizon/nlputils,The-Orizon/nlputils,The-Orizon/nlputils | iconv.py | iconv.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import locale
import argparse
import fileinput
preferredenc = locale.getpreferredencoding()
parser = argparse.ArgumentParser(
description="Convert encoding of given files from one encoding to another.")
parser.add_argument(
"-f", "--from-code", metavar='NAME', default=preferredenc,
help="encoding of original text (locale default: %s)" % preferredenc)
parser.add_argument(
"-t", "--to-code", metavar='NAME', default=preferredenc,
help="encoding for output (locale default: %s)" % preferredenc)
parser.add_argument(
"-c", metavar='errors', nargs='?', default='strict', const='ignore',
help="set error handling scheme (default: 'strict', omitted: 'ignore')")
parser.add_argument("-o", metavar='FILE', help="output file")
parser.add_argument("FILE", nargs='*', help="input file")
args = parser.parse_args()
if args.o:
wstream = open(args.o, 'wb')
else:
wstream = sys.stdout.buffer
with fileinput.input(args.FILE, mode='rb') as f, wstream:
for line in f:
wstream.write(
line.decode(args.from_code, args.c).encode(args.to_code, args.c))
| mit | Python |
|
aa7f888605dee0a845a20e1c0869cc5061719151 | Add rtree spatial index class | evil-mad/plotink | plotink/rtree.py | plotink/rtree.py | # -*- coding: utf-8 -*-
# rtree.py
# part of plotink: https://github.com/evil-mad/plotink
#
# See below for version information
#
# Written by Michal Migurski https://github.com/migurski @michalmigurski
# as a contribution to the AxiDraw project https://github.com/evil-mad/axidraw/
#
# Copyright (c) 2022 Windell H. Oskay, Evil Mad Scientist Laboratories
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
rtree.py
Minimal R-tree spatial index class for calculating intersecting regions
"""
import math
class Index:
''' One-shot R-Tree index (no rebalancing, insertions, etc.)
'''
bboxes = []
subtrees = []
xmin = None
ymin = None
xmax = None
ymax = None
def __init__(self, bboxes):
center_x, center_y = 0, 0
self.xmin, self.ymin = math.inf, math.inf
self.xmax, self.ymax = -math.inf, -math.inf
for (_, (xmin, ymin, xmax, ymax)) in bboxes:
center_x += (xmin/2 + xmax/2) / len(bboxes)
center_y += (ymin/2 + ymax/2) / len(bboxes)
self.xmin = min(self.xmin, xmin)
self.ymin = min(self.ymin, ymin)
self.xmax = max(self.xmax, xmax)
self.ymax = max(self.ymax, ymax)
# Make four lists of bboxes, one for each quadrant around the center point
# An original bbox may be present in more than one list
sub_bboxes = [
[
(i, (x_1, y_1, x_2, y_2)) for (i, (x_1, y_1, x_2, y_2)) in bboxes
if x_1 < center_x and y_1 < center_y
],
[
(i, (x_1, y_1, x_2, y_2)) for (i, (x_1, y_1, x_2, y_2)) in bboxes
if x_2 > center_x and y_1 < center_y
],
[
(i, (x_1, y_1, x_2, y_2)) for (i, (x_1, y_1, x_2, y_2)) in bboxes
if x_1 < center_x and y_2 > center_y
],
[
(i, (x_1, y_1, x_2, y_2)) for (i, (x_1, y_1, x_2, y_2)) in bboxes
if x_2 > center_x and y_2 > center_y
],
]
# Store bboxes or subtrees but not both
if max(map(len, sub_bboxes)) == len(bboxes):
# One of the subtrees is identical to the whole tree so just keep all the bboxes
self.bboxes = bboxes
else:
# Make four subtrees, one for each quadrant
self.subtrees = [Index(sub) for sub in sub_bboxes]
def intersection(self, bbox):
''' Get a set of IDs for a given bounding box
'''
ids, (x_1, y_1, x_2, y_2) = set(), bbox
for (i, (xmin, ymin, xmax, ymax)) in self.bboxes:
is_disjoint = x_1 > xmax or y_1 > ymax or x_2 < xmin or y_2 < ymin
if not is_disjoint:
ids.add(i)
for subt in self.subtrees:
is_disjoint = x_1 > subt.xmax or y_1 > subt.ymax or x_2 < subt.xmin or y_2 < subt.ymin
if not is_disjoint:
ids |= subt.intersection(bbox)
return ids
| mit | Python |
|
56ca21d312f34b6a229fe6cdb720ccc96ef712a5 | add polysites2vcf | mathii/gdc | polysites2vcf.py | polysites2vcf.py | #This is for converting Shop Mallick's polysite format to vcf
#Probably only useful to you if you are working on the SGDP
#Very specific to this particular format.
from __future__ import division, print_function
import argparse, sys, pdb
#Remember, in eigenstrat, 2 means "2 ref copies"
CODES={
"A":"AA",
"C":"CC",
"G":"GG",
"T":"TT",
"R":"AG",
"Y":"CT",
"S":"GC",
"W":"AT",
"K":"GT",
"M":"AC",
"-":"..",
"N":"..",
}
################################################################################
def parse_options():
"""
argparse
"""
parser=argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=argparse.FileType('r'), default="-")
return parser.parse_args()
################################################################################
def main(options):
"""
Convert
"""
samples=[]
include_ancients=False
include_refs=False
reading_header=True
for line in options.input:
if len(line)==1:
continue
elif line[:2]=="##" and reading_header:
bits=line.split()
if len(bits)<4:
continue
elif bits[1]!="..":
continue
elif bits[2][0]=="4" and include_refs:
samples.append(bits[7])
elif bits[2][0]=="4":
samples.append(bits[7])
elif bits[2][0]=="7" and include_ancients:
samples.append(bits[4].split(":")[0])
elif bits[2][0]=="8":
samples.append(bits[4].split(":")[0])
elif line[0]=="#" and reading_header:
reading_header=False
print("##fileformat=VCFv4.2")
print("##source=polysites2vcf.py")
print("##FORMAT=<ID=GT,Number=1,Type=String,Description=\"Genotype\">")
print("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\t"+"\t".join(samples))
elif not reading_header:
bits=line.split()
chrom=bits[0]
poss=bits[1]
idd=chrom+"_"+poss
ref=bits[2][0]
alleles=(bits[3]+bits[4]+bits[7]).upper()
if include_refs and include_ancients:
alleles=(bits[2]+bits[3]+bits[4]+bits[6]+bits[7]).upper()
elif include_refs:
alleles=(bits[2]+bits[3]+bits[4]+bits[7]).upper()
elif include_ancients:
alleles=(bits[3]+bits[4]+bits[6]+bits[7]).upper()
gts = [CODES[x] for x in alleles]
alt_alleles=list(set([x for x in "".join(gts) if (x!=ref and x!=".")]))
if not len(alt_alleles):
continue
alt=",".join(alt_alleles)
allele_map={ref:"0", ".":"."}
for i,a in enumerate(alt_alleles):
allele_map[a]=str(i+1)
gt_strings=[allele_map[x[0]]+"/"+allele_map[x[1]] for x in gts]
print("\t".join([chrom, poss, idd, ref, alt, "100", ".", ".", "GT"]+gt_strings))
else:
print(line, file=sys.stderr)
raise Exception("Header line in unexpected place")
################################################################################
if __name__=="__main__":
options=parse_options()
main(options)
| apache-2.0 | Python |
|
72dcd6857f5f895f0fb9325681302f5875bc50ec | Add a new user-defined file | NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd | profile_collection/startup/31-capillaries.py | profile_collection/startup/31-capillaries.py | #6.342 mm apart
#6.074
def capillary6_in():
mov(diff.xh,12.41)
mov(diff.yh,-12.58)
def capillary7_in():
mov(diff.xh,6.075)
mov(diff.yh,-12.58)
def capillary8_in():
mov(diff.xh,-.26695)
mov(diff.yh,-12.58)
def capillary9_in():
mov(diff.xh,-6.609)
mov(diff.yh,-12.58)
def capillary10_in():
mov(diff.xh,-12.951)
mov(diff.yh,-12.58)
'''
commands to move capillaries
capillary6_in()
sam = Sample("YT-11")
capillary7_in()
sam = Sample("YT-28")
capillary8_in()
sam = Sample("YT-47")
capillary9_in()
sam = Sample("YT-306")
capillary10_in()
sam = Sample("YT-51")
'''
''' Steps for capillary measurements:
1. Rename "Name Pattern" to sample name (YT-3 for example)
2. type command capillary6_in() (or the number)
3. move and measure
'''
| bsd-2-clause | Python |
|
f1ee6ce108626342b42a2d2a7b5aa4779af87e6c | Add python code to plot the histogram | chengluyu/SDU-Computer-Networks | plot-histogram.py | plot-histogram.py | import matplotlib.pyplot as plt
import sys
if __name__ == "__main__":
with open(sys.argv[1]) as f:
data = map(float, f.readlines())
plt.hist(list(data), 100)
plt.show()
| mit | Python |
|
0852aa9328cf3fe2b975581f4e67357fc2c68f06 | add reprozip installation and trace cmd | kaczmarj/neurodocker,kaczmarj/neurodocker | neurodocker/interfaces/reprozip.py | neurodocker/interfaces/reprozip.py | """Add Dockerfile instructions to minimize container with ReproZip.
Project repository: https://github.com/ViDA-NYU/reprozip/
See https://github.com/freesurfer/freesurfer/issues/70 for an example of using
ReproZip to minimize Freesurfer's recon-all command.
"""
# Author: Jakub Kaczmarzyk <jakubk@mit.edu>
from __future__ import absolute_import, division, print_function
import posixpath
from neurodocker.utils import indent, manage_pkgs
class Reprozip(object):
"""Add Dockerfile instructions to minimize a container based on a command
or a list of commands.
First, reprozip trace is run on a command or a list of commands, and then
all files are deleted except those in the reprozip trace output.
Parameters
----------
cmds : str or list
Command(s) to run to minimize the image. Double-quotes within commands
will be escaped automatically.
pkg_manager : {'apt', 'yum'}
Linux package manager.
"""
def __init__(self, cmds, pkg_manager, trace_dir="/reprozip-trace"):
self.cmds = cmds
self.pkg_manager = pkg_manager
self.trace_dir = trace_dir
if isinstance(self.cmds, str):
self.cmds = [self.cmds]
self._conda_root = "/opt/miniconda-reprozip"
def _create_cmd(self):
"""Return full command to install and run ReproZip."""
comment = ("#-----------------\n"
"# Install ReproZip\n"
"#-----------------\n")
cmds = (self._install_miniconda(), self._install_reprozip(),
self.trace())
cmds = indent("RUN", ''.join(cmds))
return comment + cmds
def _install_miniconda(self):
"""Install Miniconda solely for reprozip. Do not add this installation
to PATH.
"""
url = ("https://repo.continuum.io/miniconda/"
"Miniconda3-latest-Linux-x86_64.sh")
return ("curl -ssL -o miniconda.sh {}"
"\n&& bash miniconda.sh -b -p {}"
"\n&& rm -f miniconda.sh".format(url, self._conda_root))
def _install_reprozip(self):
"""Conda install reprozip from the vida-nyu channel."""
conda = posixpath.join(self._conda_root, 'bin', 'conda')
return ("\n&& {conda} install -y -q python=3.5 pyyaml"
"\n&& {conda} install -y -q -c vida-nyu reprozip"
"".format(conda=conda))
def trace(self):
"""Run reprozip trace on the specified commands."""
reprozip = posixpath.join(self._conda_root, 'bin', 'reprozip')
trace_cmds = []
base = ('\n&& {reprozip} trace -d {trace_dir} --dont-identify-packages'
' {continue_}\n\t{cmd}')
for i, cmd in enumerate(self.cmds):
if not cmd:
raise ValueError("Command to trace is empty.")
continue_ = "--continue " if i else ""
trace_cmds.append(base.format(cmd=cmd, reprozip=reprozip,
trace_dir=self.trace_dir,
continue_=continue_))
return "".join(trace_cmds)
def remove_untraced_files(self):
# QUESTION: how do we deal with directories in config.yml?
pass
| apache-2.0 | Python |
|
c9f64c0e61fb08c43b1c8cb93ec6f9c389b9c31c | delete finished pods from cluster | Iwan-Zotow/runEGS | XcScripts/deletePods.py | XcScripts/deletePods.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import json
import shutil
import subprocess
import time
def ReadPodsToBeDeleted(fname):
"""
self explanatory
"""
listPods = []
with open(fname,'r') as f:
for line in f:
listPods.append(line.rstrip('\n'))
return listPods
def main(pods_fname):
"""
This method takes list of pods and delte them all,
one by one
"""
pods = ReadPodsToBeDeleted(pods_fname)
print("To remove PODs: {0}".format(len(pods)))
for pod in pods:
cmd = "kubectl delete pod " + pod
rc = 0
for k in range(0, 12): # several attempts to make a pod
rc = subprocess.call(cmd, shell=True)
if rc == 0:
break
if rc != 0:
print("Cannot delete pod {0}".format(pod))
sys.exit(1)
if __name__ =='__main__':
nof_args = len(sys.argv)
if nof_args == 1:
print("Use:deletePods list_of_PODs")
sys.exit(1)
pods_fname = ""
if nof_args >= 2:
pods_fname = sys.argv[1]
main(pods_fname)
sys.exit(0)
| apache-2.0 | Python |
|
375134eba8a7fa1cbf2ab5c94ae0976eebc65de9 | Solve Code Fights crazyball problem | HKuz/Test_Code | CodeFights/crazyball.py | CodeFights/crazyball.py | #!/usr/local/bin/python
# Code Fights Crazyball Problem
from itertools import combinations
def crazyball(players, k):
return sorted([sorted(list(l)) for l in combinations(players, k)])
def main():
tests = [
[["Ninja", "Warrior", "Trainee", "Newbie"], 3,
[["Newbie", "Ninja", "Trainee"], ["Newbie", "Ninja", "Warrior"],
["Newbie", "Trainee", "Warrior"],
["Ninja", "Trainee", "Warrior"]]],
[["Ninja", "Warrior", "Trainee", "Newbie"], 4,
[["Newbie", "Ninja", "Trainee", "Warrior"]]],
[["Pooh"], 1, [["Pooh"]]],
[["Browny", "Whitey", "Blacky"], 1,
[["Blacky"], ["Browny"], ["Whitey"]]],
[["One", "Two", "Three", "Four", "Five", "Six", "Seven"], 5,
[["Five", "Four", "One", "Seven", "Six"],
["Five", "Four", "One", "Seven", "Three"],
["Five", "Four", "One", "Seven", "Two"],
["Five", "Four", "One", "Six", "Three"],
["Five", "Four", "One", "Six", "Two"],
["Five", "Four", "One", "Three", "Two"],
["Five", "Four", "Seven", "Six", "Three"],
["Five", "Four", "Seven", "Six", "Two"],
["Five", "Four", "Seven", "Three", "Two"],
["Five", "Four", "Six", "Three", "Two"],
["Five", "One", "Seven", "Six", "Three"],
["Five", "One", "Seven", "Six", "Two"],
["Five", "One", "Seven", "Three", "Two"],
["Five", "One", "Six", "Three", "Two"],
["Five", "Seven", "Six", "Three", "Two"],
["Four", "One", "Seven", "Six", "Three"],
["Four", "One", "Seven", "Six", "Two"],
["Four", "One", "Seven", "Three", "Two"],
["Four", "One", "Six", "Three", "Two"],
["Four", "Seven", "Six", "Three", "Two"],
["One", "Seven", "Six", "Three", "Two"]]]
]
for t in tests:
res = crazyball(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: crazyball({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: crazyball({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
| mit | Python |
|
e7bac459119e32cb79708ae7764a149dc22a1ed8 | add visitor.py from python svn (python 2.5 doesnt have it) | pyjs/pyjs,lancezlin/pyjs,pyjs/pyjs,minghuascode/pyj,pombredanne/pyjs,gpitel/pyjs,pyjs/pyjs,minghuascode/pyj,lancezlin/pyjs,pyjs/pyjs,anandology/pyjamas,Hasimir/pyjs,spaceone/pyjs,minghuascode/pyj,lancezlin/pyjs,Hasimir/pyjs,gpitel/pyjs,anandology/pyjamas,lancezlin/pyjs,anandology/pyjamas,anandology/pyjamas,gpitel/pyjs,spaceone/pyjs,spaceone/pyjs,pombredanne/pyjs,gpitel/pyjs,minghuascode/pyj,Hasimir/pyjs,pombredanne/pyjs,Hasimir/pyjs,spaceone/pyjs,pombredanne/pyjs | pyjs/src/pyjs/visitor.py | pyjs/src/pyjs/visitor.py | # XXX should probably rename ASTVisitor to ASTWalker
# XXX can it be made even more generic?
class ASTVisitor:
"""Performs a depth-first walk of the AST
The ASTVisitor will walk the AST, performing either a preorder or
postorder traversal depending on which method is called.
methods:
preorder(tree, visitor)
postorder(tree, visitor)
tree: an instance of ast.Node
visitor: an instance with visitXXX methods
The ASTVisitor is responsible for walking over the tree in the
correct order. For each node, it checks the visitor argument for
a method named 'visitNodeType' where NodeType is the name of the
node's class, e.g. Class. If the method exists, it is called
with the node as its sole argument.
The visitor method for a particular node type can control how
child nodes are visited during a preorder walk. (It can't control
the order during a postorder walk, because it is called _after_
the walk has occurred.) The ASTVisitor modifies the visitor
argument by adding a visit method to the visitor; this method can
be used to visit a child node of arbitrary type.
"""
VERBOSE = 0
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in node.getChildNodes():
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass, None)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
## if self.VERBOSE > 0:
## className = klass.__name__
## if self.VERBOSE == 1:
## if meth == 0:
## print "dispatch", className
## else:
## print "dispatch", className, (meth and meth.__name__ or '')
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args) # XXX *args make sense?
class ExampleASTVisitor(ASTVisitor):
"""Prints examples of the nodes that aren't visited
This visitor-driver is only useful for development, when it's
helpful to develop a visitor incrementally, and get feedback on what
you still have to do.
"""
examples = {}
def dispatch(self, node, *args):
self.node = node
meth = self._cache.get(node.__class__, None)
className = node.__class__.__name__
if meth is None:
meth = getattr(self.visitor, 'visit' + className, 0)
self._cache[node.__class__] = meth
if self.VERBOSE > 1:
print "dispatch", className, (meth and meth.__name__ or '')
if meth:
meth(node, *args)
elif self.VERBOSE > 0:
klass = node.__class__
if klass not in self.examples:
self.examples[klass] = klass
print
print self.visitor
print klass
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-12.12s" % attr, getattr(node, attr)
print
return self.default(node, *args)
# XXX this is an API change
_walker = ASTVisitor
def walk(tree, visitor, walker=None, verbose=None):
if walker is None:
walker = _walker()
if verbose is not None:
walker.VERBOSE = verbose
walker.preorder(tree, visitor)
return walker.visitor
def dumpNode(node):
print node.__class__
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-10.10s" % attr, getattr(node, attr)
| apache-2.0 | Python |
|
7da94fd5576f4c052e79a8068164c101054d5ae7 | Add Python / `requests` example | masterleague-net/api-examples | python/simple.py | python/simple.py | import requests # http://python-requests.org/
# Premium user authentication process and API access example
r = requests.post('https://api.masterleague.net/auth/token/', data={'username': 'user', 'password': '12345'})
if 'token' not in r.json():
print(r.text)
raise ValueError("Unable to extract authentication token!")
token = r.json()['token']
s = requests.Session()
s.headers.update({'Authorization': 'Token ' + token})
r = s.get('https://api.masterleague.net/heroes.json')
print(r.text)
# Anonymous user access example
r = requests.get('https://api.masterleague.net/heroes.json')
print(r.text)
| mit | Python |
|
b4f2c7b8bde0d28f7d1b61718eb7cd0b9159f507 | add __version__ | harmslab/epistasis,Zsailer/epistasis | epistasis/__version__.py | epistasis/__version__.py | __version__ = "0.6.4"
| unlicense | Python |
|
9498ac9ec27bbef1725b92e84a3b0d4c9e967aa6 | add ex14 | Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python | lpthw/ex14.py | lpthw/ex14.py | #!/usr/bin/env python
# Exercise 14: Prompting and Passing
from sys import argv
script, user_name = argv
prompt = '> '
print "Hi %s, I'm the %s script." % (user_name, script)
print "I'd like to ask you a few questions."
print "Do you like me %s?" % user_name
likes = raw_input(prompt)
print "Where do you live %s?" % user_name
lives = raw_input(prompt)
print "What kind of computer do you have?"
computer = raw_input(prompt)
print """
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer)
| mit | Python |
|
1c8fd79c783ba6f21140b4c08bbf648bf5989dd4 | Add main module | HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core | core/hybra.py | core/hybra.py | import data_loader
import descriptives
import network
import timeline
import wordclouds
def load_data( terms = [], data_folder = '' ):
if data_folder == '':
return load_all_data( terms )
else:
if '/' not in data_folder:
data_folder += '/'
loader = data_folder.split( '/' )[0]
return load_data_from_folder( terms, loader, data_folder )
def load_all_data( terms ):
data = {}
for function_name in dir( data_loader ):
if 'load_' in function_name:
if len( terms ) == 0:
f = getattr( data_loader, function_name )
else:
f = getattr( data_loader, function_name )( *terms )
data[function_name] = f()
return data
def load_data_from_folder( terms, loader, data_folder ):
data = []
for function_name in dir( data_loader ):
if loader in function_name:
if len( terms ) == 0:
data += getattr( data_loader, function_name )( data_folder = data_folder )
else:
data += getattr( data_loader, function_name)( terms, data_folder )
return data
def describe( data ):
if isinstance( data, dict ):
for loader in data:
print loader
descriptives.describe( data[loader] )
print '\n'
else:
descriptives.describe( data )
def create_timeline( data ):
timeline.create_timeline( data )
def create_network( data ):
network.create_network( data )
def create_wordcloud( data ):
wordclouds.create_wordcloud( data )
| mit | Python |
|
0ec0398f8e50ed0adca426f9c468fd5154603941 | add mmd matrix example | deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel | open_spiel/python/examples/mmd_matrix_example.py | open_spiel/python/examples/mmd_matrix_example.py | """ Example of using MMD with dilated entropy
to solve for QRE in a Matrix Game """
from absl import app
from absl import flags
from open_spiel.python.algorithms import mmd_dilated
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_integer("iterations", 1000, "Number of iterations")
flags.DEFINE_float("alpha", 0.1, "QRE parameter, larger value amounts to more regularization")
flags.DEFINE_integer("print_freq", 100, "How often to print the gap")
# create pyspiel perturbed RPS matrix game
game = pyspiel.create_matrix_game([[0, -1, 3],
[1, 0, -3],
[-3, 3, 0]],
[[0, 1, -3],
[-1, 0, 3],
[3, -3, 0]])
game = pyspiel.convert_to_turn_based(game)
def main(_):
mmd = mmd_dilated.MMDDilatedEnt(game, FLAGS.alpha)
for i in range(FLAGS.iterations):
mmd.update_sequences()
if i % FLAGS.print_freq == 0:
conv = mmd.get_gap()
print("Iteration {} gap {}".format(i, conv))
# Extract policies for both players
print(mmd.get_policies().action_probability_array)
# Note the sequence form and behavioural-form coincide
# for a normal-form game (sequence form has extra root value of 1)
print(mmd.current_sequences())
if __name__ == "__main__":
app.run(main) | apache-2.0 | Python |
|
eb2cbb45fd78c2e8accdaa6f8ba37ef1403159dd | Add brainfuck live shell | agwilt/python | bf/bf_cmd.py | bf/bf_cmd.py | #!/usr/bin/env python3
class BracketError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Machine():
def __init__(self):
self.tape = [0]
self.p = 0
def run(self, code):
pc = 0
loop_stack = []
brackets = 0
printed = False
for instr in code:
if instr == '[':
brackets += 1
elif instr == ']':
brackets -= 1
if brackets != 0:
raise BracketError('Error: failed bracket count')
while pc < len(code):
instr = code[pc]
# increment/decrement
if instr == '+':
self.increment(1)
elif instr == '-':
self.increment(-1)
# I/O
elif instr == '.':
print(chr(self.cell()), end='')
printed = True
elif instr == ',':
self.input()
# move tape
elif instr == '<':
if self.p > 0:
self.p -= 1
else:
print("Error: Can't decrement pointer")
elif instr == '>':
if self.p > (len(self.tape)-2):
self.tape.append(0)
self.p += 1
# looping
elif instr == ']':
pc = loop_stack.pop() - 1
elif instr == '[':
if self.cell() == 0:
while code[pc] != ']':
pc += 1
else:
loop_stack.append(pc)
pc += 1
if printed:
print('')
def set(self, val):
self.tape[self.p] = val % 128
def increment(self, amount):
self.set(self.cell() + amount)
def input(self):
character = input()
if character == '':
print("No value given, setting cell to 0 ...")
self.set(0)
else:
self.set(ord(character[0]))
def cell(self):
return self.tape[self.p]
def dump(self):
print("%d," % self.p, self.tape)
if __name__ == "__main__":
helptext = "h: Display this help text\nq: Quit\nd: Print tape, pointer\nr: Reset tape"
tape = Machine()
while True:
try:
command = input("[%d]:%d$ " %(tape.p,tape.cell()))
except EOFError:
break
if command == "":
continue
elif command == "q" or command == "quit":
break
elif command == "d" or command == "dump":
tape.dump()
elif command == "h" or command == "help":
print(helptext)
elif command == "r" or command == "reset":
tape = Machine()
print("Tape Reset.")
else:
tape.run(command)
print("Goodbye!")
| mit | Python |
|
3425d265c32d33c189710bcffd1d0df62ce27b3a | update model | freedomboxtwh/Plinth,jvalleroy/plinth-debian,harry-7/Plinth,vignanl/Plinth,vignanl/Plinth,kkampardi/Plinth,kkampardi/Plinth,kkampardi/Plinth,jvalleroy/plinth-debian,freedomboxtwh/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,vignanl/Plinth,jvalleroy/plinth-debian,kkampardi/Plinth,jvalleroy/plinth-debian,jvalleroy/plinth-debian,vignanl/Plinth,vignanl/Plinth,harry-7/Plinth,kkampardi/Plinth,harry-7/Plinth,harry-7/Plinth,freedomboxtwh/Plinth | model.py | model.py | class User(dict):
""" Every user must have keys for a username, name, passphrase (this
is a bcrypt hash of the password), salt, groups, and an email address.
They can be blank or None, but the keys must exist. """
def __init__(self, dict=None):
for key in ['username', 'name', 'passphrase', 'salt', 'email']:
self[key] = ''
for key in ['groups']:
self[key] = []
if dict:
for key in dict:
self[key] = dict[key]
def __getattr__(self, attr):
return None
| class User(dict):
""" Every user must have keys for a username, name, passphrase (this
is a md5 hash of the password), groups, and an email address. They can be
blank or None, but the keys must exist. """
def __init__(self, dict=None):
for key in ['username', 'name', 'passphrase', 'email']:
self[key] = ''
for key in ['groups']:
self[key] = []
if dict:
for key in dict:
self[key] = dict[key]
def __getattr__(self, attr):
return None
| agpl-3.0 | Python |
f95d7011ff89badfadbd07da0226f67f6dbd27a5 | Remove unused `organizations:new-tracebacks` flag. (#4083) | zenefits/sentry,gencer/sentry,JamesMura/sentry,JamesMura/sentry,fotinakis/sentry,looker/sentry,BuildingLink/sentry,JamesMura/sentry,JamesMura/sentry,BuildingLink/sentry,alexm92/sentry,mvaled/sentry,mvaled/sentry,ifduyue/sentry,beeftornado/sentry,beeftornado/sentry,jean/sentry,beeftornado/sentry,looker/sentry,looker/sentry,mvaled/sentry,jean/sentry,BuildingLink/sentry,zenefits/sentry,looker/sentry,JackDanger/sentry,zenefits/sentry,zenefits/sentry,BuildingLink/sentry,ifduyue/sentry,JackDanger/sentry,gencer/sentry,ifduyue/sentry,gencer/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,fotinakis/sentry,jean/sentry,alexm92/sentry,JackDanger/sentry,fotinakis/sentry,mvaled/sentry,mvaled/sentry,alexm92/sentry,zenefits/sentry,gencer/sentry,ifduyue/sentry,fotinakis/sentry,mvaled/sentry,jean/sentry,jean/sentry,looker/sentry,gencer/sentry | src/sentry/features/__init__.py | src/sentry/features/__init__.py | from __future__ import absolute_import
from .base import * # NOQA
from .handler import * # NOQA
from .manager import * # NOQA
default_manager = FeatureManager() # NOQA
default_manager.add('auth:register')
default_manager.add('organizations:api-keys', OrganizationFeature) # NOQA
default_manager.add('organizations:create')
default_manager.add('organizations:sso', OrganizationFeature) # NOQA
default_manager.add('organizations:onboarding', OrganizationFeature) # NOQA
default_manager.add('organizations:callsigns', OrganizationFeature) # NOQA
default_manager.add('organizations:reports:prepare', OrganizationFeature) # NOQA
default_manager.add('organizations:reports:deliver', OrganizationFeature) # NOQA
default_manager.add('projects:global-events', ProjectFeature) # NOQA
default_manager.add('projects:quotas', ProjectFeature) # NOQA
default_manager.add('projects:plugins', ProjectPluginFeature) # NOQA
# expose public api
add = default_manager.add
get = default_manager.get
has = default_manager.has
| from __future__ import absolute_import
from .base import * # NOQA
from .handler import * # NOQA
from .manager import * # NOQA
default_manager = FeatureManager() # NOQA
default_manager.add('auth:register')
default_manager.add('organizations:api-keys', OrganizationFeature) # NOQA
default_manager.add('organizations:create')
default_manager.add('organizations:sso', OrganizationFeature) # NOQA
default_manager.add('organizations:onboarding', OrganizationFeature) # NOQA
default_manager.add('organizations:callsigns', OrganizationFeature) # NOQA
default_manager.add('organizations:new-tracebacks', OrganizationFeature) # NOQA
default_manager.add('organizations:reports:prepare', OrganizationFeature) # NOQA
default_manager.add('organizations:reports:deliver', OrganizationFeature) # NOQA
default_manager.add('projects:global-events', ProjectFeature) # NOQA
default_manager.add('projects:quotas', ProjectFeature) # NOQA
default_manager.add('projects:plugins', ProjectPluginFeature) # NOQA
# expose public api
add = default_manager.add
get = default_manager.get
has = default_manager.has
| bsd-3-clause | Python |
bbfcddbb21a6b6f40fafe8c88ca76ab4a0b4667b | add script to analysis the flow map | chihyaoma/Activity-Recognition-with-CNN-and-RNN,chihyaoma/Activity-Recognition-with-CNN-and-RNN | FlowNet/flowAnalysis.py | FlowNet/flowAnalysis.py | # When the movement of the objects in the video is not distinct to be
# captured by optical flow algorithm, training this "noisy" flow map
# against the ground truth labeling is risky. In this code, we would
# like to iterate through all the generated flow videos, and filter
# out the noisy flow map.
#
#
# Contact: Chih-Yao Ma at cyma@gatech.edu
# Last update: 05/17/2016
import time
import numpy as np
import cv2
import matplotlib.pyplot as plt
# cap = cv2.VideoCapture('v_HandStandPushups_g01_c04_flow.avi')
cap = cv2.VideoCapture('v_HandStandPushups_g12_c06_flow.avi')
# information of the video
# property identifier:
# 1: ?; 2: s/frame; 3: width; 4: height; 6: ?; 7: ?
Fr = round(1 / cap.get(2))
Wd = int(cap.get(3))
Ht = int(cap.get(4))
# Define the codec and create VideoWriter object
# fourcc = cv2.cv.CV_FOURCC('X', 'V', 'I', 'D') # opencv 2.4
fourcc = cv2.VideoWriter_fourcc(*'XVID') # opencv 3.0
out = cv2.VideoWriter('out_flow.avi', fourcc, Fr, (Wd, Ht))
indFrame = 1
def close_event():
plt.close() #timer calls this function after 3 seconds and closes the window
while(cap.isOpened):
# Capture frame-by-frame
ret, frame = cap.read()
if ret == True:
print('--------------------------------------')
print('Frame # ', indFrame)
# convert back to HSV
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# extract the channels and flat them
channel_0 = hsv[..., 0].flatten()
channel_1 = hsv[..., 1].flatten()
channel_2 = hsv[..., 2].flatten()
# out.write(frame)
# Display the resulting frame
cv2.imshow('Processed frame', frame)
# plot histogram for each channel
fig, (ax0, ax1, ax2) = plt.subplots(ncols=3, figsize=(12, 4))
ax0.hist(channel_0, 20, normed=1, histtype='bar', facecolor='r', alpha=0.75)
ax0.set_title('Channel #0')
ax1.hist(channel_1, 20, normed=1, histtype='bar', facecolor='g', alpha=0.75)
ax1.set_title('Channel #1')
ax2.hist(channel_2, 20, normed=1, histtype='bar', facecolor='b', alpha=0.75)
ax2.set_title('Channel #2')
# plot the figure for a short time
plt.tight_layout()
timer = fig.canvas.new_timer(interval = 4000) #creating a timer object and setting an interval of 3000 milliseconds
timer.add_callback(close_event)
timer.start()
plt.show()
# fname = 'histogramFrame_' + str(indFrame)
# plt.savefig(fname)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
break
indFrame = indFrame + 1
# When everything done, release the capture
cap.release()
out.release()
cv2.destroyAllWindows()
| mit | Python |
|
629ccdc27d2eb3522def903cc42606e43c3f816b | Add script to write network related files | AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE,AdaptivePELE/AdaptivePELE | AdaptivePELE/analysis/writeNetworkFiles.py | AdaptivePELE/analysis/writeNetworkFiles.py | import os
import sys
import argparse
from AdaptivePELE.utilities import utilities
import matplotlib.pyplot as plt
def parseArguments():
desc = "Write the information related to the conformation network to file\n"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("clusteringObject", type=str, help="Path to the clustering object")
parser.add_argument("suffix", type=str, help="Suffix to append to file names")
parser.add_argument("metricCol", type=int, help="Column of the metric of interest")
parser.add_argument("-o", type=str, default=None, help="Output path where to write the files")
args = parser.parse_args()
return args.clusteringObject, args.suffix, args.metricCol, args.o
if __name__ == "__main__":
clusteringObject, suffix, metricCol, outputPath = parseArguments()
if outputPath is not None:
outputPath = os.path.join(outputPath, "")
else:
outputPath = ""
if not os.path.exists(outputPath):
os.makedirs(outputPath)
sys.stderr.write("Reading clustering object...\n")
cl = utilities.readClusteringObject(clusteringObject)
optimalCluster = cl.getOptimalMetric()
pathway = cl.createPathwayToCluster(optimalCluster)
sys.stderr.write("Writing conformation network...\n")
cl.writeConformationNetwork(outputPath+"conformationNetwork%s.edgelist" % suffix)
sys.stderr.write("Writing FDT...\n")
cl.writeFDT(outputPath+"FDT%s.edgelist" % suffix)
sys.stderr.write("Writing pathway to optimal cluster...\n")
# cl.writePathwayOptimalCluster(outputPath+"pathwayFDT%s.pdb" % suffix)
cl.writePathwayTrajectory(pathway, outputPath+"pathwayFDT%s.pdb" % suffix)
sys.stderr.write("Writing nodes population...\n")
cl.writeConformationNodePopulation(outputPath+"nodesPopulation%s.txt" % suffix)
sys.stderr.write("Writing nodes metrics...\n")
cl.writeConformationNodeMetric(outputPath+"nodesMetric%s.txt" % suffix, metricCol)
sys.stderr.write("Writing metastability indeces...\n")
metInd = cl.calculateMetastabilityIndex()
cl.writeMetastabilityIndex(outputPath+"nodesMetIndex%s.txt" % suffix)
plt.figure()
plt.plot(pathway, [cl.clusters.clusters[i].getMetricFromColumn(5) for i in pathway])
plt.xlabel("Cluster number")
plt.ylabel("Binding energy(kcal/mol)")
plt.savefig(outputPath+"bindingEnergy_%s.png" % suffix)
plt.figure()
plt.plot(pathway, [cl.clusters.clusters[i].getMetricFromColumn(3) for i in pathway])
plt.xlabel("Cluster number")
plt.ylabel("Energy(kcal/mol)")
plt.savefig(outputPath+"totalEnergy_%s.png" % suffix)
plt.figure()
plt.plot(pathway, [metInd[i] for i in pathway])
plt.xlabel("Cluster number")
plt.ylabel("Metastability index")
plt.savefig(outputPath+"metIndex_%s.png" % suffix)
plt.show()
| mit | Python |
|
daa4565abe4059e8588ddf374fde0f51d9ec784e | Create a skeleton for node propagation integration tests | thiderman/network-kitten | test/integration/test_node_propagation.py | test/integration/test_node_propagation.py | class TestPropagation(object):
def test_node_propagation(self):
"""
Tests that check node propagation
1) Spin up four servers.
2) Make the first one send a sync request to all three others.
3) Count the numbers of requests made.
4) Check databases to see that they all know each other.
"""
pass
| mit | Python |
|
5b9b27d98cad06f0bbd67026b6533dee7c218df7 | update series server code shifted from custom script to py file | saurabh6790/omnit-app,hatwar/Das_erpnext,suyashphadtare/vestasi-update-erp,indictranstech/tele-erpnext,saurabh6790/test-erp,gangadhar-kadam/smrterp,saurabh6790/test-med-app,saurabh6790/medsynaptic1-app,meisterkleister/erpnext,geekroot/erpnext,sagar30051991/ozsmart-erp,hatwar/Das_erpnext,gangadhar-kadam/sapphire_app,Tejal011089/huntercamp_erpnext,anandpdoshi/erpnext,Tejal011089/digitales_erpnext,indictranstech/tele-erpnext,suyashphadtare/test,indictranstech/vestasi-erpnext,gangadharkadam/v5_erp,treejames/erpnext,gangadhar-kadam/sms-erpnext,mahabuber/erpnext,rohitwaghchaure/digitales_erpnext,fuhongliang/erpnext,mbauskar/phrerp,suyashphadtare/test,suyashphadtare/vestasi-erp-final,hatwar/buyback-erpnext,gangadharkadam/vlinkerp,indictranstech/erpnext,gangadhar-kadam/laganerp,pawaranand/phrerp,saurabh6790/test_final_med_app,SPKian/Testing2,Tejal011089/osmosis_erpnext,suyashphadtare/test,gangadhar-kadam/nassimapp,suyashphadtare/vestasi-update-erp,gangadharkadam/verveerp,Suninus/erpnext,treejames/erpnext,suyashphadtare/vestasi-erp-1,indictranstech/osmosis-erpnext,sheafferusa/erpnext,hanselke/erpnext-1,ShashaQin/erpnext,mbauskar/internal-hr,indictranstech/trufil-erpnext,indictranstech/phrerp,mbauskar/alec_frappe5_erpnext,Drooids/erpnext,Tejal011089/med2-app,saurabh6790/tru_app_back,gangadharkadam/letzerp,indictranstech/vestasi-erpnext,gangadhar-kadam/sapphire_app,indictranstech/focal-erpnext,indictranstech/phrerp,mahabuber/erpnext,ShashaQin/erpnext,gangadhar-kadam/latestchurcherp,MartinEnder/erpnext-de,mbauskar/helpdesk-erpnext,Tejal011089/digitales_erpnext,hatwar/buyback-erpnext,hatwar/focal-erpnext,mbauskar/omnitech-erpnext,saurabh6790/OFF-RISAPP,indictranstech/fbd_erpnext,hernad/erpnext,saurabh6790/omni-apps,gangadhar-kadam/hrerp,saurabh6790/medapp,4commerce-technologies-AG/erpnext,aruizramon/alec_erpnext,indictranstech/phrerp,gangadhar-kadam/nassimapp,tmimori/erpnext,rohitwaghchaure/GenieManager-erpnext,shitolepriya/test-erp,saurabh6790/med_new_app,gangadharkadam/contributionerp,gangadharkadam/office_erp,saurabh6790/medsyn-app,4commerce-technologies-AG/erpnext,gmarke/erpnext,rohitwaghchaure/erpnext_smart,susuchina/ERPNEXT,indictranstech/fbd_erpnext,gangadhar-kadam/laganerp,rohitwaghchaure/GenieManager-erpnext,susuchina/ERPNEXT,saurabh6790/aimobilize-app-backup,gangadhar-kadam/laganerp,fuhongliang/erpnext,gangadhar-kadam/mic-erpnext,gangadharkadam/saloon_erp,saurabh6790/omnisys-app,saurabh6790/alert-med-app,gangadharkadam/v6_erp,Drooids/erpnext,BhupeshGupta/erpnext,hanselke/erpnext-1,SPKian/Testing,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/verve_test_erp,rohitwaghchaure/erpnext-receipher,sheafferusa/erpnext,gangadharkadam/vlinkerp,saurabh6790/trufil_app,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,gangadharkadam/tailorerp,mbauskar/omnitech-erpnext,Tejal011089/paypal_erpnext,geekroot/erpnext,mbauskar/phrerp,indictranstech/internal-erpnext,ShashaQin/erpnext,suyashphadtare/sajil-final-erp,netfirms/erpnext,sheafferusa/erpnext,dieface/erpnext,4commerce-technologies-AG/erpnext,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/smrterp,anandpdoshi/erpnext,indictranstech/vestasi-erpnext,Tejal011089/huntercamp_erpnext,gangadhar-kadam/mic-erpnext,hanselke/erpnext-1,saurabh6790/test-erp,indictranstech/biggift-erpnext,njmube/erpnext,mbauskar/omnitech-demo-erpnext,saurabh6790/medsyn-app,shft117/SteckerApp,gangadharkadam/v5_erp,mbauskar/omnitech-erpnext,njmube/erpnext,Tejal011089/osmosis_erpnext,suyashphadtare/gd-erp,indictranstech/reciphergroup-erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp,SPKian/Testing,saurabh6790/aimobilize,gangadhar-kadam/sapphire_app,gangadhar-kadam/verve_test_erp,indictranstech/vestasi-erpnext,rohitwaghchaure/New_Theme_Erp,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/New_Theme_Erp,gangadharkadam/v5_erp,ThiagoGarciaAlves/erpnext,saurabh6790/test-med-app,Tejal011089/osmosis_erpnext,njmube/erpnext,Tejal011089/trufil-erpnext,indictranstech/fbd_erpnext,suyashphadtare/sajil-final-erp,saurabh6790/omnitech-apps,saurabh6790/OFF-RISAPP,saurabh6790/omn-app,ShashaQin/erpnext,hatwar/focal-erpnext,hernad/erpnext,gangadhar-kadam/verve-erp,Tejal011089/Medsyn2_app,gangadhar-kadam/adb-erp,gangadharkadam/v6_erp,rohitwaghchaure/erpnext_smart,rohitwaghchaure/erpnext-receipher,gangadhar-kadam/adb-erp,indictranstech/Das_Erpnext,SPKian/Testing2,saurabh6790/test-erp,Tejal011089/med2-app,saurabh6790/medsyn-app1,gangadhar-kadam/helpdesk-erpnext,meisterkleister/erpnext,gangadhar-kadam/verve_live_erp,mbauskar/sapphire-erpnext,suyashphadtare/sajil-erp,mbauskar/Das_Erpnext,SPKian/Testing,gsnbng/erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/powapp,gangadharkadam/office_erp,mbauskar/helpdesk-erpnext,Tejal011089/trufil-erpnext,gangadhar-kadam/verve_live_erp,sagar30051991/ozsmart-erp,Suninus/erpnext,saurabh6790/medsynaptic1-app,indictranstech/buyback-erp,Yellowen/Owrang,pawaranand/phrerp,gangadhar-kadam/mtn-erpnext,rohitwaghchaure/digitales_erpnext,gangadharkadam/saloon_erp_install,gangadharkadam/verveerp,saurabh6790/med_app_rels,gangadhar-kadam/sms-erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,shft117/SteckerApp,indictranstech/phrerp,Tejal011089/fbd_erpnext,hatwar/Das_erpnext,mahabuber/erpnext,gangadhar-kadam/verve_erp,suyashphadtare/vestasi-update-erp,indictranstech/focal-erpnext,saurabh6790/ON-RISAPP,gangadhar-kadam/verve_live_erp,Tejal011089/osmosis_erpnext,mbauskar/phrerp,rohitwaghchaure/digitales_erpnext,gangadharkadam/johnerp,mbauskar/phrerp,pawaranand/phrerp,gangadharkadam/v4_erp,Yellowen/Owrang,suyashphadtare/vestasi-erp-jan-end,ThiagoGarciaAlves/erpnext,indictranstech/Das_Erpnext,treejames/erpnext,indictranstech/erpnext,suyashphadtare/sajil-erp,suyashphadtare/vestasi-erp-1,gangadhar-kadam/verve_live_erp,mbauskar/internal-hr,gangadharkadam/saloon_erp_install,saurabh6790/medsynaptic-app,indictranstech/trufil-erpnext,indictranstech/buyback-erp,fuhongliang/erpnext,saurabh6790/test-erp,indictranstech/reciphergroup-erpnext,saurabh6790/test_final_med_app,Tejal011089/paypal_erpnext,gangadhar-kadam/verve_erp,gangadhar-kadam/verve-erp,gangadharkadam/sher,gangadharkadam/sterp,saurabh6790/omn-app,gangadharkadam/v4_erp,gangadharkadam/verveerp,gangadhar-kadam/verve_test_erp,suyashphadtare/sajil-final-erp,Tejal011089/paypal_erpnext,hernad/erpnext,BhupeshGupta/erpnext,gangadhar-kadam/verve_erp,gangadharkadam/vlinkerp,hatwar/buyback-erpnext,saurabh6790/omni-apps,rohitwaghchaure/erpnext_smart,anandpdoshi/erpnext,indictranstech/biggift-erpnext,shft117/SteckerApp,gangadharkadam/v5_erp,gangadhar-kadam/latestchurcherp,netfirms/erpnext,netfirms/erpnext,rohitwaghchaure/erpnext-receipher,MartinEnder/erpnext-de,saurabh6790/aimobilize,ThiagoGarciaAlves/erpnext,indictranstech/reciphergroup-erpnext,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/sher,geekroot/erpnext,indictranstech/trufil-erpnext,SPKian/Testing2,hatwar/focal-erpnext,susuchina/ERPNEXT,mbauskar/alec_frappe5_erpnext,indictranstech/buyback-erp,gangadharkadam/tailorerp,hernad/erpnext,gangadharkadam/vlinkerp,sagar30051991/ozsmart-erp,gangadhar-kadam/verve_test_erp,mbauskar/sapphire-erpnext,pawaranand/phrerp,aruizramon/alec_erpnext,mbauskar/internal-hr,indictranstech/focal-erpnext,suyashphadtare/vestasi-erp-jan-end,rohitwaghchaure/New_Theme_Erp,gangadharkadam/v4_erp,indictranstech/internal-erpnext,dieface/erpnext,gangadhar-kadam/latestchurcherp,indictranstech/erpnext,saurabh6790/pow-app,Tejal011089/digitales_erpnext,Aptitudetech/ERPNext,gangadharkadam/letzerp,gangadhar-kadam/prjapp,gangadharkadam/johnerp,mbauskar/omnitech-erpnext,indictranstech/tele-erpnext,gsnbng/erpnext,gmarke/erpnext,indictranstech/osmosis-erpnext,suyashphadtare/gd-erp,fuhongliang/erpnext,pombredanne/erpnext,suyashphadtare/gd-erp,Tejal011089/fbd_erpnext,saurabh6790/med_app_rels,gangadharkadam/contributionerp,indictranstech/fbd_erpnext,hatwar/Das_erpnext,indictranstech/focal-erpnext,susuchina/ERPNEXT,shitolepriya/test-erp,tmimori/erpnext,ThiagoGarciaAlves/erpnext,treejames/erpnext,suyashphadtare/vestasi-erp-final,shitolepriya/test-erp,gangadharkadam/contributionerp,gangadharkadam/v4_erp,saurabh6790/aimobilize-app-backup,saurabh6790/omnit-app,sagar30051991/ozsmart-erp,saurabh6790/omnisys-app,mbauskar/helpdesk-erpnext,gangadharkadam/v6_erp,suyashphadtare/vestasi-erp-final,saurabh6790/tru_app_back,gangadhar-kadam/helpdesk-erpnext,mbauskar/alec_frappe5_erpnext,MartinEnder/erpnext-de,netfirms/erpnext,Tejal011089/Medsyn2_app,gangadhar-kadam/mtn-erpnext,mbauskar/Das_Erpnext,Tejal011089/digitales_erpnext,BhupeshGupta/erpnext,aruizramon/alec_erpnext,sheafferusa/erpnext,indictranstech/biggift-erpnext,BhupeshGupta/erpnext,mbauskar/Das_Erpnext,Drooids/erpnext,gangadhar-kadam/church-erpnext,rohitwaghchaure/GenieManager-erpnext,anandpdoshi/erpnext,tmimori/erpnext,SPKian/Testing,pombredanne/erpnext,Tejal011089/huntercamp_erpnext,gangadhar-kadam/helpdesk-erpnext,saurabh6790/trufil_app,indictranstech/buyback-erp,Tejal011089/trufil-erpnext,shitolepriya/test-erp,gangadharkadam/smrterp,gsnbng/erpnext,saurabh6790/ON-RISAPP,gangadharkadam/saloon_erp_install,gmarke/erpnext,gangadhar-kadam/helpdesk-erpnext,Drooids/erpnext,saurabh6790/pow-app,gangadharkadam/saloon_erp_install,gangadhar-kadam/hrerp,gmarke/erpnext,suyashphadtare/sajil-erp,indictranstech/trufil-erpnext,gangadhar-kadam/latestchurcherp,mbauskar/omnitech-demo-erpnext,gsnbng/erpnext,mahabuber/erpnext,njmube/erpnext,Suninus/erpnext,gangadharkadam/smrterp,indictranstech/osmosis-erpnext,gangadhar-kadam/prjapp,hatwar/buyback-erpnext,indictranstech/biggift-erpnext,gangadharkadam/saloon_erp,saurabh6790/alert-med-app,gangadhar-kadam/powapp,gangadhar-kadam/powapp,indictranstech/osmosis-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,suyashphadtare/vestasi-erp-1,gangadharkadam/contributionerp,pombredanne/erpnext,MartinEnder/erpnext-de,rohitwaghchaure/New_Theme_Erp,mbauskar/Das_Erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/saloon_erp,rohitwaghchaure/digitales_erpnext,indictranstech/internal-erpnext,indictranstech/erpnext,gangadharkadam/letzerp,indictranstech/internal-erpnext,hanselke/erpnext-1,gangadhar-kadam/verve-erp,Suninus/erpnext,saurabh6790/omnitech-apps,pombredanne/erpnext,SPKian/Testing2,dieface/erpnext,gangadharkadam/office_erp,shft117/SteckerApp,mbauskar/omnitech-demo-erpnext,gangadharkadam/verveerp,gangadhar-kadam/verve_erp,suyashphadtare/gd-erp,tmimori/erpnext,mbauskar/sapphire-erpnext,saurabh6790/medsynaptic-app,Tejal011089/fbd_erpnext,saurabh6790/medsyn-app1,mbauskar/alec_frappe5_erpnext,indictranstech/tele-erpnext,dieface/erpnext,Tejal011089/paypal_erpnext,meisterkleister/erpnext,hatwar/focal-erpnext,gangadhar-kadam/church-erpnext,gangadharkadam/sterp,saurabh6790/medapp,Tejal011089/fbd_erpnext,geekroot/erpnext,saurabh6790/med_new_app,gangadharkadam/letzerp,Tejal011089/trufil-erpnext,aruizramon/alec_erpnext | setup/doctype/update_series/update_series.py | setup/doctype/update_series/update_series.py | # Please edit this list and import only required elements
import webnotes
from webnotes.utils import add_days, add_months, add_years, cint, cstr, date_diff, default_fields, flt, fmt_money, formatdate, generate_hash, getTraceback, get_defaults, get_first_day, get_last_day, getdate, has_common, month_name, now, nowdate, replace_newlines, sendmail, set_default, str_esc_quote, user_format, validate_email_add
from webnotes.model import db_exists
from webnotes.model.doc import Document, addchild, removechild, getchildren, make_autoname, SuperDocType
from webnotes.model.doclist import getlist, copy_doclist
from webnotes.model.code import get_obj, get_server_obj, run_server_obj, updatedb, check_syntax
from webnotes import session, form, is_testing, msgprint, errprint
set = webnotes.conn.set
sql = webnotes.conn.sql
get_value = webnotes.conn.get_value
in_transaction = webnotes.conn.in_transaction
convert_to_lists = webnotes.conn.convert_to_lists
# -----------------------------------------------------------------------------------------
class DocType:
def __init__(self, doc, doclist=[]):
self.doc = doc
self.doclist = doclist
def update_series(self):
series = sql("select name,current from `tabSeries` where name = %s", self.doc.prefix,as_dict = 1)
if series:
msgprint("This is going to update Series with Prefix : " + series[0]['name'] + " from Current : " + cstr(series[0]['current']) + " to Current : "+ cstr(self.doc.current))
sql("update `tabSeries` set current = '%s' where name = '%s'" % (self.doc.current,series[0]['name']))
msgprint("Series Updated Successfully")
else:
msgprint("Please Check Prefix as there is no such Prefix : "+ self.doc.prefix +" Or Try Insert Button")
def insert_series(self):
#sql("start transaction")
series = sql("select name,current from `tabSeries` where name = %s", self.doc.prefix, as_dict = 1)
if series:
msgprint("Series with Prefix : " + series[0]['name'] + "already in the system . Try Update Button")
else:
msgprint("This is going to Insert Series with Prefix : " + cstr(self.doc.prefix) + " Current: " + cstr(self.doc.current))
sql("insert into `tabSeries` (name,current) values ('%s','%s')" % (self.doc.prefix, self.doc.current))
msgprint("Series Inserted Successfully")
| agpl-3.0 | Python |
|
4eab434002c99daf9c302cb1007e7ec384453aae | Fix cherrypy example | ajdiaz/pyload,ajdiaz/pyload | examples/cherrypysample.py | examples/cherrypysample.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
import bottle
@bottle.get('/')
def index():
return {'key': 'value'}
bottle.run(port=8080, host="0.0.0.0", server="cherrypy")
| mit | Python |
|
93e07841d961fb7956612339f13dfd4e8ddd8bac | Create RPi_Final.py | alexreed6899/RPI-Final-Proj. | RPi_Final.py | RPi_Final.py | from random import *
| mit | Python |
|
2eba3f5072b547829964eac9d2d5b03076a49faf | add firmwareupdate sample | sakuraio/python-sakuraio | examples/firmwareupdate.py | examples/firmwareupdate.py | from sakuraio.hardware.rpi import SakuraIOGPIO
#from sakuraio.hardware.rpi import SakuraIOSMBus
import time
sakuraio = SakuraIOGPIO()
#sakuraio = SakuraIOSMBus()
sakuraio.unlock()
time.sleep(1)
sakuraio.update_firmware()
#print(sakuraio.get_firmware_version())
| mit | Python |
|
16c57e5f3bd63667c7ca0b828e1f0fcd85d64b76 | Create SecureMSG.py | dwiktor/SecureMSG | SecureMSG.py | SecureMSG.py | #!/usr/python
#
# I dedicate this application for my best friend, Robert Niemiec :)
#
# Copyright (c) 2015 Dawid Wiktor
# This app is writed for all whistleblowers, journalists and
# cryptoanarchists. Use it when you need. Be carefull! NSA watchin'
#
# This is the Open Source Software. You can freely use it, edit code, and
# ditribute. But you should respect Attribution.
def encryption():
key = input("Please, input a number here to be used as the key.\n")
key = int(key)
dummy = 0
rawData = input("Enter string here.\n")
rawlist = list(rawData)
data = rawlist[0 + dummy]
number = len(rawlist)
while dummy != number:
data = ord(data)
data = data + key
print(data)
dummy = dummy + 1
data = rawlist[0 + dummy]
run = "y"
while run == "y":
encryption()
run = input("Do you want to encrypt this? (y/n)\n")
if run !="y":
exit()
| mpl-2.0 | Python |
|
522fb2e4b9fdf46abed3b5ca8ba43758b22253a1 | add missing file | mlaitinen/odoo,abdellatifkarroum/odoo,jesramirez/odoo,0k/odoo,eino-makitalo/odoo,vnsofthe/odoo,naousse/odoo,Maspear/odoo,VielSoft/odoo,Drooids/odoo,goliveirab/odoo,bkirui/odoo,grap/OpenUpgrade,minhtuancn/odoo,lsinfo/odoo,apanju/GMIO_Odoo,Antiun/odoo,OpenUpgrade/OpenUpgrade,sinbazhou/odoo,abstract-open-solutions/OCB,Ichag/odoo,VielSoft/odoo,Nick-OpusVL/odoo,ShineFan/odoo,leorochael/odoo,dfang/odoo,odoousers2014/odoo,fgesora/odoo,frouty/odoo_oph,ojengwa/odoo,chiragjogi/odoo,factorlibre/OCB,sadleader/odoo,Daniel-CA/odoo,ecosoft-odoo/odoo,slevenhagen/odoo,JonathanStein/odoo,tvtsoft/odoo8,kirca/OpenUpgrade,javierTerry/odoo,n0m4dz/odoo,nhomar/odoo,takis/odoo,shivam1111/odoo,nitinitprof/odoo,funkring/fdoo,ccomb/OpenUpgrade,colinnewell/odoo,nagyistoce/odoo-dev-odoo,ingadhoc/odoo,guerrerocarlos/odoo,pplatek/odoo,Endika/OpenUpgrade,fjbatresv/odoo,bobisme/odoo,RafaelTorrealba/odoo,abstract-open-solutions/OCB,SerpentCS/odoo,hip-odoo/odoo,ygol/odoo,savoirfairelinux/OpenUpgrade,bkirui/odoo,tinkhaven-organization/odoo,avoinsystems/odoo,sinbazhou/odoo,ehirt/odoo,janocat/odoo,hopeall/odoo,doomsterinc/odoo,ApuliaSoftware/odoo,wangjun/odoo,VitalPet/odoo,fevxie/odoo,xujb/odoo,windedge/odoo,idncom/odoo,javierTerry/odoo,grap/OCB,mszewczy/odoo,Noviat/odoo,shaufi/odoo,windedge/odoo,cpyou/odoo,nitinitprof/odoo,storm-computers/odoo,syci/OCB,MarcosCommunity/odoo,nexiles/odoo,BT-rmartin/odoo,funkring/fdoo,QianBIG/odoo,Danisan/odoo-1,pedrobaeza/odoo,tvtsoft/odoo8,osvalr/odoo,alhashash/odoo,glovebx/odoo,zchking/odoo,luistorresm/odoo,ehirt/odoo,spadae22/odoo,BT-rmartin/odoo,JGarcia-Panach/odoo,cpyou/odoo,jiangzhixiao/odoo,fuselock/odoo,papouso/odoo,ShineFan/odoo,jfpla/odoo,ramadhane/odoo,rubencabrera/odoo,pedrobaeza/odoo,fuhongliang/odoo,Endika/odoo,havt/odoo,camptocamp/ngo-addons-backport,virgree/odoo,fgesora/odoo,csrocha/OpenUpgrade,wangjun/odoo,Grirrane/odoo,MarcosCommunity/odoo,poljeff/odoo,osvalr/odoo,Nick-OpusVL/odoo,savoirfairelinux/OpenUpgrade,OSSESAC/odoopubarquiluz,kittiu/odoo,tinkhaven-organization/odoo,mlaitinen/odoo,leoliujie/odoo,optima-ict/odoo,incaser/odoo-odoo,minhtuancn/odoo,mvaled/OpenUpgrade,bkirui/odoo,erkrishna9/odoo,datenbetrieb/odoo,rgeleta/odoo,hopeall/odoo,minhtuancn/odoo,chiragjogi/odoo,jiangzhixiao/odoo,codekaki/odoo,alexteodor/odoo,patmcb/odoo,cdrooom/odoo,CopeX/odoo,ecosoft-odoo/odoo,odooindia/odoo,fossoult/odoo,JGarcia-Panach/odoo,mszewczy/odoo,inspyration/odoo,n0m4dz/odoo,hip-odoo/odoo,mlaitinen/odoo,JonathanStein/odoo,collex100/odoo,idncom/odoo,datenbetrieb/odoo,jiachenning/odoo,damdam-s/OpenUpgrade,juanalfonsopr/odoo,acshan/odoo,tangyiyong/odoo,Daniel-CA/odoo,aviciimaxwell/odoo,odootr/odoo,Bachaco-ve/odoo,rubencabrera/odoo,waytai/odoo,fevxie/odoo,tvibliani/odoo,arthru/OpenUpgrade,ojengwa/odoo,wangjun/odoo,fjbatresv/odoo,0k/OpenUpgrade,dsfsdgsbngfggb/odoo,tangyiyong/odoo,tangyiyong/odoo,OSSESAC/odoopubarquiluz,tinkerthaler/odoo,hubsaysnuaa/odoo,apanju/odoo,cedk/odoo,csrocha/OpenUpgrade,srsman/odoo,jiangzhixiao/odoo,sysadminmatmoz/OCB,gorjuce/odoo,xzYue/odoo,shivam1111/odoo,apocalypsebg/odoo,brijeshkesariya/odoo,bplancher/odoo,leoliujie/odoo,pedrobaeza/OpenUpgrade,BT-rmartin/odoo,codekaki/odoo,rgeleta/odoo,oihane/odoo,dkubiak789/odoo,CubicERP/odoo,NL66278/OCB,bkirui/odoo,bguillot/OpenUpgrade,makinacorpus/odoo,avoinsystems/odoo,Codefans-fan/odoo,demon-ru/iml-crm,blaggacao/OpenUpgrade,dariemp/odoo,hassoon3/odoo,juanalfonsopr/odoo,Gitlab11/odoo,makinacorpus/odoo,Adel-Magebinary/odoo,fjbatresv/odoo,mvaled/OpenUpgrade,abstract-open-solutions/OCB,CatsAndDogsbvba/odoo,guewen/OpenUpgrade,Nowheresly/odoo,provaleks/o8,cdrooom/odoo,brijeshkesariya/odoo,shaufi10/odoo,ApuliaSoftware/odoo,fgesora/odoo,dariemp/odoo,cedk/odoo,ujjwalwahi/odoo,provaleks/o8,ingadhoc/odoo,thanhacun/odoo,hanicker/odoo,tinkerthaler/odoo,hip-odoo/odoo,0k/odoo,eino-makitalo/odoo,syci/OCB,ccomb/OpenUpgrade,ehirt/odoo,goliveirab/odoo,mkieszek/odoo,dfang/odoo,PongPi/isl-odoo,camptocamp/ngo-addons-backport,OpenUpgrade/OpenUpgrade,rdeheele/odoo,stonegithubs/odoo,JCA-Developpement/Odoo,abstract-open-solutions/OCB,stonegithubs/odoo,leorochael/odoo,AuyaJackie/odoo,lombritz/odoo,alhashash/odoo,stonegithubs/odoo,apanju/GMIO_Odoo,ygol/odoo,OpenPymeMx/OCB,ujjwalwahi/odoo,tinkhaven-organization/odoo,odoousers2014/odoo,Maspear/odoo,dfang/odoo,provaleks/o8,numerigraphe/odoo,hifly/OpenUpgrade,steedos/odoo,ShineFan/odoo,shaufi/odoo,tarzan0820/odoo,spadae22/odoo,nuncjo/odoo,fossoult/odoo,provaleks/o8,mkieszek/odoo,rowemoore/odoo,cysnake4713/odoo,VitalPet/odoo,OpenPymeMx/OCB,steedos/odoo,Adel-Magebinary/odoo,rubencabrera/odoo,abdellatifkarroum/odoo,Bachaco-ve/odoo,jesramirez/odoo,Codefans-fan/odoo,mszewczy/odoo,makinacorpus/odoo,rowemoore/odoo,luiseduardohdbackup/odoo,CatsAndDogsbvba/odoo,bplancher/odoo,mszewczy/odoo,mustafat/odoo-1,florentx/OpenUpgrade,joariasl/odoo,ramadhane/odoo,sebalix/OpenUpgrade,Elico-Corp/odoo_OCB,ShineFan/odoo,minhtuancn/odoo,ramadhane/odoo,klunwebale/odoo,kifcaliph/odoo,Ernesto99/odoo,FlorianLudwig/odoo,glovebx/odoo,damdam-s/OpenUpgrade,bkirui/odoo,joariasl/odoo,srsman/odoo,pplatek/odoo,SerpentCS/odoo,ecosoft-odoo/odoo,florian-dacosta/OpenUpgrade,gvb/odoo,oihane/odoo,gavin-feng/odoo,factorlibre/OCB,Ichag/odoo,virgree/odoo,ramitalat/odoo,bobisme/odoo,Noviat/odoo,vnsofthe/odoo,ujjwalwahi/odoo,synconics/odoo,VitalPet/odoo,shingonoide/odoo,shivam1111/odoo,poljeff/odoo,leorochael/odoo,bobisme/odoo,salaria/odoo,grap/OpenUpgrade,storm-computers/odoo,nhomar/odoo-mirror,lgscofield/odoo,ShineFan/odoo,grap/OCB,Maspear/odoo,CubicERP/odoo,JGarcia-Panach/odoo,salaria/odoo,OpenUpgrade-dev/OpenUpgrade,shaufi10/odoo,Adel-Magebinary/odoo,grap/OpenUpgrade,goliveirab/odoo,provaleks/o8,gsmartway/odoo,numerigraphe/odoo,havt/odoo,fossoult/odoo,omprakasha/odoo,nuncjo/odoo,blaggacao/OpenUpgrade,nagyistoce/odoo-dev-odoo,andreparames/odoo,shaufi10/odoo,demon-ru/iml-crm,PongPi/isl-odoo,Ichag/odoo,havt/odoo,odoousers2014/odoo,cloud9UG/odoo,jiangzhixiao/odoo,dkubiak789/odoo,simongoffin/website_version,sysadminmatmoz/OCB,ojengwa/odoo,poljeff/odoo,dkubiak789/odoo,feroda/odoo,damdam-s/OpenUpgrade,srsman/odoo,bplancher/odoo,NeovaHealth/odoo,abstract-open-solutions/OCB,takis/odoo,leoliujie/odoo,diagramsoftware/odoo,GauravSahu/odoo,Antiun/odoo,sv-dev1/odoo,JGarcia-Panach/odoo,sinbazhou/odoo,odooindia/odoo,addition-it-solutions/project-all,CubicERP/odoo,fevxie/odoo,Ernesto99/odoo,ojengwa/odoo,avoinsystems/odoo,florian-dacosta/OpenUpgrade,slevenhagen/odoo,Danisan/odoo-1,slevenhagen/odoo-npg,eino-makitalo/odoo,ingadhoc/odoo,stephen144/odoo,savoirfairelinux/odoo,NL66278/OCB,lightcn/odoo,mmbtba/odoo,guerrerocarlos/odoo,bguillot/OpenUpgrade,kybriainfotech/iSocioCRM,Noviat/odoo,dsfsdgsbngfggb/odoo,nagyistoce/odoo-dev-odoo,microcom/odoo,sysadminmatmoz/OCB,rgeleta/odoo,cpyou/odoo,hassoon3/odoo,Gitlab11/odoo,JonathanStein/odoo,MarcosCommunity/odoo,Codefans-fan/odoo,kittiu/odoo,gsmartway/odoo,frouty/odoogoeen,tarzan0820/odoo,sinbazhou/odoo,fjbatresv/odoo,alhashash/odoo,apocalypsebg/odoo,stonegithubs/odoo,realsaiko/odoo,kybriainfotech/iSocioCRM,joariasl/odoo,ovnicraft/odoo,Codefans-fan/odoo,dllsf/odootest,hassoon3/odoo,vrenaville/ngo-addons-backport,kirca/OpenUpgrade,n0m4dz/odoo,Nick-OpusVL/odoo,draugiskisprendimai/odoo,bwrsandman/OpenUpgrade,pedrobaeza/OpenUpgrade,javierTerry/odoo,dalegregory/odoo,deKupini/erp,codekaki/odoo,minhtuancn/odoo,ygol/odoo,TRESCLOUD/odoopub,vnsofthe/odoo,ChanduERP/odoo,Noviat/odoo,Maspear/odoo,sergio-incaser/odoo,Kilhog/odoo,bakhtout/odoo-educ,odoo-turkiye/odoo,avoinsystems/odoo,factorlibre/OCB,leoliujie/odoo,lgscofield/odoo,ApuliaSoftware/odoo,Nowheresly/odoo,dsfsdgsbngfggb/odoo,BT-ojossen/odoo,hanicker/odoo,mustafat/odoo-1,dkubiak789/odoo,alexcuellar/odoo,fuselock/odoo,ygol/odoo,eino-makitalo/odoo,luiseduardohdbackup/odoo,markeTIC/OCB,glovebx/odoo,odoo-turkiye/odoo,omprakasha/odoo,mustafat/odoo-1,agrista/odoo-saas,prospwro/odoo,nhomar/odoo,florentx/OpenUpgrade,CatsAndDogsbvba/odoo,mszewczy/odoo,patmcb/odoo,Drooids/odoo,waytai/odoo,prospwro/odoo,ramadhane/odoo,GauravSahu/odoo,fdvarela/odoo8,feroda/odoo,highco-groupe/odoo,BT-astauder/odoo,realsaiko/odoo,stephen144/odoo,sve-odoo/odoo,alexcuellar/odoo,javierTerry/odoo,sadleader/odoo,jfpla/odoo,Endika/OpenUpgrade,takis/odoo,incaser/odoo-odoo,virgree/odoo,ccomb/OpenUpgrade,markeTIC/OCB,hassoon3/odoo,MarcosCommunity/odoo,frouty/odoo_oph,deKupini/erp,camptocamp/ngo-addons-backport,hmen89/odoo,omprakasha/odoo,gdgellatly/OCB1,draugiskisprendimai/odoo,poljeff/odoo,ChanduERP/odoo,jiangzhixiao/odoo,arthru/OpenUpgrade,osvalr/odoo,rahuldhote/odoo,klunwebale/odoo,optima-ict/odoo,ramitalat/odoo,rdeheele/odoo,takis/odoo,rahuldhote/odoo,Codefans-fan/odoo,janocat/odoo,bguillot/OpenUpgrade,jpshort/odoo,florentx/OpenUpgrade,laslabs/odoo,gsmartway/odoo,mszewczy/odoo,Adel-Magebinary/odoo,zchking/odoo,Endika/odoo,thanhacun/odoo,vrenaville/ngo-addons-backport,rdeheele/odoo,savoirfairelinux/odoo,ovnicraft/odoo,funkring/fdoo,Noviat/odoo,doomsterinc/odoo,OpusVL/odoo,Eric-Zhong/odoo,jusdng/odoo,rubencabrera/odoo,JCA-Developpement/Odoo,nexiles/odoo,jiangzhixiao/odoo,jeasoft/odoo,GauravSahu/odoo,dsfsdgsbngfggb/odoo,hopeall/odoo,bakhtout/odoo-educ,hip-odoo/odoo,bakhtout/odoo-educ,tvibliani/odoo,vnsofthe/odoo,jpshort/odoo,microcom/odoo,grap/OCB,nhomar/odoo-mirror,incaser/odoo-odoo,mmbtba/odoo,laslabs/odoo,nuuuboo/odoo,dsfsdgsbngfggb/odoo,joshuajan/odoo,JonathanStein/odoo,bobisme/odoo,oliverhr/odoo,lsinfo/odoo,frouty/odoogoeen,cloud9UG/odoo,dariemp/odoo,storm-computers/odoo,christophlsa/odoo,Noviat/odoo,nuncjo/odoo,FlorianLudwig/odoo,inspyration/odoo,mlaitinen/odoo,omprakasha/odoo,grap/OCB,mlaitinen/odoo,0k/OpenUpgrade,dalegregory/odoo,srimai/odoo,Drooids/odoo,cysnake4713/odoo,jaxkodex/odoo,bobisme/odoo,havt/odoo,csrocha/OpenUpgrade,naousse/odoo,hifly/OpenUpgrade,dalegregory/odoo,leorochael/odoo,hubsaysnuaa/odoo,KontorConsulting/odoo,luistorresm/odoo,ClearCorp-dev/odoo,slevenhagen/odoo,poljeff/odoo,funkring/fdoo,agrista/odoo-saas,slevenhagen/odoo,VielSoft/odoo,andreparames/odoo,apanju/GMIO_Odoo,OSSESAC/odoopubarquiluz,synconics/odoo,alexteodor/odoo,cysnake4713/odoo,srimai/odoo,collex100/odoo,agrista/odoo-saas,SerpentCS/odoo,sinbazhou/odoo,ujjwalwahi/odoo,luistorresm/odoo,avoinsystems/odoo,idncom/odoo,thanhacun/odoo,luistorresm/odoo,bguillot/OpenUpgrade,virgree/odoo,florentx/OpenUpgrade,oliverhr/odoo,ApuliaSoftware/odoo,KontorConsulting/odoo,windedge/odoo,jpshort/odoo,BT-fgarbely/odoo,aviciimaxwell/odoo,leoliujie/odoo,hifly/OpenUpgrade,srimai/odoo,blaggacao/OpenUpgrade,bguillot/OpenUpgrade,AuyaJackie/odoo,syci/OCB,dgzurita/odoo,AuyaJackie/odoo,alqfahad/odoo,joariasl/odoo,dllsf/odootest,srsman/odoo,frouty/odoogoeen,x111ong/odoo,lightcn/odoo,bealdav/OpenUpgrade,TRESCLOUD/odoopub,factorlibre/OCB,syci/OCB,hopeall/odoo,jaxkodex/odoo,mustafat/odoo-1,rahuldhote/odoo,optima-ict/odoo,brijeshkesariya/odoo,arthru/OpenUpgrade,ClearCorp-dev/odoo,feroda/odoo,ihsanudin/odoo,cloud9UG/odoo,christophlsa/odoo,OpenUpgrade-dev/OpenUpgrade,spadae22/odoo,osvalr/odoo,PongPi/isl-odoo,odootr/odoo,charbeljc/OCB,pedrobaeza/odoo,hoatle/odoo,gvb/odoo,janocat/odoo,PongPi/isl-odoo,syci/OCB,dgzurita/odoo,kybriainfotech/iSocioCRM,0k/OpenUpgrade,gorjuce/odoo,codekaki/odoo,oliverhr/odoo,sv-dev1/odoo,Bachaco-ve/odoo,rgeleta/odoo,ihsanudin/odoo,rschnapka/odoo,massot/odoo,virgree/odoo,demon-ru/iml-crm,tinkhaven-organization/odoo,jpshort/odoo,sinbazhou/odoo,apanju/odoo,CubicERP/odoo,factorlibre/OCB,nitinitprof/odoo,rgeleta/odoo,credativUK/OCB,gavin-feng/odoo,grap/OpenUpgrade,jolevq/odoopub,ygol/odoo,hopeall/odoo,bealdav/OpenUpgrade,vrenaville/ngo-addons-backport,dkubiak789/odoo,cpyou/odoo,guewen/OpenUpgrade,kybriainfotech/iSocioCRM,tinkhaven-organization/odoo,Gitlab11/odoo,stephen144/odoo,massot/odoo,bkirui/odoo,zchking/odoo,goliveirab/odoo,oasiswork/odoo,incaser/odoo-odoo,apanju/GMIO_Odoo,codekaki/odoo,sebalix/OpenUpgrade,Adel-Magebinary/odoo,pplatek/odoo,dsfsdgsbngfggb/odoo,abenzbiria/clients_odoo,ygol/odoo,CopeX/odoo,zchking/odoo,Kilhog/odoo,funkring/fdoo,omprakasha/odoo,rowemoore/odoo,ccomb/OpenUpgrade,x111ong/odoo,ClearCorp-dev/odoo,Endika/OpenUpgrade,nuuuboo/odoo,tvibliani/odoo,nhomar/odoo,deKupini/erp,guewen/OpenUpgrade,rubencabrera/odoo,papouso/odoo,xzYue/odoo,hifly/OpenUpgrade,eino-makitalo/odoo,salaria/odoo,ChanduERP/odoo,abdellatifkarroum/odoo,guerrerocarlos/odoo,jfpla/odoo,ShineFan/odoo,jusdng/odoo,ojengwa/odoo,markeTIC/OCB,ihsanudin/odoo,lightcn/odoo,ehirt/odoo,rahuldhote/odoo,sebalix/OpenUpgrade,BT-astauder/odoo,odoousers2014/odoo,fjbatresv/odoo,slevenhagen/odoo-npg,glovebx/odoo,glovebx/odoo,lightcn/odoo,ThinkOpen-Solutions/odoo,tvtsoft/odoo8,rschnapka/odoo,waytai/odoo,ubic135/odoo-design,chiragjogi/odoo,sebalix/OpenUpgrade,tinkerthaler/odoo,shaufi/odoo,Elico-Corp/odoo_OCB,n0m4dz/odoo,mvaled/OpenUpgrade,charbeljc/OCB,odooindia/odoo,elmerdpadilla/iv,OpenPymeMx/OCB,gdgellatly/OCB1,colinnewell/odoo,mustafat/odoo-1,andreparames/odoo,BT-rmartin/odoo,fuhongliang/odoo,javierTerry/odoo,christophlsa/odoo,patmcb/odoo,naousse/odoo,papouso/odoo,jiachenning/odoo,tinkerthaler/odoo,papouso/odoo,synconics/odoo,savoirfairelinux/odoo,BT-fgarbely/odoo,lombritz/odoo,Antiun/odoo,Ernesto99/odoo,ThinkOpen-Solutions/odoo,andreparames/odoo,slevenhagen/odoo-npg,addition-it-solutions/project-all,shingonoide/odoo,shaufi10/odoo,ramitalat/odoo,naousse/odoo,jeasoft/odoo,apanju/odoo,funkring/fdoo,ingadhoc/odoo,SerpentCS/odoo,feroda/odoo,juanalfonsopr/odoo,jaxkodex/odoo,RafaelTorrealba/odoo,apocalypsebg/odoo,fdvarela/odoo8,kittiu/odoo,aviciimaxwell/odoo,sysadminmatmoz/OCB,csrocha/OpenUpgrade,sysadminmatmoz/OCB,Ichag/odoo,florian-dacosta/OpenUpgrade,alhashash/odoo,elmerdpadilla/iv,thanhacun/odoo,bealdav/OpenUpgrade,Gitlab11/odoo,microcom/odoo,BT-rmartin/odoo,CatsAndDogsbvba/odoo,odooindia/odoo,abstract-open-solutions/OCB,microcom/odoo,Daniel-CA/odoo,credativUK/OCB,steedos/odoo,avoinsystems/odoo,ChanduERP/odoo,SAM-IT-SA/odoo,cedk/odoo,vrenaville/ngo-addons-backport,cedk/odoo,Bachaco-ve/odoo,SAM-IT-SA/odoo,dgzurita/odoo,nexiles/odoo,nuncjo/odoo,optima-ict/odoo,demon-ru/iml-crm,mvaled/OpenUpgrade,fuselock/odoo,dgzurita/odoo,jeasoft/odoo,ygol/odoo,gavin-feng/odoo,Ichag/odoo,hanicker/odoo,BT-ojossen/odoo,hassoon3/odoo,numerigraphe/odoo,ihsanudin/odoo,shaufi10/odoo,slevenhagen/odoo,diagramsoftware/odoo,pplatek/odoo,jeasoft/odoo,grap/OCB,Nowheresly/odoo,doomsterinc/odoo,markeTIC/OCB,markeTIC/OCB,shingonoide/odoo,savoirfairelinux/OpenUpgrade,klunwebale/odoo,aviciimaxwell/odoo,mlaitinen/odoo,lombritz/odoo,ShineFan/odoo,jolevq/odoopub,alqfahad/odoo,SAM-IT-SA/odoo,datenbetrieb/odoo,RafaelTorrealba/odoo,blaggacao/OpenUpgrade,minhtuancn/odoo,doomsterinc/odoo,xzYue/odoo,incaser/odoo-odoo,dezynetechnologies/odoo,frouty/odoo_oph,QianBIG/odoo,hifly/OpenUpgrade,dezynetechnologies/odoo,kittiu/odoo,sadleader/odoo,sve-odoo/odoo,nitinitprof/odoo,oasiswork/odoo,alexteodor/odoo,alhashash/odoo,rubencabrera/odoo,cloud9UG/odoo,BT-fgarbely/odoo,goliveirab/odoo,x111ong/odoo,hoatle/odoo,havt/odoo,OpenUpgrade/OpenUpgrade,x111ong/odoo,vnsofthe/odoo,christophlsa/odoo,nexiles/odoo,CopeX/odoo,pedrobaeza/OpenUpgrade,RafaelTorrealba/odoo,0k/odoo,fuselock/odoo,draugiskisprendimai/odoo,Drooids/odoo,fossoult/odoo,Elico-Corp/odoo_OCB,factorlibre/OCB,FlorianLudwig/odoo,joshuajan/odoo,mkieszek/odoo,savoirfairelinux/odoo,synconics/odoo,rschnapka/odoo,tvtsoft/odoo8,pedrobaeza/odoo,nhomar/odoo,brijeshkesariya/odoo,rowemoore/odoo,kirca/OpenUpgrade,mszewczy/odoo,kirca/OpenUpgrade,oliverhr/odoo,erkrishna9/odoo,draugiskisprendimai/odoo,FlorianLudwig/odoo,luiseduardohdbackup/odoo,Gitlab11/odoo,datenbetrieb/odoo,rowemoore/odoo,frouty/odoogoeen,charbeljc/OCB,aviciimaxwell/odoo,SAM-IT-SA/odoo,ehirt/odoo,rschnapka/odoo,frouty/odoogoeen,oihane/odoo,Ernesto99/odoo,shaufi/odoo,bealdav/OpenUpgrade,tinkerthaler/odoo,jaxkodex/odoo,hubsaysnuaa/odoo,odoo-turkiye/odoo,gvb/odoo,cloud9UG/odoo,wangjun/odoo,javierTerry/odoo,cedk/odoo,CopeX/odoo,Eric-Zhong/odoo,jesramirez/odoo,blaggacao/OpenUpgrade,nexiles/odoo,slevenhagen/odoo,mvaled/OpenUpgrade,Elico-Corp/odoo_OCB,Grirrane/odoo,VielSoft/odoo,nuuuboo/odoo,frouty/odoo_oph,diagramsoftware/odoo,aviciimaxwell/odoo,BT-rmartin/odoo,windedge/odoo,BT-fgarbely/odoo,acshan/odoo,lightcn/odoo,abenzbiria/clients_odoo,markeTIC/OCB,janocat/odoo,hoatle/odoo,charbeljc/OCB,erkrishna9/odoo,laslabs/odoo,bakhtout/odoo-educ,dkubiak789/odoo,bwrsandman/OpenUpgrade,kybriainfotech/iSocioCRM,waytai/odoo,damdam-s/OpenUpgrade,cloud9UG/odoo,doomsterinc/odoo,leoliujie/odoo,shivam1111/odoo,sadleader/odoo,apanju/odoo,damdam-s/OpenUpgrade,ThinkOpen-Solutions/odoo,TRESCLOUD/odoopub,nagyistoce/odoo-dev-odoo,minhtuancn/odoo,nagyistoce/odoo-dev-odoo,shaufi/odoo,wangjun/odoo,KontorConsulting/odoo,patmcb/odoo,blaggacao/OpenUpgrade,AuyaJackie/odoo,alexcuellar/odoo,n0m4dz/odoo,nhomar/odoo-mirror,Kilhog/odoo,grap/OpenUpgrade,srsman/odoo,bobisme/odoo,odootr/odoo,sv-dev1/odoo,shingonoide/odoo,colinnewell/odoo,apocalypsebg/odoo,odoousers2014/odoo,hbrunn/OpenUpgrade,fuhongliang/odoo,joariasl/odoo,pedrobaeza/OpenUpgrade,oihane/odoo,incaser/odoo-odoo,xujb/odoo,ClearCorp-dev/odoo,sysadminmatmoz/OCB,hip-odoo/odoo,lombritz/odoo,acshan/odoo,Ernesto99/odoo,Endika/OpenUpgrade,gorjuce/odoo,acshan/odoo,Drooids/odoo,frouty/odoogoeen,Grirrane/odoo,guewen/OpenUpgrade,matrixise/odoo,rschnapka/odoo,sadleader/odoo,BT-ojossen/odoo,ccomb/OpenUpgrade,Danisan/odoo-1,Drooids/odoo,brijeshkesariya/odoo,BT-fgarbely/odoo,realsaiko/odoo,odoo-turkiye/odoo,havt/odoo,slevenhagen/odoo-npg,TRESCLOUD/odoopub,lsinfo/odoo,thanhacun/odoo,camptocamp/ngo-addons-backport,ramadhane/odoo,MarcosCommunity/odoo,Adel-Magebinary/odoo,juanalfonsopr/odoo,OpenPymeMx/OCB,elmerdpadilla/iv,BT-rmartin/odoo,NL66278/OCB,dllsf/odootest,abdellatifkarroum/odoo,vnsofthe/odoo,Daniel-CA/odoo,nuuuboo/odoo,waytai/odoo,Antiun/odoo,mmbtba/odoo,vrenaville/ngo-addons-backport,ThinkOpen-Solutions/odoo,oliverhr/odoo,jiachenning/odoo,BT-fgarbely/odoo,realsaiko/odoo,papouso/odoo,fuhongliang/odoo,feroda/odoo,bplancher/odoo,poljeff/odoo,bwrsandman/OpenUpgrade,camptocamp/ngo-addons-backport,camptocamp/ngo-addons-backport,prospwro/odoo,mmbtba/odoo,ovnicraft/odoo,savoirfairelinux/odoo,fdvarela/odoo8,frouty/odoo_oph,cysnake4713/odoo,addition-it-solutions/project-all,Danisan/odoo-1,kirca/OpenUpgrade,hoatle/odoo,CubicERP/odoo,GauravSahu/odoo,ubic135/odoo-design,hubsaysnuaa/odoo,janocat/odoo,feroda/odoo,bealdav/OpenUpgrade,ingadhoc/odoo,lightcn/odoo,credativUK/OCB,tvibliani/odoo,VielSoft/odoo,charbeljc/OCB,joariasl/odoo,numerigraphe/odoo,hbrunn/OpenUpgrade,demon-ru/iml-crm,odootr/odoo,Antiun/odoo,syci/OCB,pplatek/odoo,shingonoide/odoo,srimai/odoo,xujb/odoo,dezynetechnologies/odoo,credativUK/OCB,damdam-s/OpenUpgrade,nitinitprof/odoo,Grirrane/odoo,prospwro/odoo,matrixise/odoo,MarcosCommunity/odoo,OpenUpgrade/OpenUpgrade,arthru/OpenUpgrade,Danisan/odoo-1,cysnake4713/odoo,hifly/OpenUpgrade,CatsAndDogsbvba/odoo,BT-ojossen/odoo,luiseduardohdbackup/odoo,fossoult/odoo,BT-ojossen/odoo,tinkhaven-organization/odoo,andreparames/odoo,lgscofield/odoo,abenzbiria/clients_odoo,x111ong/odoo,csrocha/OpenUpgrade,laslabs/odoo,javierTerry/odoo,Antiun/odoo,fgesora/odoo,apanju/GMIO_Odoo,Daniel-CA/odoo,dgzurita/odoo,fevxie/odoo,x111ong/odoo,tinkhaven-organization/odoo,abenzbiria/clients_odoo,ubic135/odoo-design,nitinitprof/odoo,acshan/odoo,BT-fgarbely/odoo,spadae22/odoo,charbeljc/OCB,sergio-incaser/odoo,goliveirab/odoo,sebalix/OpenUpgrade,kittiu/odoo,ujjwalwahi/odoo,gdgellatly/OCB1,datenbetrieb/odoo,JCA-Developpement/Odoo,Ichag/odoo,alqfahad/odoo,sebalix/OpenUpgrade,klunwebale/odoo,lgscofield/odoo,oihane/odoo,xzYue/odoo,OpusVL/odoo,steedos/odoo,hoatle/odoo,vnsofthe/odoo,NeovaHealth/odoo,JonathanStein/odoo,naousse/odoo,OpenUpgrade-dev/OpenUpgrade,shaufi/odoo,naousse/odoo,salaria/odoo,ChanduERP/odoo,ujjwalwahi/odoo,collex100/odoo,oasiswork/odoo,chiragjogi/odoo,charbeljc/OCB,hubsaysnuaa/odoo,fgesora/odoo,collex100/odoo,ApuliaSoftware/odoo,Noviat/odoo,ecosoft-odoo/odoo,sv-dev1/odoo,nexiles/odoo,pplatek/odoo,FlorianLudwig/odoo,jeasoft/odoo,apanju/GMIO_Odoo,Maspear/odoo,addition-it-solutions/project-all,acshan/odoo,JGarcia-Panach/odoo,BT-astauder/odoo,nagyistoce/odoo-dev-odoo,guewen/OpenUpgrade,TRESCLOUD/odoopub,BT-ojossen/odoo,x111ong/odoo,mkieszek/odoo,NL66278/OCB,chiragjogi/odoo,jfpla/odoo,savoirfairelinux/OpenUpgrade,Grirrane/odoo,QianBIG/odoo,juanalfonsopr/odoo,credativUK/OCB,juanalfonsopr/odoo,pedrobaeza/OpenUpgrade,pedrobaeza/odoo,virgree/odoo,vrenaville/ngo-addons-backport,kifcaliph/odoo,jiachenning/odoo,pedrobaeza/OpenUpgrade,oihane/odoo,erkrishna9/odoo,oasiswork/odoo,alexcuellar/odoo,jeasoft/odoo,steedos/odoo,cedk/odoo,florentx/OpenUpgrade,nuuuboo/odoo,fuhongliang/odoo,0k/OpenUpgrade,bakhtout/odoo-educ,credativUK/OCB,gsmartway/odoo,tinkerthaler/odoo,guewen/OpenUpgrade,rschnapka/odoo,SerpentCS/odoo,draugiskisprendimai/odoo,diagramsoftware/odoo,rschnapka/odoo,mvaled/OpenUpgrade,optima-ict/odoo,odootr/odoo,bwrsandman/OpenUpgrade,kittiu/odoo,glovebx/odoo,lightcn/odoo,dalegregory/odoo,deKupini/erp,ojengwa/odoo,SerpentCS/odoo,nhomar/odoo-mirror,gvb/odoo,apanju/odoo,dfang/odoo,hmen89/odoo,shivam1111/odoo,andreparames/odoo,xujb/odoo,leorochael/odoo,ihsanudin/odoo,fuselock/odoo,fdvarela/odoo8,microcom/odoo,havt/odoo,Ichag/odoo,VitalPet/odoo,dgzurita/odoo,Nowheresly/odoo,cpyou/odoo,lombritz/odoo,spadae22/odoo,thanhacun/odoo,GauravSahu/odoo,colinnewell/odoo,inspyration/odoo,dalegregory/odoo,jeasoft/odoo,fevxie/odoo,ecosoft-odoo/odoo,tvtsoft/odoo8,NL66278/OCB,alqfahad/odoo,fdvarela/odoo8,gavin-feng/odoo,QianBIG/odoo,luiseduardohdbackup/odoo,windedge/odoo,draugiskisprendimai/odoo,Nick-OpusVL/odoo,dariemp/odoo,windedge/odoo,hbrunn/OpenUpgrade,gorjuce/odoo,guerrerocarlos/odoo,Codefans-fan/odoo,datenbetrieb/odoo,jpshort/odoo,ovnicraft/odoo,christophlsa/odoo,avoinsystems/odoo,dllsf/odootest,sv-dev1/odoo,leorochael/odoo,oasiswork/odoo,stonegithubs/odoo,datenbetrieb/odoo,addition-it-solutions/project-all,camptocamp/ngo-addons-backport,jpshort/odoo,hanicker/odoo,jfpla/odoo,tvibliani/odoo,hifly/OpenUpgrade,dkubiak789/odoo,lombritz/odoo,stonegithubs/odoo,doomsterinc/odoo,simongoffin/website_version,synconics/odoo,n0m4dz/odoo,OSSESAC/odoopubarquiluz,eino-makitalo/odoo,rowemoore/odoo,VielSoft/odoo,xzYue/odoo,synconics/odoo,fevxie/odoo,srimai/odoo,tangyiyong/odoo,fevxie/odoo,Elico-Corp/odoo_OCB,eino-makitalo/odoo,sinbazhou/odoo,collex100/odoo,rdeheele/odoo,odoo-turkiye/odoo,gavin-feng/odoo,jolevq/odoopub,jesramirez/odoo,brijeshkesariya/odoo,hanicker/odoo,Danisan/odoo-1,OSSESAC/odoopubarquiluz,takis/odoo,rahuldhote/odoo,laslabs/odoo,guerrerocarlos/odoo,Endika/odoo,collex100/odoo,Endika/odoo,ovnicraft/odoo,rowemoore/odoo,dalegregory/odoo,klunwebale/odoo,CubicERP/odoo,kittiu/odoo,lgscofield/odoo,xujb/odoo,odootr/odoo,JonathanStein/odoo,Gitlab11/odoo,Grirrane/odoo,colinnewell/odoo,bealdav/OpenUpgrade,rubencabrera/odoo,tarzan0820/odoo,frouty/odoo_oph,brijeshkesariya/odoo,sve-odoo/odoo,kirca/OpenUpgrade,jusdng/odoo,osvalr/odoo,kifcaliph/odoo,sergio-incaser/odoo,xujb/odoo,gvb/odoo,nexiles/odoo,NeovaHealth/odoo,jpshort/odoo,arthru/OpenUpgrade,cdrooom/odoo,ClearCorp-dev/odoo,steedos/odoo,gorjuce/odoo,abstract-open-solutions/OCB,lsinfo/odoo,sv-dev1/odoo,stephen144/odoo,oliverhr/odoo,luistorresm/odoo,luiseduardohdbackup/odoo,Eric-Zhong/odoo,Daniel-CA/odoo,lsinfo/odoo,dalegregory/odoo,AuyaJackie/odoo,lsinfo/odoo,PongPi/isl-odoo,JGarcia-Panach/odoo,FlorianLudwig/odoo,jusdng/odoo,addition-it-solutions/project-all,vrenaville/ngo-addons-backport,inspyration/odoo,frouty/odoogoeen,Nick-OpusVL/odoo,hip-odoo/odoo,ramadhane/odoo,Daniel-CA/odoo,abdellatifkarroum/odoo,fjbatresv/odoo,ecosoft-odoo/odoo,Bachaco-ve/odoo,OpenUpgrade-dev/OpenUpgrade,KontorConsulting/odoo,diagramsoftware/odoo,idncom/odoo,BT-astauder/odoo,acshan/odoo,gorjuce/odoo,nitinitprof/odoo,shivam1111/odoo,colinnewell/odoo,elmerdpadilla/iv,nhomar/odoo-mirror,christophlsa/odoo,gdgellatly/OCB1,prospwro/odoo,windedge/odoo,SAM-IT-SA/odoo,shingonoide/odoo,luiseduardohdbackup/odoo,tvtsoft/odoo8,dariemp/odoo,grap/OpenUpgrade,sergio-incaser/odoo,storm-computers/odoo,nagyistoce/odoo-dev-odoo,omprakasha/odoo,Maspear/odoo,tangyiyong/odoo,CopeX/odoo,ihsanudin/odoo,jiachenning/odoo,jusdng/odoo,ramadhane/odoo,highco-groupe/odoo,CopeX/odoo,shingonoide/odoo,JonathanStein/odoo,alhashash/odoo,csrocha/OpenUpgrade,dllsf/odootest,cdrooom/odoo,ApuliaSoftware/odoo,KontorConsulting/odoo,simongoffin/website_version,srsman/odoo,ovnicraft/odoo,dezynetechnologies/odoo,florian-dacosta/OpenUpgrade,sve-odoo/odoo,apanju/GMIO_Odoo,OpenUpgrade/OpenUpgrade,dgzurita/odoo,juanalfonsopr/odoo,sve-odoo/odoo,ubic135/odoo-design,VitalPet/odoo,patmcb/odoo,ChanduERP/odoo,dezynetechnologies/odoo,RafaelTorrealba/odoo,kifcaliph/odoo,slevenhagen/odoo-npg,gvb/odoo,ehirt/odoo,jeasoft/odoo,gdgellatly/OCB1,zchking/odoo,slevenhagen/odoo,GauravSahu/odoo,laslabs/odoo,rgeleta/odoo,bguillot/OpenUpgrade,ThinkOpen-Solutions/odoo,makinacorpus/odoo,nuncjo/odoo,pedrobaeza/odoo,storm-computers/odoo,fuhongliang/odoo,storm-computers/odoo,jiangzhixiao/odoo,Kilhog/odoo,RafaelTorrealba/odoo,Gitlab11/odoo,steedos/odoo,Eric-Zhong/odoo,ubic135/odoo-design,alqfahad/odoo,ovnicraft/odoo,QianBIG/odoo,camptocamp/ngo-addons-backport,erkrishna9/odoo,hassoon3/odoo,gsmartway/odoo,agrista/odoo-saas,ingadhoc/odoo,aviciimaxwell/odoo,patmcb/odoo,jaxkodex/odoo,dezynetechnologies/odoo,codekaki/odoo,VitalPet/odoo,srimai/odoo,KontorConsulting/odoo,chiragjogi/odoo,Ernesto99/odoo,simongoffin/website_version,kybriainfotech/iSocioCRM,poljeff/odoo,jolevq/odoopub,Ernesto99/odoo,srsman/odoo,hopeall/odoo,hbrunn/OpenUpgrade,sergio-incaser/odoo,kirca/OpenUpgrade,xzYue/odoo,OpenPymeMx/OCB,lgscofield/odoo,abenzbiria/clients_odoo,mmbtba/odoo,guerrerocarlos/odoo,fjbatresv/odoo,hmen89/odoo,damdam-s/OpenUpgrade,bplancher/odoo,naousse/odoo,dariemp/odoo,collex100/odoo,elmerdpadilla/iv,florian-dacosta/OpenUpgrade,CopeX/odoo,grap/OpenUpgrade,xzYue/odoo,abdellatifkarroum/odoo,wangjun/odoo,tinkerthaler/odoo,luistorresm/odoo,doomsterinc/odoo,arthru/OpenUpgrade,Endika/odoo,optima-ict/odoo,papouso/odoo,wangjun/odoo,jusdng/odoo,jfpla/odoo,GauravSahu/odoo,hbrunn/OpenUpgrade,fuselock/odoo,Bachaco-ve/odoo,fuhongliang/odoo,papouso/odoo,sysadminmatmoz/OCB,andreparames/odoo,hubsaysnuaa/odoo,ojengwa/odoo,savoirfairelinux/OpenUpgrade,Eric-Zhong/odoo,NeovaHealth/odoo,0k/OpenUpgrade,diagramsoftware/odoo,ramitalat/odoo,draugiskisprendimai/odoo,gavin-feng/odoo,SAM-IT-SA/odoo,Eric-Zhong/odoo,PongPi/isl-odoo,provaleks/o8,stonegithubs/odoo,Kilhog/odoo,idncom/odoo,FlorianLudwig/odoo,savoirfairelinux/odoo,Nowheresly/odoo,ThinkOpen-Solutions/odoo,MarcosCommunity/odoo,AuyaJackie/odoo,bwrsandman/OpenUpgrade,mkieszek/odoo,jesramirez/odoo,credativUK/OCB,OpenUpgrade/OpenUpgrade,CatsAndDogsbvba/odoo,OpusVL/odoo,odoo-turkiye/odoo,spadae22/odoo,OpenPymeMx/OCB,matrixise/odoo,tangyiyong/odoo,0k/OpenUpgrade,numerigraphe/odoo,VielSoft/odoo,grap/OCB,OSSESAC/odoopubarquiluz,pplatek/odoo,bakhtout/odoo-educ,zchking/odoo,Kilhog/odoo,glovebx/odoo,fgesora/odoo,hoatle/odoo,janocat/odoo,agrista/odoo-saas,markeTIC/OCB,ecosoft-odoo/odoo,prospwro/odoo,OpenPymeMx/OCB,NeovaHealth/odoo,mvaled/OpenUpgrade,joshuajan/odoo,JCA-Developpement/Odoo,Endika/OpenUpgrade,mmbtba/odoo,hubsaysnuaa/odoo,pedrobaeza/OpenUpgrade,bakhtout/odoo-educ,oliverhr/odoo,jolevq/odoopub,codekaki/odoo,mlaitinen/odoo,slevenhagen/odoo-npg,hopeall/odoo,xujb/odoo,osvalr/odoo,matrixise/odoo,realsaiko/odoo,gsmartway/odoo,grap/OCB,bkirui/odoo,ramitalat/odoo,alexcuellar/odoo,VitalPet/odoo,zchking/odoo,sergio-incaser/odoo,CubicERP/odoo,microcom/odoo,bwrsandman/OpenUpgrade,apanju/odoo,apocalypsebg/odoo,OpusVL/odoo,florian-dacosta/OpenUpgrade,factorlibre/OCB,NeovaHealth/odoo,stephen144/odoo,jaxkodex/odoo,joshuajan/odoo,rgeleta/odoo,dfang/odoo,diagramsoftware/odoo,rahuldhote/odoo,christophlsa/odoo,highco-groupe/odoo,SAM-IT-SA/odoo,feroda/odoo,bplancher/odoo,funkring/fdoo,makinacorpus/odoo,highco-groupe/odoo,Adel-Magebinary/odoo,tvibliani/odoo,fossoult/odoo,ccomb/OpenUpgrade,nuncjo/odoo,shivam1111/odoo,guewen/OpenUpgrade,rahuldhote/odoo,spadae22/odoo,gavin-feng/odoo,nuuuboo/odoo,credativUK/OCB,Drooids/odoo,chiragjogi/odoo,Eric-Zhong/odoo,numerigraphe/odoo,rschnapka/odoo,BT-astauder/odoo,hanicker/odoo,gdgellatly/OCB1,Danisan/odoo-1,jfpla/odoo,Endika/odoo,BT-ojossen/odoo,nhomar/odoo,tangyiyong/odoo,tvibliani/odoo,massot/odoo,luistorresm/odoo,gdgellatly/OCB1,takis/odoo,alqfahad/odoo,leorochael/odoo,rdeheele/odoo,grap/OCB,klunwebale/odoo,0k/odoo,slevenhagen/odoo-npg,Antiun/odoo,bobisme/odoo,JGarcia-Panach/odoo,numerigraphe/odoo,odooindia/odoo,klunwebale/odoo,ingadhoc/odoo,ehirt/odoo,colinnewell/odoo,dfang/odoo,lombritz/odoo,Maspear/odoo,shaufi10/odoo,nuuuboo/odoo,hbrunn/OpenUpgrade,cloud9UG/odoo,Nowheresly/odoo,massot/odoo,oihane/odoo,sv-dev1/odoo,provaleks/o8,nhomar/odoo,odootr/odoo,prospwro/odoo,patmcb/odoo,hanicker/odoo,tarzan0820/odoo,KontorConsulting/odoo,codekaki/odoo,vrenaville/ngo-addons-backport,omprakasha/odoo,NeovaHealth/odoo,gorjuce/odoo,takis/odoo,shaufi10/odoo,jaxkodex/odoo,n0m4dz/odoo,makinacorpus/odoo,massot/odoo,abdellatifkarroum/odoo,alexteodor/odoo,hmen89/odoo,odoousers2014/odoo,alexcuellar/odoo,Nick-OpusVL/odoo,JCA-Developpement/Odoo,Nowheresly/odoo,VitalPet/odoo,fgesora/odoo,apocalypsebg/odoo,dariemp/odoo,dezynetechnologies/odoo,hoatle/odoo,ThinkOpen-Solutions/odoo,janocat/odoo,joshuajan/odoo,SerpentCS/odoo,lsinfo/odoo,OpenUpgrade/OpenUpgrade,Elico-Corp/odoo_OCB,mmbtba/odoo,waytai/odoo,csrocha/OpenUpgrade,oasiswork/odoo,CatsAndDogsbvba/odoo,makinacorpus/odoo,hmen89/odoo,osvalr/odoo,mkieszek/odoo,stephen144/odoo,tarzan0820/odoo,bguillot/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,jusdng/odoo,apanju/odoo,gdgellatly/OCB1,jiachenning/odoo,matrixise/odoo,waytai/odoo,gvb/odoo,tarzan0820/odoo,joshuajan/odoo,salaria/odoo,AuyaJackie/odoo,MarcosCommunity/odoo,Kilhog/odoo,RafaelTorrealba/odoo,ramitalat/odoo,synconics/odoo,ChanduERP/odoo,kybriainfotech/iSocioCRM,sebalix/OpenUpgrade,fuselock/odoo,lgscofield/odoo,virgree/odoo,salaria/odoo,blaggacao/OpenUpgrade,ujjwalwahi/odoo,cedk/odoo,alexcuellar/odoo,nuncjo/odoo,shaufi/odoo,salaria/odoo,alqfahad/odoo,0k/odoo,ApuliaSoftware/odoo,QianBIG/odoo,kifcaliph/odoo,frouty/odoogoeen,leoliujie/odoo,mustafat/odoo-1,odoo-turkiye/odoo,goliveirab/odoo,bwrsandman/OpenUpgrade,OpenPymeMx/OCB,gsmartway/odoo,highco-groupe/odoo,Endika/OpenUpgrade,joariasl/odoo,Nick-OpusVL/odoo,PongPi/isl-odoo,oasiswork/odoo,idncom/odoo,idncom/odoo,ihsanudin/odoo,dsfsdgsbngfggb/odoo,Endika/OpenUpgrade,florentx/OpenUpgrade,simongoffin/website_version,ccomb/OpenUpgrade,deKupini/erp,Bachaco-ve/odoo,tarzan0820/odoo,Codefans-fan/odoo,fossoult/odoo,thanhacun/odoo,incaser/odoo-odoo,guerrerocarlos/odoo,OpenUpgrade-dev/OpenUpgrade,srimai/odoo,alexteodor/odoo,apocalypsebg/odoo,savoirfairelinux/OpenUpgrade,mustafat/odoo-1,Endika/odoo | addons/web/ir_module.py | addons/web/ir_module.py | from openerp.osv import osv
import openerp.wsgi.core as oewsgi
from common.http import Root
class ir_module(osv.Model):
_inherit = 'ir.module.module'
def update_list(self, cr, uid, context=None):
result = super(ir_module, self).update_list(cr, uid, context=context)
if tuple(result) != (0, 0):
for handler in oewsgi.module_handlers:
if isinstance(handler, Root):
handler._load_addons()
return result
| agpl-3.0 | Python |
|
9c7d04b4eecb392b368e4b84a48197682ea63b8d | Add blackroom/python/auto-push.py | zhihaofans/Bilibili | blackroom/python/auto-push.py | blackroom/python/auto-push.py | #!/usr/bin/python3
# -*-coding:utf-8 -*-
# ProjectName:Bilibili小黑屋爬虫v2
# Author:zhihaofans
# Github:https://github.com/zhihaofans/Bilibili/tree/master/blackroom
# PythonVersion:3.x
import requests
import os
import json
import time
from bilibili import blackRoom
from zhihaofans import file as f
savePath = f.getUpPath(f.getMyPyPath()) + '/data/'
savePath_forever = savePath + '/forever/'
savePath_noForever = savePath + '/user/'
savePath_backup = savePath + '/backup/'
savePath_history = savePath + '/history/'
blList = []
updateTime = 0
gitPath = 'git'
gitLocalBranch = 'origin'
gitRemoteBranch = 'master'
def saveData(data):
thisTime = str(time.time()).split(".")[0]
updateTime = thisTime
f.write(savePath + "blackroom.json", json.dumps(data))
# 备份数据
print("备份数据")
f.write(savePath_backup + thisTime + ".json", json.dumps(data))
# 历史数据
print("历史数据")
for a in data:
f.write(savePath_history + str(a['id']) + ".json", json.dumps(a), True)
# 永久封禁
print("永久封禁与限时封禁数据分开按用户储存")
for b in data:
if b["blockedForever"]:
f.write(savePath_forever +
str(b['uid']) + ".json", json.dumps(b), True)
else:
filePath = savePath_noForever + str(b['uid']) + "/"
f.mk(filePath)
f.write(filePath + str(b['id']) + ".json", json.dumps(b), True)
f.write(savePath + "update.txt", thisTime)
def mkdirs():
f.mk(savePath_forever)
f.mk(savePath_noForever)
f.mk(savePath_backup)
f.mk(savePath_history)
def getPush():
cmdPath = os.getcwd()
os.chdir(f.getUpPath(f.getMyPyPath()))
print(getCmd("git add *"))
print(getCmd("git status"))
print(getCmd("git commit -m \"Auto update blackroom(" + str(updateTime) + "\")"))
print(getCmd("git push " + gitLocalBranch + " " + gitRemoteBranch))
def getCmd(cmdText):
return os.popen(cmdText).read()
def main():
mkdirs()
print("开始抓取小黑屋数据")
brList = blackRoom.getData()
print("保存数据")
saveData(brList)
print("抓取完成")
input("按回车退出")
exit()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
380a87e71c347eab5d9c5d22a255753e62e1d739 | Add the original game code to the files to show progress made during the week using classes and other skills | Baseyoyoyo/Higher-or-Lower,Baseyoyoyo/Higher-or-Lower | Original_python_game.py | Original_python_game.py | import random
GuessesTaken = 0
print ("Hello and welcome to my higher or lower number guessing game.")
print ("Whats your name?")
myName = input()
number = random.randint(1, 20)
number1 = random.randint(1, 20)
number2 = random.randint(1, 20)
number3 = random.randint(1, 20)
number4 = random.randint(1, 20)
number5 = random.randint(1, 20)
number6 = random.randint(1, 20)
number7 = random.randint(1, 20)
number8 = random.randint(1, 20)
number9 = random.randint(1, 20)
number10 = random.randint(1, 20)
number11 = random.randint(1, 20)
number12 = random.randint(1, 20)
number13 = random.randint(1, 20)
number14 = random.randint(1, 20)
number15 = random.randint(1, 20)
number16 = random.randint(1, 20)
number17 = random.randint(1, 20)
number18 = random.randint(1, 20)
number19 = random.randint(1, 20)
number20 = random.randint(1, 20)
print ("So, your names " + myName + " Hmmmmmmm")
print ("Ok " + myName + " here is your first number")
print ("")
print (number)
print ("")
print ("Also keep in mind that the numbers range from 1 to 20")
print ("")
print ("So will the next number be higher or lower?")
print ("")
print ("")
print ("Use h to guess Higher and use l to guess Lower.")
guess = input('Enter either h or l: ')
if number > number1 and guess == "l":
print ("Well done the number was " + number1 + " Now onto stage 2")
elif number > number1 and guess == "h":
print ("Incorrect the number was " + number1 + "GAME OVER")
elif number < number1 and guess == "h":
print ("Well done the number was " + number1 + " Now onto stage 2")
elif number < number1 and guess == "l":
print ("Incorrect the number was " + number1 + " GAME OVER") | mit | Python |
|
958e6ca0ba5be68802e61a450aeb2bf39ea5d5ba | Create psf2pdb.py | tmorrell/psf2pdb | psf2pdb.py | psf2pdb.py | import sys
pdbfile = open(sys.argv[1],'r')
psfile = open(sys.argv[2],'r')
inline = pdbfile.readline()
output = ''
while inline != 'END\n':
output = output + inline
inline = pdbfile.readline()
if inline == '': #sanity check
print "Error"
exit()
inline = psfile.readline().split()
while inline[1] != '!NBOND:':
inline = psfile.readline().split()
while len(inline)<1:
inline = psfile.readline().split()
bondlist = psfile.readline().split()
for i in range(int(inline[0])):
new = bondlist.pop(0)
output = output + 'CONECT '+new+' '+bondlist.pop(0)+'\n'
if len(bondlist)==0:
bondlist = psfile.readline().split()
outfile = open(sys.argv[3],'w')
outfile.write(output)
| mit | Python |
|
e73d16d4051c6bc66daf415d2da4e8d204a97004 | Add rainbow function | tryptochan/pymol-scripts | rainbow.py | rainbow.py | import re
import colorsys
from pymol import cmd
def rainbow(range_string):
"""
DESCRIPTION
Colors rainbow spectrum for a selection given in range string.
The difference between coloring in rainbow with built-in 'spectrum' is that
this relies on the segment order in range string (not alphabetically
sorted), so it can handle multiple chain domain as in insulin where usually
chain B should be before chain A in many cases.
USAGE
rainbow range_string
ARGUMENTS
range_string = 'B:2-29,A:1-21'
"""
seg_ptn = re.compile(r'([A-Za-z0-9]{1}):(-?[0-9]+[A-Z]?)-(-?[0-9]+[A-Z]?)')
all_resi = []
for seg in seg_ptn.finditer(range_string):
chain = seg.group(1)
local_space = {'resnums' : [], 'chain': chain}
groups = list(seg.groups())
for i in [1, 2]:
# excape minus index
if groups[i].startswith('-'):
groups[i] = '\\' + groups[i]
cmd.iterate('c. %s and i. %s-%s and n. CA' % seg.groups(),
'resnums.append(resi)', space=local_space)
all_resi.append(local_space)
total = reduce(lambda x, y: x + len(y['resnums']), all_resi, 0)
cnt = 0
for seg in all_resi:
chain = seg['chain']
for i in seg['resnums']:
hue = colorsys.TWO_THIRD - colorsys.TWO_THIRD * cnt / (total - 1)
red, green, blue = colorsys.hsv_to_rgb(hue, 1, 1)
hexcolor = hex((int(red * 255) << 16) + (int(green * 255) << 8) +
int(blue * 255))
cmd.color(hexcolor, 'c. %s and i. %s' % (chain, i))
cnt += 1
if __name__ != "rainbow":
cmd.extend('rainbow', rainbow)
| mit | Python |
|
cbaed7d194f4a91198fc097d4657ad327819af4b | Add new migration. | unt-libraries/django-invite,unt-libraries/django-invite | invite/migrations/0004_auto_20191126_1740.py | invite/migrations/0004_auto_20191126_1740.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-11-26 17:40
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('invite', '0003_abstract_invitation_auto_now_add'),
]
operations = [
migrations.AlterField(
model_name='invitation',
name='activation_code',
field=models.CharField(default=uuid.uuid4, editable=False, help_text='unique id, generated on email submission', max_length=36, unique=True),
),
migrations.AlterField(
model_name='invitation',
name='date_invited',
field=models.DateField(auto_now_add=True, help_text='the day on which the superuser invited the potential member'),
),
migrations.AlterField(
model_name='invitation',
name='email',
field=models.EmailField(help_text="the potential member's email address", max_length=41),
),
migrations.AlterField(
model_name='passwordresetinvitation',
name='activation_code',
field=models.CharField(default=uuid.uuid4, editable=False, help_text='unique id, generated on email submission', max_length=36, unique=True),
),
migrations.AlterField(
model_name='passwordresetinvitation',
name='date_invited',
field=models.DateField(auto_now_add=True, help_text='the day on which the superuser invited the potential member'),
),
migrations.AlterField(
model_name='passwordresetinvitation',
name='email',
field=models.EmailField(help_text="the potential member's email address", max_length=41),
),
]
| bsd-3-clause | Python |
|
47044317e4067fb38bf9e0fdb2e9c5f9ccb78053 | add migration | PokeAPI/pokeapi,PokeAPI/pokeapi,PokeAPI/pokeapi,PokeAPI/pokeapi | pokemon_v2/migrations/0006_auto_20200725_2205.py | pokemon_v2/migrations/0006_auto_20200725_2205.py | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pokemon_v2", "0005_auto_20200709_1930"),
]
operations = [
migrations.AlterField(
model_name="pokemon",
name="height",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="pokemon",
name="weight",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="pokemon",
name="base_experience",
field=models.IntegerField(blank=True, null=True),
),
]
| bsd-3-clause | Python |
|
bad97abfe7fd93cefac10d46b5434b63cc7e3d2b | add line to end of file | keras-team/keras-contrib,keras-team/keras-contrib,farizrahman4u/keras-contrib,stygstra/keras-contrib,keras-team/keras-contrib | keras_contrib/constraints.py | keras_contrib/constraints.py | from __future__ import absolute_import
from . import backend as K
from keras.utils.generic_utils import get_from_module
from keras.constraints import *
class Clip(Constraint):
"""Clips weights to [-c, c].
# Arguments
c: Clipping parameter.
"""
def __init__(self, c=0.01):
self.c = c
def __call__(self, p):
return K.clip(p, -self.c, self.c)
def get_config(self):
return {'name': self.__class__.__name__,
'c': self.c}
# Aliases.
clip = Clip
| from __future__ import absolute_import
from . import backend as K
from keras.utils.generic_utils import get_from_module
from keras.constraints import *
class Clip(Constraint):
"""Clips weights to [-c, c].
# Arguments
c: Clipping parameter.
"""
def __init__(self, c=0.01):
self.c = c
def __call__(self, p):
return K.clip(p, -self.c, self.c)
def get_config(self):
return {'name': self.__class__.__name__,
'c': self.c}
# Aliases.
clip = Clip | mit | Python |
d558ed9875cf99ebdf6915e7acd877fc7fae69f3 | Add missing migration | mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit | candidates/migrations/0028_auto_20160411_1055.py | candidates/migrations/0028_auto_20160411_1055.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('candidates', '0027_create_standard_complex_fields'),
]
operations = [
migrations.AlterField(
model_name='complexpopolofield',
name='info_type_key',
field=models.CharField(help_text="Name of the field in the array that stores the type ('note' for links, 'contact_type' for contacts, 'scheme' for identifiers)", max_length=100),
),
migrations.AlterField(
model_name='complexpopolofield',
name='info_value_key',
field=models.CharField(help_text="Name of the field in the array that stores the value, e.g 'url' for links, 'value' for contact_type, 'identifier' for identifiers", max_length=100),
),
migrations.AlterField(
model_name='complexpopolofield',
name='old_info_type',
field=models.CharField(help_text="Used for supporting info_types that have been renamed. As such it's rarely used.", max_length=100, blank=True),
),
]
| agpl-3.0 | Python |
|
90a5242a93beda053ad91adca0728995232e23d2 | Create toggle_editor_text_console.py | satishgoda/learning,satishgoda/learning,satishgoda/learning,satishgoda/learning | cg/blender/scripts/toggle_editor_text_console.py | cg/blender/scripts/toggle_editor_text_console.py | import bpy
keyconfig = bpy.context.window_manager.keyconfigs.user
args = ('wm.context_set_enum', 'ESC', 'PRESS')
kwargs = {'shift':True}
for source, destination in (('Console', 'TEXT_EDITOR'), ('Text', 'CONSOLE')):
kmi = keyconfig.keymaps[source].keymap_items.new(*args, **kwargs)
properties = kmi.properties
properties.data_path = 'area.type'
properties.value = destination
| mit | Python |
|
cb505bd4c86c39bd7ce575a7d72e4a3d33875b93 | Create polyDataMake.py | probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml | figureCode/polyDataMake.py | figureCode/polyDataMake.py | import numpy as np
from random import seed, getstate, setstate
def polyDataMake(n=21,deg=3,sampling='sparse'):
old_state = getstate()
seed(0)
if sampling == 'irregular':
xtrain = np.array([np.linspace(-1,-.5,6),np.linspace(3,3.5,6)]).reshape(-1,1)
elif sampling == 'sparse':
xtrain = np.array([-3, -2, 0, 2, 3])
elif sampling == 'dense':
xtrain = np.array(np.arange(-5,5,.6))
elif sampling == 'thibaux':
xtrain = np.linspace(0,20,n)
else:
raise ValueError('Unrecognized sampling provided.')
if sampling == 'thibaux':
seed(654321)
xtest = np.linspace(0,20,201)
sigma2 = 4
w = np.array([-1.5,1.0/9.0]).T
def fun(x):
return w[0]*x + w[1]*(x**2)
else:
xtest = np.linspace(-7,7,141)
if deg == 2:
def fun(x):
return 10 + x + x**2
elif deg == 3 :
def fun(x):
return 10 + x + x**3
else:
raise ValueError('Unrecognized degree.')
sigma2 = 25
ytrain = fun(xtrain) + np.random.normal(size=xtrain.shape[0])*np.sqrt(sigma2)
ytestNoisefree = fun(xtest)
ytestNoisy = ytestNoisefree + np.random.normal(size=xtest.shape[0])*np.sqrt(sigma2)
def shp(x):
return np.asarray(x).reshape(-1,1)
setstate(old_state)
return shp(xtrain), shp(ytrain), shp(xtest), shp(ytestNoisefree), shp(ytestNoisy), sigma2
| mit | Python |
|
53dc0a5a1e8cc94dd23f6b6cfa1997f7b8b6f926 | call FSL NIDM export from command line | cmaumet/nidm-results_fsl,incf-nidash/nidmresults-fsl,jbpoline/nidm-results_fsl,incf-nidash/nidm-results_afni,cmaumet/nidmresults-fsl,incf-nidash/nidm-results_fsl,cmaumet/nidm-results_afni | nidm-results_fsl.py | nidm-results_fsl.py | #!/usr/bin/python
"""
Export neuroimaging results created with FSL feat following NIDM-Results
specification. The path to feat directory must be passed as first argument.
@author: Camille Maumet <c.m.j.maumet@warwick.ac.uk>
@copyright: University of Warwick 2013-2014
"""
import sys
import os
from fsl_exporter.fsl_exporter import FSLtoNIDMExporter
if __name__ == "__main__":
# Remove first argument (script name)
num_args = len(sys.argv)-1
sys.argv.pop(0)
args = sys.argv
usage = "Usage: python nidm-results_fsl.py path/to/feat/dir"
if num_args != 1:
raise Exception(usage)
feat_dir = args[0]
if not os.path.isdir(feat_dir):
raise Exception("Unknown directory: "+str(feat_dir))
fslnidm = FSLtoNIDMExporter(feat_dir=feat_dir, version="0.2.0")
fslnidm.parse()
fslnidm.export()
print 'NIDM export available at: '+str(os.path.join(feat_dir, "nidm"))
| mit | Python |
|
775a86179c321ac3cab73c9556edaa798f4273fd | add PassiveTotal OneShotAnalytics | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | plugins/analytics/passive_total.py | plugins/analytics/passive_total.py | import requests
import json
from datetime import datetime
from core.analytics import OneShotAnalytics
from core.observables import Observable, Hostname
class PassiveTotal(OneShotAnalytics):
default_values = {
"name": "PassiveTotal Passive DNS",
"description": "Perform passive DNS (reverse) lookups on domain names or IP addresses."
}
settings = {
"passivetotal_api_key": {
"name": "PassiveTotal API Key",
"description": "API Key provided by PassiveTotal."
}
}
ACTS_ON = ["Hostname", "Ip"]
API_URL = 'https://api.passivetotal.org/api/v1/passive'
@staticmethod
def analyze(observable, settings):
links = set()
params = {
'api_key': settings['passivetotal_api_key'],
'query': observable.value
}
r = requests.get(PassiveTotal.API_URL, params=params)
r.raise_for_status()
result = json.loads(r.content)
for record in result['results']['records']:
first_seen = datetime.strptime(record['firstSeen'], "%Y-%m-%d %H:%M:%S")
last_seen = datetime.strptime(record['lastSeen'], "%Y-%m-%d %H:%M:%S")
new = Observable.add_text(record['resolve'])
if isinstance(observable, Hostname):
links.update(observable.link_to(new, "A record", 'PassiveTotal', first_seen, last_seen))
else:
links.update(new.link_to(observable, "A record", 'PassiveTotal', first_seen, last_seen))
return links
| apache-2.0 | Python |
|
2bd913c6cad94f3bc244d92a1ae1caffda82dcf8 | Add humble plugin | thomasleese/smartbot-old,Cyanogenoid/smartbot,Muzer/smartbot,tomleese/smartbot | plugins/humble.py | plugins/humble.py | import lxml.html
import requests
from smartbot import utils
class Plugin:
def __call__(self, bot):
bot.on_respond(r"humble( weekly)?( sale)?", self.on_respond)
bot.on_help("humble", self.on_help)
def on_respond(self, bot, msg, reply):
page = requests.get("https://www.humblebundle.com/weekly")
tree = lxml.html.fromstring(page.text)
try:
title = tree.cssselect("title")[0].text_content().strip()
clock = tree.cssselect("#heading-time-remaining .mini-digit-holder")[0]
c0 = clock.cssselect(".c0 .heading-num")[0].text_content()
c1 = clock.cssselect(".c1 .heading-num")[0].text_content()
c2 = clock.cssselect(".c2 .heading-num")[0].text_content()
c3 = clock.cssselect(".c3 .heading-num")[0].text_content()
c4 = clock.cssselect(".c4 .heading-num")[0].text_content()
c5 = clock.cssselect(".c5 .heading-num")[0].text_content()
c6 = clock.cssselect(".c6 .heading-num")[0].text_content()
c7 = clock.cssselect(".c7 .heading-num")[0].text_content()
reply("{0} - {1}{2}:{3}{4}:{5}{6}:{7}{8} left".format(title, c0, c1, c2, c3, c4, c5, c6, c7))
except IndexError:
reply("No weekly sale.")
def on_help(self, bot, msg, reply):
reply("Syntax: humble [weekly] [deal]")
| mit | Python |
|
23ffdaf1ed0e1739975de058b9f8c1adeef15531 | Add "nice guy bot" strategy | HiccupinGminor/bidding-game | nicebotbot.py | nicebotbot.py | #!/bin/python
def calculate_bid(player,pos,first_moves,second_moves):
remaining = remaining_amount(player, first_moves, second_moves)
amortized_bid = remaining / steps_remaining(player, pos)
if(amortized_bid < 1):
amortized_bid = 1
default_bid = 14
last_first_bid = 0
last_second_bid = 0
if( len(first_moves) > 0 ):
last_first_bid = first_moves[-1]
last_second_bid = second_moves[-1]
if player == 1:
if pos == 1:
return remaining
else:
#If the last move was greater than my last
if last_second_bid > last_first_bid:
#Take revenge
return min(last_second_bid + 1, amortized_bid)
else:
return min(amortized_bid, default_bid)
else:
if pos == 9:
return remaining
else:
#If the last move was greater than my last
if last_first_bid > last_second_bid:
#Take revenge
return min(last_first_bid + 1, amortized_bid)
else:
return min(amortized_bid, default_bid)
def steps_remaining(player, pos):
if player == 1:
return pos
else:
return 10 - pos
def get_draw_advantage_holder(first_moves, second_moves):
holder = 1
draws = 0
for i in range(0, len(first_moves)):
if first_moves[i] == second_moves[i]:
draws += 1
if draws % 2 == 0:
return 1
else:
return 2
# def get_opponent(player):
# if(player == 1):
# return 2
# else:
# return 1
# def get_opponents_remaining_amount(player, first_moves, second_moves):
# opponent = get_opponent(player)
# return remaining_amount(opponent, first_moves, second_moves)
#Calculate how much we've spent
def remaining_amount(player, first_moves, second_moves):
starting_amount = 100
first_spent = 0
second_spent = 0
for i in range(0, len(first_moves)):
if first_moves[i] > second_moves[i]:
first_spent += first_moves[i]
elif first_moves[i] < second_moves[i]:
second_spent += second_moves[i]
else:
trimmed_first = first_moves[:i]
trimmed_second = second_moves[:i]
# get current draw advantage
holder = get_draw_advantage_holder(trimmed_first, trimmed_second)
if(holder != 1):
second_spent += second_moves[i]
else:
first_spent += first_moves[i]
if player == 1:
return starting_amount - first_spent
else:
return starting_amount - second_spent
#gets the id of the player
player = input()
scotch_pos = input() #current position of the scotch
first_moves = [int(i) for i in raw_input().split()]
second_moves = [int(i) for i in raw_input().split()]
bid = calculate_bid(player,scotch_pos,first_moves,second_moves)
print bid
| mit | Python |
|
48951aa7c2c82ca03e801e1bfce09be5492ce27b | Add python_analytics package | enthought/python-analytics | python_analytics/__init__.py | python_analytics/__init__.py | import logging
try: # pragma: no cover
from ._version import full_version as __version__
except ImportError: # pragma: no cover
__version__ = "not-built"
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| bsd-3-clause | Python |
|
9fa9b339cb0da0ae6a4318288afd8c75e6890e4e | prepare for provider | RealGeeks/flask-oauthlib,Ryan-K/flask-oauthlib,PyBossa/flask-oauthlib,landler/flask-oauthlib,kevin1024/flask-oauthlib,lepture/flask-oauthlib,tonyseek/flask-oauthlib,Fleurer/flask-oauthlib,huxuan/flask-oauthlib,huxuan/flask-oauthlib,CoreyHyllested/flask-oauthlib,Ryan-K/flask-oauthlib,Fleurer/flask-oauthlib,cogniteev/flask-oauthlib,brightforme/flask-oauthlib,PyBossa/flask-oauthlib,auerj/flask-oauthlib,icook/flask-oauthlib,adambard/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,CoreyHyllested/flask-oauthlib,brightforme/flask-oauthlib,RealGeeks/flask-oauthlib,adambard/flask-oauthlib,landler/flask-oauthlib,stianpr/flask-oauthlib,stianpr/flask-oauthlib,lepture/flask-oauthlib,kevin1024/flask-oauthlib,tonyseek/flask-oauthlib,cogniteev/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,auerj/flask-oauthlib,icook/flask-oauthlib | flask_oauthlib/provider.py | flask_oauthlib/provider.py | # coding: utf-8
"""
Flask-OAuthlib
--------------
Implemnts OAuth2 provider support for Flask.
:copyright: (c) 2013 by Hsiaoming Yang.
"""
| bsd-3-clause | Python |
|
f8e64d26c86e84ce9efe36db1155fdf5a4c6d5f8 | Add example to show of icons. | zoofIO/flexx,zoofIO/flexx,jrversteegh/flexx,jrversteegh/flexx | flexx/ui/examples/icons.py | flexx/ui/examples/icons.py | # doc-export: Icons
"""
This example demonstrates the use of icons in Flexx.
"""
import os
import flexx
from flexx import app, ui
# todo: support icons in widgets like Button, TabWidget, etc.
# todo: support fontawesome icons
class Icons(ui.Widget):
def init(self):
ui.Button(text='Not much to see here yet')
if __name__ == '__main__':
fname = os.path.join(os.path.dirname(flexx.__file__), 'resources', 'flexx.ico')
black_png = ('iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAIUlEQVR42mNgY'
'GD4TyEeTAacOHGCKDxqwKgBtDVgaGYmAD/v6XAYiQl7AAAAAElFTkSuQmCC')
# Select application icon. Can be a url, a relative url to a shared asset,
# a base64 encoded image, or a local filename. Note that the local filename
# works for setting the aplication icon in a desktop-like app, but not for
# a web app. File types can be ico or png.
icon = None # use default
# icon = 'https://assets-cdn.github.com/favicon.ico'
# icon = app.assets.add_shared_asset('ico.icon', open(fname, 'rb'))
# icon = 'data:image/png;base64,' + black_png
# icon = fname
m = app.App(Icons, title='Icon demo', icon=icon).launch('firefox-browser')
app.start()
| bsd-2-clause | Python |
|
5e574a24d95e686bc2592af439e148e68036c61d | Add unit test for nova connector | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/unit/cloud/clouds/nova_test.py | tests/unit/cloud/clouds/nova_test.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase
from salt.cloud.clouds import nova
from salttesting.mock import MagicMock, patch
from tests.unit.cloud.clouds import _preferred_ip
class NovaTestCase(TestCase):
'''
Test case for openstack
'''
PRIVATE_IPS = ['0.0.0.0', '1.1.1.1', '2.2.2.2']
@patch('salt.cloud.clouds.nova.show_instance',
MagicMock(return_value={'state': 'ACTIVE',
'public_ips': [],
'addresses': [],
'private_ips': PRIVATE_IPS}))
@patch('salt.cloud.clouds.nova.rackconnect', MagicMock(return_value=False))
@patch('salt.cloud.clouds.nova.rackconnectv3', MagicMock(return_value={'mynet': ['1.1.1.1']}))
@patch('salt.cloud.clouds.nova.cloudnetwork', MagicMock(return_value=False))
@patch('salt.cloud.clouds.nova.managedcloud', MagicMock(return_value=False))
@patch('salt.cloud.clouds.nova.preferred_ip', _preferred_ip(PRIVATE_IPS, ['0.0.0.0']))
@patch('salt.cloud.clouds.nova.ssh_interface', MagicMock(return_value='public_ips'))
def test_query_node_data_filter_preferred_ip_addresses(self):
'''
Test if query node data is filtering out unpreferred IP addresses.
'''
nova.__opts__ = {}
vm = {'name': None}
data = MagicMock()
data.public_ips = []
assert nova._query_node_data(vm, data).public_ips == ['0.0.0.0']
| apache-2.0 | Python |
|
2b8ff3b38e4f8bdc9da30c7978062174b0259f76 | Add lc0068_text_justification.py | bowen0701/algorithms_data_structures | lc0068_text_justification.py | lc0068_text_justification.py | """Leetcode 68. Text Justification
Hard
URL: https://leetcode.com/problems/text-justification/
Given an array of words and a width maxWidth, format the text such that each line has
exactly maxWidth characters and is fully (left and right) justified.
You should pack your words in a greedy approach; that is, pack as many words as you
can in each line. Pad extra spaces ' ' when necessary so that each line has exactly
maxWidth characters.
Extra spaces between words should be distributed as evenly as possible. If the number
of spaces on a line do not divide evenly between words, the empty slots on the left
will be assigned more spaces than the slots on the right.
For the last line of text, it should be left justified and no extra space is inserted
between words.
Note:
- A word is defined as a character sequence consisting of non-space characters only.
- Each word's length is guaranteed to be greater than 0 and not exceed maxWidth.
- The input array words contains at least one word.
Example 1:
Input:
words = ["This", "is", "an", "example", "of", "text", "justification."]
maxWidth = 16
Output:
[
"This is an",
"example of text",
"justification. "
]
Example 2:
Input:
words = ["What","must","be","acknowledgment","shall","be"]
maxWidth = 16
Output:
[
"What must be",
"acknowledgment ",
"shall be "
]
Explanation: Note that the last line is "shall be " instead of "shall be",
because the last line must be left-justified instead of fully-justified.
Note that the second line is also left-justified becase it contains only
one word.
Example 3:
Input:
words = ["Science","is","what","we","understand","well","enough","to","explain",
"to","a","computer.","Art","is","everything","else","we","do"]
maxWidth = 20
Output:
[
"Science is what we",
"understand well",
"enough to explain to",
"a computer. Art is",
"everything else we",
"do "
]
"""
class Solution(object):
def fullJustify(self, words, maxWidth):
"""
:type words: List[str]
:type maxWidth: int
:rtype: List[str]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
63a8f4af91048d0847cb7628f2ea15bb2b5f0e0a | Add abstract base classes to fs.archive | althonos/fs.archive | fs/archive/base.py | fs/archive/base.py | # coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import io
import abc
import six
import shutil
import tempfile
from .. import errors
from ..base import FS
from ..proxy.writer import ProxyWriter
@six.add_metaclass(abc.ABCMeta)
class ArchiveSaver(object):
def __init__(self, output, overwrite=False, stream=True, **options):
self.output = output
self.overwrite = overwrite
self.stream = stream
if hasattr(output, 'tell'):
self._initial_position = output.tell()
def save(self, fs):
if self.stream:
self.to_stream(fs)
else:
self.to_file(fs)
def to_file(self, fs):
if self.overwrite: # If we need to overwrite, use temporary file
tmp = '.'.join([self.output, 'tmp'])
self._to(tmp, fs)
shutil.move(tmp, self.output)
else:
self._to(self.output, fs)
def to_stream(self, fs):
if self.overwrite: # If we need to overwrite, use temporary file
fd, temp = tempfile.mkstemp()
os.close(fd)
self._to(temp, fs)
self.output.seek(self._initial_position)
with open(temp, 'rb') as f:
shutil.copyfileobj(f, self.output)
else:
self._to(self.output, fs)
@abc.abstractmethod
def _to(self, handle, fs):
raise NotImplementedError()
@six.add_metaclass(abc.ABCMeta)
class ArchiveReadFS(FS):
def __init__(self, handle, **options):
super(ArchiveReadFS, self).__init__()
self._handle = handle
def __repr__(self):
return "{}({!r})".format(
self.__class__.__name__,
getattr(self._handle, 'name', self._handle),
)
def __str__(self):
return "<{} '{}'>".format(
self.__class__.__name__.lower(),
getattr(self._handle, 'name', self._handle),
)
def _on_modification_attempt(self, path):
raise errors.ResourceReadOnly(path)
def setinfo(self, path, info):
self.check()
self._on_modification_attempt(path)
def makedir(self, path, permissions=None, recreate=False):
self.check()
self._on_modification_attempt(path)
def remove(self, path):
self.check()
self._on_modification_attempt(path)
def removedir(self, path):
self.check()
self._on_modification_attempt(path)
@six.add_metaclass(abc.ABCMeta)
class ArchiveFS(ProxyWriter):
_read_fs_cls = ArchiveReadFS
_saver_cls = ArchiveSaver
def __init__(self, handle, proxy=None, **options):
if isinstance(handle, six.text_type):
stream = False
saver = True
if os.path.exists(handle):
read_only = self._read_fs_cls(handle, **options)
else:
read_only = None
elif isinstance(handle, io.IOBase):
stream = True
saver = handle.writable()
if handle.readable() and handle.seekable():
read_only = self._read_fs_cls(handle, **options)
else:
read_only = None
else:
raise errors.CreateFailed("cannot use {}".format(handle))
if saver:
self._saver = self._saver_cls(handle, read_only is not None, stream)
else:
self._saver = None
super(ArchiveFS, self).__init__(read_only, proxy)
def close(self):
if not self.isclosed():
if self._saver is not None:
self._saver.save(self)
super(ArchiveFS, self).close()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.