commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
a0789a4bad7747073257d8976534b33ab9862ec4 | Add unit test for IssueRegister view | toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity | feed/tests/test_issueregisterview.py | feed/tests/test_issueregisterview.py | from django.contrib.auth.models import User
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from feed.views import IssueRegisterViewSet
from workflow.models import IssueRegister, Organization, TolaUser
class IssueRegisterViewsTest(TestCase):
def setUp(self):
self.user = User.objects.create_user('john', 'lennon@thebeatles.com', 'johnpassword')
self.user.is_superuser = True
self.user.is_staff = True
self.user.save()
IssueRegister.objects.bulk_create([
IssueRegister(name='IssueRegister1'),
IssueRegister(name='IssueRegister2'),
])
factory = APIRequestFactory()
self.request_get = factory.get('/api/issueregister/')
self.request_post = factory.post('/api/issueregister/')
def test_list_issueregister_superuser(self):
self.request_get.user = self.user
view = IssueRegisterViewSet.as_view({'get': 'list'})
response = view(self.request_get)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 2)
def test_list_issueregister_normaluser(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
TolaUser.objects.create(user=self.user, organization=organization)
self.request_get.user = self.user
view = IssueRegisterViewSet.as_view({'get': 'list'})
response = view(self.request_get)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 0)
def test_list_issueregister_normaluser_one_result(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
TolaUser.objects.create(user=self.user, organization=organization)
IssueRegister.objects.create(name='IssueRegister0', organization=organization)
self.request_get.user = self.user
view = IssueRegisterViewSet.as_view({'get': 'list'})
response = view(self.request_get)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 1)
def test_create_issueregister_normaluser_one_result(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
TolaUser.objects.create(user=self.user, organization=organization)
self.request_post.user = self.user
view = IssueRegisterViewSet.as_view({'post': 'create'})
response = view(self.request_post)
self.assertEqual(response.status_code, 201)
# check if the obj created has the user organization
self.request_get.user = self.user
view = IssueRegisterViewSet.as_view({'get': 'list'})
response = view(self.request_get)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 1)
| apache-2.0 | Python |
|
05e8f84356c63ab953f5c2a3d3d06ee1760008d0 | Add list_queue plugin | ianstalk/Flexget,crawln45/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,qvazzler/Flexget,sean797/Flexget,Danfocus/Flexget,jawilson/Flexget,qk4l/Flexget,tobinjt/Flexget,gazpachoking/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,oxc/Flexget,LynxyssCZ/Flexget,oxc/Flexget,malkavi/Flexget,Flexget/Flexget,qvazzler/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,jawilson/Flexget,sean797/Flexget,tobinjt/Flexget,tarzasai/Flexget,crawln45/Flexget,JorisDeRieck/Flexget,oxc/Flexget,dsemi/Flexget,qvazzler/Flexget,Flexget/Flexget,crawln45/Flexget,jacobmetrick/Flexget,qk4l/Flexget,malkavi/Flexget,drwyrm/Flexget,drwyrm/Flexget,jawilson/Flexget,poulpito/Flexget,Danfocus/Flexget,Flexget/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,tarzasai/Flexget,LynxyssCZ/Flexget,poulpito/Flexget,malkavi/Flexget,Flexget/Flexget,jacobmetrick/Flexget,gazpachoking/Flexget,poulpito/Flexget,dsemi/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,dsemi/Flexget,ianstalk/Flexget,jawilson/Flexget,tobinjt/Flexget,malkavi/Flexget,sean797/Flexget | flexget/plugins/filter/list_queue.py | flexget/plugins/filter/list_queue.py | import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('list_queue')
class ListQueue(object):
schema = {
'type': 'array',
'items': {
'allOf': [
{'$ref': '/schema/plugins?group=list'},
{
'maxProperties': 1,
'error_maxProperties': 'Plugin options within list_queue plugin must be indented 2 more spaces '
'than the first letter of the plugin name.',
'minProperties': 1
}
]
}
}
def on_task_filter(self, task, config):
for item in config:
for plugin_name, plugin_config in item.iteritems():
thelist = plugin.get_plugin_by_name(plugin_name).instance.get_list(plugin_config)
for entry in task.all_entries:
if entry in thelist:
entry.accept()
def on_task_learn(self, task, config):
for item in config:
for plugin_name, plugin_config in item.iteritems():
thelist = plugin.get_plugin_by_name(plugin_name).instance.get_list(plugin_config)
thelist -= task.accepted
@event('plugin.register')
def register_plugin():
plugin.register(ListQueue, 'list_queue', api_ver=2)
| mit | Python |
|
f7a69e24912c3b9ed52201b52c79be4407884c3a | add module util for trying to resolve an ipv6 netmask to cidr. not perfect, but not meant to be either. | F5Networks/f5-ansible-modules | library/module_utils/network/f5/ipaddress.py | library/module_utils/network/f5/ipaddress.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2018 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
def ipv6_netmask_to_cidr(mask):
"""converts an IPv6 netmask to CIDR form
According to the link below, CIDR is the only official way to specify
a subset of IPv6. With that said, the same link provides a way to
loosely convert an netmask to a CIDR.
Arguments:
mask (string): The IPv6 netmask to convert to CIDR
Returns:
int: The CIDR representation of the netmask
References:
https://stackoverflow.com/a/33533007
http://v6decode.com/
"""
bit_masks = [
0, 0x8000, 0xc000, 0xe000, 0xf000, 0xf800,
0xfc00, 0xfe00, 0xff00, 0xff80, 0xffc0,
0xffe0, 0xfff0, 0xfff8, 0xfffc, 0xfffe,
0xffff
]
count = 0
try:
for w in mask.split(':'):
if not w or int(w, 16) == 0:
break
count += bit_masks.index(int(w, 16))
return count
except:
return -1
| mit | Python |
|
8922f9430ec2844a3a14621ad0625aa45999c92a | fix args order | liujianpc/xunlei-lixian,sdgdsffdsfff/xunlei-lixian,davies/xunlei-lixian,GeassDB/xunlei-lixian,wogong/xunlei-lixian,iambus/xunlei-lixian,myself659/xunlei-lixian,wangjun/xunlei-lixian,sndnvaps/xunlei-lixian,windygu/xunlei-lixian,xieyanhao/xunlei-lixian,ccagg/xunlei | lixian_hash.py | lixian_hash.py | #!/usr/bin/env python
import hashlib
import lixian_hash_ed2k
import lixian_hash_bt
import os
def lib_hash_file(h, path):
with open(path, 'rb') as stream:
while True:
bytes = stream.read(1024*1024)
if not bytes:
break
h.update(bytes)
return h.hexdigest()
def sha1_hash_file(path):
return lib_hash_file(hashlib.sha1(), path)
def verify_sha1(path, sha1):
return sha1_hash_file(path).lower() == sha1.lower()
def md5_hash_file(path):
return lib_hash_file(hashlib.md5(), path)
def verify_md5(path, md5):
return md5_hash_file(path).lower() == md5.lower()
def md4_hash_file(path):
return lib_hash_file(hashlib.new('md4'), path)
def verify_md4(path, md4):
return md4_hash_file(path).lower() == md4.lower()
def dcid_hash_file(path):
h = hashlib.sha1()
size = os.path.getsize(path)
with open(path, 'rb') as stream:
if size < 0xF000:
h.update(stream.read())
else:
h.update(stream.read(0x5000))
stream.seek(size/3)
h.update(stream.read(0x5000))
stream.seek(size-0x5000)
h.update(stream.read(0x5000))
return h.hexdigest()
def verify_dcid(path, dcid):
return dcid_hash_file(path).lower() == dcid.lower()
def main(args):
option = args.pop(0)
if option.startswith('--verify'):
hash_fun = {'--verify-sha1':verify_sha1,
'--verify-md5':verify_md5,
'--verify-md4':verify_md4,
'--verify-dcid':verify_dcid,
'--verify-ed2k':lixian_hash_ed2k.verify_ed2k_link,
'--verify-bt': lambda f, t: lixian_hash_bt.verify_bt_file(t, f),
}[option]
assert len(args) == 2
hash, path = args
if hash_fun(path, hash):
print 'looks good...'
else:
print 'failed...'
else:
hash_fun = {'--sha1':sha1_hash_file,
'--md5':md5_hash_file,
'--md4':md4_hash_file,
'--dcid':dcid_hash_file,
'--ed2k':lixian_hash_ed2k.generate_ed2k_link,
'--info-hash':lixian_hash_bt.info_hash,
}[option]
for f in args:
h = hash_fun(f)
print '%s *%s' % (h, f)
if __name__ == '__main__':
import sys
args = sys.argv[1:]
main(args)
| #!/usr/bin/env python
import hashlib
import lixian_hash_ed2k
import lixian_hash_bt
import os
def lib_hash_file(h, path):
with open(path, 'rb') as stream:
while True:
bytes = stream.read(1024*1024)
if not bytes:
break
h.update(bytes)
return h.hexdigest()
def sha1_hash_file(path):
return lib_hash_file(hashlib.sha1(), path)
def verify_sha1(path, sha1):
return sha1_hash_file(path).lower() == sha1.lower()
def md5_hash_file(path):
return lib_hash_file(hashlib.md5(), path)
def verify_md5(path, md5):
return md5_hash_file(path).lower() == md5.lower()
def md4_hash_file(path):
return lib_hash_file(hashlib.new('md4'), path)
def verify_md4(path, md4):
return md4_hash_file(path).lower() == md4.lower()
def dcid_hash_file(path):
h = hashlib.sha1()
size = os.path.getsize(path)
with open(path, 'rb') as stream:
if size < 0xF000:
h.update(stream.read())
else:
h.update(stream.read(0x5000))
stream.seek(size/3)
h.update(stream.read(0x5000))
stream.seek(size-0x5000)
h.update(stream.read(0x5000))
return h.hexdigest()
def verify_dcid(path, dcid):
return dcid_hash_file(path).lower() == dcid.lower()
def main(args):
option = args.pop(0)
if option.startswith('--verify'):
hash_fun = {'--verify-sha1':verify_sha1,
'--verify-md5':verify_md5,
'--verify-md4':verify_md4,
'--verify-dcid':verify_dcid,
'--verify-ed2k':lixian_hash_ed2k.verify_ed2k_link,
'--verify-bt':lixian_hash_bt.verify_bt_file,
}[option]
assert len(args) == 2
hash, path = args
if hash_fun(path, hash):
print 'looks good...'
else:
print 'failed...'
else:
hash_fun = {'--sha1':sha1_hash_file,
'--md5':md5_hash_file,
'--md4':md4_hash_file,
'--dcid':dcid_hash_file,
'--ed2k':lixian_hash_ed2k.generate_ed2k_link,
'--info-hash':lixian_hash_bt.info_hash,
}[option]
for f in args:
h = hash_fun(f)
print '%s *%s' % (h, f)
if __name__ == '__main__':
import sys
args = sys.argv[1:]
main(args)
| mit | Python |
0814bbf6867a4bdd9d92c63e467f237b6129ee28 | add solution for palindrome number | SwordYoung/cutprob,SwordYoung/cutprob | leetcode/palindrome-number/sol.py | leetcode/palindrome-number/sol.py | #!/usr/bin/env python
class Solution:
# @return a boolean
def isPalindrome(self, x):
if x == -1:
return True
def ll(x):
return 0 if x == 0 or x == -1 else ll(x/10)+1
p = x >= 0
l = ll(x)
print "x is %d l is %d" % (x, l)
t = x
for a in range(l/2):
mark = 10**(a)+10**(l-1-a)
b = (t / (10**(a))) % 10
b = b if p else 10-b
t = (t - b * mark) if p else (t+b*mark)
# print "t=%d" % (t)
if l % 2:
b = (t/(10**(l/2))) % 10
b = b if p else 10-b
t = (t - b * (10**(l/2))) if p else (t+b*(10**(l/2)))
return t == 0
if __name__ == "__main__":
sol = Solution()
print sol.isPalindrome(-2147483648)
print sol.isPalindrome(1234321)
print sol.isPalindrome(-1234321)
print sol.isPalindrome(1)
print sol.isPalindrome(-1)
print sol.isPalindrome(-11)
| artistic-2.0 | Python |
|
d2a92c5d628f426c26374dea6cb37bd35ba18812 | print variables | calico/basenji,calico/basenji | bin/basenji_variables.py | bin/basenji_variables.py | #!/usr/bin/env python
# Copyright 2017 Calico LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import os
import sys
import time
import h5py
import tensorflow as tf
from basenji import params
from basenji import seqnn
"""
basenji_variables.py
Print a model's variables, typically for debugging purposes.
"""
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file>'
parser = OptionParser(usage)
(options, args) = parser.parse_args()
if len(args) != 2:
parser.error('Must provide parameters, model, and test data HDF5')
else:
params_file = args[0]
model_file = args[1]
#######################################################
# model parameters and placeholders
job = params.read_job_params(params_file)
model = seqnn.SeqNN()
model.build(job)
# initialize saver
saver = tf.train.Saver()
with tf.Session() as sess:
# load variables into session
saver.restore(sess, model_file)
for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
print(v.name, v.shape)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
517a326b2869190bb1c0a676f467e1529e119259 | Create enigma.py | pkeating/Enigma-Machine | enigma.py | enigma.py | from EnigmaMachine import Plugboard
from EnigmaMachine import Rotor
from EnigmaMachine import Reflector
from EnigmaMachine import Machine
import ConfigParser
def configureRotor(n):
# Opens the Rotor configurations file.
config_file = ConfigParser.RawConfigParser()
config_file.read('Config/rotor_config.cfg')
# Prints instructions to the user along with a list of the valid
# rotor configurations.
print "-" * 65
if n == 1:
print "Choose the first rotor and its starting position."
if n == 2:
print "Choose the second rotor and its starting position."
if n == 3:
print "Choose the third rotor and its starting position."
print "Select the rotor you wish to use. Valid choices are:"
print config_file.sections()
# Gets the rotor configuration from the user and ensures it's valid.
while True:
rotor_id = raw_input("Choose Rotor: ")
if config_file.has_section(rotor_id):
break
else:
print "No such rotor exists."
# Gets the starting position from the user and ensures it's valid.
print "Starting position should be a number between 0 and 25."
while True:
try:
rotor_starting_position = int(raw_input("Choose Starting Position: "))
# If user doesn't enter an integer, the resulting exception
# will be handled here.
except:
print 'You must enter a number.'
# If the integer entered by the user is not between 0 and 25,
# then the user will be informed their input is invalid and
# will be re-prompted.
else:
if rotor_starting_position < 0 or rotor_starting_position > 25:
print 'You must enter a number between 0 and 25.'
else: # If input is valid, the while loop is broken.
break
# Initializes the rotor and returns it to main().
rotor = Rotor.Rotor(rotor_id, rotor_starting_position, config_file)
return rotor
def configureReflector():
# Opens the Reflector configurations file.
config_file = ConfigParser.RawConfigParser()
config_file.read('Config/reflector_config.cfg')
# Prints the reflectors in the reflector configurations file.
print "-" * 65
print "Select the reflector you wish to use. Valid choices are:"
print config_file.sections()
# While loop ensures user's input is valid.
while True:
# Gets the reflector name from the user.
reflector_id = raw_input("Choose reflector: ")
# If reflector_id is not a section in the config file, the while loop
# repeats. If reflector_id is valid, the while loop is broken.
if config_file.has_section(reflector_id):
break
else:
print "No such reflector exists."
# Initializes the reflector and returns it to main().
reflector = Reflector.Reflector(reflector_id, config_file)
return reflector
def configurePlugboard():
# Explains how to configure the plugboard.
print "-" * 65
print "Choose the plugboard settings. The plugboard allows you to swap"
print "one letter for another before and after it runs through the rotors."
print "Input should take the form:"
print "ab, cd, ef, gh"
print "You can choose as many pairs as you like, but you cannot"
print "repeat letters."
# Gets the plugboard settings from the user.
pairs = raw_input('> ')
# Configures the plugboard.
plugboard = Plugboard.Plugboard(pairs)
# Returns the plugboard to main().
return plugboard
def main():
# Configures the machine.
enigma_machine = Machine.EnigmaMachine(
configurePlugboard(),
configureRotor(1),
configureRotor(2),
configureRotor(3),
configureReflector()
)
# Gets the user's message.
message = raw_input('Input Message: ')
# Put's the message in the Enigma Machine.
enigma_machine.inputMessage(message)
# Encrypts the message
converted_message = enigma_machine.convertMessage()
# Opens an output file and writes new_message to it.
output_file = open('output.txt', 'w')
output_file.write(converted_message)
output_file.close()
# Prints a message to the user letting them know their output is ready
print '-' * 65
print "Your encrypted message is available in output.txt"
print "Remember your plugboard settings, the rotors you chose, their"
print "starting positions, and the reflector you used. You will need"
print "these to decrypt the message. To decrypt, rerun the program"
print "with the same settings and enter the encrypted message.\n"
if __name__ == "__main__":
main()
| mit | Python |
|
0598e61d9bcef2217f22cce2deeec08ed6868575 | Add rmd.py | cangermueller/deepcpg,cangermueller/deepcpg | scripts/rmd.py | scripts/rmd.py | #!/usr/bin/env python
import argparse
import sys
import logging
import os
import os.path as pt
import shutil
class App(object):
def run(self, args):
name = pt.basename(args[0])
parser = self.create_parser(name)
opts = parser.parse_args(args[1:])
return self.main(name, opts)
def create_parser(self, name):
p = argparse.ArgumentParser(
prog=name,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Run rmd script')
p.add_argument(
'rmd_file',
help='RMD file')
p.add_argument(
'-o', '--out_file',
help='Output file')
p.add_argument(
'-f', '--format',
help='Output format',
default='html',
choices=['html', 'pdf', 'word'])
p.add_argument(
'--cmd',
help='R command')
p.add_argument(
'--copy',
help='Copy to file')
p.add_argument(
'--test',
help='Print command without executing',
action='store_true')
p.add_argument(
'--verbose',
help='More detailed log messages',
action='store_true')
p.add_argument(
'--log_file',
help='Write log messages to file')
return p
def main(self, name, opts):
logging.basicConfig(filename=opts.log_file,
format='%(levelname)s (%(asctime)s): %(message)s')
log = logging.getLogger(name)
if opts.verbose:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
log.debug(opts)
rmd_file = opts.rmd_file
if opts.copy:
shutil.copyfile(rmd_file, opts.copy)
rmd_file = opts.copy
_format = opts.format
out_file = opts.out_file
if out_file is None:
out_file = '%s.%s' % (pt.splitext(rmd_file)[0], opts.format)
else:
_format = pt.splitext(out_file)[1][1:]
Rcmd = ''
if opts.cmd is not None:
Rcmd = '%s;' % (opts.cmd)
cmd = "library(rmarkdown); {c} render('{r}', output_file='{o}', output_format='{f}_document')"
cmd = cmd.format(c=Rcmd, r=rmd_file, o=out_file, f=_format)
cmd = 'Rscript -e "%s"' % (cmd)
print(cmd)
if not opts.test:
os.system(cmd)
return 0
if __name__ == '__main__':
app = App()
app.run(sys.argv)
| mit | Python |
|
30a8e40efee241dd6aa3b534814655b9f70cfffe | Add 020-valid-parentheses.py, but missed case "([])", the description is confused | mvj3/leetcode | 020-valid-parentheses.py | 020-valid-parentheses.py | """
Question:
Valid Parentheses My Submissions Question Solution
Given a string containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid.
The brackets must close in the correct order, "()" and "()[]{}" are all valid but "(]" and "([)]" are not.
Performance:
1. Total Accepted: 71155 Total Submissions: 265078 Difficulty: Easy
"""
class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
valid_set = set(["()", "[]", "{}"])
max_group = (len(s) + 1) / 2
is_valid = True
for idx in xrange(max_group):
curr_group = s[idx*2:idx*2+2]
if curr_group not in valid_set:
is_valid = False
break
return is_valid
assert Solution().isValid("()") is True
assert Solution().isValid("()[]{}") is True
assert Solution().isValid("([])") is True
assert Solution().isValid("(]") is False
assert Solution().isValid("([)]") is False
assert Solution().isValid("[") is False
| mit | Python |
|
8249d33898500d9d39e8bee3d44d39c2a6034659 | Add script to create overlays | JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field | scripts/create_overlays.py | scripts/create_overlays.py | """Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
| mit | Python |
|
5510f90565809471e545584419b22980b63a1864 | Add metadata | njvack/markdown-to-json | bids_writer/_metadata.py | bids_writer/_metadata.py | # -*- coding: utf-8 -*-
version = "0.1.0"
author = "Nathan Vack"
author_email = "njvack@wisc.edu"
license = "MIT"
copyright = "Copyright 2015 Boards of Regent of the University of Wisconsin System"
url = "https://github.com/njvack/bids-json-writer"
| mit | Python |
|
65f574973bbde545c1c815d0ad21e4a8d3f3b59d | Add initial cbio client | bgyori/bioagents,sorgerlab/bioagents | bioagents/cbio_client.py | bioagents/cbio_client.py | import os
import json
import logging
import requests
from collections import defaultdict
logger = logging.getLogger(__name__)
base_url = 'https://www.cbioportal.org/api'
resources_dir = os.path.join(os.path.dirname(
os.path.abspath(__file__)), os.pardir, 'resources')
patient_list_cache = os.path.join(resources_dir, 'cbio_patients.json')
def get_patient_list():
if os.path.exists(patient_list_cache):
logger.info('Loading patient list from cache at %s' %
patient_list_cache)
with open(patient_list_cache, 'r') as fh:
patient_list = json.load(fh)
else:
logger.info('Querying patient list from cBioPortal')
url = base_url + '/patients'
res = requests.get(url)
patient_list = res.json()
with open(patient_list_cache, 'w') as fh:
json.dump(patient_list, fh, indent=1)
patients_by_id = defaultdict(list)
patients_by_study = defaultdict(list)
for patient in patient_list:
patients_by_id[patient['patientId']].append(patient)
patients_by_study[patient['studyId']].append(patient)
return dict(patients_by_id), dict(patients_by_study)
patients_by_id, patients_by_study = get_patient_list()
| bsd-2-clause | Python |
|
b38b9e62c174ff55d496bec2fb6599bee8262a3c | Add plot_compare_methods from scikit-learn | lucasdavid/Manifold-Learning,lucasdavid/Manifold-Learning | manifold/plot_compare_methods.py | manifold/plot_compare_methods.py | # Author: Jake Vanderplas -- <vanderplas@astro.washington.edu>
# print(__doc__)
from time import time
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold, datasets
def compare():
# Next line to silence pyflakes. This import is needed.
Axes3D
n_points = 1000
X, color = datasets.samples_generator.make_s_curve(n_points, random_state=0)
n_neighbors = 10
n_components = 2
fig = plt.figure(figsize=(15, 8))
plt.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
try:
# compatibility matplotlib < 1.0
ax = fig.add_subplot(251, projection='3d')
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=plt.cm.Spectral)
ax.view_init(4, -72)
except:
ax = fig.add_subplot(251, projection='3d')
plt.scatter(X[:, 0], X[:, 2], c=color, cmap=plt.cm.Spectral)
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
Y = manifold.LocallyLinearEmbedding(n_neighbors, n_components,
eigen_solver='auto',
method=method).fit_transform(X)
t1 = time()
print("%s: %.2g sec" % (methods[i], t1 - t0))
ax = fig.add_subplot(252 + i)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
t0 = time()
Y = manifold.Isomap(n_neighbors, n_components).fit_transform(X)
t1 = time()
print("Isomap: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(257)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.title("Isomap (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
t0 = time()
mds = manifold.MDS(n_components, max_iter=100, n_init=1)
Y = mds.fit_transform(X)
t1 = time()
print("MDS: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(258)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
t0 = time()
se = manifold.SpectralEmbedding(n_components=n_components,
n_neighbors=n_neighbors)
Y = se.fit_transform(X)
t1 = time()
print("SpectralEmbedding: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(259)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.title("SpectralEmbedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
t0 = time()
tsne = manifold.TSNE(n_components=n_components, init='pca', random_state=0)
Y = tsne.fit_transform(X)
t1 = time()
print("t-SNE: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(250)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
plt.show()
if __name__ == '__main__':
compare()
| mit | Python |
|
bf60d3c48a30863571a8700fa5a843be48e7646b | add vat_reckoner | douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo,douglassquirrel/combo | components/vat_reckoner/vat_reckoner.py | components/vat_reckoner/vat_reckoner.py | #! /usr/bin/env python
from json import loads, dumps
from pika import BlockingConnection, ConnectionParameters
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
def vat(ch, method, properties, body):
product = loads(body)
sku, price = product['sku'], product['price']
vat = price * 0.20
vat_fact = {'sku': sku, 'vat': vat}
print 'Calculated vat %s' % (vat_fact,)
channel.basic_publish(exchange='alex2',
routing_key='vat',
body=dumps(vat_fact))
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='new_products')
channel.basic_consume(vat, queue=queue, no_ack=True)
channel.start_consuming()
| mit | Python |
|
77d90ec03eff1946a422e5471cc1a64708eff0f4 | Test dramatis personae | zmbc/shakespearelang,zmbc/shakespearelang,zmbc/shakespearelang | shakespearelang/tests/unit/test_dramatis_personae.py | shakespearelang/tests/unit/test_dramatis_personae.py | from shakespearelang import Shakespeare
from shakespearelang.errors import ShakespeareRuntimeError
import pytest
MANY_CHARACTERS_PLAY = """
A lot of people.
Achilles, a test.
Christopher Sly, a test.
Demetrius, a test.
John of Lancaster, a test.
Juliet, a test.
Mistress Overdone, a test.
Romeo, a test.
Stephano, a test.
The Abbot of Westminster, a test.
The Ghost, a test.
Titania, a test.
Vincentio, a test.
"""
def test_correct_characters():
s = Shakespeare('Foo. Juliet, a test. Romeo, a test. The Ghost, a test.')
assert sorted([c.name for c in s.characters]) == ['Juliet', 'Romeo', 'The Ghost']
def test_no_characters():
s = Shakespeare('Foo. Act I: The beginning.')
assert s.characters == []
def test_many_characters():
s = Shakespeare(MANY_CHARACTERS_PLAY)
assert sorted([c.name for c in s.characters]) == [
'Achilles',
'Christopher Sly',
'Demetrius',
'John of Lancaster',
'Juliet',
'Mistress Overdone',
'Romeo',
'Stephano',
'The Abbot of Westminster',
'The Ghost',
'Titania',
'Vincentio',
]
| mit | Python |
|
0ba11dd47dac04f3f7a314cf320558ccbc9eb148 | Add test for water polygon name dropping. | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | integration-test/1477-water-layer-too-big.py | integration-test/1477-water-layer-too-big.py | # -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
| mit | Python |
|
a3e538830305d8a6651c5ed46e2dfdffe41c28e6 | Add a module for ssh 'console' API | xcat2/confluent,xcat2/confluent,jjohnson42/confluent,xcat2/confluent,jjohnson42/confluent,whowutwut/confluent,xcat2/confluent,whowutwut/confluent,whowutwut/confluent,whowutwut/confluent,jjohnson42/confluent,jjohnson42/confluent,jjohnson42/confluent,xcat2/confluent | confluent_server/confluent/plugins/shell/ssh.py | confluent_server/confluent/plugins/shell/ssh.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'jjohnson2'
# This plugin provides an ssh implementation comforming to the 'console'
# specification. consoleserver or shellserver would be equally likely
# to use this.
import confluent.interface.console as conapi
import eventlet
paramiko = eventlet.import_patched('paramiko')
class SshShell(conapi.Console):
def __init__(self, node, config, username='', password=''):
self.node = node
self.nodeconfig = config
self.username = username
self.password = password
self.inputmode = 0 # 0 = username, 1 = password...
def recvdata(self):
while self.connected:
pendingdata = self.shell.recv(8192)
if pendingdata == '':
self.datacallback(conapi.ConsoleEvent.Disconnect)
return
self.datacallback(pendingdata)
def connect(self, callback):
# for now, we just use the nodename as the presumptive ssh destination
#TODO(jjohnson2): use a 'nodeipget' utility function for architectures
# that would rather not use the nodename as anything but an opaque
# identifier
self.datacallback = callback
if self.username is not '':
self.logon()
else:
self.inputmode = 0
callback('\r\nlogin as: ')
return
def logon(self):
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
try:
self.ssh.connect(self.node, username=self.username,
password=self.password, allow_agent=False,
look_for_keys=False)
except paramiko.AuthenticationException:
self.inputmode = 0
self.username = ''
self.password = ''
self.datacallback('\r\nlogin as: ')
return
self.inputmode = 2
self.connected = True
self.shell = self.ssh.invoke_shell()
self.rxthread = eventlet.spawn(self.recvdata)
def write(self, data):
if self.inputmode == 0:
self.username += data
if '\r' in self.username:
self.username, self.password = self.username.split('\r')
lastdata = data.split('\r')[0]
if lastdata != '':
self.datacallback(lastdata)
self.datacallback('\r\nEnter password: ')
self.inputmode = 1
else:
# echo back typed data
self.datacallback(data)
elif self.inputmode == 1:
self.password += data
if '\r' in self.password:
self.password = self.password.split('\r')[0]
self.datacallback('\r\n')
self.logon()
else:
self.shell.sendall(data)
def close(self):
self.ssh.close()
def create(nodes, element, configmanager, inputdata):
if len(nodes) == 1:
return SshShell(nodes[0], configmanager) | apache-2.0 | Python |
|
865dc29421c1e9ef4bf340bf32164863cc5f2006 | Add management command to list installed spiders | legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch | app/raw/management/commands/list_spiders.py | app/raw/management/commands/list_spiders.py | from django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
| mit | Python |
|
77966f7f993e526467b2e54e0d12241354efec16 | add spec for re2 | facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro | build/fbcode_builder/specs/re2.py | build/fbcode_builder/specs/re2.py | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def fbcode_builder_spec(builder):
return {
'steps': [
builder.github_project_workdir('google/re2', 'build'),
builder.cmake_install('google/re2'),
],
}
| mit | Python |
|
15388e09ab537d3731891353c54f53105c4a7ee4 | add files | youqingkui/weixin_pay | weixin_pay.py | weixin_pay.py | #!/usr/bin/env python
# coding=utf-8
__author__ = 'youqingkui'
| mit | Python |
|
b7360d6ba397f8654f4e051227aa86a1ebe693f7 | Add main program | networm/FollowGitHubUser | follow.py | follow.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from github import Github
# usage
def usage():
print 'Follow GitHub user\'s starred, watching and following.'
print
print 'Usage: python follow.py <token> <user>'
print
print 'token: Go to https://github.com/settings/tokens and `Generate new token` with scope `public_repo`.'
print
print 'user: GitHub user ID you want to follow.'
# args
if len(sys.argv) != 3:
usage()
exit(1)
# variable
me = Github(sys.argv[1])
namedUser = Github().get_user(sys.argv[2])
# action
for starred in namedUser.get_starred().reversed:
me.get_user().add_to_starred(starred)
for subscription in namedUser.get_subscriptions().reversed:
me.get_user().add_to_subscriptions(subscription)
for watched in namedUser.get_watched().reversed:
me.get_user().add_to_watched(watched)
for following in namedUser.get_following().reversed:
me.get_user().add_to_following(following)
| mit | Python |
|
9080d20bd61ac66a534c834a17a9825808416512 | Add pre-stage hook for FrostNumberModel | csdms/wmt-metadata | metadata/FrostNumberModel/hooks/pre-stage.py | metadata/FrostNumberModel/hooks/pre-stage.py | """A hook for modifying parameter values read from the WMT client."""
import os
import shutil
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
assign_parameters(env, file_list)
env['fn_out_filename'] = 'frostnumber_output.dat'
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
| mit | Python |
|
70849edc52acc1c559b35a55c7f1925c1cbf57ad | add new tagcount plugin for yawt rewrite | drivet/yawt,drivet/yawt,drivet/yawt | yawtext/tagcount.py | yawtext/tagcount.py | from flask import current_app, g, Blueprint
import jsonpickle
from yawt.utils import save_file, load_file
import os
tagcountsbp = Blueprint('tagcounts', __name__)
@tagcountsbp.app_context_processor
def tagcounts():
tagcountfile = current_app.config['YAWT_TAGCOUNT_FILE']
tvars = {}
if os.path.isfile(tagcountfile):
tagbase = current_app.config['YAWT_TAGCOUNT_BASE']
if not tagbase.endswith('/'):
tagbase += '/'
tvars = {'tagcounts': jsonpickle.decode(load_file(tagcountfile)),
'tagbase': tagbase}
return tvars
class YawtTagCount(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
self.tagcounts = {}
def init_app(self, app):
app.config.setdefault('YAWT_TAGCOUNT_BASE', '')
app.config.setdefault('YAWT_TAGCOUNT_FILE', '/tmp/tagcounts')
app.register_blueprint(tagcountsbp)
def on_pre_walk(self):
self.tagcounts = {}
def on_visit_article(self, article):
if hasattr(article.info, 'taglist'):
for tag in article.info.taglist:
if tag in self.tagcounts:
self.tagcounts[tag] += 1
else:
self.tagcounts[tag] = 1
def on_post_walk(self):
pickled_counts = jsonpickle.encode(self.tagcounts)
save_file(current_app.config['YAWT_TAGCOUNT_FILE'], pickled_counts)
def on_files_changed(self, files_modified, files_added, files_removed):
pickled_counts = load_file(current_app.config['YAWT_TAGCOUNT_FILE'])
self.tagcounts = jsonpickle.decode(pickled_counts)
for f in files_removed + files_modified:
article = g.store.fetch_article_by_repofile(f)
for tag in article.info.taglist:
self.tagcounts[tag] -= 1
for f in files_modified + files_added:
article = g.store.fetch_article_by_repofile(f)
self.on_visit_article(article)
self.on_post_walk()
| mit | Python |
|
d19aaf0fd3c88c08b2b8563030dd38c0cea3631b | Add unit test for `parse_cluster_info` (#22205) | ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray | dashboard/modules/job/tests/test_sdk.py | dashboard/modules/job/tests/test_sdk.py | import pytest
from typing import Dict, Optional, Tuple
from unittest.mock import Mock, patch
from ray.dashboard.modules.job.sdk import parse_cluster_info
@pytest.mark.parametrize(
"address_param",
[
("ray://1.2.3.4:10001", "ray", "1.2.3.4:10001"),
("other_module://", "other_module", ""),
("other_module://address", "other_module", "address"),
],
)
@pytest.mark.parametrize("create_cluster_if_needed", [True, False])
@pytest.mark.parametrize("cookies", [None, {"test_cookie_key": "test_cookie_val"}])
@pytest.mark.parametrize("metadata", [None, {"test_metadata_key": "test_metadata_val"}])
@pytest.mark.parametrize("headers", [None, {"test_headers_key": "test_headers_val"}])
def test_parse_cluster_info(
address_param: Tuple[str, str, str],
create_cluster_if_needed: bool,
cookies: Optional[Dict[str, str]],
metadata: Optional[Dict[str, str]],
headers: Optional[Dict[str, str]],
):
"""
Test ray.dashboard.modules.job.sdk.parse_cluster_info for different
format of addresses.
"""
mock_get_job_submission_client_cluster = Mock(return_value="Ray ClusterInfo")
mock_module = Mock()
mock_module.get_job_submission_client_cluster_info = Mock(
return_value="Other module ClusterInfo"
)
mock_import_module = Mock(return_value=mock_module)
address, module_string, inner_address = address_param
with patch.multiple(
"ray.dashboard.modules.job.sdk",
get_job_submission_client_cluster_info=mock_get_job_submission_client_cluster,
), patch.multiple("importlib", import_module=mock_import_module):
if module_string == "ray":
assert (
parse_cluster_info(
address,
create_cluster_if_needed=create_cluster_if_needed,
cookies=cookies,
metadata=metadata,
headers=headers,
)
== "Ray ClusterInfo"
)
mock_get_job_submission_client_cluster.assert_called_once_with(
inner_address,
create_cluster_if_needed=create_cluster_if_needed,
cookies=cookies,
metadata=metadata,
headers=headers,
)
elif module_string == "other_module":
assert (
parse_cluster_info(
address,
create_cluster_if_needed=create_cluster_if_needed,
cookies=cookies,
metadata=metadata,
headers=headers,
)
== "Other module ClusterInfo"
)
mock_import_module.assert_called_once_with(module_string)
mock_module.get_job_submission_client_cluster_info.assert_called_once_with(
inner_address,
create_cluster_if_needed=create_cluster_if_needed,
cookies=cookies,
metadata=metadata,
headers=headers,
)
| apache-2.0 | Python |
|
4d570475d22cc85dd55c4b68bd7321cec7be8e7e | Add bytecode patcher to change the snooper URL and interval (see wiki.vg/Session#Snoop) | SupaHam/mark2,frostyfrog/mark2,frostyfrog/mark2,SupaHam/mark2 | snoop_patch.py | snoop_patch.py | from os import chdir
from tempfile import mkdtemp
from shutil import rmtree
from struct import pack
from subprocess import check_output
def jar_contents(j_path):
return check_output(['jar', 'tf', j_path]).split("\n")
def jar_extract(j_path):
return check_output(['jar', 'xf', j_path])
def jar_update(j_path, t_path, c_path):
c_path = c_path[len(t_path)+1:]
return check_output(['jar', 'uf', j_path, '-C', t_path, c_path])
def jlong(v):
return pack('>q', v)
def jstring(v):
return "%s%s" % (pack('>h', len(v)), v)
def jurl(h, p):
p = "" if p == 80 else ":%d" % p
return jstring("http://%s%s/" % (h,p))
def patch(j_path, host, port, interval):
#Marker file to put in jar
m_name = '.snooper-patched'
#Get jar contents
j_contents = jar_contents(j_path)
#Make a temporary directory
t_path = mkdtemp(prefix='mark2-patch-')
chdir(t_path)
#Extract the jar
jar_extract(j_path)
#Figure out what we need to replace
if m_name in j_contents:
f = open("%s/%s" % (t_path, m_name), "r")
old_host, old_port, old_interval = f.read().split("\n")
old_port = int(old_port)
old_interval = int(old_interval)
f.close()
else:
old_host, old_port, old_interval = 'snoop.minecraft.net', 80, 900000
replace = {
jlong(old_interval): jlong(interval),
jurl(old_host, old_port): jurl(host, port)}
#Find the relevant class
c_path = None
c_data = None
for name in j_contents:
name = "%s/%s" % (t_path, name)
if not name.endswith(".class"):
continue
f = open(name, 'r')
data = f.read()
f.close()
found = True
for k in replace.keys():
found &= data.find(k) != -1
if found:
c_path = name
c_data = data
break
#Patch if found
if c_path != None:
#Update file contents
for find, replace in replace.iteritems():
c_data = c_data.replace(find, replace)
#Write to file
f = open(c_path, 'wb')
f.write(c_data)
f.close()
#Update jar
jar_update(j_path, t_path, c_path)
#Add marker that it's been patched
m_path = "%s/%s" % (t_path, m_name)
f = open(m_path, "w")
f.write("%s\n%d\n%d" % (host, port, interval))
f.close()
jar_update(j_path, t_path, m_path)
rmtree(t_path)
return c_path != None
| mit | Python |
|
3c685922756a582030980f319014ba308735ee2c | add nextlaunch command | cblgh/tenyks-contrib,kyleterry/tenyks-contrib | src/tenyksscripts/scripts/rockets.py | src/tenyksscripts/scripts/rockets.py | import datetime
import requests
import time
def run(data, settings):
if data["payload"] != "nextlaunch":
return
launches = requests.get("https://launchlibrary.net/1.2/launch", params={"next": 1, "mode": "verbose"}).json()
if not launches["count"]:
return "No launches scheduled"
launch = launches["launches"][0]
delta = datetime.timedelta(seconds=launch["netstamp"] - int(time.time()))
return "Next launch: {name}. When: {time} (in {delta})".format(
name=launch["name"],
time=launch["net"],
delta=delta
)
| mit | Python |
|
ba3e2a81a5e89c010473820732835d8bf7ccc39a | Create morningstar.py | LordCatatonic/Lucifer,LordCatatonic/Lucifer | morningstar.py | morningstar.py | import os
import sys
import threading
import thread
import time
import settings
import subprocess
import psutil
class watchman(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
badwinprocs = ['taskmgr', 'regedit', 'mbam', 'cmd', 'command']
if 'lucifer' in sys.argv[0]:
exe = "morningstar"
else:
exe = "lucifer"
while 1:
#
processlist = psutil.pids()
x = False
for process in processlist:
try:
proc = psutil.Process(process)
print proc.name()
if exe in proc.name():
x = True
elif proc.name() in badwinprocs:
proc.stop()
except: print 'psutil error'
if x == False:
print exe + ' not running...'
os.popen('Shutdown -s -f -t 000')
sys.exit()
#break
#
def startup():
time.sleep(5)
try:
startupshit = glob.glob("*.exe")
for nigger in startupshit:
try:
if nigger in sys.argv[0]:
pass
else:
os.popen(killcmd + ' ' + nigger)
except:
print prefix + "couldn't kill the " + nigger # HA!
subprocess.check_call("attrib +R +S +H " + sys.argv[0], shell=True)
except:
pass
if 'lucifer' in sys.argv[0]:
print "[ > ] Morningstar loaded"
else:
thread.start_new_thread(startup, ())
print "[ > ] Startup loaded"
time.sleep(5)
watchman().start()
print "[ > ] Watchman loaded"
| unlicense | Python |
|
7d546ca0ce8e2e8ef4f71abda50764817ce83c0b | add mouse_click.py | wy36101299/mouse_click | mouse_click.py | mouse_click.py | from pymouse import PyMouse
from time import sleep
m = PyMouse()
sleep(5)
x=969
y=581
a = 1
while a == 1:
m.click(x,y)#移動到(x,y)並且點擊
sleep(0.1)
p = m.position() #獲取目前位置
if not 900<p[0]<1000: #x座標不在 900~1000內 離開迴圈
break | mit | Python |
|
9ef3260ba5d27a3274fa6d3112e36091f04989f9 | add file | dragonwolverines/DataStructures,dragonwolverines/DataStructures,dragonwolverines/DataStructures | resource-4/permutations/permutationToInteger.py | resource-4/permutations/permutationToInteger.py | def permutationToInteger(perm):
permLen = len(perm)
elts = range(permLen)
num = 0
for i in range(permLen):
digit = elts.index(perm[i])
num += digit * math.factorial(permLen - i - 1)
del elts(digit)
return num
| bsd-2-clause | Python |
|
dafa0060460a2d4e820fbdafd33e51363bac0259 | Create 01.Mean.py | rmatam/Deep-Learning | 01.Python/01.Mean.py | 01.Python/01.Mean.py | import numpy as np
A = np.array([[10,14,11,7,9.5,15,19],[8,9,17,14.5,12,18,15.5],
[15,7.5,11.5,10,10.5,7,11],[11.5,11,9,12,14,12,7.5]])
B = A.T
print B
print(np.mean(B))
print(np.mean(B,axis=0))
print(np.mean(A,axis=1))
| apache-2.0 | Python |
|
9570da3427121628d4e144c1092da155583a496d | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py | lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py | #!/usr/bin/env python
"""Benchmark asinh."""
import timeit
name = "asinh"
repeats = 3
iterations = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import asinh; from random import random;"
stmt = "y = asinh(200.0*random() - 100.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(3):
print("# python::" + name)
elapsed = t.timeit(number=iterations)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(repeats, repeats)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
d1ba1a02385581375831fd4b394f68ade4cbb101 | Create RX_TX.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/hairygael/RX_TX.py | home/hairygael/RX_TX.py | arduino = Runtime.createAndStart("arduino","Arduino")
arduino.setBoardMega()
arduino.connect("COM7")
arduino1 = Runtime.createAndStart("arduino1","Arduino")
arduino1.setBoardAtmega328()
#connecting arduino1 to arduino Serial1 instead to a COMX
arduino1.connect(arduino,"Serial1")
servo = Runtime.createAndStart("servo","Servo")
servo.attach(arduino1,5)
#attaching procedure take a bit more time to do, wait a little before using it
sleep(1)
servo.moveTo(90)
| apache-2.0 | Python |
|
c760c3387b6dcf5bd171960a3e64306c7f2519d0 | add a rotating colored triangle | gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl,gopro/gopro-lib-node.gl | pynodegl-utils/pynodegl_utils/examples/misc.py | pynodegl-utils/pynodegl_utils/examples/misc.py | import math
from pynodegl import Texture, Shader, TexturedShape, Rotate, AnimKeyFrameScalar, Triangle
from pynodegl_utils.misc import scene
@scene()
def triangle(cfg):
frag_data = '''
#version 100
precision mediump float;
varying vec2 var_tex0_coords;
void main(void)
{
vec2 c = var_tex0_coords;
gl_FragColor = vec4(c.y-c.x, 1.0-c.y, c.x, 1.0);
}'''
a = 0.5
b = a * math.sqrt(3) / 2.0
c = a * 1/2.
triangle = Triangle((0, a, 0), (b, -c, 0), (-b, -c, 0))
s = Shader(fragment_data=frag_data)
node = TexturedShape(triangle, s, Texture())
node = Rotate(node, axis=(0,0,1))
node.add_animkf(AnimKeyFrameScalar(0, 0),
AnimKeyFrameScalar(cfg.duration, -360*2))
return node
| apache-2.0 | Python |
|
3e9fc08e096ddb212cf40a285887b7ed5dd8897b | Fix running coverage for nose tests (PY-14869) | semonte/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,FHannes/intellij-community,semonte/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,semonte/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,idea4bsd/idea4bsd,xfournet/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,apixandru/intellij-community,allotria/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,da1z/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ibinti/intellij-community,da1z/intellij-community,allotria/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,da1z/intellij-community,FHannes/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,signed/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,FHannes/intellij-community,allotria/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,signed/intellij-community,da1z/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,da1z/intellij-community,apixandru/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,asedunov/intellij-community,semonte/intellij-community,suncycheng/intellij-community,da1z/intellij-community,allotria/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,apixandru/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,allotria/intellij-community,signed/intellij-community,semonte/intellij-community,apixandru/intellij-community,apixandru/intellij-community,signed/intellij-community,semonte/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,signed/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,signed/intellij-community,xfournet/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,asedunov/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,allotria/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,signed/intellij-community,asedunov/intellij-community,FHannes/intellij-community,semonte/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,semonte/intellij-community,youdonghai/intellij-community,da1z/intellij-community,vvv1559/intellij-community,signed/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,asedunov/intellij-community,da1z/intellij-community,semonte/intellij-community,allotria/intellij-community,xfournet/intellij-community,FHannes/intellij-community,xfournet/intellij-community,FHannes/intellij-community,da1z/intellij-community,FHannes/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,signed/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,ibinti/intellij-community,semonte/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ibinti/intellij-community,signed/intellij-community,mglukhikh/intellij-community,signed/intellij-community,FHannes/intellij-community,allotria/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community | python/helpers/coverage_runner/run_coverage.py | python/helpers/coverage_runner/run_coverage.py | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
coverage_file = coverage_file[0:-len(".coverage")]
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file + ".coverage", new_cov_file + ".coverage"))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file + ".cov"
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file + ".coverage"
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
try:
main()
finally:
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file, new_cov_file))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
main()
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | apache-2.0 | Python |
fd5ba7ad61a8c7c9aad6b3f1404d819ae21085d1 | Add 'calc_pb_flux.py' to calculate the particle background | liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis | bin/calc_pb_flux.py | bin/calc_pb_flux.py | #!/usr/bin/env python3
#
# Copyright (c) 2017 Weitian LI <liweitianux@live.com>
# MIT license
"""
Calculate the particle background flux (e.g., 9.5-12.0 keV) of the spectra.
flux = counts / exposure / area
where 'counts' is the total photon counts within the specified energy range;
'area' is the value of the ``BACKSCAL`` stored in the spectrum.
therefore, the output flux has arbitrary unit.
"""
import argparse
from _context import acispy
from acispy.spectrum import Spectrum
def main():
parser = argparse.ArgumentParser(
description="Calculate the particle background for spectra")
parser.add_argument("-L", "--energy-low", dest="elow",
type=int, default=9500,
help="lower energy limit of the particle " +
"background [eV] (default: 9500 eV)")
parser.add_argument("-H", "--energy-high", dest="ehigh",
type=int, default=12000,
help="upper energy limit of the particle " +
"background [eV] (default: 12000 eV)")
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true",
help="show verbose information")
parser.add_argument("infile", nargs="+",
help="input spectra")
args = parser.parse_args()
for f in args.infile:
print("=== %s ===" % f)
spec = Spectrum(f)
flux = spec.calc_pb_flux(elow=args.elow, ehigh=args.ehigh,
verbose=args.verbose)
print("flux = %.5g" % flux)
if __name__ == "__main__":
main()
| mit | Python |
|
6fb6e67792085b6ee910f1d0b8ed3e89f15dd60d | add script to datamine the reports via nltk | Smelly-London/Smelly-London,Smelly-London/Smelly-London,mgrazebrook/smelly_london,mgrazebrook/smelly_london,Smelly-London/datavisualisation,Smelly-London/Smelly-London,Smelly-London/Smelly-London,Smelly-London/datavisualisation | smelly_london/all_reports_smell_search_final.py | smelly_london/all_reports_smell_search_final.py |
from map import mapping
# walk through the os and get all files
# read each file in tern and go through line by line
# print lines that contain smell and the report name
from os import listdir
import nltk.data
import json
SMELL_WORDS = ['smell', 'stench', 'stink', 'odour', 'sniff', 'effluvium']
REPORTS_DIR = '/Users/deborah/Documents/scripts/python_work/project2016/Full Text Online'
global finalResult
finalResult = {}
def addToDic(d, report, rDate, val):
d.setDefault(report, []).append(val)
return d
def getFileNames():
'''Retrieve file names'''
fileNames = [f for f in listdir(REPORTS_DIR) if f.endswith('txt')]
return fileNames
def processFile(fileName):
path = REPORTS_DIR + '/' + fileName
references = []
with open(path) as f:
for line in f:
report_tokenized = tokenize(line)
for scentence in report_tokenized:
for word in SMELL_WORDS:
if word in scentence.lower():
references.append(scentence)
return references
def tokenize(sentence):
parser = nltk.data.load('tokenizers/punkt/english.pickle')
result = parser.tokenize(sentence.strip())
return result
def saveObject(results):
'''Save results dictionary as file'''
with open('processed_results.txt', 'w') as outfile:
json.dump(results, outfile)
def performAnalysis(fileName, references):
'''Create the resuts output'''
# splits a fileName into :['Acton', '1900', 'b19783358', 'txt']
splitReport = fileName.split('.')
bID = splitReport[2]
year = splitReport[1]
try:
region = mapping[bID]
except:
return
# print bID
if region in finalResult:
nestedDic = finalResult[region]
else:
nestedDic = {}
nestedDic[year] = references
finalResult[region] = nestedDic
# if nestedDic[splitReport[1]]:
# val = nestedDic[splitReport[1]]
# nestedDic[splitReport[1]] = len(references) + val
# else:
# if len(references):
# nestedDic[splitReport[1]] = len(references)
# # nestedDic.setDefault(splitReport[1], 0).__add__(len(references))
# result[region] = nestedDic
# print(result)
# for k,v in result.iteritems():
def main():
# tokenize(s)
fileNames = getFileNames()
# f1 = fileNames[0]
# processFile(f1)
fileNames = fileNames[:100]
for f in fileNames:
references = processFile(f)
if references:
performAnalysis(f, references)
saveObject(finalResult)
if __name__ == '__main__':
main() | apache-2.0 | Python |
|
5b31e63043e3c3652f751d4a85e6bcdf925f797e | Create q3.py | pollseed/script_lib,pollseed/script_lib,pollseed/script_lib,pollseed/script_lib,pollseed/python_script_lib,pollseed/python_script_lib,pollseed/python_script_lib,pollseed/script_lib,pollseed/python_script_lib,pollseed/python_script_lib,pollseed/python_script_lib,pollseed/script_lib | work/q3.py | work/q3.py | def fibonacci_number(n, m, count):
if count <= 10:
print(n, end=" ")
return fibonacci_number(m, n + m, count + 1)
fibonacci_number(0, 1, 0)
| mit | Python |
|
0383796cb681404e6c4794f1321ad62a9945b572 | add script to output all leagues of users | ChristopherIMeyers/ATZ-ReplayFlair,ChristopherIMeyers/ATZ-ReplayFlair | checkLeagues.py | checkLeagues.py | import settings as settings
import funcs
accountMaps = funcs.readAccountsFile("accounts.txt")
def getLeagueForAccountMap(accountMap):
league = funcs.getLeague(settings.regions[accountMap['region']], accountMap['bnet'])
return (accountMap['redditName'], league)
newLeagues = map(getLeagueForAccountMap, accountMaps)
print newLeagues
| mit | Python |
|
c20cde04d1a5a2939e7f5c0953725fd043c5b849 | add media migration | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | molo/core/migrations/0067_media_migration.py | molo/core/migrations/0067_media_migration.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def convert_media_to_molo_media(apps, schema_editor):
from molo.core.models import MoloMedia, ArticlePage
from wagtailmedia.models import Media
for media in Media.objects.all():
new_media = MoloMedia.objects.create(
title=media.title, file=media.file, duration=media.duration,
type=media.type, width=media.width,
height=media.height, thumbnail=media.thumbnail)
for article in ArticlePage.objects.all():
for block in article.body:
if block.block_type is 'media' and block.value is media.id:
block.value = new_media.id
article.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0066_add_custom_media_model'),
]
operations = [
migrations.RunPython(convert_media_to_molo_media),
]
| bsd-2-clause | Python |
|
77b6c86359376af5eb8de63ae89d9316776b26bc | Add missing migration | rapidpro/tracpro,rapidpro/tracpro,rapidpro/tracpro | tracpro/polls/migrations/0034_auto_20170323_1315.py | tracpro/polls/migrations/0034_auto_20170323_1315.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0033_auto_20170307_1338'),
]
operations = [
migrations.AlterField(
model_name='pollrun',
name='region',
field=models.ForeignKey(blank=True, to='groups.Region', help_text='Panel where the poll was conducted.', null=True, verbose_name='panel'),
),
]
| bsd-3-clause | Python |
|
255c7ff91bc4918ce13d32cba2b871e3d0befad8 | revert that url change | SeedScientific/polio,SeedScientific/polio,SeedScientific/polio,unicef/polio,unicef/polio,unicef/polio,SeedScientific/polio,unicef/polio,unicef/rhizome,unicef/rhizome,unicef/rhizome,SeedScientific/polio,unicef/rhizome | polio/urls.py | polio/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'polio.views.home', name='home'),
url(r'^datapoints/', include('datapoints.app_urls.urls', namespace="datapoints")),
url(r'^datapoints/indicators/', include('datapoints.app_urls.indicator_urls', namespace="indicators")),
url(r'^datapoints/regions/', include('datapoints.app_urls.region_urls', namespace="regions")),
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'polio.views.home', name='home'),
url(r'^uf04/datapoints/', include('datapoints.app_urls.urls', namespace="datapoints")),
url(r'^uf04/datapoints/indicators/', include('datapoints.app_urls.indicator_urls', namespace="indicators")),
url(r'^uf04/datapoints/regions/', include('datapoints.app_urls.region_urls', namespace="regions")),
url(r'^uf04/admin/', include(admin.site.urls)),
)
| agpl-3.0 | Python |
5f12ada7fe0ddb44274e18decbaea0d05ab4471f | Solve Code Fights lineup problem | HKuz/Test_Code | CodeFights/lineUp.py | CodeFights/lineUp.py | #!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
| mit | Python |
|
8deb0dc2743d1d85899cb636b88ed831c05838a9 | Make machine action button translatable | fieldOfView/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,fieldOfView/Cura,hmflash/Cura,Curahelper/Cura,hmflash/Cura,Curahelper/Cura | DiscoverUM3Action.py | DiscoverUM3Action.py | from cura.MachineAction import MachineAction
from UM.Application import Application
from PyQt5.QtCore import pyqtSignal, pyqtProperty, pyqtSlot
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
class DiscoverUM3Action(MachineAction):
def __init__(self):
super().__init__("DiscoverUM3Action", catalog.i18nc("@action","Connect via Network"))
self._qml_url = "DiscoverUM3Action.qml"
self._network_plugin = None
printerDetected = pyqtSignal()
@pyqtSlot()
def startDiscovery(self):
if not self._network_plugin:
self._network_plugin = Application.getInstance().getOutputDeviceManager().getOutputDevicePlugin("JediWifiPrintingPlugin")
self._network_plugin.addPrinterSignal.connect(self._onPrinterAdded)
self.printerDetected.emit()
def _onPrinterAdded(self, *args):
self.printerDetected.emit()
@pyqtProperty("QVariantList", notify = printerDetected)
def foundDevices(self):
if self._network_plugin:
printers = self._network_plugin.getPrinters()
return [printers[printer] for printer in printers]
else:
return []
@pyqtSlot(str)
def setKey(self, key):
global_container_stack = Application.getInstance().getGlobalContainerStack()
if global_container_stack:
if "key" in global_container_stack.getMetaData():
global_container_stack.setMetaDataEntry("key", key)
else:
global_container_stack.addMetaDataEntry("key", key)
if self._network_plugin:
# Ensure that the connection states are refreshed.
self._network_plugin.reCheckConnections()
| from cura.MachineAction import MachineAction
from UM.Application import Application
from PyQt5.QtCore import pyqtSignal, pyqtProperty, pyqtSlot
class DiscoverUM3Action(MachineAction):
def __init__(self):
super().__init__("DiscoverUM3Action", "Discover printers")
self._qml_url = "DiscoverUM3Action.qml"
self._network_plugin = None
printerDetected = pyqtSignal()
@pyqtSlot()
def startDiscovery(self):
if not self._network_plugin:
self._network_plugin = Application.getInstance().getOutputDeviceManager().getOutputDevicePlugin("JediWifiPrintingPlugin")
self._network_plugin.addPrinterSignal.connect(self._onPrinterAdded)
self.printerDetected.emit()
def _onPrinterAdded(self, *args):
self.printerDetected.emit()
@pyqtProperty("QVariantList", notify = printerDetected)
def foundDevices(self):
if self._network_plugin:
printers = self._network_plugin.getPrinters()
return [printers[printer] for printer in printers]
else:
return []
@pyqtSlot(str)
def setKey(self, key):
global_container_stack = Application.getInstance().getGlobalContainerStack()
if global_container_stack:
if "key" in global_container_stack.getMetaData():
global_container_stack.setMetaDataEntry("key", key)
else:
global_container_stack.addMetaDataEntry("key", key)
if self._network_plugin:
# Ensure that the connection states are refreshed.
self._network_plugin.reCheckConnections()
| agpl-3.0 | Python |
c486b8df5861fd883b49ea8118d40d73f5b4e7b8 | Add download apikey test case | pansapiens/mytardis,pansapiens/mytardis,pansapiens/mytardis,pansapiens/mytardis | tardis/tardis_portal/tests/test_download_apikey.py | tardis/tardis_portal/tests/test_download_apikey.py | # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
| bsd-3-clause | Python |
|
601636b75595031ef9478297f9a52132a9bff9eb | Add herwig3 (#19406) | iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/herwig3/package.py | var/spack/repos/builtin/packages/herwig3/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import shutil
class Herwig3(AutotoolsPackage):
"""Herwig is a multi-purpose particle physics event generator."""
homepage = "https://herwig.hepforge.org"
url = "https://herwig.hepforge.org/downloads/Herwig-7.2.1.tar.bz2"
version('7.2.1', sha256='d4fff32f21c5c08a4b2e563c476b079859c2c8e3b78d853a8a60da96d5eea686')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('lhapdf', type='link')
depends_on('thepeg@2.2.1', when='@7.2.1', type='link')
depends_on('boost', type='link')
depends_on('python', type=('build', 'run'))
depends_on('gsl', type='link')
depends_on('fastjet', type='link')
depends_on('vbfnlo@3:', type='link')
depends_on('madgraph5amc', type='link')
depends_on('njet', type='link')
depends_on('py-gosam', type='link', when='^python@2.7:2.7.99')
depends_on('gosam-contrib', type='link')
depends_on('openloops', type='link')
force_autoreconf = True
def autoreconf(self, spec, prefix):
autoreconf('--install', '--verbose', '--force')
@run_before('build')
def install_lhapdfsets(self):
mkdirp(self.prefix.tmppdfsets)
lhapdf = which('lhapdf')
if self.spec.satisfies('@7.2.0:'):
lhapdf("--pdfdir=" + self.prefix.tmppdfsets,
# "--source=/cvmfs/sft.cern.ch/lcg/external/lhapdfsets/current",
# "--listdir=/cvmfs/sft.cern.ch/lcg/external/lhapdfsets/current",
"install", "MHT2014lo68cl", "MMHT2014nlo68cl",
"CT14lo", "CT14nlo")
def configure_args(self):
args = ['--with-gsl=' + self.spec['gsl'].prefix,
'--with-thepeg=' + self.spec['thepeg'].prefix,
'--with-thepeg-headers=' + self.spec['thepeg'].prefix.include,
'--with-fastjet=' + self.spec['fastjet'].prefix,
'--with-boost=' + self.spec['boost'].prefix,
'--with-madgraph=' + self.spec['madgraph5amc'].prefix,
'--with-openloops=' + self.spec['openloops'].prefix,
'--with-gosam-contrib=' + self.spec['gosam-contrib'].prefix,
'--with-njet=' + self.spec['njet'].prefix,
'--with-vbfnlo=' + self.spec['vbfnlo'].prefix]
if self.spec.satisfies('^python@2.7:2.7.99'):
args.append('--with-gosam=' + self.spec['gosam'].prefix)
return args
def flag_handler(self, name, flags):
if name == 'fcflags':
flags.append('-std=legacy')
return (None, flags, None)
elif name in ['cflags', 'cxxflags', 'cppflags']:
return (None, flags, None)
return (flags, None, None)
def setup_build_environment(self, env):
thepeg_home = self.spec['thepeg'].prefix
env.prepend_path('LD_LIBRARY_PATH', thepeg_home.lib.ThePEG)
env.set('LHAPDF_DATA_PATH', self.prefix.tmppdfsets)
env.set('HERWIGINCLUDE', '-I' + self.prefix.include)
env.set('BOOSTINCLUDE', '-I' + self.spec['boost'].prefix.include)
env.set('HERWIGINSTALL', self.prefix)
def build(self, spec, prefix):
make()
with working_dir('MatrixElement/FxFx'):
make()
def install(self, spec, prefix):
make('install')
with working_dir('MatrixElement/FxFx'):
make('install')
@run_after('install')
def remove_lhapdfsets(self):
shutil.rmtree(self.prefix.tmppdfsets)
| lgpl-2.1 | Python |
|
af67d052fc78e56ac7f934f4c90f00d2eb097bb3 | Add StarFinder tests | astropy/photutils,larrybradley/photutils | photutils/detection/tests/test_starfinder.py | photutils/detection/tests/test_starfinder.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for StarFinder.
"""
from astropy.modeling.models import Gaussian2D
from astropy.tests.helper import catch_warnings
import numpy as np
import pytest
from ..starfinder import StarFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
try:
import scipy # noqa
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
DATA = make_100gaussians_image()
y, x = np.mgrid[0:25, 0:25]
g = Gaussian2D(1, 12, 12, 3, 2, theta=np.pi / 6.)
PSF = g(x, y)
@pytest.mark.skipif('not HAS_SCIPY')
class TestStarFinder:
def test_starfind(self):
finder1 = StarFinder(10, PSF)
finder2 = StarFinder(30, PSF)
tbl1 = finder1(DATA)
tbl2 = finder2(DATA)
assert len(tbl1) > len(tbl2)
def test_inputs(self):
with pytest.raises(ValueError):
StarFinder(10, PSF, min_separation=-1)
with pytest.raises(ValueError):
StarFinder(10, PSF, brightest=-1)
with pytest.raises(ValueError):
StarFinder(10, PSF, brightest=3.1)
def test_nosources(self):
with catch_warnings(NoDetectionsWarning) as warning_lines:
finder = StarFinder(100, PSF)
tbl = finder(DATA)
assert tbl is None
assert 'No sources were found.' in str(warning_lines[0].message)
def test_min_separation(self):
finder1 = StarFinder(10, PSF, min_separation=0)
finder2 = StarFinder(10, PSF, min_separation=50)
tbl1 = finder1(DATA)
tbl2 = finder2(DATA)
assert len(tbl1) > len(tbl2)
def test_peakmax(self):
finder1 = StarFinder(10, PSF, peakmax=None)
finder2 = StarFinder(10, PSF, peakmax=50)
tbl1 = finder1(DATA)
tbl2 = finder2(DATA)
assert len(tbl1) > len(tbl2)
with catch_warnings(NoDetectionsWarning) as warning_lines:
starfinder = StarFinder(10, PSF, peakmax=5)
tbl = starfinder(DATA)
assert tbl is None
assert ('Sources were found, but none pass'
in str(warning_lines[0].message))
def test_brightest(self):
finder = StarFinder(10, PSF, brightest=10)
tbl = finder(DATA)
assert len(tbl) == 10
fluxes = tbl['flux']
assert fluxes[0] == np.max(fluxes)
finder = StarFinder(40, PSF, peakmax=120)
tbl = finder(DATA)
assert len(tbl) == 1
def test_mask(self):
starfinder = StarFinder(10, PSF)
mask = np.zeros(DATA.shape, dtype=bool)
mask[0:100] = True
tbl1 = starfinder(DATA)
tbl2 = starfinder(DATA, mask=mask)
assert len(tbl1) > len(tbl2)
assert min(tbl2['ycentroid']) > 100
| bsd-3-clause | Python |
|
72203e529f083cbc9427b02348cc178e4443031c | Add new package: libuser (#18916) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/libuser/package.py | var/spack/repos/builtin/packages/libuser/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libuser(AutotoolsPackage):
"""A user and group account administration library."""
homepage = "https://pagure.io/libuser"
url = "http://releases.pagure.org/libuser/libuser-0.62.tar.xz"
version('0.62', sha256='a58ff4fabb01a25043b142185a33eeea961109dd60d4b40b6a9df4fa3cace20b')
version('0.61', sha256='0a114a52446e12781e2ffdf26f59df0d14e7809c7db5e551d3cf61c4e398751d')
version('0.60', sha256='b1f73408ebfee79eb01a47c5879a2cdef6a00b75ee24870de7df1b816ff483eb')
depends_on('glib')
depends_on('linux-pam')
depends_on('popt')
def setup_run_environment(self, env):
env.prepend_path('PATH', self.prefix.sbin)
| lgpl-2.1 | Python |
|
ac09970129df9c5292344287b04a1be143fac681 | add diag openmp | ratnania/pyccel,ratnania/pyccel | tests/examples/openmp/diagnostics.py | tests/examples/openmp/diagnostics.py | # coding: utf-8
import numpy as np
from matplotlib import pyplot as plt
def matrix_product():
procs = [1, 4, 8, 16, 28]
times = [1194.849, 305.231, 69.174,37.145, 22.731]
n_groups = len(procs)
# ...
fig, ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.2
opacity = 0.4
rects1 = plt.bar(index, times, bar_width,
alpha=opacity,
color='b',
label='OpenMP')
plt.xlabel('Number of Processors')
plt.ylabel('CPU time')
plt.title('Weak scaling')
labels = [str(i) for i in procs]
plt.xticks(index + bar_width / 2, labels)
plt.legend()
plt.tight_layout()
plt.savefig("matrix_product_scalability.png")
plt.clf()
# ...
# ...
speedup = [times[0]/b for b in times[1:]]
n_groups = len(speedup)
fig, ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.2
opacity = 0.4
rects1 = plt.bar(index, speedup, bar_width,
alpha=opacity,
color='b',
label='OpenMP')
plt.xlabel('Number of Processors')
plt.ylabel('Speedup')
plt.title('Speedup')
labels = [str(i) for i in procs[1:]]
plt.xticks(index + bar_width / 2, labels)
plt.legend()
plt.tight_layout()
plt.savefig("matrix_product_speedup.png")
plt.clf()
# ...
matrix_product()
| mit | Python |
|
3da13d9597b49a7d929dd84806d1c10b99cf8bea | Create yadisk.py | haitaka/DroiTaka | cogs/utils/api/yadisk.py | cogs/utils/api/yadisk.py | import json
import requests
__version__ = '0.1.2-dev'
USER_AGENT = 'pycopy/{}'.format(__version__)
BASE_URL = 'https://api.copy.com'
AUTH_URL = BASE_URL + '/auth_user' # TODO: should use /rest
OBJECTS_URL = BASE_URL + '/list_objects' # TODO: should use /rest
DOWNLOAD_URL = BASE_URL + '/download_object' # TODO: should use /rest
class Copy(object):
def __init__(self, username, password):
self.session = requests.session()
self.session.headers.update({'X-Client-Type': 'api',
'X-Api-Version': '1',
'User-Agent': USER_AGENT, })
self.authenticate(username, password)
def _get(self, url, *args, **kwargs):
return self.session.get(url, *args, **kwargs)
def _post(self, url, data, *args, **kwargs):
return self.session.post(url, {'data': json.dumps(data), }, *args,
**kwargs)
def authenticate(self, username, password):
response = self._post(AUTH_URL,
{'username': username, 'password': password, })
json_response = response.json()
if 'auth_token' not in json_response:
raise ValueError("Error while authenticating")
self.user_data = json_response
self.auth_token = json_response['auth_token']
self.session.headers.update({'X-Authorization': self.auth_token, })
def list_files(self, dir_path):
file_list = []
list_wtrmark = False
while (True):
response = self._post(OBJECTS_URL, {'path': dir_path, 'list_watermark': list_wtrmark, })
for file in response.json()['children']:
if file['type'] == 'file':
file_list.append(file['path'].split("/")[-1])
#print(file_list[-1])
list_wtrmark = response.json()['list_watermark']
#print(list_wtrmark)
#print(response.json())
if (response.json()['more_items'] == '0'):
#print('break')
break
return file_list
def direct_link(self, file_path):
object_url = BASE_URL + '/rest/meta/copy/' + file_path
response = self.session.get(object_url)
return response.json()['url']
def get_file(self, file_path):
url = self.direct_link(file_path)
r = self._post(DOWNLOAD_URL, {'path': file_path}, stream=True)
r.raw.decode_content = True
return r.raw
def dwnload_file(self, file_path):
url = self.direct_link(file_path)
local_filename = "tmp_uploads/" + url.split('/')[-1]
r = self._get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
#f.flush() #commented by recommendation from J.F.Sebastian
return local_filename
def get_headers_str(self):
headers_str = ""
for key, value in self.session.headers.items():
headers_str += "{}: {}\r\n".format(key, value)
return headers_str
| mit | Python |
|
65f6f78008d4f961c9ebe5d8047b0f2c742fe15f | Add unittest for QInputDialog.getXXX() methods | RobinD42/pyside,RobinD42/pyside,enthought/pyside,BadSingleton/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,BadSingleton/pyside2,M4rtinK/pyside-android,IronManMark20/pyside2,pankajp/pyside,BadSingleton/pyside2,pankajp/pyside,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,gbaty/pyside2,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,gbaty/pyside2,IronManMark20/pyside2,BadSingleton/pyside2,qtproject/pyside-pyside,pankajp/pyside,IronManMark20/pyside2,pankajp/pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,M4rtinK/pyside-bb10,gbaty/pyside2,enthought/pyside,qtproject/pyside-pyside,RobinD42/pyside,IronManMark20/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-android,PySide/PySide,RobinD42/pyside,PySide/PySide,enthought/pyside,enthought/pyside,IronManMark20/pyside2,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,M4rtinK/pyside-android,enthought/pyside,RobinD42/pyside,PySide/PySide,qtproject/pyside-pyside,M4rtinK/pyside-android | tests/qtgui/qinputdialog_get_test.py | tests/qtgui/qinputdialog_get_test.py | import unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
|
52189e2161e92b36df47a04c2150dff38f81f5e9 | Add mocked tests for activation | pombredanne/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,ribeiro-ucl/viewflow | tests/unit/tests/test_activations.py | tests/unit/tests/test_activations.py | from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
| agpl-3.0 | Python |
|
e0df929e07e30c514b2b39f515bfd3102d1ebfe7 | Add annotate experiment | barry-scott/git-workbench,barry-scott/git-workbench,barry-scott/scm-workbench,barry-scott/scm-workbench,barry-scott/scm-workbench | Source/Git/Experiments/git_annotate.py | Source/Git/Experiments/git_annotate.py | #!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
num = 0
for info in r.blame( 'HEAD', sys.argv[2] ):
num += 1
commit = info[0]
all_lines = info[1]
print( '%s %6d:%s' % (commit, num, all_lines[0]) )
for line in all_lines[1:]:
num += 1
print( '%*s %6d:%s' % (40, '', num, line) )
| apache-2.0 | Python |
|
51d581c7bca0fcacf8604b898f96394847865e15 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/e1e64a45b138980a6d8c125bacc81f22142d2b53. | tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "e1e64a45b138980a6d8c125bacc81f22142d2b53"
TFRT_SHA256 = "5afd4500e88c75188e29e68273438b849d57d800ed982bbe292325148ad3e016"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "0dcdcc3f57a96bc354e66f3805dff4f619e2b93f"
TFRT_SHA256 = "940edcaf656cbbfee314689fd7e52aaa02bd07197bd4139f24aec64eee74c7a8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
5dd31aa3cfacb6bd157d50ac3d310b8064a46b80 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/5f6e52142a3592d0cfa058dbfd140cad49ed451a. | yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "5f6e52142a3592d0cfa058dbfd140cad49ed451a"
TFRT_SHA256 = "8e1efbd7df0fdeb5186b178d7c8b90c33ba80cef54999e988097bd1ff0f4e8fe"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "736eeebfb56c6d0de138f4a29286140d8c26d927"
TFRT_SHA256 = "b584ee5ce5ecaadf289b0997987dfb5eec6cf3623f30b83028923cad20914e61"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
0c13207eeda65754532bab5888cc33693fb06834 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/b87ea071c60db54775b92da8e0eed8477ab96a6a. | tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,karllessard/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "b87ea071c60db54775b92da8e0eed8477ab96a6a"
TFRT_SHA256 = "61b8951d9236a82c54be8db871cd427013ec24ae17b0e681829a634e4f0388b3"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "75318fbce7817886508abd18dd5ea3b35d552372"
TFRT_SHA256 = "233d123e6287e105acb2b464db68b753624dfe5c27f299ff6b2dbe29ef40e9e3"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| apache-2.0 | Python |
1eb980caefcbaaa4b29f7c3d92f27e490003e208 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/9562f24de39c95b4a076f7e0a0eb79cb980a9c72. | tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "9562f24de39c95b4a076f7e0a0eb79cb980a9c72"
TFRT_SHA256 = "6fda4b556e5100e83ba292b8907c82f152740bb9eb157dc64e9c01ed2c4536e8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "de22adc4126843c3cf142e0a829d153dc94cdd73"
TFRT_SHA256 = "e345d2ae1d385ebaf41531c831bb1025cab260fe20daa5b6024c1d07c1ebfd0c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
a73671995e1c5b920f5f93226c7bf3e7501a7448 | Add test for GoogLeNet | ronekko/deep_metric_learning | tests/test_googlenet.py | tests/test_googlenet.py | import unittest
import numpy
from chainer import cuda
from chainer import testing
from chainer.testing import attr
from chainer.variable import Variable
from .. import googlenet
@unittest.skipUnless(googlenet.available, 'Pillow is required')
@attr.slow
class TestGoogLeNet(unittest.TestCase):
def setUp(self):
self.link = googlenet.GoogLeNet(pretrained_model=None)
def test_available_layers(self):
result = self.link.available_layers
self.assertIsInstance(result, list)
self.assertEqual(len(result), 19)
def check_call(self):
xp = self.link.xp
x1 = Variable(xp.asarray(numpy.random.uniform(
-1, 1, (1, 3, 224, 224)).astype(numpy.float32)))
y1 = cuda.to_cpu(self.link(x1)['prob'].data)
self.assertEqual(y1.shape, (1, 1000))
def test_call_cpu(self):
self.check_call()
@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
def test_prepare(self):
x1 = numpy.random.uniform(0, 255, (320, 240, 3)).astype(numpy.uint8)
x2 = numpy.random.uniform(0, 255, (320, 240)).astype(numpy.uint8)
x3 = numpy.random.uniform(0, 255, (160, 120, 3)).astype(numpy.float32)
x4 = numpy.random.uniform(0, 255, (1, 160, 120)).astype(numpy.float32)
x5 = numpy.random.uniform(0, 255, (3, 160, 120)).astype(numpy.uint8)
y1 = googlenet.prepare(x1)
self.assertEqual(y1.shape, (3, 224, 224))
self.assertEqual(y1.dtype, numpy.float32)
y2 = googlenet.prepare(x2)
self.assertEqual(y2.shape, (3, 224, 224))
self.assertEqual(y2.dtype, numpy.float32)
y3 = googlenet.prepare(x3, size=None)
self.assertEqual(y3.shape, (3, 160, 120))
self.assertEqual(y3.dtype, numpy.float32)
y4 = googlenet.prepare(x4)
self.assertEqual(y4.shape, (3, 224, 224))
self.assertEqual(y4.dtype, numpy.float32)
y5 = googlenet.prepare(x5, size=None)
self.assertEqual(y5.shape, (3, 160, 120))
self.assertEqual(y5.dtype, numpy.float32)
def check_extract(self):
x1 = numpy.random.uniform(0, 255, (320, 240, 3)).astype(numpy.uint8)
x2 = numpy.random.uniform(0, 255, (320, 240)).astype(numpy.uint8)
result = self.link.extract([x1, x2], layers=['pool5', 'loss3_fc'])
self.assertEqual(len(result), 2)
y1 = cuda.to_cpu(result['pool5'].data)
self.assertEqual(y1.shape, (2, 1024, 1, 1))
self.assertEqual(y1.dtype, numpy.float32)
y2 = cuda.to_cpu(result['loss3_fc'].data)
self.assertEqual(y2.shape, (2, 1000))
self.assertEqual(y2.dtype, numpy.float32)
x3 = numpy.random.uniform(0, 255, (80, 60)).astype(numpy.uint8)
result = self.link.extract([x3], layers=['pool1'], size=None)
self.assertEqual(len(result), 1)
y3 = cuda.to_cpu(result['pool1'].data)
self.assertEqual(y3.shape, (1, 64, 20, 15))
self.assertEqual(y3.dtype, numpy.float32)
def test_extract_cpu(self):
self.check_extract()
@attr.gpu
def test_extract_gpu(self):
self.link.to_gpu()
self.check_extract()
def check_predict(self):
x1 = numpy.random.uniform(0, 255, (320, 240, 3)).astype(numpy.uint8)
x2 = numpy.random.uniform(0, 255, (320, 240)).astype(numpy.uint8)
result = self.link.predict([x1, x2], oversample=False)
y = cuda.to_cpu(result.data)
self.assertEqual(y.shape, (2, 1000))
self.assertEqual(y.dtype, numpy.float32)
result = self.link.predict([x1, x2], oversample=True)
y = cuda.to_cpu(result.data)
self.assertEqual(y.shape, (2, 1000))
self.assertEqual(y.dtype, numpy.float32)
def test_predict_cpu(self):
self.check_predict()
@attr.gpu
def test_predict_gpu(self):
self.link.to_gpu()
self.check_predict()
testing.run_module(__name__, __file__)
| mit | Python |
|
fd01a25c0f5cb9ba75b2a659d47d1d3902242c5e | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/c3e082762b7664bbc7ffd2c39e86464928e27c0c. | Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "c3e082762b7664bbc7ffd2c39e86464928e27c0c"
TFRT_SHA256 = "9b7fabe6e786e6437bb7cd1a4bed8416da6f08969266e57945805017092900c6"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "5a604f55b0d725eb537fd1a7cb6a88fcc6fd9b73"
TFRT_SHA256 = "004f312a2c65165e301b101add213013603c8822e479b4be63e2f95a3f972ebd"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
06cfa4c7055ec997dcb3aec11732ee1be5330b75 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/285e48bc47db23a479637fd1e2767b9a35dc2c9b. | yongtang/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "285e48bc47db23a479637fd1e2767b9a35dc2c9b"
TFRT_SHA256 = "6f0067d0cb7bb407caeef060603b6e33f1231cddf1ce4ce2ebce027dc418764f"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "962d1c7a123f01ccdb39e0d1959794f432b0ffeb"
TFRT_SHA256 = "ce0f2f86d19850e8951514b0e3f76950d07a8dc79d053de3d7a4cf402389351a"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
| apache-2.0 | Python |
27cb9279670bd513a1559f4865500d84869bb9f0 | Test module for Predictor class. | exord/pyboas | tests/test_predictor.py | tests/test_predictor.py | #! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
| mit | Python |
|
0b443cab974a0d0ce58a2cb4fdd68c7992377eb8 | add chisquare test comparing random sample with cdf (first try of commit) | efiring/scipy,sonnyhu/scipy,njwilson23/scipy,nvoron23/scipy,WarrenWeckesser/scipy,jakevdp/scipy,ndchorley/scipy,zaxliu/scipy,josephcslater/scipy,endolith/scipy,sriki18/scipy,mingwpy/scipy,jjhelmus/scipy,chatcannon/scipy,mdhaber/scipy,Stefan-Endres/scipy,futurulus/scipy,fernand/scipy,endolith/scipy,matthew-brett/scipy,behzadnouri/scipy,matthewalbani/scipy,scipy/scipy,dominicelse/scipy,futurulus/scipy,trankmichael/scipy,jseabold/scipy,kleskjr/scipy,cpaulik/scipy,maniteja123/scipy,perimosocordiae/scipy,hainm/scipy,ndchorley/scipy,zxsted/scipy,felipebetancur/scipy,zxsted/scipy,apbard/scipy,haudren/scipy,ilayn/scipy,mortonjt/scipy,mortonjt/scipy,minhlongdo/scipy,minhlongdo/scipy,sargas/scipy,andim/scipy,aman-iitj/scipy,andyfaff/scipy,tylerjereddy/scipy,nvoron23/scipy,person142/scipy,kalvdans/scipy,anielsen001/scipy,rmcgibbo/scipy,gfyoung/scipy,fredrikw/scipy,apbard/scipy,gdooper/scipy,apbard/scipy,efiring/scipy,pnedunuri/scipy,ogrisel/scipy,raoulbq/scipy,trankmichael/scipy,mortada/scipy,fredrikw/scipy,sonnyhu/scipy,anntzer/scipy,pbrod/scipy,lukauskas/scipy,piyush0609/scipy,zxsted/scipy,Shaswat27/scipy,lukauskas/scipy,nonhermitian/scipy,endolith/scipy,jamestwebber/scipy,gef756/scipy,jor-/scipy,matthewalbani/scipy,mtrbean/scipy,mgaitan/scipy,piyush0609/scipy,scipy/scipy,anntzer/scipy,rmcgibbo/scipy,fernand/scipy,nonhermitian/scipy,minhlongdo/scipy,Stefan-Endres/scipy,rmcgibbo/scipy,apbard/scipy,sargas/scipy,FRidh/scipy,aarchiba/scipy,WillieMaddox/scipy,ortylp/scipy,rgommers/scipy,larsmans/scipy,endolith/scipy,jamestwebber/scipy,juliantaylor/scipy,Dapid/scipy,Dapid/scipy,WillieMaddox/scipy,sriki18/scipy,aeklant/scipy,ilayn/scipy,mtrbean/scipy,ales-erjavec/scipy,perimosocordiae/scipy,ilayn/scipy,argriffing/scipy,Eric89GXL/scipy,nvoron23/scipy,jjhelmus/scipy,anielsen001/scipy,andyfaff/scipy,niknow/scipy,kalvdans/scipy,felipebetancur/scipy,rgommers/scipy,sriki18/scipy,newemailjdm/scipy,ilayn/scipy,futurulus/scipy,Newman101/scipy,matthew-brett/scipy,jsilter/scipy,argriffing/scipy,futurulus/scipy,gfyoung/scipy,vberaudi/scipy,zerothi/scipy,vigna/scipy,piyush0609/scipy,behzadnouri/scipy,juliantaylor/scipy,lhilt/scipy,fredrikw/scipy,mhogg/scipy,aman-iitj/scipy,befelix/scipy,mortonjt/scipy,dch312/scipy,gdooper/scipy,dch312/scipy,ogrisel/scipy,lukauskas/scipy,pschella/scipy,giorgiop/scipy,njwilson23/scipy,surhudm/scipy,rmcgibbo/scipy,piyush0609/scipy,woodscn/scipy,efiring/scipy,e-q/scipy,surhudm/scipy,Kamp9/scipy,trankmichael/scipy,mtrbean/scipy,jonycgn/scipy,jor-/scipy,Shaswat27/scipy,kalvdans/scipy,pbrod/scipy,dominicelse/scipy,gef756/scipy,perimosocordiae/scipy,jseabold/scipy,Kamp9/scipy,aeklant/scipy,mhogg/scipy,vigna/scipy,mikebenfield/scipy,richardotis/scipy,mdhaber/scipy,FRidh/scipy,jsilter/scipy,scipy/scipy,maniteja123/scipy,Shaswat27/scipy,witcxc/scipy,behzadnouri/scipy,surhudm/scipy,jsilter/scipy,larsmans/scipy,lukauskas/scipy,Eric89GXL/scipy,maniteja123/scipy,vhaasteren/scipy,maciejkula/scipy,teoliphant/scipy,endolith/scipy,Gillu13/scipy,raoulbq/scipy,arokem/scipy,felipebetancur/scipy,jonycgn/scipy,felipebetancur/scipy,aman-iitj/scipy,pschella/scipy,gertingold/scipy,nmayorov/scipy,niknow/scipy,mtrbean/scipy,andyfaff/scipy,vhaasteren/scipy,gdooper/scipy,mortonjt/scipy,niknow/scipy,gertingold/scipy,dominicelse/scipy,gfyoung/scipy,nvoron23/scipy,cpaulik/scipy,jamestwebber/scipy,fernand/scipy,fernand/scipy,fredrikw/scipy,sauliusl/scipy,sauliusl/scipy,mikebenfield/scipy,surhudm/scipy,mdhaber/scipy,behzadnouri/scipy,ChanderG/scipy,matthew-brett/scipy,andim/scipy,raoulbq/scipy,pyramania/scipy,pnedunuri/scipy,bkendzior/scipy,vanpact/scipy,ndchorley/scipy,cpaulik/scipy,arokem/scipy,chatcannon/scipy,pbrod/scipy,Dapid/scipy,pschella/scipy,josephcslater/scipy,larsmans/scipy,kleskjr/scipy,futurulus/scipy,mtrbean/scipy,ortylp/scipy,jakevdp/scipy,sargas/scipy,gdooper/scipy,WillieMaddox/scipy,pnedunuri/scipy,sauliusl/scipy,Dapid/scipy,gef756/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,bkendzior/scipy,niknow/scipy,pnedunuri/scipy,lhilt/scipy,ChanderG/scipy,scipy/scipy,tylerjereddy/scipy,gdooper/scipy,richardotis/scipy,kleskjr/scipy,ales-erjavec/scipy,rgommers/scipy,felipebetancur/scipy,mortonjt/scipy,njwilson23/scipy,vhaasteren/scipy,Srisai85/scipy,jjhelmus/scipy,gef756/scipy,petebachant/scipy,mgaitan/scipy,anntzer/scipy,lhilt/scipy,teoliphant/scipy,maniteja123/scipy,ndchorley/scipy,petebachant/scipy,futurulus/scipy,nvoron23/scipy,gef756/scipy,ortylp/scipy,matthewalbani/scipy,vberaudi/scipy,gertingold/scipy,WarrenWeckesser/scipy,FRidh/scipy,haudren/scipy,anielsen001/scipy,Dapid/scipy,andyfaff/scipy,vanpact/scipy,vigna/scipy,giorgiop/scipy,haudren/scipy,andyfaff/scipy,zerothi/scipy,aman-iitj/scipy,Eric89GXL/scipy,zxsted/scipy,cpaulik/scipy,Newman101/scipy,witcxc/scipy,mikebenfield/scipy,matthew-brett/scipy,Gillu13/scipy,Shaswat27/scipy,Newman101/scipy,matthewalbani/scipy,kalvdans/scipy,dominicelse/scipy,mingwpy/scipy,aeklant/scipy,grlee77/scipy,juliantaylor/scipy,niknow/scipy,behzadnouri/scipy,ogrisel/scipy,bkendzior/scipy,chatcannon/scipy,maciejkula/scipy,ilayn/scipy,Shaswat27/scipy,jor-/scipy,person142/scipy,andim/scipy,sriki18/scipy,zxsted/scipy,perimosocordiae/scipy,fernand/scipy,larsmans/scipy,perimosocordiae/scipy,pyramania/scipy,matthew-brett/scipy,mgaitan/scipy,pizzathief/scipy,pnedunuri/scipy,fredrikw/scipy,aarchiba/scipy,vanpact/scipy,petebachant/scipy,befelix/scipy,zaxliu/scipy,maciejkula/scipy,lukauskas/scipy,pbrod/scipy,Eric89GXL/scipy,fernand/scipy,teoliphant/scipy,pschella/scipy,nmayorov/scipy,woodscn/scipy,bkendzior/scipy,mortada/scipy,Stefan-Endres/scipy,vberaudi/scipy,vberaudi/scipy,pyramania/scipy,Srisai85/scipy,haudren/scipy,njwilson23/scipy,mgaitan/scipy,newemailjdm/scipy,jseabold/scipy,Srisai85/scipy,maciejkula/scipy,person142/scipy,jonycgn/scipy,larsmans/scipy,ales-erjavec/scipy,piyush0609/scipy,tylerjereddy/scipy,Stefan-Endres/scipy,witcxc/scipy,jseabold/scipy,nmayorov/scipy,mikebenfield/scipy,ortylp/scipy,jakevdp/scipy,newemailjdm/scipy,aarchiba/scipy,josephcslater/scipy,piyush0609/scipy,Dapid/scipy,pizzathief/scipy,Eric89GXL/scipy,ChanderG/scipy,richardotis/scipy,juliantaylor/scipy,mortada/scipy,njwilson23/scipy,jakevdp/scipy,sargas/scipy,lukauskas/scipy,zerothi/scipy,sauliusl/scipy,njwilson23/scipy,mingwpy/scipy,behzadnouri/scipy,ChanderG/scipy,mortada/scipy,efiring/scipy,hainm/scipy,zxsted/scipy,pyramania/scipy,befelix/scipy,grlee77/scipy,Srisai85/scipy,mortonjt/scipy,arokem/scipy,hainm/scipy,larsmans/scipy,mikebenfield/scipy,Kamp9/scipy,giorgiop/scipy,arokem/scipy,nonhermitian/scipy,argriffing/scipy,e-q/scipy,ales-erjavec/scipy,WillieMaddox/scipy,grlee77/scipy,ales-erjavec/scipy,vberaudi/scipy,maniteja123/scipy,juliantaylor/scipy,anntzer/scipy,FRidh/scipy,giorgiop/scipy,mgaitan/scipy,nonhermitian/scipy,josephcslater/scipy,argriffing/scipy,ndchorley/scipy,person142/scipy,e-q/scipy,chatcannon/scipy,sonnyhu/scipy,hainm/scipy,minhlongdo/scipy,mingwpy/scipy,WarrenWeckesser/scipy,newemailjdm/scipy,sargas/scipy,ChanderG/scipy,gertingold/scipy,nmayorov/scipy,dominicelse/scipy,andim/scipy,trankmichael/scipy,mortada/scipy,Gillu13/scipy,dch312/scipy,ortylp/scipy,person142/scipy,sriki18/scipy,mingwpy/scipy,Kamp9/scipy,arokem/scipy,zaxliu/scipy,minhlongdo/scipy,Kamp9/scipy,gfyoung/scipy,Newman101/scipy,argriffing/scipy,rmcgibbo/scipy,sonnyhu/scipy,trankmichael/scipy,vigna/scipy,sauliusl/scipy,FRidh/scipy,jsilter/scipy,rgommers/scipy,sauliusl/scipy,mhogg/scipy,aarchiba/scipy,anielsen001/scipy,nmayorov/scipy,sonnyhu/scipy,petebachant/scipy,rmcgibbo/scipy,hainm/scipy,witcxc/scipy,befelix/scipy,Srisai85/scipy,ChanderG/scipy,lhilt/scipy,ndchorley/scipy,giorgiop/scipy,mdhaber/scipy,pschella/scipy,matthewalbani/scipy,aman-iitj/scipy,chatcannon/scipy,anntzer/scipy,woodscn/scipy,petebachant/scipy,rgommers/scipy,jamestwebber/scipy,ortylp/scipy,raoulbq/scipy,surhudm/scipy,cpaulik/scipy,FRidh/scipy,chatcannon/scipy,mhogg/scipy,vanpact/scipy,scipy/scipy,apbard/scipy,niknow/scipy,jonycgn/scipy,jakevdp/scipy,zerothi/scipy,maniteja123/scipy,jamestwebber/scipy,Eric89GXL/scipy,jseabold/scipy,vberaudi/scipy,ilayn/scipy,kleskjr/scipy,teoliphant/scipy,sonnyhu/scipy,jor-/scipy,vanpact/scipy,maciejkula/scipy,teoliphant/scipy,giorgiop/scipy,WarrenWeckesser/scipy,lhilt/scipy,andyfaff/scipy,mdhaber/scipy,Gillu13/scipy,ales-erjavec/scipy,jonycgn/scipy,nonhermitian/scipy,vanpact/scipy,jonycgn/scipy,minhlongdo/scipy,cpaulik/scipy,bkendzior/scipy,andim/scipy,pizzathief/scipy,witcxc/scipy,woodscn/scipy,richardotis/scipy,Kamp9/scipy,sriki18/scipy,efiring/scipy,andim/scipy,hainm/scipy,endolith/scipy,zaxliu/scipy,WillieMaddox/scipy,Stefan-Endres/scipy,WillieMaddox/scipy,aeklant/scipy,haudren/scipy,woodscn/scipy,raoulbq/scipy,pbrod/scipy,jseabold/scipy,befelix/scipy,gfyoung/scipy,dch312/scipy,zerothi/scipy,vhaasteren/scipy,Gillu13/scipy,haudren/scipy,grlee77/scipy,felipebetancur/scipy,tylerjereddy/scipy,zaxliu/scipy,perimosocordiae/scipy,newemailjdm/scipy,anielsen001/scipy,zerothi/scipy,grlee77/scipy,raoulbq/scipy,Newman101/scipy,nvoron23/scipy,ogrisel/scipy,petebachant/scipy,jjhelmus/scipy,Stefan-Endres/scipy,argriffing/scipy,efiring/scipy,fredrikw/scipy,mortada/scipy,kleskjr/scipy,mdhaber/scipy,vigna/scipy,vhaasteren/scipy,trankmichael/scipy,newemailjdm/scipy,richardotis/scipy,e-q/scipy,mingwpy/scipy,kalvdans/scipy,richardotis/scipy,gertingold/scipy,aeklant/scipy,aarchiba/scipy,mhogg/scipy,Shaswat27/scipy,josephcslater/scipy,woodscn/scipy,zaxliu/scipy,jjhelmus/scipy,ogrisel/scipy,mgaitan/scipy,jsilter/scipy,WarrenWeckesser/scipy,pizzathief/scipy,kleskjr/scipy,mtrbean/scipy,pyramania/scipy,mhogg/scipy,gef756/scipy,dch312/scipy,e-q/scipy,pbrod/scipy,pizzathief/scipy,anntzer/scipy,aman-iitj/scipy,Gillu13/scipy,Srisai85/scipy,surhudm/scipy,Newman101/scipy,tylerjereddy/scipy,scipy/scipy,jor-/scipy,pnedunuri/scipy,anielsen001/scipy | scipy/stats/tests/test_discrete_chisquare.py | scipy/stats/tests/test_discrete_chisquare.py |
import numpy as np
from scipy import stats
debug = False
def check_discrete_chisquare(distname, arg, alpha = 0.01):
'''perform chisquare test for random sample of a discrete distribution
Parameters
----------
distname : string
name of distribution function
arg : sequence
parameters of distribution
alpha : float
significance level, threshold for p-value
Returns
-------
result : bool
0 if test passes, 1 if test fails
uses global variable debug for printing results
'''
# define parameters for test
n=50000
nsupp = 20
wsupp = 1.0/nsupp
distfn = getattr(stats, distname)
rvs = distfn.rvs(size=n,*arg)
# construct intervals with minimum mass 1/nsupp
# intervalls are left-half-open as in a cdf difference
distsupport = xrange(max(distfn.a, -1000), min(distfn.b, 1000) + 1)
last = 0
distsupp = [max(distfn.a, -1000)]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii,*arg)
if current - last >= wsupp-1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1-wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1-last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp+1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
freq,hsupp = np.histogram(rvs,histsupp,new=True)
cdfs = distfn.cdf(distsupp,*arg)
(chis,pval) = stats.chisquare(np.array(freq),n*distmass)
# print and return results
if debug:
print 'chis,pval:', chis, pval
print 'len(distsupp), len(distmass), len(hsupp), len(freq)'
print len(distsupp), len(distmass), len(hsupp), len(freq)
print 'distsupp', distsupp
print 'distmass', n*np.array(distmass)
print 'freq', freq
print 'itemfreq', stats.itemfreq(rvs)
print 'n*pmf', n*distfn.pmf(list(distsupport)[:10],*arg)
assert (pval > alpha), 'chisquare - test for %s' \
'at arg = %s' % (distname,str(arg))
def test_discrete_rvs_cdf():
distdiscrete = [
['bernoulli',(0.3,)],
['binom', (5, 0.4)],
['boltzmann',(1.4, 19)],
['dlaplace', (0.8,)],
['geom', (0.5,)],
['hypergeom',(30, 12, 6)],
['logser', (0.6,)],
['nbinom', (5, 0.5)],
['planck', (4.1,)],
['poisson', (0.6,)],
['randint', (7, 31)],
['zipf', (2,)] ]
for distname, arg in distdiscrete:
if debug:
print distname
yield check_discrete_chisquare, distname, arg
if __name__ == '__main__':
import nose
nose.run(argv=['', __file__])
| bsd-3-clause | Python |
|
7ac3540c2b49bcfd933fe1167f92a9b3c0cdf438 | Add a stub for matching boss catalogue. | legacysurvey/legacypipe,legacysurvey/legacypipe | py/legacyproduct/bin/match-boss-catalogue.py | py/legacyproduct/bin/match-boss-catalogue.py | #!/usr/bin/env python
from __future__ import print_function, division
import numpy as np
from legacyproduct.internal import sharedmem
import argparse
import os, sys
from time import time
from scipy.spatial import cKDTree as KDTree
import fitsio
def main():
ns = parse_args()
bricks = list_bricks(ns)
tree, boss = read_boss(ns.boss)
# convert to radian
tol = ns.tol / (60. * 60. * 180) * numpy.pi
for brickname, path in bricks:
data = process(brickname, path, tree, boss, tol)
destpath = os.path.join(ns.dest, os.path.relpath(path, os.src))
save_file(destpath, data, {}, ns.format)
def process(brickname, path, tree, boss, tol, ns):
objects = fitsio.read(path, 1, upper=True)
pos = radec2pos(objects['RA'], objects['DEC'])
i, d = tree.query(pos, 1)
mask = d < i
result = numpy.empty(len(objects), boss.dtype)
result[mask] = boss[i[mask]]
result[~mask]['SURVEY'] = 'N/A'
return result
def save_file(filename, data, header, format):
if format == 'fits':
fitsio.write(filename, data, extname='DECALS-BOSS', header=header, clobber=True)
elif format == 'hdf5':
import h5py
with h5py.File(filename, 'w') as ff:
dset = ff.create_dataset('DECALS-BOSS', data=data)
for key in header:
dset.attrs[key] = header[key]
else:
raise ValueError("Unknown format")
def radec2pos(ra, dec):
pos = numpy.empty(len(ra), ('f4', 3))
pos[:, 2] = numpy.sin(dec / 180. * numpy.pi)
pos[:, 1] = numpy.cos(dec / 180. * numpy.pi)
pos[:, 0] = pos[:, 1]
pos[:, 0] *= numpy.sin(ra / 180. * numpy.pi)
pos[:, 1] *= numpy.cos(ra / 180. * numpy.pi)
return pos
def read_boss(filename):
boss = fitsio.FITS(filename, upper=True)[1][:]
ra = boss['PLUG_RA']
dec = boss['PLUG_DEC']
pos = radec2pos(ra, dec)
tree = KDTree(pos)
return tree, boss
def list_bricks(ns):
t0 = time()
if ns.filelist is not None:
d = dict([(parse_filename(fn.strip()), fn.strip())
for fn in open(ns.filelist, 'r').readlines()])
else:
d = dict(iter_tractor(ns.src))
if ns.verbose:
print('enumerated %d bricks in %g seconds' % (
len(d), time() - t0))
#- Load list of bricknames to use
if ns.bricklist is not None:
bricklist = np.loadtxt(ns.bricklist, dtype='S8')
# TODO: skip unknown bricks?
d = dict([(brickname, d[brickname])
for brickname in bricklist])
t0 = time()
bricks = sorted(d.items())
return bricks
def parse_args():
ap = argparse.ArgumentParser(
description="""Match Boss Catalogue for DECALS.
This will create a mirror of tractor catalogue directories, but each file would only contains
The corresponding object in BOSS DR12.
"""
)
ap.add_argument("boss", help="BOSS DR12 catalogue. e.g. /global/project/projectdirs/cosmo/work/sdss/cats/specObj-dr12.fits")
ap.add_argument("src", help="Path to the root directory of all tractor files")
ap.add_argument("dest", help="Path to the root directory of output matched catalogue")
ap.add_argument('-f', "--format", choices=['fits', 'hdf5'], default="fits",
help="Format of the output sweep files")
ap.add_argument('-t', "--tolerance", default=0.01,
help="Tolerance of the angular distance for a match, in arc-seconds")
ap.add_argument('-F', "--filelist", default=None,
help="list of tractor brickfiles to use; this will avoid expensive walking of the path.")
ap.add_argument('-b', "--bricklist",
help="""Filename with list of bricknames to include.
If not set, all bricks in src are included, sorted by brickname.
""")
ap.add_argument('-v', "--verbose", action='store_true')
ap.add_argument("--numproc", type=int, default=None,
help="""Number of concurrent processes to use. 0 for sequential execution.
Default is to use OMP_NUM_THREADS, or the number of cores on the node.""")
return ap.parse_args()
| bsd-3-clause | Python |
|
34d5b5cdc058f1c9055b82151b518251fa3b4f74 | Add tool to create combined smart contract files | tomashaber/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden | tools/join-contracts.py | tools/join-contracts.py | import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
| mit | Python |
|
e06416a61826229ebd0cccdc519b6dc39d8a0fd9 | Add migration to remove models. | sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal,salopensource/sal,sheagcraig/sal | server/migrations/0088_auto_20190304_1313.py | server/migrations/0088_auto_20190304_1313.py | # Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
| apache-2.0 | Python |
|
04a4d7887664753f87d6ccd0921c87160d8ced26 | Create 002_gen.py | ys-nuem/project-euler,ys-nuem/project-euler | 002/002_gen.py | 002/002_gen.py | #!/usr/bin/env python
def fibonatti(n_max=4000000):
f1, f2 = 1, 1
while f2 <= n_max:
yield f2
f2 += f1
f1 = f2 - f1
answer = sum(f for f in fibonatti() if f % 2 == 0)
print(answer)
| mit | Python |
|
92f88fb9021094f1429f5175d01a354c4ad35880 | add initial gyp to build freetype lib (problems with cflags not showing up in xcode) | Cue/skia,metajack/skia,mrobinson/skia,Cue/skia,metajack/skia,Cue/skia,mrobinson/skia,mrobinson/skia,mrobinson/skia,metajack/skia,metajack/skia,mrobinson/skia,Cue/skia | gyp/freetype.gyp | gyp/freetype.gyp | {
# 'includes': [
# 'common.gypi',
# ],
'targets': [
{
'target_name': 'skfreetype',
'type': 'static_library',
'sources': [
'../third_party/freetype/src/base/ftbbox.c',
'../third_party/freetype/src/base/ftbitmap.c',
'../third_party/freetype/src/base/ftglyph.c',
'../third_party/freetype/src/base/ftlcdfil.c',
'../third_party/freetype/src/base/ftstroke.c',
'../third_party/freetype/src/base/ftxf86.c',
'../third_party/freetype/src/base/ftbase.c',
'../third_party/freetype/src/base/ftsystem.c',
'../third_party/freetype/src/base/ftinit.c',
'../third_party/freetype/src/base/ftgasp.c',
'../third_party/freetype/src/base/ftfstype.c',
'../third_party/freetype/src/raster/raster.c',
'../third_party/freetype/src/sfnt/sfnt.c',
'../third_party/freetype/src/smooth/smooth.c',
'../third_party/freetype/src/autofit/autofit.c',
'../third_party/freetype/src/truetype/truetype.c',
'../third_party/freetype/src/cff/cff.c',
'../third_party/freetype/src/psnames/psnames.c',
'../third_party/freetype/src/pshinter/pshinter.c',
# added for linker
'../third_party/freetype/src/lzw/ftlzw.c',
'../third_party/freetype/src/gzip/ftgzip.c',
'../third_party/freetype/src/cid/type1cid.c',
'../third_party/freetype/src/bdf/bdf.c',
'../third_party/freetype/src/psaux/psaux.c',
'../third_party/freetype/src/pcf/pcf.c',
'../third_party/freetype/src/pfr/pfr.c',
'../third_party/freetype/src/type1/type1.c',
'../third_party/freetype/src/type42/type42.c',
'../third_party/freetype/src/winfonts/winfnt.c',
],
'include_dirs': [
'../third_party/freetype/internal',
'../third_party/freetype/builds',
'../third_party/freetype/include',
'../third_party/freetype',
],
'cflags': [
'-W',
'-Wall',
'-fPIC',
'-DPIC',
'-DDARWIN_NO_CARBON',
'-DFT2_BUILD_LIBRARY',
],
'direct_dependent_settings': {
'include_dirs': [
'../third_party/freetype/include', # For ft2build.h
],
},
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| bsd-3-clause | Python |
|
e8efe8de59b32e7b78fcf801dccce36e7ec53768 | implement regular Kmeans | Totoketchup/das,Totoketchup/das | models/Kmeans_2.py | models/Kmeans_2.py | # -*- coding: utf-8 -*-
# My Model
from utils.ops import ops
import tensorflow as tf
import numpy as np
from sklearn.datasets import make_blobs
#############################################
# Deep Adaptive Separator Model #
#############################################
class KMeans:
def __init__(self, nb_clusters, nb_iterations=50, graph=None, input_tensor=None):
self.nb_clusters = nb_clusters
self.nb_iterations = nb_iterations
if input_tensor == None:
self.graph = tf.Graph()
with self.graph.as_default():
# Spectrogram, embeddings
# shape = [batch, T*F , E ]
self.X = tf.placeholder("float", [None, None, None])
self.input_dim = tf.shape(self.X)[1]
begin = tf.random_uniform([], minval=0, maxval=self.input_dim-self.nb_clusters, dtype=tf.int32)
self.centroids = tf.identity(self.X[: , begin:begin+nb_clusters, :])
self.network
# Create a session for this model based on the constructed graph
self.sess = tf.Session(graph = self.graph)
else:
self.X = input_tensor
self.input_dim = tf.shape(self.X)[1]
begin = tf.random_uniform([], minval=0, maxval=self.input_dim-self.nb_clusters, dtype=tf.int32)
self.centroids = tf.identity(self.X[: , begin:begin+nb_clusters, :])
self.network
def init(self):
with self.graph.as_default():
self.sess.run(tf.global_variables_initializer())
@ops.scope
def network(self):
i = tf.constant(0)
cond = lambda i, m: tf.less(i, self.nb_iterations)
_ , self.centroids = tf.while_loop(cond, self.body,[i, self.centroids])
return self.centroids, self.get_labels(self.centroids, self.X)
def body(self ,i, centroids):
with tf.name_scope('iteration'):
# Checking the closest clusters
labels = self.get_labels(centroids, self.X)
# Creating the matrix equality [ B , S , TF], equality[: , s, :] = [labels == s](float32)
cluster_range = tf.range(0, tf.shape(centroids)[1])
equality = tf.map_fn(lambda r: tf.cast(tf.equal(labels, r), tf.float32), cluster_range, dtype=tf.float32)
equality = tf.transpose(equality, [1 , 0, 2])
new_centroids = tf.matmul(equality, self.X)/tf.reduce_sum(equality, axis=2, keep_dims=True)
return [i+1, new_centroids]
def get_labels(self, centroids, X):
centroids_ = tf.expand_dims(centroids, 1)
X_ = tf.expand_dims(X, 2)
return tf.argmin(tf.norm(X_ - centroids_, axis=3), axis=2, output_type=tf.int32)
def fit(self, X_train):
return self.sess.run(self.network, {self.X: X_train})
if __name__ == "__main__":
nb_samples = 10000
E = 2
nb_clusters = 2
# X1 = np.random.random_sample((nb_samples/2, E))
# X2 = np.random.random_sample((nb_samples/2, E)) + 2
# X = np.reshape(np.concatenate((X1,X2), axis=0), (1, nb_samples ,E))
# X = np.reshape(np.concatenate((X, X), axis=0), (2, nb_samples ,E))
# print X.shape
X, y = make_blobs(n_samples=nb_samples, centers=nb_clusters, n_features=E)
X = X[np.newaxis,:]
y = y[np.newaxis,:]
print y
kmean = KMeans(nb_clusters)
kmean.init()
centroids, labels = kmean.fit(X)
print labels
print y
if np.all((y-labels) == 0) or np.all((y+labels) == 1):
print 'OK'
| mit | Python |
|
66137a8710bf3b778c860af8d6278ee0c97bbab4 | Add script to delete unused users on JupyterHub | ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub | scripts/delete-unused-users.py | scripts/delete-unused-users.py | #!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
| bsd-3-clause | Python |
|
ad6aa623bbd8f316ab7fb8c389d1c9c74b17ae8c | add util module for converting an update job into xml | sassoftware/rpath-repeater | rpath_repeater/utils/update_job_formatter.py | rpath_repeater/utils/update_job_formatter.py | #!/usr/bin/python
#
# Copyright (c) 2012 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
from xml.etree import cElementTree as etree
class Formatter(object):
__slots__ = [ 'jobs', 'root', 'changes' ]
def __init__(self, updateJob):
self.jobs = []
if updateJob is not None:
self.jobs = updateJob.getJobs()
self.root = None
self.changes = None
def format(self):
self.root = etree.Element('preview')
self.changes = etree.SubElement(self.root, 'conary_package_changes')
for oneJob in self.jobs:
for j in oneJob:
self._formatJob(j)
def toxml(self):
return etree.tostring(self.root)
def _formatJob(self, job):
(name, (oldVer, oldFla), (newVer, newFla)) = job[:3]
if oldVer is None:
self._formatInstall(name, newVer, newFla)
elif newVer is None:
self._formatErase(name, oldVer, oldFla)
else:
self._formatUpdate(name, oldVer, oldFla, newVer, newFla)
def _formatInstall(self, name, version, flavor):
node = self._newPackageChange('added')
self._packageSpec(node, 'added_conary_package', name, version, flavor)
def _formatErase(self, name, version, flavor):
node = self._newPackageChange('removed')
self._packageSpec(node, 'removed_conary_package', name, version, flavor)
def _formatUpdate(self, name, oldVersion, oldFlavor, newVersion, newFlavor):
node = self._newPackageChange('changed')
self._packageSpec(node, 'from', name, oldVersion, oldFlavor)
self._packageSpec(node, 'to', name, newVersion, newFlavor)
diff = etree.SubElement(node, 'conary_package_diff')
self._fieldDiff(diff, 'version', oldVersion, newVersion)
self._fieldDiff(diff, 'flavor', oldFlavor, newFlavor)
def _newPackageChange(self, type):
node = etree.SubElement(self.changes, 'conary_package_change')
etree.SubElement(node, 'type').text = type
return node
def _packageSpec(self, parent, tag, name, version, flavor):
node = etree.SubElement(parent, tag)
etree.SubElement(node, 'name').text = str(name)
etree.SubElement(node, 'version').text = str(version)
etree.SubElement(node, 'flavor').text = str(flavor)
return node
def _fieldDiff(self, parent, tag, oldValue, newValue):
if oldValue == newValue:
return
node = etree.SubElement(parent, tag)
etree.SubElement(node, 'from').text = str(oldValue)
etree.SubElement(node, 'to').text = str(newValue)
| apache-2.0 | Python |
|
7b14028f3796981974b6d01b98277326123c0395 | add get_flatpage template tag | mupi/timtec,mupi/timtec,mupi/timtec,AllanNozomu/tecsaladeaula,mupi/tecsaladeaula,hacklabr/timtec,GustavoVS/timtec,virgilio/timtec,AllanNozomu/tecsaladeaula,AllanNozomu/tecsaladeaula,mupi/tecsaladeaula,virgilio/timtec,GustavoVS/timtec,GustavoVS/timtec,mupi/tecsaladeaula,hacklabr/timtec,mupi/tecsaladeaula,AllanNozomu/tecsaladeaula,hacklabr/timtec,GustavoVS/timtec,hacklabr/timtec,virgilio/timtec,virgilio/timtec,mupi/timtec | core/templatetags/get_flatpage.py | core/templatetags/get_flatpage.py | from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import get_current_site
register = template.Library()
class FlatpageNode(template.Node):
def __init__(self, context_name, url):
self.context_name = context_name
self.url = template.Variable(url)
def render(self, context):
if 'request' in context:
site_pk = get_current_site(context['request']).pk
else:
site_pk = settings.SITE_ID
try:
flatpage = FlatPage.objects.get(sites__id=site_pk, url=self.url.resolve(context))
except ObjectDoesNotExist:
flatpage = FlatPage(url=self.url.resolve(context))
context[self.context_name] = flatpage
return ''
@register.tag
def get_flatpage(parser, token):
"""
Retrieves the flatpage object for the specified url
Syntax::
{% get_flatpages ['url'] as context_name %}
Example usage::
{% get_flatpages '/about/' as about_page %}
"""
bits = token.split_contents()
syntax_message = ("%(tag_name)s expects a syntax of %(tag_name)s "
"['url'] as context_name" %
dict(tag_name=bits[0]))
# Must have at 3-6 bits in the tag
if len(bits) == 4:
# The very last bit must be the context name
if bits[-2] != 'as':
raise template.TemplateSyntaxError(syntax_message)
context_name = bits[-1]
url = bits[1]
return FlatpageNode(context_name, url)
else:
raise template.TemplateSyntaxError(syntax_message)
| agpl-3.0 | Python |
|
b8a07ce36cfeb2679ace05b26d6adc1e525d6044 | Add feature computation module | starcalibre/microscopium,microscopium/microscopium,microscopium/microscopium,Don86/microscopium,Don86/microscopium,jni/microscopium,jni/microscopium | husc/features.py | husc/features.py | import functools as fun
import numpy as np
from scipy.stats.mstats import mquantiles
from scipy import ndimage as nd
from skimage import feature, color, io as imio, img_as_float, \
morphology as skmorph
from skimage import filter as imfilter, measure
def lab_hist(rgb_image, **kwargs):
return np.histogram(color.rgb2lab(rgb_image), **kwargs)
# threshold and labeling number of objects, statistics about object size and
# shape
def intensity_object_features(im, adaptive_t_radius=51):
"""Segment objects based on intensity threshold and compute properties.
Parameters
----------
im : 2D np.ndarray of float or uint8.
The input image.
adaptive_t_radius : int, optional
The radius to calculate background with adaptive threshold.
Returns
-------
f : 1D np.ndarray of float
The feature vector.
"""
tim1 = im > imfilter.threshold_otsu(im)
f1 = object_features(tim1, im)
tim2 = imfilter.threshold_adaptive(im, adaptive_t_radius)
f2 = object_features(tim2, im)
f = np.concatenate([f1, f2])
return f
def object_features(bin_im, im, erode=2):
"""Compute features about objects in a binary image.
Parameters
----------
bin_im : 2D np.ndarray of bool
The image of objects.
im : 2D np.ndarray of float or uint8
The actual image.
erode : int, optional
Radius of erosion of objects.
Returns
-------
f : 1D np.ndarray of float
The feature vector.
"""
selem = skmorph.disk(erode)
if erode > 0:
bin_im = nd.binary_erosion(bin_im, selem)
lab_im, n_objs = nd.label(bin_im)
if erode > 0:
lab_im = nd.grey_dilate(lab_im, footprint=selem)
feats = measure.regionprops(lab_im,
['Area', 'Eccentricity', 'EulerNumber',
'Extent', 'MinIntensity', 'MeanIntensity',
'MaxIntensity', 'Solidity'],
intensity_image=im)
feats = np.array([props.values() for props in feats], np.float)
feature_quantiles = mquantiles(feats, [0.05, 0.25, 0.5, 0.75, 0.95],
axis=0)
f = np.concatenate([np.array([n_objs], np.float),
feature_quantiles.ravel()])
return f
full_feature_list = \
[fun.partial(np.histogram, bins=16, range=(0.0, 1.0)),
fun.partial(lab_hist, bins=16, range=(0.0, 1.0)),
feature.hog
]
# TO-DO: add segmentation features
def image_feature_vector(im, feature_list=None):
if type(im) == str:
im = img_as_float(imio.imread(im))
if feature_list is None:
feature_list = full_feature_list
features = np.concatenate([f(im) for f in feature_list])
return features
| bsd-3-clause | Python |
|
f16a7e43ce4d9dc82fd4bfca34d80f0447bd57db | add isStaffOrReadOnly permissions | avih/treeherder,avih/treeherder,jgraham/treeherder,gbrmachado/treeherder,moijes12/treeherder,rail/treeherder,vaishalitekale/treeherder,edmorley/treeherder,adusca/treeherder,tojon/treeherder,akhileshpillai/treeherder,parkouss/treeherder,kapy2010/treeherder,jgraham/treeherder,gbrmachado/treeherder,edmorley/treeherder,moijes12/treeherder,parkouss/treeherder,KWierso/treeherder,avih/treeherder,tojonmz/treeherder,jgraham/treeherder,edmorley/treeherder,wlach/treeherder,deathping1994/treeherder,avih/treeherder,parkouss/treeherder,KWierso/treeherder,parkouss/treeherder,moijes12/treeherder,vaishalitekale/treeherder,adusca/treeherder,kapy2010/treeherder,vaishalitekale/treeherder,kapy2010/treeherder,avih/treeherder,deathping1994/treeherder,akhileshpillai/treeherder,vaishalitekale/treeherder,sylvestre/treeherder,glenn124f/treeherder,akhileshpillai/treeherder,deathping1994/treeherder,gbrmachado/treeherder,sylvestre/treeherder,moijes12/treeherder,deathping1994/treeherder,rail/treeherder,jgraham/treeherder,kapy2010/treeherder,wlach/treeherder,gbrmachado/treeherder,kapy2010/treeherder,adusca/treeherder,deathping1994/treeherder,glenn124f/treeherder,sylvestre/treeherder,gbrmachado/treeherder,wlach/treeherder,glenn124f/treeherder,wlach/treeherder,sylvestre/treeherder,vaishalitekale/treeherder,rail/treeherder,wlach/treeherder,moijes12/treeherder,glenn124f/treeherder,adusca/treeherder,akhileshpillai/treeherder,edmorley/treeherder,KWierso/treeherder,moijes12/treeherder,rail/treeherder,deathping1994/treeherder,akhileshpillai/treeherder,parkouss/treeherder,sylvestre/treeherder,jgraham/treeherder,jgraham/treeherder,adusca/treeherder,KWierso/treeherder,tojon/treeherder,avih/treeherder,tojon/treeherder,gbrmachado/treeherder,rail/treeherder,akhileshpillai/treeherder,tojonmz/treeherder,adusca/treeherder,tojonmz/treeherder,rail/treeherder,parkouss/treeherder,glenn124f/treeherder,tojonmz/treeherder,tojon/treeherder,vaishalitekale/treeherder,tojonmz/treeherder,wlach/treeherder,sylvestre/treeherder,glenn124f/treeherder,tojonmz/treeherder | treeherder/webapp/api/permissions.py | treeherder/webapp/api/permissions.py | from rest_framework.permissions import BasePermission
from rest_framework.permissions import SAFE_METHODS
class IsStaffOrReadOnly(BasePermission):
"""
The request is authenticated as an admin staff (eg. sheriffs), or is a read-only request.
"""
def has_permission(self, request, view):
return (request.method in SAFE_METHODS or
request.user and
request.user.is_authenticated() and
request.user.is_staff) | mpl-2.0 | Python |
|
5a77678a44ec9838e943b514a586dbd96b8bdfdc | Add migration for license change | openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform | modelview/migrations/0042_auto_20171215_0953.py | modelview/migrations/0042_auto_20171215_0953.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
| agpl-3.0 | Python |
|
5522285af9179441e56f65405037bb3a4c1c1274 | Revert "Important fixes" | JNeiger/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software | soccer/gameplay/plays/testing/triangle_pass.py | soccer/gameplay/plays/testing/triangle_pass.py | import robocup
import play
import behavior
import skills.move
import skills.capture
import tactics.coordinated_pass
import constants
import main
import enum
## A demo play written during a teaching session to demonstrate play-writing
# Three robots form a triangle on the field and pass the ball A->B->C->A and so on.
class TrianglePass(play.Play):
class State(enum.Enum):
## 2 robots get on the corners of a triangle,
# while a third fetches the ball
setup = 1
## The robots continually pass to each other
passing = 2
def __init__(self):
super().__init__(continuous=True)
# register states - they're both substates of "running"
self.add_state(TrianglePass.State.setup,
behavior.Behavior.State.running)
self.add_state(TrianglePass.State.passing,
behavior.Behavior.State.running)
self.add_transition(behavior.Behavior.State.start,
TrianglePass.State.setup, lambda: True,
'immediately')
self.add_transition(TrianglePass.State.setup,
TrianglePass.State.passing,
lambda: self.all_subbehaviors_completed(),
'all subbehaviors completed')
self.triangle_points = [
robocup.Point(0, constants.Field.Length / 2.0),
robocup.Point(constants.Field.Width / 4,
constants.Field.Length / 4),
robocup.Point(-constants.Field.Width / 4,
constants.Field.Length / 4),
]
def on_enter_setup(self):
closestPt = min(self.triangle_points,
key=lambda pt: pt.dist_to(main.ball().pos))
otherPts = list(self.triangle_points)
otherPts.remove(closestPt)
self.add_subbehavior(skills.move.Move(otherPts[0]), 'move1')
self.add_subbehavior(skills.move.Move(otherPts[1]), 'move2')
self.add_subbehavior(skills.capture.Capture(), 'capture')
def on_exit_setup(self):
self.remove_all_subbehaviors()
def execute_passing(self):
# If we had a pass in progress before and it finished, remove it
if self.has_subbehaviors():
if self.all_subbehaviors()[0].is_done_running():
self.remove_all_subbehaviors()
# if we're not currently passing, start a new pass
if not self.has_subbehaviors():
# pick pass from and to points
kickFrom = min(self.triangle_points,
key=lambda pt: pt.dist_to(main.ball().pos))
kickFromIdx = self.triangle_points.index(kickFrom)
kickToIdx = (kickFromIdx + 1) % len(self.triangle_points)
kickToPt = self.triangle_points[kickToIdx]
# add the pass subbehavior
self.add_subbehavior(
tactics.coordinated_pass.CoordinatedPass(kickToPt), 'pass')
def on_exit_passing(self):
self.remove_all_subbehaviors()
| apache-2.0 | Python |
|
0a4c100f9fb6e7540320fb7c55aeebdffe91c6d1 | add primenumber.py | BhaskarNaidu/python | primenumber.py | primenumber.py | lower = int(input("Enter lower range: "))
upper = int(input("Enter upper range: "))
for num in range(lower,upper + 1):
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
| apache-2.0 | Python |
|
64ced324f05de20f839782913cfb13d147d49dd6 | create a scheduler example file to test on live | jaredwelch1/CapstoneI,jaredwelch1/CapstoneI | code-samples/web_scraper/jared/scheduling_script.py | code-samples/web_scraper/jared/scheduling_script.py | from time import sleep
from apscheduler.schedulers.background import BackgroundScheduler as Scheduler
import logging
import datetime
# create a scheduler
s = Scheduler()
# This is what I want to happen
def job():
logging.basicConfig(filename='scheduled_task.log',level=logging.INFO,
format='%(asctime)s %(message)s line: %(lineno)d')
try:
logging.info( "scheduled event")
except Exception as e:
print("open file failed")
def main():
newTime = datetime.datetime.now() + datetime.timedelta(seconds = 2)
s.add_job(job, 'cron', hour='0-23')
s.start()
try:
# This is here to simulate application activity (which keeps the main thread alive).
while True:
sleep(2)
except (KeyboardInterrupt, SystemExit):
# Not strictly necessary if daemonic mode is enabled but should be done if possible
scheduler.shutdown()
if __name__ == "__main__":
main()
# Running a python script with python script & will fork that process immediately, so you can close the terminal. | mit | Python |
|
a9a6a3dafc8901ffeeb89862fdc79f7099ba311a | Add UTF-8 test | ksuarz/monary,ksuarz/monary,ksuarz/monary,ksuarz/monary,ksuarz/monary,ksuarz/monary | test/test_utf8.py | test/test_utf8.py | # -*- coding: utf-8 -*-
# Monary - Copyright 2011-2014 David J. C. Beach
# Please see the included LICENSE.TXT and NOTICE.TXT for licensing information.
import pymongo
import monary
def setup():
with pymongo.Connection("127.0.0.1") as c:
c.drop_database("monary_test")
c.monary_test.data.insert({"test" : u"aあ"})
c.monary_test.data.insert({"test" : u"âéÇ"})
c.monary_test.data.insert({"test" : u"αλΩ"})
def teardown():
with pymongo.Connection("127.0.0.1") as c:
c.drop_database("monary_test")
def test_utf8():
with monary.Monary("127.0.0.1") as m:
[data] = m.query("monary_test",
"data",
{},
["test"],
["string:8"],
sort="sequence")
expected = ["aあ", "âéÇ", "αλΩ"]
for x, y in zip(data, expected):
assert x == y
| apache-2.0 | Python |
|
6740c6192ab9bf37767230981b86e446486d4c43 | implement basic plugin loader for laser | b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril | mythril/laser/ethereum/plugins/plugin_loader.py | mythril/laser/ethereum/plugins/plugin_loader.py | from mythril.laser.ethereum.svm import LaserEVM
from mythril.laser.ethereum.plugins.plugin import LaserPlugin
class LaserPluginLoader:
"""
The LaserPluginLoader is used to abstract the logic relating to plugins.
Components outside of laser thus don't have to be aware of the interface that plugins provide
"""
def __init__(self, symbolic_vm: LaserEVM):
""" Initializes the plugin loader
:param symbolic_vm: symbolic virtual machine to load plugins for
"""
self.symbolic_vm = symbolic_vm
self.laser_plugins = []
def load(self, laser_plugin: LaserPlugin):
""" Loads the plugin
:param laser_plugin: plugin that will be loaded in the symbolic virtual machine
"""
laser_plugin.initialize(self.symbolic_vm)
self.laser_plugins.append(laser_plugin)
def is_enabled(self, laser_plugin: LaserPlugin):
""" Returns whether the plugin is loaded in the symbolic_vm
:param laser_plugin: plugin that will be checked
"""
return laser_plugin in self.laser_plugins
| mit | Python |
|
a01f4d47410ee1bf164d8b962f6337f8c39f0d16 | add quicksort recursive | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | sort/quick_sort/python/quicksort-recusive.py | sort/quick_sort/python/quicksort-recusive.py |
def quickSort(arr):
sort(arr,0,len(arr)-1)
def sort(arr, low, high):
if (low < high):
p = partition(arr, low, high)
sort(arr, low, p - 1)
sort(arr, p + 1, high)
def partition(arr, low, high):
pivot = arr[high]
i = (low - 1)
for j in range(low,high):
if (arr[j] <= pivot):
i+= 1
arr[i],arr[j] = arr[j],arr[i]
arr[i+1],arr[high] = arr[high],arr[i+1]
return i + 1 | cc0-1.0 | Python |
|
e6b381a617808c500e115d5e3715dc2ae454e896 | Add command line tool | psd-tools/psd-tools,kmike/psd-tools,kmike/psd-tools | src/psd_tools2/__main__.py | src/psd_tools2/__main__.py | from __future__ import unicode_literals
import logging
import docopt
from psd_tools2 import PSDImage
from psd_tools2.version import __version__
try:
from IPython.lib.pretty import pprint
except ImportError:
from pprint import pprint
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
def main():
"""
psd-tools command line utility.
Usage:
psd-tools export <input_file> <output_file> [options]
psd-tools show <input_file> [options]
psd-tools debug <input_file> [options]
psd-tools -h | --help
psd-tools --version
Options:
-v --verbose Be more verbose.
Example:
psd-tools show example.psd # Show the file content
psd-tools export example.psd example.png # Export as PNG
psd-tools export example.psd[0] example-0.png # Export layer as PNG
"""
args = docopt.docopt(main.__doc__, version=__version__)
if args['--verbose']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
if args['export']:
input_parts = args['<input_file>'].split('[')
input_file = input_parts[0]
if len(input_parts) > 1:
indices = [int(x.rstrip(']')) for x in input_parts[1:]]
else:
indices = []
layer = PSDImage.open(input_file)
for index in indices:
layer = layer[index]
if isinstance(layer, PSDImage) and layer.has_preview():
image = layer.topil()
else:
image = layer.compose()
image.save(args['<output_file>'])
elif args['show']:
psd = PSDImage.open(args['<input_file>'])
pprint(psd)
elif args['debug']:
psd = PSDImage.open(args['<input_file>'])
pprint(psd._psd)
if __name__ == "__main__":
main()
| mit | Python |
|
bce910100fe0c3970b82d4f5544f11ce3392bc3c | Remove NoQueueMinCycleTime nonsense from sync worker | mduggan/tapiriik,cpfair/tapiriik,campbellr/tapiriik,niosus/tapiriik,abs0/tapiriik,cgourlay/tapiriik,campbellr/tapiriik,niosus/tapiriik,brunoflores/tapiriik,abhijit86k/tapiriik,dlenski/tapiriik,cmgrote/tapiriik,gavioto/tapiriik,olamy/tapiriik,cpfair/tapiriik,mduggan/tapiriik,cpfair/tapiriik,mjnbike/tapiriik,marxin/tapiriik,dlenski/tapiriik,cgourlay/tapiriik,mduggan/tapiriik,cmgrote/tapiriik,cgourlay/tapiriik,cpfair/tapiriik,cheatos101/tapiriik,cgourlay/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,abs0/tapiriik,marxin/tapiriik,mjnbike/tapiriik,abhijit86k/tapiriik,brunoflores/tapiriik,gavioto/tapiriik,abhijit86k/tapiriik,abs0/tapiriik,mjnbike/tapiriik,dmschreiber/tapiriik,brunoflores/tapiriik,abs0/tapiriik,marxin/tapiriik,dmschreiber/tapiriik,olamy/tapiriik,brunoflores/tapiriik,niosus/tapiriik,mjnbike/tapiriik,gavioto/tapiriik,niosus/tapiriik,olamy/tapiriik,dmschreiber/tapiriik,cmgrote/tapiriik,cheatos101/tapiriik,cheatos101/tapiriik,abhijit86k/tapiriik,marxin/tapiriik,dmschreiber/tapiriik,campbellr/tapiriik,mduggan/tapiriik,campbellr/tapiriik,dlenski/tapiriik,cmgrote/tapiriik,gavioto/tapiriik,olamy/tapiriik | sync_worker.py | sync_worker.py | from datetime import datetime, timedelta
import os
print("Sync worker %s booting at %s" % (os.getpid(), datetime.now()))
from tapiriik.requests_lib import patch_requests_with_default_timeout, patch_requests_source_address
from tapiriik import settings
from tapiriik.database import db, close_connections
import time
import signal
import sys
import subprocess
import socket
Run = True
RecycleInterval = 2 # Time spent rebooting workers < time spent wrangling Python memory management.
oldCwd = os.getcwd()
WorkerVersion = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, cwd=os.path.dirname(__file__)).communicate()[0].strip()
os.chdir(oldCwd)
def sync_interrupt(signal, frame):
global Run
Run = False
signal.signal(signal.SIGINT, sync_interrupt)
signal.signal(signal.SIGUSR2, sync_interrupt)
def sync_heartbeat(state, user=None):
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"$set": {"Heartbeat": datetime.utcnow(), "State": state, "User": user}})
print("Sync worker " + str(os.getpid()) + " initialized at " + str(datetime.now()))
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"Process": os.getpid(), "Heartbeat": datetime.utcnow(), "Startup": datetime.utcnow(), "Version": WorkerVersion, "Host": socket.gethostname(), "Index": settings.WORKER_INDEX, "State": "startup"}, upsert=True)
sys.stdout.flush()
patch_requests_with_default_timeout(timeout=60)
if isinstance(settings.HTTP_SOURCE_ADDR, list):
settings.HTTP_SOURCE_ADDR = settings.HTTP_SOURCE_ADDR[settings.WORKER_INDEX % len(settings.HTTP_SOURCE_ADDR)]
patch_requests_source_address((settings.HTTP_SOURCE_ADDR, 0))
print(" -> Index %s\n -> Interface %s" % (settings.WORKER_INDEX, settings.HTTP_SOURCE_ADDR))
# We defer including the main body of the application till here so the settings aren't captured before we've set them up.
# The better way would be to defer initializing services until they're requested, but it's 10:30 and this will work just as well.
from tapiriik.sync import Sync
Sync.InitializeWorkerBindings()
sync_heartbeat("ready")
while Run:
cycleStart = datetime.utcnow() # Avoid having synchronization fall down during DST setback
processed_user_count = Sync.PerformGlobalSync(heartbeat_callback=sync_heartbeat, version=WorkerVersion)
RecycleInterval -= processed_user_count
if RecycleInterval <= 0:
break
sync_heartbeat("idle")
print("Sync worker shutting down cleanly")
db.sync_workers.remove({"Process": os.getpid(), "Host": socket.gethostname()})
print("Closing database connections")
close_connections()
sys.stdout.flush()
| from datetime import datetime, timedelta
import os
print("Sync worker %s booting at %s" % (os.getpid(), datetime.now()))
from tapiriik.requests_lib import patch_requests_with_default_timeout, patch_requests_source_address
from tapiriik import settings
from tapiriik.database import db, close_connections
import time
import signal
import sys
import subprocess
import socket
Run = True
RecycleInterval = 2 # Time spent rebooting workers < time spent wrangling Python memory management.
NoQueueMinCycleTime = timedelta(seconds=30) # No need to hammer the database given the number of sync workers I have
oldCwd = os.getcwd()
WorkerVersion = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, cwd=os.path.dirname(__file__)).communicate()[0].strip()
os.chdir(oldCwd)
def sync_interrupt(signal, frame):
global Run
Run = False
signal.signal(signal.SIGINT, sync_interrupt)
signal.signal(signal.SIGUSR2, sync_interrupt)
def sync_heartbeat(state, user=None):
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"$set": {"Heartbeat": datetime.utcnow(), "State": state, "User": user}})
print("Sync worker " + str(os.getpid()) + " initialized at " + str(datetime.now()))
db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"Process": os.getpid(), "Heartbeat": datetime.utcnow(), "Startup": datetime.utcnow(), "Version": WorkerVersion, "Host": socket.gethostname(), "Index": settings.WORKER_INDEX, "State": "startup"}, upsert=True)
sys.stdout.flush()
patch_requests_with_default_timeout(timeout=60)
if isinstance(settings.HTTP_SOURCE_ADDR, list):
settings.HTTP_SOURCE_ADDR = settings.HTTP_SOURCE_ADDR[settings.WORKER_INDEX % len(settings.HTTP_SOURCE_ADDR)]
patch_requests_source_address((settings.HTTP_SOURCE_ADDR, 0))
print(" -> Index %s\n -> Interface %s" % (settings.WORKER_INDEX, settings.HTTP_SOURCE_ADDR))
# We defer including the main body of the application till here so the settings aren't captured before we've set them up.
# The better way would be to defer initializing services until they're requested, but it's 10:30 and this will work just as well.
from tapiriik.sync import Sync
Sync.InitializeWorkerBindings()
while Run:
cycleStart = datetime.utcnow() # Avoid having synchronization fall down during DST setback
processed_user_count = Sync.PerformGlobalSync(heartbeat_callback=sync_heartbeat, version=WorkerVersion)
RecycleInterval -= processed_user_count
# When there's no queue, all the workers sit sending 1000s of the queries to the database server
if processed_user_count == 0:
# Put this before the recycle shutdown, otherwise it'll quit and get rebooted ASAP
remaining_cycle_time = NoQueueMinCycleTime - (datetime.utcnow() - cycleStart)
if remaining_cycle_time > timedelta(0):
print("Pausing for %ss" % remaining_cycle_time.total_seconds())
sync_heartbeat("idle-spin")
time.sleep(remaining_cycle_time.total_seconds())
if RecycleInterval <= 0:
break
sync_heartbeat("idle")
print("Sync worker shutting down cleanly")
db.sync_workers.remove({"Process": os.getpid(), "Host": socket.gethostname()})
print("Closing database connections")
close_connections()
sys.stdout.flush()
| apache-2.0 | Python |
6d134c2a870150477ecc41edbab272e75462bbcd | Add benchmark script | knightwupz/mistune,lepture/mistune,jubel-han/mistune,knightwupz/mistune | tests/bench.py | tests/bench.py |
import os
import re
import time
root = os.path.dirname(__file__)
known = []
def listdir(folder):
folder = os.path.join(root, folder)
files = os.listdir(folder)
files = filter(lambda o: o.endswith('.text'), files)
return files
def mistune_runner(content):
import mistune
return mistune.markdown(content)
def misaka_runner(content):
import misaka
extensions = (
misaka.EXT_NO_INTRA_EMPHASIS | misaka.EXT_TABLES |
misaka.EXT_FENCED_CODE | misaka.EXT_AUTOLINK |
misaka.EXT_STRIKETHROUGH
)
md = misaka.Markdown(misaka.HtmlRenderer(), extensions=extensions)
return md.render(content)
def bench(runner=None):
cases = []
for name in listdir('cases'):
with open(os.path.join(root, 'cases', name), 'r') as f:
cases.append(f.read())
for name in listdir('extra'):
with open(os.path.join(root, 'extra', name), 'r') as f:
cases.append(f.read())
if runner is None:
runner = mistune_runner
begin = time.time()
count = 100
while count:
count -= 1
for text in cases:
runner(text)
end = time.time()
return end - begin
print('misaka', bench(misaka_runner))
print('mistune', bench())
| bsd-3-clause | Python |
|
f5970d1488d28f27c5f20dd11619187d0c13c960 | Add simple windows registry read/write functions | ddubson/code-dojo-py | os/win_registry.py | os/win_registry.py | import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
| mit | Python |
|
a37007e03747395c12cc4bc34c761aa3253f7599 | Add tests folder | MrKiven/ECache | tests/__init__.py | tests/__init__.py | # -*- coding: utf-8 -*-
| mit | Python |
|
d046bc3be27c39ca70a45d92939a2aa2444f3195 | test examples | tkarna/cofs | test/examples/test_examples.py | test/examples/test_examples.py | """
Runs all example scripts. Only tests whether examples can be executed.
"""
import pytest
import os
import subprocess
import glob
import sys
# set environment flag
# can be used in examples to reduce cpu cost
os.environ['THETIS_REGRESSION_TEST'] = "1"
exclude_files = [
'baroclinic_eddies/diagnostics.py',
'baroclinic_eddies/submitRuns.py',
'bottomFriction/plot_results.py',
'columbia_plume/atm_forcing.py',
'columbia_plume/bathymetry.py',
'columbia_plume/cre-plume.py',
'columbia_plume/diagnostics.py',
'columbia_plume/plot_elevation_ts.py',
'columbia_plume/roms_forcing.py',
'columbia_plume/test_bathy_smoothing.py',
'columbia_plume/tidal_forcing.py',
'columbia_plume/timeseries_forcing.py',
'dome/diagnostics.py',
'dome/dome_setup.py',
'dome/plot_histogram.py',
'katophillips/plot_results.py',
'lockExchange/diagnostics.py',
'lockExchange/plotting.py',
'lockExchange/submitRuns.py',
'tidalfarm/tidalfarm.py',
]
cwd = os.path.abspath(os.path.dirname(__file__))
examples_dir = os.path.abspath(os.path.join(cwd, '..', '..', 'examples'))
exclude_files = [os.path.join(examples_dir, f) for f in exclude_files]
all_examples = glob.glob(os.path.join(examples_dir, '*/*.py'))
all_examples = [f for f in all_examples if f not in exclude_files]
@pytest.fixture(params=all_examples,
ids=lambda x: os.path.basename(x))
def example_file(request):
return os.path.abspath(request.param)
def test_examples(example_file, tmpdir, monkeypatch):
assert os.path.isfile(example_file), 'File not found {:}'.format(example_file)
# change workdir to temporary dir
monkeypatch.chdir(tmpdir)
subprocess.check_call([sys.executable, example_file])
| mit | Python |
|
b872aaa2837e7cd72c36f2b3fd7679106fda57b4 | Add test cli | MizukiSonoko/iroha-cli,MizukiSonoko/iroha-cli | tests/test_cli.py | tests/test_cli.py | import unittest
import sys, os
import cli
from io import StringIO
io = StringIO()
class TestBuildInCommands(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_normal(self):
sys.stdout = io
# $ iroha-ya-cli
cli.main.main(['iroha-ya-cli'])
sys.stdout = sys.__stdout__
self.assertTrue('Iroha-mizuki-cli' in io.getvalue())
def test_config(self):
sys.stdout = io
# $ iroha-ya-cli config
cli.main.main(['iroha-ya-cli','config'])
sys.stdout = sys.__stdout__
self.assertTrue('Iroha-mizuki-cli' in io.getvalue()) | apache-2.0 | Python |
|
3d7bb0dfcbfda9c99ee2372394959667c76bb83f | Add first .py file to project | Nehoroshiy/multi_classifier,Nehoroshiy/multi_classifier | main.py | main.py |
print("Hello!") | mit | Python |
|
59a57a25ff925bd1ce6d467d316ec478847b58ad | Create combinations.py | Matir/analysis-tools | combinations.py | combinations.py | #!/usr/bin/env python
from string import uppercase, lowercase, maketrans
import math, sys
class combinations():
def combs(self, total, choice):
return (math.factorial(total)/(math.factorial(choice)*math.factorial(total-choice)))
if __name__ == '__main__':
try:
total = sys.argv[1]
choice = sys.argv[2]
total = int(total, 0)
choice = int(choice, 0)
ops = combinations()
result = ops.combs(total, choice)
print result
except IndexError:
print('Usage: combinations.py <int of total> <int to choice>')
| mit | Python |
|
2c63d77428b84c7d1be1c861079d39d641d51fcf | add script to scrap stock data and save them locally | Michael-Tu/tools,Michael-Tu/tools,Michael-Tu/tools,Michael-Tu/tools | stock_scraping/stock_price_scraping_to_local.py | stock_scraping/stock_price_scraping_to_local.py | '''
This script helps you scrap stock data avaliable on Bloomberg Finance
and store them locally.
Please obey applicable local and federal laws and applicable API term of use
when using this scripts. I, the creater of this script, will not be responsible
for any legal issues resulting from the use of this script.
@author Gan Tu
@version python 2 or python 3
[HOW TO CHANGE PYTHON VERSION]
This script by default should be run by Python 2.
To use this in Python 3, change the followings:
1) change ALL occurrences of "urllib" to "urllib.request".
'''
import urllib
import re
import json
import os
# Stock Symbols Initialization
# Feel free to modify the file source to contain stock symbols you plan to scrap fro
stocks = open("nasdaq_symbols.txt", "r").read().split("\n")
# URL Initialization
urlPrefix = "http://www.bloomberg.com/markets/api/bulk-time-series/price/"
urlAffix = "%3AUS?timeFrame="
# Only four of these are valid options for now
# 1_Day will scrap minute by minute data for one day, while others will be daily close price
# Feel free to modify them for your own need
options = ["1_DAY", "1_MONTH", "1_YEAR", "5_YEAR"]
def setup():
try:
os.mkdir("data")
except Exception as e:
pass
for option in options:
try:
os.mkdir("data/" + option + "/")
except Exception as e:
pass
def scrap():
i = 0
while i < len(stocks):
for option in options:
file = open("data/" + option + "/" + stocks[i] + ".txt", "w")
file.close()
htmltext = urllib.urlopen(urlPrefix + stocks[i] + urlAffix + option)
try:
data = json.load(htmltext)[0]["price"]
key = "date"
if option == "1_DAY":
key = "dateTime"
file = open("data/" + option + "/" + stocks[i] + ".txt", "a")
for price in data:
file.write(stocks[i] + "," + price[key] + "," + str(price["value"]) + "\n")
file.close()
except Exception as e:
pass
i += 1
if __name__ == "__main__":
setup()
scrap()
| mit | Python |
|
72fe45ca6e6cd13b0b5fbb250ce769f5ec883e90 | Add awful pixiv command. | MJB47/Jokusoramame,MJB47/Jokusoramame,MJB47/Jokusoramame | joku/cogs/pixiv.py | joku/cogs/pixiv.py | """
Cog for interacting with the Pixiv API.
"""
import random
import shutil
import requests
from discord.ext import commands
from io import BytesIO
from pixivpy3 import AppPixivAPI
from asyncio_extras import threadpool
from pixivpy3 import PixivAPI
from pixivpy3 import PixivError
from joku.bot import Context
class EncodingAwarePixivAPI(PixivAPI):
"""
A custom encoding-aware Pixiv API.
"""
def requests_call(self, method, url, headers=None, params=None, data=None, stream=False):
""" requests http/https call for Pixiv API """
if headers is None:
headers = {}
try:
if method == 'GET':
r = requests.get(url, params=params, headers=headers, stream=stream, **self.requests_kwargs)
elif method == 'POST':
r = requests.post(url, params=params, data=data, headers=headers, stream=stream,
**self.requests_kwargs)
elif method == 'DELETE':
r = requests.delete(url, params=params, data=data, headers=headers, stream=stream,
**self.requests_kwargs)
else:
raise PixivError('Unknown method: %s' % method)
r.encoding = "utf-8"
return r
except Exception as e:
raise PixivError('requests %s %s error: %s' % (method, url, e)) from e
class Pixiv(object):
def __init__(self, bot):
self.bot = bot
# This is the authentciated API.
self._pixiv_api = EncodingAwarePixivAPI()
@commands.group(pass_context=True)
async def pixiv(self, ctx: Context):
"""
Commands for interacting with the Pixiv API.
"""
@pixiv.command(pass_context=True)
async def search(self, ctx: Context, *, tag: str):
"""
Searches Pixiv using the specified tag.
"""
await ctx.bot.type()
async with threadpool():
if not self._pixiv_api.access_token:
self._pixiv_api.auth(**ctx.bot.config.get("pixiv", {}))
data = self._pixiv_api.search_works(tag, per_page=100)
if data.get("status") == "failure":
await ctx.bot.say(":x: Failed to download from pixiv.")
return
# 'response' is the actual data key
illusts = data["response"]
if not illusts:
await ctx.bot.say(":x: No results found.")
return
# Sort the illusts by score.
illusts = sorted(illusts, key=lambda x: x["stats"]["score"], reverse=True)[:30]
item = random.SystemRandom().choice(illusts)
# Get some useful attributes out.
obb = {
"id": item["id"],
"title": item["title"],
"image": item["image_urls"]["large"],
"username": item["user"]["name"],
"url": "http://www.pixiv.net/member_illust.php?mode=medium&illust_id={}".format(item["id"]),
"total_bookmarks": item["stats"]["favorited_count"]["public"],
"views": item["stats"]["views_count"],
"score": item["stats"]["score"]
}
async with threadpool():
# Download the image.
r = self._pixiv_api.requests_call('GET', obb["image"],
headers={'Referer': "https://app-api.pixiv.net/"},
stream=True)
# Copy it into BytesIO, which wiull be uploaded to Discord.
fobj = BytesIO()
shutil.copyfileobj(r.raw, fobj)
# Seek back, so that it acutally uploads a file.
fobj.seek(0)
await ctx.bot.say("`{title}`, by **{username}** (Illustration ID `{id}`):\n"
"\n**{score}** score, **{total_bookmarks}** bookmarks, **{views}** views"
"\n<{url}>".format(**obb))
await ctx.bot.type()
await ctx.bot.upload(fobj, filename=obb["image"].split("/")[-1])
def setup(bot):
bot.add_cog(Pixiv(bot))
| mit | Python |
|
ba06683866ce8e4e3bccd4acebd6ec2278acfeaa | Add Litecoin testnet, and Litecoin BIP32 prefixes. | gitonio/pycoin,shivaenigma/pycoin,shayanb/pycoin,XertroV/pycoin,richardkiss/pycoin,antiface/pycoin,Magicking/pycoin,richardkiss/pycoin,thirdkey-solutions/pycoin,Pan0ram1x/pycoin,Treefunder/pycoin,Tetchain/pycoin,shayanb/pycoin,Bluejudy/pycoin,lekanovic/pycoin,tomholub/pycoin,tomholub/pycoin,Tetchain/pycoin,Kefkius/pycoin,Kefkius/pycoin,lekanovic/pycoin,shivaenigma/pycoin,devrandom/pycoin,Treefunder/pycoin,Magicking/pycoin,pycoin/pycoin,moocowmoo/pycoin,antiface/pycoin,mperklin/pycoin,ptcrypto/pycoin,gitonio/pycoin,XertroV/pycoin,Bluejudy/pycoin,thirdkey-solutions/pycoin,moocowmoo/pycoin,zsulocal/pycoin,mperklin/pycoin,ptcrypto/pycoin,devrandom/pycoin,pycoin/pycoin,Pan0ram1x/pycoin,zsulocal/pycoin | pycoin/networks.py | pycoin/networks.py | from collections import namedtuple
from .serialize import h2b
NetworkValues = namedtuple('NetworkValues',
('network_name', 'subnet_name', 'code', 'wif', 'address',
'pay_to_script', 'prv32', 'pub32'))
NETWORKS = (
NetworkValues("Bitcoin", "mainnet", "BTC", b'\x80', b'\0', b'\5', h2b("0488ADE4"), h2b("0488B21E")),
NetworkValues("Bitcoin", "testnet3", "XTN", b'\xef', b'\x6f', b'\xc4',
h2b("04358394"), h2b("043587CF")),
NetworkValues("Litecoin", "mainnet", "LTC", b'\xb0', b'\x30', None, h2b('019d9cfe'), h2b('019da462')),
NetworkValues("Litecoin", "testnet", "XLT", b'\xb1', b'\x31', None, h2b('0436ef7d'), h2b('0436f6e1')),
NetworkValues("Dogecoin", "mainnet", "DOGE", b'\x9e', b'\x1e', b'\x16',
h2b("02fda4e8"), h2b("02fda923")),
# BlackCoin: unsure about bip32 prefixes; assuming will use Bitcoin's
NetworkValues("Blackcoin", "mainnet", "BLK", b'\x99', b'\x19', None, h2b("0488ADE4"), h2b("0488B21E")),
)
# Map from short code to details about that network.
NETWORK_NAME_LOOKUP = dict((i.code, i) for i in NETWORKS)
# All network names, return in same order as list above: for UI purposes.
NETWORK_NAMES = [i.code for i in NETWORKS]
DEFAULT_NETCODES = NETWORK_NAMES
def _lookup(netcode, property):
# Lookup a specific value needed for a specific network
network = NETWORK_NAME_LOOKUP.get(netcode)
if network:
return getattr(network, property)
return None
def network_name_for_netcode(netcode):
return _lookup(netcode, "network_name")
def subnet_name_for_netcode(netcode):
return _lookup(netcode, "subnet_name")
def full_network_name_for_netcode(netcode):
network = NETWORK_NAME_LOOKUP[netcode]
if network:
return "%s %s" % (network.network_name, network.subnet_name)
def wif_prefix_for_netcode(netcode):
return _lookup(netcode, "wif")
def address_prefix_for_netcode(netcode):
return _lookup(netcode, "address")
def pay_to_script_prefix_for_netcode(netcode):
return _lookup(netcode, "pay_to_script")
def prv32_prefix_for_netcode(netcode):
return _lookup(netcode, "prv32")
def pub32_prefix_for_netcode(netcode):
return _lookup(netcode, "pub32")
| from collections import namedtuple
from .serialize import h2b
NetworkValues = namedtuple('NetworkValues',
('network_name', 'subnet_name', 'code', 'wif', 'address',
'pay_to_script', 'prv32', 'pub32'))
NETWORKS = (
NetworkValues("Bitcoin", "mainnet", "BTC", b'\x80', b'\0', b'\5', h2b("0488ADE4"), h2b("0488B21E")),
NetworkValues("Bitcoin", "testnet3", "XTN", b'\xef', b'\x6f', b'\xc4',
h2b("04358394"), h2b("043587CF")),
NetworkValues("Litecoin", "mainnet", "LTC", b'\xb0', b'\x30', None, None, None),
NetworkValues("Dogecoin", "mainnet", "DOGE", b'\x9e', b'\x1e', b'\x16',
h2b("02fda4e8"), h2b("02fda923")),
# BlackCoin: unsure about bip32 prefixes; assuming will use Bitcoin's
NetworkValues("Blackcoin", "mainnet", "BLK", b'\x99', b'\x19', None, h2b("0488ADE4"), h2b("0488B21E")),
)
# Map from short code to details about that network.
NETWORK_NAME_LOOKUP = dict((i.code, i) for i in NETWORKS)
# All network names, return in same order as list above: for UI purposes.
NETWORK_NAMES = [i.code for i in NETWORKS]
DEFAULT_NETCODES = NETWORK_NAMES
def _lookup(netcode, property):
# Lookup a specific value needed for a specific network
network = NETWORK_NAME_LOOKUP.get(netcode)
if network:
return getattr(network, property)
return None
def network_name_for_netcode(netcode):
return _lookup(netcode, "network_name")
def subnet_name_for_netcode(netcode):
return _lookup(netcode, "subnet_name")
def full_network_name_for_netcode(netcode):
network = NETWORK_NAME_LOOKUP[netcode]
if network:
return "%s %s" % (network.network_name, network.subnet_name)
def wif_prefix_for_netcode(netcode):
return _lookup(netcode, "wif")
def address_prefix_for_netcode(netcode):
return _lookup(netcode, "address")
def pay_to_script_prefix_for_netcode(netcode):
return _lookup(netcode, "pay_to_script")
def prv32_prefix_for_netcode(netcode):
return _lookup(netcode, "prv32")
def pub32_prefix_for_netcode(netcode):
return _lookup(netcode, "pub32")
| mit | Python |
1de668219f618a0632fac80fd892a0a229b8fa05 | Solve Code Fights addition without carrying problem | HKuz/Test_Code | CodeFights/additionWithoutCarrying.py | CodeFights/additionWithoutCarrying.py | #!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
| mit | Python |
|
ed5c27623711a7f3b798aed9c0f7cdbdcebc0dcd | test python interpreter | eevee/cocos2d-mirror | test/test_interpreter_layer.py | test/test_interpreter_layer.py | # This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
import cocos
from cocos.director import director
import pyglet
if __name__ == "__main__":
director.init()
interpreter_layer = cocos.layer.InterpreterLayer()
main_scene = cocos.scene.Scene(interpreter_layer)
director.run(main_scene)
| bsd-3-clause | Python |
|
7f4bd900d1e647fe017ce4c01e279dd41a71a349 | Add management command to set SoftwareSecure verification status. | procangroup/edx-platform,fintech-circle/edx-platform,devs1991/test_edx_docmode,a-parhom/edx-platform,pomegranited/edx-platform,zubair-arbi/edx-platform,JCBarahona/edX,naresh21/synergetics-edx-platform,jjmiranda/edx-platform,a-parhom/edx-platform,deepsrijit1105/edx-platform,IONISx/edx-platform,mitocw/edx-platform,zubair-arbi/edx-platform,ovnicraft/edx-platform,TeachAtTUM/edx-platform,edx-solutions/edx-platform,Livit/Livit.Learn.EdX,alu042/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,philanthropy-u/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,cognitiveclass/edx-platform,Lektorium-LLC/edx-platform,philanthropy-u/edx-platform,lduarte1991/edx-platform,RPI-OPENEDX/edx-platform,jzoldak/edx-platform,halvertoluke/edx-platform,eduNEXT/edunext-platform,ampax/edx-platform,waheedahmed/edx-platform,devs1991/test_edx_docmode,halvertoluke/edx-platform,jbzdak/edx-platform,pomegranited/edx-platform,TeachAtTUM/edx-platform,procangroup/edx-platform,Endika/edx-platform,10clouds/edx-platform,ampax/edx-platform,ZLLab-Mooc/edx-platform,Endika/edx-platform,synergeticsedx/deployment-wipro,fintech-circle/edx-platform,edx-solutions/edx-platform,bigdatauniversity/edx-platform,msegado/edx-platform,IONISx/edx-platform,proversity-org/edx-platform,pomegranited/edx-platform,nttks/edx-platform,louyihua/edx-platform,CredoReference/edx-platform,Edraak/circleci-edx-platform,arbrandes/edx-platform,louyihua/edx-platform,adoosii/edx-platform,longmen21/edx-platform,defance/edx-platform,edx/edx-platform,bigdatauniversity/edx-platform,zubair-arbi/edx-platform,Edraak/edx-platform,vikas1885/test1,doganov/edx-platform,ZLLab-Mooc/edx-platform,JioEducation/edx-platform,shabab12/edx-platform,devs1991/test_edx_docmode,Stanford-Online/edx-platform,kmoocdev2/edx-platform,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,adoosii/edx-platform,mitocw/edx-platform,Edraak/edraak-platform,vikas1885/test1,vikas1885/test1,Edraak/edraak-platform,hastexo/edx-platform,gymnasium/edx-platform,iivic/BoiseStateX,romain-li/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,Livit/Livit.Learn.EdX,jjmiranda/edx-platform,procangroup/edx-platform,Lektorium-LLC/edx-platform,angelapper/edx-platform,tanmaykm/edx-platform,IndonesiaX/edx-platform,solashirai/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,RPI-OPENEDX/edx-platform,hamzehd/edx-platform,teltek/edx-platform,defance/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,cecep-edu/edx-platform,edx/edx-platform,Ayub-Khan/edx-platform,eduNEXT/edx-platform,mitocw/edx-platform,deepsrijit1105/edx-platform,solashirai/edx-platform,miptliot/edx-platform,mcgachey/edx-platform,chrisndodge/edx-platform,franosincic/edx-platform,jjmiranda/edx-platform,synergeticsedx/deployment-wipro,hastexo/edx-platform,raccoongang/edx-platform,pepeportela/edx-platform,msegado/edx-platform,gymnasium/edx-platform,amir-qayyum-khan/edx-platform,Edraak/edx-platform,ampax/edx-platform,pomegranited/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,shurihell/testasia,Ayub-Khan/edx-platform,10clouds/edx-platform,alexthered/kienhoc-platform,alu042/edx-platform,Lektorium-LLC/edx-platform,BehavioralInsightsTeam/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,jbzdak/edx-platform,tanmaykm/edx-platform,proversity-org/edx-platform,antoviaque/edx-platform,waheedahmed/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,caesar2164/edx-platform,eduNEXT/edunext-platform,waheedahmed/edx-platform,nttks/edx-platform,Edraak/edx-platform,MakeHer/edx-platform,shabab12/edx-platform,alu042/edx-platform,halvertoluke/edx-platform,Stanford-Online/edx-platform,cpennington/edx-platform,caesar2164/edx-platform,TeachAtTUM/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,Edraak/circleci-edx-platform,pepeportela/edx-platform,cognitiveclass/edx-platform,zhenzhai/edx-platform,Edraak/edx-platform,adoosii/edx-platform,itsjeyd/edx-platform,alexthered/kienhoc-platform,eduNEXT/edunext-platform,inares/edx-platform,pabloborrego93/edx-platform,zubair-arbi/edx-platform,Edraak/circleci-edx-platform,franosincic/edx-platform,kmoocdev2/edx-platform,doganov/edx-platform,ahmedaljazzar/edx-platform,doganov/edx-platform,Edraak/circleci-edx-platform,hamzehd/edx-platform,ESOedX/edx-platform,simbs/edx-platform,miptliot/edx-platform,Ayub-Khan/edx-platform,IONISx/edx-platform,analyseuc3m/ANALYSE-v1,appsembler/edx-platform,analyseuc3m/ANALYSE-v1,inares/edx-platform,ahmedaljazzar/edx-platform,EDUlib/edx-platform,mbareta/edx-platform-ft,analyseuc3m/ANALYSE-v1,simbs/edx-platform,CourseTalk/edx-platform,fintech-circle/edx-platform,pabloborrego93/edx-platform,gsehub/edx-platform,wwj718/edx-platform,adoosii/edx-platform,antoviaque/edx-platform,MakeHer/edx-platform,pabloborrego93/edx-platform,miptliot/edx-platform,edx/edx-platform,doganov/edx-platform,mbareta/edx-platform-ft,lduarte1991/edx-platform,fintech-circle/edx-platform,caesar2164/edx-platform,devs1991/test_edx_docmode,MakeHer/edx-platform,jbzdak/edx-platform,synergeticsedx/deployment-wipro,arbrandes/edx-platform,shabab12/edx-platform,xingyepei/edx-platform,adoosii/edx-platform,ovnicraft/edx-platform,inares/edx-platform,longmen21/edx-platform,cecep-edu/edx-platform,xingyepei/edx-platform,stvstnfrd/edx-platform,devs1991/test_edx_docmode,iivic/BoiseStateX,alexthered/kienhoc-platform,CourseTalk/edx-platform,devs1991/test_edx_docmode,ovnicraft/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,Endika/edx-platform,IndonesiaX/edx-platform,IndonesiaX/edx-platform,louyihua/edx-platform,kmoocdev2/edx-platform,ZLLab-Mooc/edx-platform,arbrandes/edx-platform,chrisndodge/edx-platform,mcgachey/edx-platform,proversity-org/edx-platform,alu042/edx-platform,jolyonb/edx-platform,bigdatauniversity/edx-platform,pepeportela/edx-platform,Endika/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,JCBarahona/edX,jzoldak/edx-platform,itsjeyd/edx-platform,mcgachey/edx-platform,simbs/edx-platform,IONISx/edx-platform,angelapper/edx-platform,TeachAtTUM/edx-platform,UOMx/edx-platform,louyihua/edx-platform,tanmaykm/edx-platform,edx/edx-platform,hastexo/edx-platform,EDUlib/edx-platform,wwj718/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,gymnasium/edx-platform,shurihell/testasia,franosincic/edx-platform,eduNEXT/edx-platform,antoviaque/edx-platform,jolyonb/edx-platform,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,CredoReference/edx-platform,ahmadiga/min_edx,nttks/edx-platform,CredoReference/edx-platform,cecep-edu/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadiga/min_edx,10clouds/edx-platform,stvstnfrd/edx-platform,Edraak/edraak-platform,zhenzhai/edx-platform,raccoongang/edx-platform,arbrandes/edx-platform,deepsrijit1105/edx-platform,hamzehd/edx-platform,naresh21/synergetics-edx-platform,solashirai/edx-platform,RPI-OPENEDX/edx-platform,Lektorium-LLC/edx-platform,pepeportela/edx-platform,JCBarahona/edX,Edraak/circleci-edx-platform,prarthitm/edxplatform,iivic/BoiseStateX,franosincic/edx-platform,appsembler/edx-platform,waheedahmed/edx-platform,analyseuc3m/ANALYSE-v1,longmen21/edx-platform,bigdatauniversity/edx-platform,mcgachey/edx-platform,franosincic/edx-platform,pomegranited/edx-platform,shurihell/testasia,Livit/Livit.Learn.EdX,chrisndodge/edx-platform,Livit/Livit.Learn.EdX,ESOedX/edx-platform,UOMx/edx-platform,teltek/edx-platform,Edraak/edx-platform,gymnasium/edx-platform,xingyepei/edx-platform,shabab12/edx-platform,msegado/edx-platform,teltek/edx-platform,zubair-arbi/edx-platform,JCBarahona/edX,hamzehd/edx-platform,prarthitm/edxplatform,msegado/edx-platform,Ayub-Khan/edx-platform,10clouds/edx-platform,ahmadiga/min_edx,marcore/edx-platform,kursitet/edx-platform,ovnicraft/edx-platform,bigdatauniversity/edx-platform,naresh21/synergetics-edx-platform,wwj718/edx-platform,RPI-OPENEDX/edx-platform,appsembler/edx-platform,cognitiveclass/edx-platform,kursitet/edx-platform,edx-solutions/edx-platform,doganov/edx-platform,mcgachey/edx-platform,prarthitm/edxplatform,synergeticsedx/deployment-wipro,itsjeyd/edx-platform,EDUlib/edx-platform,shurihell/testasia,miptliot/edx-platform,alexthered/kienhoc-platform,raccoongang/edx-platform,jbzdak/edx-platform,romain-li/edx-platform,JioEducation/edx-platform,CourseTalk/edx-platform,pabloborrego93/edx-platform,defance/edx-platform,vikas1885/test1,marcore/edx-platform,jzoldak/edx-platform,ahmadiga/min_edx,nttks/edx-platform,JioEducation/edx-platform,UOMx/edx-platform,gsehub/edx-platform,jjmiranda/edx-platform,JioEducation/edx-platform,simbs/edx-platform,JCBarahona/edX,iivic/BoiseStateX,ESOedX/edx-platform,EDUlib/edx-platform,MakeHer/edx-platform,mitocw/edx-platform,xingyepei/edx-platform,caesar2164/edx-platform,kursitet/edx-platform,zhenzhai/edx-platform,kursitet/edx-platform,teltek/edx-platform,MakeHer/edx-platform,tanmaykm/edx-platform,chrisndodge/edx-platform,jbzdak/edx-platform,hamzehd/edx-platform,ampax/edx-platform,CourseTalk/edx-platform,UOMx/edx-platform,amir-qayyum-khan/edx-platform,Stanford-Online/edx-platform,ESOedX/edx-platform,hastexo/edx-platform,cognitiveclass/edx-platform,naresh21/synergetics-edx-platform,solashirai/edx-platform,cpennington/edx-platform,IONISx/edx-platform,cecep-edu/edx-platform,marcore/edx-platform,iivic/BoiseStateX,jolyonb/edx-platform,jzoldak/edx-platform,lduarte1991/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,proversity-org/edx-platform,wwj718/edx-platform,inares/edx-platform,kmoocdev2/edx-platform,wwj718/edx-platform,mbareta/edx-platform-ft,halvertoluke/edx-platform,devs1991/test_edx_docmode,cpennington/edx-platform,shurihell/testasia,kursitet/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,jolyonb/edx-platform,inares/edx-platform,prarthitm/edxplatform,antoviaque/edx-platform,xingyepei/edx-platform,RPI-OPENEDX/edx-platform,defance/edx-platform,CredoReference/edx-platform,procangroup/edx-platform,zhenzhai/edx-platform,stvstnfrd/edx-platform,amir-qayyum-khan/edx-platform,vikas1885/test1,romain-li/edx-platform,marcore/edx-platform,angelapper/edx-platform,appsembler/edx-platform,ovnicraft/edx-platform,zhenzhai/edx-platform,ZLLab-Mooc/edx-platform,mbareta/edx-platform-ft,gsehub/edx-platform,ahmedaljazzar/edx-platform | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | """
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
| agpl-3.0 | Python |
|
4dd66150c922e1c700fad74727955ef72c045f37 | Add Find Command MCEdit filter | satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product | minecraft/FindCommand.py | minecraft/FindCommand.py | # MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
| mit | Python |
|
5e4ef4737c78b6154596ab8c76c4e60bd840453c | Add component.navbar | CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime | src/penn_chime_dash/app/components/navbar.py | src/penn_chime_dash/app/components/navbar.py | # components/navbar.py
import dash_bootstrap_components as dbc
import dash_html_components as html
import dash_core_components as dcc
from ..config import Config
cfg = Config()
navbar = dbc.NavbarSimple(
brand='Penn Med CHIME', # Browser window title
brand_href='/', # index page
children=[
html.Link(
key='penn-med-header',
rel="stylesheet",
href=cfg.PENN_HEADER,
),
dbc.NavItem(
dcc.Link(
'Model',
href='/CHIME',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Contribute',
href='https://codeforphilly.github.io/chime/',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Resources',
href='/resources',
className='nav-link'
)
),
dbc.NavItem(
dcc.Link(
'Contact',
href=cfg.PENN_MED_URL,
className='nav-link'
)
),
],
sticky='top',
color='primary',
light=True,
dark=False
)
| mit | Python |
|
eea33e6207da7446e1713eb4d78b76d37ae5eaf2 | Add sample of scheduler using celery | jovannypcg/python_scheduler | with_celery.py | with_celery.py | from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
| apache-2.0 | Python |
|
7ca1f6c5d51f5e2fc582603012c3ca5a053ee4eb | Add BLT package (#19410) | iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/blt/package.py | var/spack/repos/builtin/packages/blt/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Blt(Package):
"""BLT is a streamlined CMake-based foundation for Building, Linking and
Testing large-scale high performance computing (HPC) applications."""
homepage = "https://github.com/LLNL/blt"
url = "https://github.com/LLNL/blt/archive/v0.3.6.tar.gz"
git = "https://github.com/LLNL/blt.git"
maintainers = ['davidbeckingsale']
version('develop', branch='develop')
version('main', branch='main')
version('0.3.6', sha256='6276317c29e7ff8524fbea47d9288ddb40ac06e9f9da5e878bf9011e2c99bf71')
version('0.3.5', sha256='68a1c224bb9203461ae6f5ab0ff3c50b4a58dcce6c2d2799489a1811f425fb84')
version('0.3.0', sha256='bb917a67cb7335d6721c997ba9c5dca70506006d7bba5e0e50033dd0836481a5')
version('0.2.5', sha256='3a000f60194e47b3e5623cc528cbcaf88f7fea4d9620b3c7446ff6658dc582a5')
version('0.2.0', sha256='c0cadf1269c2feb189e398a356e3c49170bc832df95e5564e32bdbb1eb0fa1b3')
depends_on('cmake', type='run')
def install(self, spec, prefix):
install_tree('.', prefix)
| lgpl-2.1 | Python |
|
4537ab84bb87eeae6b6865b7b9140d5324384e4a | add test cases for address operations | the-metaverse/metaverse,mvs-org/metaverse,mvs-org/metaverse,the-metaverse/metaverse,mvs-live/metaverse,mvs-org/metaverse,mvs-org/metaverse,mvs-org/metaverse,mvs-org/metaverse,mvs-live/metaverse,the-metaverse/metaverse | test/test-rpc/TestCase/Account/test_address.py | test/test-rpc/TestCase/Account/test_address.py | import random
from TestCase.MVSTestCase import *
class TestAccount(MVSTestCaseBase):
roles = (Alice,)
need_mine = False
def test_0_new_address(self):
#password error
ec, message = mvs_rpc.new_address(Alice.name, Alice.password+'1')
self.assertEqual(ec, 1000, message)
#check address_count
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0)
self.assertEqual(ec, 4004, message)
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0x00100000)
self.assertEqual(ec, 4004, message)
ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 11)
self.assertEqual(ec, 0, message)
def test_1_list_addresses(self):
# password error
ec, message = mvs_rpc.list_addresses(Alice.name, Alice.password + '1')
self.assertEqual(ec, 1000, message)
ec, addresses = mvs_rpc.list_addresses(Alice.name, Alice.password)
self.assertEqual(ec, 0, addresses)
addresses.sort()
alice_addresses = Alice.addresslist[:]
alice_addresses.sort()
self.assertEqual(addresses, alice_addresses)
def test_2_check_address(self):
for address in Alice.addresslist:
ec, message = mvs_rpc.check_address(address)
self.assertEqual(ec, 0, message) | agpl-3.0 | Python |
|
de40e15b661806dc75e73bd9f1fc2c37af60b0d3 | test case for geometry utils | YannickDieter/testbeam_analysis,SiLab-Bonn/testbeam_analysis,SiLab-Bonn/testbeam_analysis,YannickDieter/testbeam_analysis,YannickDieter/testbeam_analysis,SiLab-Bonn/testbeam_analysis,SiLab-Bonn/testbeam_analysis,YannickDieter/testbeam_analysis | testbeam_analysis/tests/test_geometry_utils.py | testbeam_analysis/tests/test_geometry_utils.py | ''' Script to check the correctness of the geometry utils functions (rotation, translation matrices)
'''
import os
import numpy as np
import unittest
from testbeam_analysis import geometry_utils
tests_data_folder = r'tests/test_track_analysis/'
class TestTrackAnalysis(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls): # remove created files
pass
def test_transformations(self): # Transforms from global to local system and back and checks for equality
position = np.array([0, 0, 0]) # Position in global system to transfrom
for position in (np.array([-1, -2, -3]), np.array([0, 1, 0]), np.array([3, 2, 1])):
for x in range(-3, 4, 3): # Loop over x translation values
for y in range(-3, 4, 3): # Loop over y translation values
for z in range(-3, 4, 3): # Loop over z translation values
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
position_g = np.array([position[0], position[1], position[2], 1]) # Extend global position dimension
transformation_matrix_to_local = geometry_utils.global_to_local_transformation_matrix(x, y, z, alpha, beta, gamma)
transformation_matrix_to_global = geometry_utils.local_to_global_transformation_matrix(x, y, z, alpha, beta, gamma)
position_l = np.dot(transformation_matrix_to_local, position_g) # Transform to local coordinate system
position_g_result = np.dot(transformation_matrix_to_global, position_l) # Transform back to global coordinate system
self.assertTrue(np.allclose(position, np.array(position_g_result[:-1]))) # Finite precision needs equality check with finite precision
def test_rotation_matrices(self):
# Check that the rotation matrices in x, y, z have the features of a rotation matrix (det = 1, inverse = transposed matrix)
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
rotation_matrix_x = geometry_utils.rotation_matrix_x(alpha)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_x), 1)
self.assertTrue(np.allclose(rotation_matrix_x.T, np.linalg.inv(rotation_matrix_x)))
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
rotation_matrix_y = geometry_utils.rotation_matrix_y(beta)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_y), 1)
self.assertTrue(np.allclose(rotation_matrix_y.T, np.linalg.inv(rotation_matrix_y)))
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
rotation_matrix_z = geometry_utils.rotation_matrix_z(gamma)
self.assertAlmostEqual(np.linalg.det(rotation_matrix_z), 1)
self.assertTrue(np.allclose(rotation_matrix_z.T, np.linalg.inv(rotation_matrix_z)))
# Check that the rotation matrix build from x, y, z rotation matrices has the features of rotation matrix (det = 1, inverse = transposed matrix)
for alpha in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop x rotation values
for beta in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop y rotation values
for gamma in [0, np.pi / 4., np.pi / 3., np.pi / 2., 3 * np.pi / 4., np.pi, 4. * np.pi / 3.]: # Loop z rotation values
rotation_matrix = geometry_utils.rotation_matrix(alpha, beta, gamma)
self.assertAlmostEqual(np.linalg.det(rotation_matrix), 1)
self.assertTrue(np.allclose(rotation_matrix.T, np.linalg.inv(rotation_matrix)))
if __name__ == '__main__':
tests_data_folder = r'test_track_analysis/'
suite = unittest.TestLoader().loadTestsFromTestCase(TestTrackAnalysis)
unittest.TextTestRunner(verbosity=2).run(suite)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.