code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
from solving import bellmansolving
import numpy as np
import pytest
from util import constants as cs
@pytest.fixture(scope='module')
def init_mock():
class Mock():
pass
mock = Mock()
mock.vep_stay = np.ones(10, dtype=np.float64)
mock.vfp_stay = np.ones(10, dtype=np.float64)/1.3
mock.vup = np.zeros(10, dtype=np.float64)
mock.vep_outside = np.zeros((10, cs.NJOBS))
for i in range(10):
mock.vep_outside[i,:] = i/10 + np.linspace(0,1, num=cs.NJOBS)
mock.lambda_u = .4
mock.offer_probs = np.ones(cs.NJOBS)/cs.NJOBS
return mock
@pytest.mark.usefixtures("init_mock")
class TestBellmanSolving(object):
def test_emax_outsideoffer(self, init_mock):
ecv_e, ecv_f, stayprobs = bellmansolving.emax_outsideoffer(init_mock.vep_stay,
init_mock.vfp_stay,
init_mock.vup,
init_mock.vep_outside,
init_mock.offer_probs)
assert np.all(ecv_e>=(init_mock.vep_stay - 1e-8))
assert np.allclose(ecv_e, np.array([ 1., 1.01111111, 1.03055556, 1.05833333, 1.09444444,
1.13888889, 1.19444444, 1.25833333, 1.33055556, 1.41111111]))
def test_emax_no_outside_offer(self, init_mock):
ecv_e, ecv_f = bellmansolving.emax_if_no_outside_offer(init_mock.vep_stay,
init_mock.vfp_stay,
init_mock.vup)
assert np.allclose(ecv_e, np.ones(10))
def test_emax_unemployed(self, init_mock):
ecv_unemp_worker = bellmansolving.emax_unemployed(init_mock.vup,
init_mock.vep_outside,
init_mock.lambda_u,
init_mock.offer_probs)
assert np.allclose(ecv_unemp_worker, np.array([0.19 , 0.228, 0.266, 0.304,
0.342, 0.38 , 0.418, 0.456, 0.494,
0.532])) | mishpat/human-capital-search | tests/bellmansolving_test.py | Python | mit | 2,285 |
# Copyright 2016 Thomas C. Hudson
# Governed by the license described in LICENSE.txt
import libtcodpy as libtcod
import log
import algebra
from components import *
import actions
import map
import spells
def dagger():
return Object(algebra.Location(0, 0), '-', 'dagger', libtcod.sky,
item=Item(description='A leaf-shaped bronze knife; provides +2 Attack'),
equipment=Equipment(slot='right hand', power_bonus=2))
def healing_potion(pos=algebra.Location(0, 0)):
return Object(pos, '!', 'healing potion', libtcod.violet,
item=Item(use_function=spells.cast_heal,
description='A flask of revivifying alchemical mixtures; heals ' + str(spells.HEAL_AMOUNT) + ' hp.'))
def lightning_scroll(pos=algebra.Location(0, 0)):
return Object(pos, '#', 'scroll of lightning bolt', libtcod.light_yellow,
item=Item(use_function=spells.cast_lightning,
description='Reading these runes will strike your nearest foe with lightning for ' +
str(spells.LIGHTNING_DAMAGE) + ' hp.'))
def fireball_scroll(pos=algebra.Location(0, 0)):
return Object(pos, '#', 'scroll of fireball', libtcod.light_yellow,
item=Item(use_function=spells.cast_fireball,
description='Reading these runes will cause a burst of flame inflicting ' + str(spells.FIREBALL_DAMAGE) +
' hp on nearby creatures.'))
def confusion_scroll(pos=algebra.Location(0, 0)):
return Object(pos, '#', 'scroll of confusion', libtcod.light_yellow,
item=Item(use_function=spells.cast_confuse,
description='Reading these runes will confuse the creature you focus on for a short time.'))
def sword(pos=algebra.Location(0, 0)):
return Object(pos, '/', 'sword', libtcod.sky,
item=Item(description='A heavy-tipped bronze chopping sword; provides +3 Attack'),
equipment=Equipment(slot='right hand', power_bonus=3))
def shield(pos=algebra.Location(0, 0)):
return Object(pos, '[', 'shield', libtcod.darker_orange,
item=Item(description='A bronze-edged oval shield; provides +1 Defense'),
equipment=Equipment(slot='left hand', defense_bonus=1))
| Naburimannu/libtcodpy-tutorial | miscellany.py | Python | bsd-3-clause | 2,299 |
try:
import json
except ImportError:
from django.utils import simplejson as json
from django import http
from django import template
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.views.generic import DetailView, FormView
from .exceptions import WysiwygFormsException
from .models import Form
from .transactions import Transaction
__all__ = ("ApplyTransactions", "Edit", "WysiwygFormView")
class ApplyTransactions(DetailView):
"""
This view applies the transactions from the WYSIWYG client editor to save a
form.
"""
queryset = Form.objects.all()
context_object_name = "form"
http_method_names = ["post"]
def __init__(self, *args, **kwargs):
super(ApplyTransactions, self).__init__(*args, **kwargs)
self.error = None
self.post = self.get
def render_to_response(self, context):
return http.HttpResponse(json.dumps(context, cls=Form.JSONEncoder),
content_type="application/json",
status=self.get_status())
def get_object(self, **kwargs):
form = super(ApplyTransactions, self).get_object(**kwargs)
try:
for t in json.loads(self.request.POST.get("transactions", "[]")):
# Force non-unicode keys for older Pythons
tt = dict(zip((str(k) for k in t.iterkeys()),
(v for v in t.itervalues())))
Transaction(**tt).apply_to(form)
form.save()
except WysiwygFormsException, e:
self.error = e
return form
def get_context_data(self, **kwargs):
context = super(ApplyTransactions, self).get_context_data(**kwargs)
if self.error:
return { "error": self.error.message }
else:
return context["form"]
def get_status(self):
if self.error:
return 500
else:
return 200
class Edit(DetailView):
"""
This is the view for editing a form and sends down the client side WYSIWYG
editor. Expects `pk` as a keyword argument.
"""
template_name = "wysiwyg_forms/edit.html"
queryset = Form.objects.all()
context_object_name = "form"
# Customize `base_template_name` to change what template
# `wysiwyg_forms/edit.html` will extend. Change this instead of
# `template_name`.
base_template_name = "wysiwyg_forms/base.html"
# Customize `save_view_name` to change where the client side JS will POST
# the transactions which save form state to.
save_view_name = "wysiwyg_forms_apply_transactions"
def get_object(self, queryset=None):
try:
form = super(Edit, self).get_object(queryset)
except AttributeError:
form = Form.objects.create(name="New Form",
description="This is a new form.")
self.kwargs["pk"] = form.id
return form
def get_save_view_url(self):
"""
Returns the url for the view which is being used to save the form. By
default, uses `self.save_view_name` and the id of the form being edited
in a `django.core.urlresolvers.reverse()` look up.
"""
return reverse(self.save_view_name, args=[self.kwargs["pk"]])
def get_context_data(self, **kwargs):
context = super(Edit, self).get_context_data(**kwargs)
context["base_template_name"] = self.base_template_name
context["debug"] = settings.DEBUG
context["save_target"] = self.get_save_view_url()
return context
class WysiwygFormView(FormView):
"""
A thin wrapper around `django.views.generic.FormView`. Provide `form_id` or
`form_slug` to specify which `wysiwyg_forms.models.Form` instance to render
as a Django form. A good place to hook in your own functionality is by
subclassing this class and overriding/extending the `form_valid`
method. Look in to `django.views.generic.FormView` for more.
Example usage:
urlpatterns = patterns("",
# ...
url(r"^foo/$",
WysiwygFormView.as_view(form_id=42,
template_name="my_app/template.html",
success_url=reverse("my_success_view")),
name="my_form_view")
)
"""
form_id = None
form_slug = None
def get_wysiwyg_form(self):
if not (self.form_id or self.form_slug):
raise ImproperlyConfigured(
"Don't know how to find the correct WYSIWYG form for this view. Provide form_id or form_slug.")
if self.form_id and self.form_slug:
raise ImproperlyConfigured(
"Can not provide both a form_id and a form_slug.")
elif self.form_id:
return Form.objects.get(pk=self.form_id)
else:
return Form.objects.get(slug=self.form_slug)
def get_context_data(self, **kwargs):
ctx = super(WysiwygFormView, self).get_context_data(**kwargs)
wysiwyg_form = self.get_wysiwyg_form()
ctx["form_description"] = wysiwyg_form.description
ctx["form_name"] = wysiwyg_form.name
return ctx
def get_form_class(self):
return self.get_wysiwyg_form().as_django_form()
| wwu-housing/django-wysiwyg-forms | wysiwyg_forms/views.py | Python | mit | 5,501 |
# This demo does the same as the dyndispatch demo, except that a
# custom dispatcher loop is used. This is how asynchronous parallel
# optimization algorithms like DE and PSADE are implemented.
# mpirun -n 4 python 05-asyncloop.py
from pyopus.parallel.cooperative import cOS
from pyopus.parallel.mpi import MPI
from funclib import jobProcessor
# Result at which we stop
stopAtResult=150
# Minimal and maximal number of parallel tasks
# The maximal number of parallel tasks can be infinite (set maxTasks to None)
minTasks=1
maxTasks=1000
if __name__=='__main__':
# Set up MPI
cOS.setVM(MPI())
# Thsi list will hold the jobs (values that are doubled)
jobs=[]
# This list will be filled with results
results=[]
# Stop the loop
stop=False
# Running task status storage
running={}
# Job index of next job
atJob=0
# Main loop
# Run until stop flag set and all tasks are joined
while not (stop and len(running)==0):
# Spawn tasks if slots are available and maximal number of tasks is not reached
# Spawn one task if there are no tasks
while (
# Spawn
not stop and (
# no tasks running, need at least one task, spawn
len(running)==0 or
# too few slaves in a parallel environment (number of slots > 0),
# force spawn regardless of the number of free slots
(cOS.slots()>0 and len(running)<minTasks) or
# free slots available and less than maximal slaves, spawn
(cOS.freeSlots()>0 and (maxTasks is None or len(running)<maxTasks))
)
):
# Job (value to double)
job=atJob
# Spawn a global search task
tid=cOS.Spawn(jobProcessor, args=[job], remote=True, block=True)
print "Spawned task", tid, "for job", job
# Store the job
running[tid]={
'index': atJob,
'job': job,
}
# Go to next job
atJob+=1
# Join jobs
tid,result = cOS.Join(block=True).popitem()
print "Received", result, "from", tid
# Get status and remove it from the dictionarz of running jobs
status=running[tid]
del running[tid]
index=status['index']
# Make space for the result
if index>=len(results):
results.extend([None]*(index+1-len(results)))
# Store result
results[index]=result
# Check if we need to stop
if result>=stopAtResult and not stop:
stop=True
print "Spawning no more tasks"
print("Results: "+str(results))
# Finish, need to do this if MPI is used
cOS.finalize()
| blorgon9000/pyopus | demo/parallel/cooperative/05-asyncloop.py | Python | gpl-3.0 | 2,431 |
from isochrones.dartmouth import Dartmouth_Isochrone
from isochrones.utils import addmags
import numpy as np
import pandas as pd
file = open('/tigress/np5/true_params.txt','a')
def get_index(n):
if n < 10:
return '000' + str(n)
elif n < 100:
return '00' + str(n)
elif n < 1000:
return '0' + str(n)
else:
return str(n)
for n in range(0,1000,1):
index = get_index(n)
file.write('test: ' + index + '\n')
dar = Dartmouth_Isochrone()
array = np.random.rand(2) + 0.5
if array[0] > array[1]:
M1 = array[0]
M2 = array[1]
else:
M1 = array[1]
M2 = array[0]
age1 = np.log10(1e8)
age2 = np.log10(5e8)
feh1 = 0.0
array = 1400*np.random.rand(2) + 100
if array[0] > array[1]:
distance1 = array[0]
distance2 = array[1]
else:
distance1 = array[1]
distance2 = array[0]
AV1 = 0.0
feh2 = 0.2
AV2 = 0.1
params = (M1,M2,age1,age2,feh1,feh2,distance1,distance2,AV1,AV2)
params = str(params)
file.write('(M1,M2,age1,age2,feh1,feh2,distance1,distance2,AV1,AV2) = ' + params + '\n')
file.write('\n')
#Simulate true magnitudes
unresolved_bands = ['J','H','K']
resolved_bands = ['i','K']
args1 = (age1, feh1, distance1, AV1)
args2 = (age2, feh2, distance2, AV2)
unresolved = {b:addmags(dar.mag[b](M1, *args1), dar.mag[b](M2, *args2)) for b in unresolved_bands}
resolved_1 = {b:dar.mag[b](M1, *args1) for b in resolved_bands}
resolved_2 = {b:dar.mag[b](M2, *args2) for b in resolved_bands}
#print dar.mag['K'](M2, *args2)
#print unresolved, resolved_1, resolved_2
instruments = ['twomass','RAO']
bands = {'twomass':['J','H','K'],
'RAO':['i','K']}
mag_unc = {'twomass': 0.02, 'RAO':0.1}
resolution = {'twomass':4.0, 'RAO':0.1}
relative = {'twomass':False, 'RAO':True}
separation = 0.5
PA = 100.
columns = ['name', 'band', 'resolution', 'relative', 'separation', 'pa', 'mag', 'e_mag']
df = pd.DataFrame(columns=columns)
i=0
for inst in ['twomass']: #Unresolved observations
for b in bands[inst]:
row = {}
row['name'] = inst
row['band'] = b
row['resolution'] = resolution[inst]
row['relative'] = relative[inst]
row['separation'] = 0.
row['pa'] = 0.
row['mag'] = unresolved[b]
row['e_mag'] = mag_unc[inst]
df = df.append(pd.DataFrame(row, index=[i]))
i += 1
for inst in ['RAO']: #Resolved observations
for b in bands[inst]:
mags = [resolved_1[b], resolved_2[b]]
pas = [0, PA]
seps = [0., separation]
for mag,sep,pa in zip(mags,seps,pas):
row = {}
row['name'] = inst
row['band'] = b
row['resolution'] = resolution[inst]
row['relative'] = relative[inst]
row['separation'] = sep
row['pa'] = pa
row['mag'] = mag
row['e_mag'] = mag_unc[inst]
df = df.append(pd.DataFrame(row, index=[i]))
i += 1
#print df
df.to_csv(path_or_buf='/tigress/np5/df_binary_test{}.csv'.format(index))
file.close()
| nonsk131/USRP2016 | generate_tests0000-0999.py | Python | mit | 3,341 |
import enum
from typing import Dict, Optional, Set
@enum.unique
class MediaTag(enum.IntEnum):
# ndb keys are based on these! Don't change!
CHAIRMANS_VIDEO = 0
CHAIRMANS_PRESENTATION = 1
CHAIRMANS_ESSAY = 2
MEDIA_TAGS: Set[MediaTag] = {t for t in MediaTag}
TAG_NAMES: Dict[MediaTag, str] = {
MediaTag.CHAIRMANS_VIDEO: "Chairman's Video",
MediaTag.CHAIRMANS_PRESENTATION: "Chairman's Presentation",
MediaTag.CHAIRMANS_ESSAY: "Chairman's Essay",
}
TAG_URL_NAMES: Dict[MediaTag, str] = {
MediaTag.CHAIRMANS_VIDEO: "chairmans_video",
MediaTag.CHAIRMANS_PRESENTATION: "chairmans_presentation",
MediaTag.CHAIRMANS_ESSAY: "chairmans_essay",
}
CHAIRMANS_TAGS: Set[MediaTag] = {
MediaTag.CHAIRMANS_VIDEO,
MediaTag.CHAIRMANS_PRESENTATION,
MediaTag.CHAIRMANS_ESSAY,
}
def get_enum_from_url(url_name: str) -> Optional[MediaTag]:
inversed = {v: k for k, v in TAG_URL_NAMES.items()}
if url_name in inversed:
return inversed[url_name]
else:
return None
| the-blue-alliance/the-blue-alliance | src/backend/common/consts/media_tag.py | Python | mit | 1,026 |
#!/usr/bin/env python
import click
import salt.config
import salt.client
def salt_init():
opts = salt.config.apply_minion_config()
opts['file_client'] = 'local'
caller = salt.client.Caller(mopts=opts)
return caller
@click.command()
@click.option('-i', '--install',
help='Install new relic system monitor',
is_flag=True)
@click.option('-k', '--key', help='new relic data access key', default=False)
def main(install, key):
if not install:
click.echo('Try new_relic --help for useful information!')
else:
if not key:
key = click.prompt('NewRelic data access key')
caller = salt_init()
info = dict(
newrelic_url='http://download.newrelic.com/pub/newrelic/el5/i386/newrelic-repo-5-3.noarch.rpm',
newrelic_package='newrelic-sysmond',
newrelic_license_cmd=r"nrsysmond-config --set license_key='%(l_key)s'"
% {'l_key': key},
newrelic_start_cmd=r"/etc/init.d/newrelic-sysmond restart",
newrelic_chkconfig_cmd='chkconfig newrelic-sysmond on')
click.echo(caller.sminion.functions['pkg.install'](sources=[
{'repo': info['newrelic_url']}
]))
click.echo(caller.sminion.functions['pkg.install'](
info['newrelic_package'],
require=[{'pkg': info['newrelic_url']}]))
click.echo(
caller.sminion.functions['cmd.run'](info['newrelic_license_cmd']))
click.echo(
caller.sminion.functions['cmd.run'](info['newrelic_start_cmd']))
click.echo(
caller.sminion.functions['cmd.run'](info['newrelic_chkconfig_cmd'])
)
if __name__ == "__main__":
main()
| Abukamel/newrelic_ops | bin/new_relic_click.py | Python | mit | 1,728 |
#!/usr/bin/env python
#
# dnscan copyright (C) 2013-2014 rbsec
# Licensed under GPLv3, see LICENSE for details
#
from __future__ import print_function
import os
import re
import sys
import threading
import time
try: # Ugly hack because Python3 decided to rename Queue to queue
import Queue
except ImportError:
import queue as Queue
try:
import argparse
except:
print("FATAL: Module argparse missing (python-argparse)")
sys.exit(1)
try:
import dns.query
import dns.resolver
import dns.zone
except:
print("FATAL: Module dnspython missing (python-dnspython)")
sys.exit(1)
# Usage: dnscan.py -d <domain name>
class scanner(threading.Thread):
def __init__(self, queue):
global wildcard
threading.Thread.__init__(self)
self.queue = queue
def get_name(self, domain):
global wildcard
try:
if sys.stdout.isatty(): # Don't spam output if redirected
sys.stdout.write(domain + " \r")
sys.stdout.flush()
res = lookup(domain, recordtype)
for rdata in res:
if wildcard:
if rdata.address == wildcard:
return
print(rdata.address + " - " + col.brown + domain + col.end)
if outfile:
print(rdata.address + " - " + domain, file=outfile)
if domain != target and args.recurse: # Don't scan root domain twice
add_target(domain) # Recursively scan subdomains
except:
pass
def run(self):
while True:
try:
domain = self.queue.get(timeout=1)
except:
return
self.get_name(domain)
self.queue.task_done()
class output:
def status(self, message):
print(col.blue + "[*] " + col.end + message)
if outfile:
print("[*] " + message, file=outfile)
def good(self, message):
print(col.green + "[+] " + col.end + message)
if outfile:
print("[+] " + message, file=outfile)
def verbose(self, message):
if args.verbose:
print(col.brown + "[v] " + col.end + message)
if outfile:
print("[v] " + message, file=outfile)
def warn(self, message):
print(col.red + "[-] " + col.end + message)
if outfile:
print("[-] " + message, file=outfile)
def fatal(self, message):
print("\n" + col.red + "FATAL: " + message + col.end)
if outfile:
print("FATAL " + message, file=outfile)
class col:
if sys.stdout.isatty():
green = '\033[32m'
blue = '\033[94m'
red = '\033[31m'
brown = '\033[33m'
end = '\033[0m'
else: # Colours mess up redirected output, disable them
green = ""
blue = ""
red = ""
brown = ""
end = ""
def lookup(domain, recordtype):
try:
res = resolver.query(domain, recordtype)
return res
except:
return
def get_wildcard(target):
# Use current unix time as a test subdomain
epochtime = str(int(time.time()))
res = lookup(epochtime + "." + target, recordtype)
if res:
out.good(col.red + "Wildcard" + col.end + " domain found - " + col.brown + res[0].address + col.end)
return res[0].address
else:
out.verbose("No wildcard domain found")
def get_nameservers(target):
try:
ns = resolver.query(target, 'NS')
return ns
except:
return
def get_txt(target):
out.verbose("Getting TXT records")
try:
res = lookup(target, "TXT")
if res:
out.good("TXT records found")
for txt in res:
print(txt)
if outfile:
print(txt, file=outfile)
except:
return
def get_mx(target):
out.verbose("Getting MX records")
try:
res = lookup(target, "MX")
except:
return
# Return if we don't get any MX records back
if not res:
return
out.good("MX records found, added to target list")
for mx in res:
print(mx.to_text())
if outfile:
print(mx.to_text(), file=outfile)
mxsub = re.search("([a-z0-9\.\-]+)\."+target, mx.to_text(), re.IGNORECASE)
try:
if mxsub.group(1) and mxsub.group(1) not in wordlist:
queue.put(mxsub.group(1) + "." + target)
except AttributeError:
pass
def zone_transfer(domain, ns):
out.verbose("Trying zone transfer against " + str(ns))
try:
zone = dns.zone.from_xfr(dns.query.xfr(str(ns), domain, relativize=False),
relativize=False)
out.good("Zone transfer sucessful using nameserver " + col.brown + str(ns) + col.end)
names = list(zone.nodes.keys())
names.sort()
for n in names:
print(zone[n].to_text(n)) # Print raw zone
if outfile:
print(zone[n].to_text(n), file=outfile)
sys.exit(0)
except Exception:
pass
def add_target(domain):
for word in wordlist:
queue.put(word + "." + domain)
def get_args():
global args
parser = argparse.ArgumentParser('dnscan.py', formatter_class=lambda prog:argparse.HelpFormatter(prog,max_help_position=40))
parser.add_argument('-d', '--domain', help='Target domain', dest='domain', required=True)
parser.add_argument('-w', '--wordlist', help='Wordlist', dest='wordlist', required=False)
parser.add_argument('-t', '--threads', help='Number of threads', dest='threads', required=False, type=int, default=8)
parser.add_argument('-6', '--ipv6', help='Scan for AAAA records', action="store_true", dest='ipv6', required=False, default=False)
parser.add_argument('-z', '--zonetransfer', action="store_true", default=False, help='Only perform zone transfers', dest='zonetransfer', required=False)
parser.add_argument('-r', '--recursive', action="store_true", default=False, help="Recursively scan subdomains", dest='recurse', required=False)
parser.add_argument('-o', '--output', help="Write output to a file", dest='output_filename', required=False)
parser.add_argument('-v', '--verbose', action="store_true", default=False, help='Verbose mode', dest='verbose', required=False)
args = parser.parse_args()
def setup():
global target, wordlist, queue, resolver, recordtype, outfile
target = args.domain
if not args.wordlist: # Try to use default wordlist if non specified
args.wordlist = os.path.join(os.path.dirname(os.path.realpath(__file__)), "subdomains.txt")
try:
wordlist = open(args.wordlist).read().splitlines()
except:
out.fatal("Could not open wordlist " + args.wordlist)
sys.exit(1)
# Open file handle for output
try:
outfile = open(args.output_filename, "w")
except TypeError:
outfile = None
except IOError:
out.fatal("Could not open output file: " + args.output_filename)
sys.exit(1)
# Number of threads should be between 1 and 32
if args.threads < 1:
args.threads = 1
elif args.threads > 32:
args.threads = 32
queue = Queue.Queue()
resolver = dns.resolver.Resolver()
resolver.timeout = 1
# Record type
if args.ipv6:
recordtype = 'AAAA'
else:
recordtype = 'A'
if __name__ == "__main__":
global wildcard
out = output()
get_args()
setup()
queue.put(target) # Add actual domain as well as subdomains
nameservers = get_nameservers(target)
out.good("Getting nameservers")
targetns = [] # NS servers for target
try: # Subdomains often don't have NS recoards..
for ns in nameservers:
ns = str(ns)[:-1] # Removed trailing dot
res = lookup(ns, "A")
for rdata in res:
targetns.append(rdata.address)
print(rdata.address + " - " + col.brown + ns + col.end)
if outfile:
print(rdata.address + " - " + ns, file=outfile)
zone_transfer(target, ns)
except SystemExit:
sys.exit(0)
except:
out.warn("Getting nameservers failed")
# resolver.nameservers = targetns # Use target's NS servers for lokups
# Missing results using domain's NS - removed for now
out.warn("Zone transfer failed")
if args.zonetransfer:
sys.exit(0)
get_txt(target)
get_mx(target)
wildcard = get_wildcard(target)
out.status("Scanning " + target + " for " + recordtype + " records")
add_target(target)
for i in range(args.threads):
t = scanner(queue)
t.setDaemon(True)
t.start()
try:
for i in range(args.threads):
t.join(1024) # Timeout needed or threads ignore exceptions
except KeyboardInterrupt:
out.fatal("Caught KeyboardInterrupt, quitting...")
sys.exit(1)
| elationfoundation/dnscan | dnscan.py | Python | gpl-3.0 | 9,150 |
# -*- coding: utf-8 -*-
import codecs
import fnmatch
import json
import os
from pyquery import PyQuery
import logging
log = logging.getLogger(__name__)
def process_all_json_files(version, build_dir=True):
"""
Return a list of pages to index
"""
if build_dir:
full_path = version.project.full_json_path(version.slug)
else:
full_path = version.project.get_production_media_path(
type='json', version_slug=version.slug, include_file=False)
html_files = []
for root, dirs, files in os.walk(full_path):
for filename in fnmatch.filter(files, '*.fjson'):
if filename in ['search.fjson', 'genindex.fjson', 'py-modindex.fjson']:
continue
html_files.append(os.path.join(root, filename))
page_list = []
for filename in html_files:
try:
result = process_file(filename)
if result:
page_list.append(result)
except:
pass
return page_list
def process_file(filename):
try:
with codecs.open(filename, encoding='utf-8', mode='r') as f:
file_contents = f.read()
except IOError as e:
log.info('Unable to index file: %s, error :%s' % (filename, e))
return
data = json.loads(file_contents)
headers = []
sections = []
content = ''
title = ''
body_content = ''
if 'current_page_name' in data:
path = data['current_page_name']
else:
log.info('Unable to index file due to no name %s' % filename)
return None
if 'toc' in data:
for element in PyQuery(data['toc'])('a'):
headers.append(recurse_while_none(element))
if None in headers:
log.info('Unable to index file headers for: %s' % filename)
if 'body' in data and len(data['body']):
body = PyQuery(data['body'])
body_content = body.text().replace(u'¶', '')
# Capture text inside h1 before the first h2
h1_section = body('.section > h1')
if h1_section:
div = h1_section.parent()
h1_title = h1_section.text().replace(u'¶', '').strip()
h1_id = div.attr('id')
h1_content = ""
next_p = body('h1').next()
while next_p:
if next_p[0].tag == 'div' and 'class' in next_p[0].attrib:
if 'section' in next_p[0].attrib['class']:
break
h1_content += "\n%s\n" % next_p.html()
next_p = next_p.next()
if h1_content:
sections.append({
'id': h1_id,
'title': h1_title,
'content': h1_content,
})
# Capture text inside h2's
section_list = body('.section > h2')
for num in range(len(section_list)):
div = section_list.eq(num).parent()
header = section_list.eq(num)
title = header.text().replace(u'¶', '').strip()
section_id = div.attr('id')
content = div.html()
sections.append({
'id': section_id,
'title': title,
'content': content,
})
log.debug("(Search Index) Section [%s:%s]: %s" % (section_id, title, content))
else:
log.info('Unable to index content for: %s' % filename)
if 'title' in data:
title = data['title']
if title.startswith('<'):
title = PyQuery(data['title']).text()
else:
log.info('Unable to index title for: %s' % filename)
return {'headers': headers, 'content': body_content, 'path': path,
'title': title, 'sections': sections}
def recurse_while_none(element):
if element.text is None:
return recurse_while_none(element.getchildren()[0])
else:
return element.text
| kdkeyser/readthedocs.org | readthedocs/search/parse_json.py | Python | mit | 3,901 |
#!/usr/bin/python3
from __future__ import division, print_function, absolute_import
import numpy as np
import mxnet as mx
from mxnet import nd, autograd, gluon
import os
os.system("taskset -a -p 0xFFFFFFFF %d" % os.getpid())
mx.random.seed(1)
ctx = mx.cpu()
data_ctx = ctx
model_ctx = ctx
batch_size = 200
width=40
num_inputs = width*width
num_outputs = 2
sizeoftype = 6400
PARAM_NAME = "./OUTS/PARS/CNN_PARS"
def evaluate_accuracy(data_iterator, net):
acc = mx.metric.Accuracy()
for i, (data, label) in enumerate(data_iterator):
data = data.as_in_context(ctx)
label = label.as_in_context(ctx)
output = net(data)
predictions = nd.argmax(output, axis=1)
prednew = nd.reshape(predictions,label.shape)
#predictions = nd.argmax(output)
#print(predictions[1])
#print(data.shape,label.shape,output.shape,predictions.shape,prednew.shape)
acc.update(preds=prednew, labels=label)
return acc.get()[1]
def trainonfiles (dataname,labelname,testdata,testlabel,NEpochs):
IMAGEFILE = dataname
statinfo = os.stat(IMAGEFILE)
leadingshape = int(statinfo.st_size/sizeoftype)
print(leadingshape)
X = np.memmap(IMAGEFILE, dtype='float32', mode='r', shape=(leadingshape,1,40,40))
Xnd = nd.array(X)
LABELFILE = labelname
Y = np.memmap(LABELFILE, dtype='float32', mode='r', shape=(leadingshape,1))
Ynd = nd.array(Y)
dataset = mx.gluon.data.dataset.ArrayDataset(Xnd, Ynd)
train_data = mx.gluon.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=1)
IMAGEFILE = testdata
statinfo = os.stat(IMAGEFILE)
leadingshape = int(statinfo.st_size/sizeoftype)
print(leadingshape)
tX = np.memmap(IMAGEFILE, dtype='float32', mode='r', shape=(leadingshape,1,40,40))
tXnd = nd.array(tX)
LABELFILE = testlabel
tY = np.memmap(LABELFILE, dtype='float32', mode='r', shape=(leadingshape,1))
tYnd = nd.array(tY)
tdataset = mx.gluon.data.dataset.ArrayDataset(tXnd, tYnd)
test_data = mx.gluon.data.DataLoader(tdataset, batch_size=batch_size, shuffle=True, num_workers=1)
num_fc = 32
net = gluon.nn.HybridSequential()
with net.name_scope():
net.add(gluon.nn.Conv2D(channels=20, kernel_size=5, activation='relu'))
net.add(gluon.nn.MaxPool2D(pool_size=2, strides=2))
net.add(gluon.nn.Conv2D(channels=50, kernel_size=5, activation='relu'))
net.add(gluon.nn.MaxPool2D(pool_size=2, strides=2))
# The Flatten layer collapses all axis, except the first one, into one axis.
net.add(gluon.nn.Flatten())
net.add(gluon.nn.Dense(num_fc, activation="relu"))
net.add(gluon.nn.Dense(num_outputs))
net.hybridize()
net.collect_params().initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx)
softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
#softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss(sparse_label=False)
trainer = gluon.Trainer(net.collect_params(), 'adam', {'learning_rate': 0.001})
epochs = NEpochs
smoothing_constant = .01
#net.load_parameters(PARAM_NAME, ctx=ctx)
for e in range(epochs):
for i, (data, label) in enumerate(train_data):
data = data.as_in_context(ctx)
label = label.as_in_context(ctx)
with autograd.record():
output = net(data)
loss = softmax_cross_entropy(output, label)
loss.backward()
trainer.step(data.shape[0])
inst_loss = nd.sum(loss).asscalar()
print(" ",inst_loss)
if((i%50)==0):
print("saving...")
net.save_parameters(PARAM_NAME)
exit()
curr_loss = nd.mean(loss).asscalar()
moving_loss = (curr_loss if ((i == 0) and (e == 0))
else (1 - smoothing_constant) * moving_loss + smoothing_constant * curr_loss)
test_accuracy = evaluate_accuracy(test_data, net)
train_accuracy = evaluate_accuracy(train_data, net)
print("Epoch %s. Loss: %s, Train_acc %s, Test_acc %s" % (e, moving_loss, train_accuracy, test_accuracy))
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/0/image","./OUTS/TMP/QCD_TOP/TRAIN/0/label","./OUTS/TMP/QCD_TOP/TEST/0/image","./OUTS/TMP/QCD_TOP/TEST/0/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/1/image","./OUTS/TMP/QCD_TOP/TRAIN/1/label","./OUTS/TMP/QCD_TOP/TEST/1/image","./OUTS/TMP/QCD_TOP/TEST/1/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/2/image","./OUTS/TMP/QCD_TOP/TRAIN/2/label","./OUTS/TMP/QCD_TOP/TEST/2/image","./OUTS/TMP/QCD_TOP/TEST/2/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/3/image","./OUTS/TMP/QCD_TOP/TRAIN/3/label","./OUTS/TMP/QCD_TOP/TEST/3/image","./OUTS/TMP/QCD_TOP/TEST/3/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/4/image","./OUTS/TMP/QCD_TOP/TRAIN/4/label","./OUTS/TMP/QCD_TOP/TEST/4/image","./OUTS/TMP/QCD_TOP/TEST/4/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/5/image","./OUTS/TMP/QCD_TOP/TRAIN/5/label","./OUTS/TMP/QCD_TOP/TEST/5/image","./OUTS/TMP/QCD_TOP/TEST/5/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/6/image","./OUTS/TMP/QCD_TOP/TRAIN/6/label","./OUTS/TMP/QCD_TOP/TEST/6/image","./OUTS/TMP/QCD_TOP/TEST/6/label",5)
trainonfiles ("./OUTS/TMP/QCD_TOP/TRAIN/7/image","./OUTS/TMP/QCD_TOP/TRAIN/7/label","./OUTS/TMP/QCD_TOP/TEST/7/image","./OUTS/TMP/QCD_TOP/TEST/7/label",5)
| aravindhv10/CPP_Wrappers | NewData/SRC/MXNET_CNN_Supervised_Simple/start.py | Python | gpl-2.0 | 5,411 |
# -*- coding: utf-8 -*-
#
# hl_api_nodes.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions for node handling
"""
import warnings
import nest
from ..ll_api import *
from .. import pynestkernel as kernel
from .hl_api_helper import *
from .hl_api_info import SetStatus
from .hl_api_types import NodeCollection, Parameter
__all__ = [
'Create',
'GetLocalNodeCollection',
'GetNodes',
'PrintNodes',
]
@check_stack
def Create(model, n=1, params=None, positions=None):
"""Create one or more nodes.
Generates `n` new network objects of the supplied model type. If `n` is not
given, a single node is created. Note that if setting parameters of the
nodes fail, the nodes will still have been created.
Parameters
----------
model : str
Name of the model to create
n : int, optional
Number of nodes to create
params : dict, list or Parameter, optional
Parameters for the new nodes. A single dictionary, a list of
dictionaries with size n or a :py:class:`.Parameter`. If omitted, the model's defaults are used.
positions: :py:class:`.spatial.grid` or :py:class:`.spatial.free` object, optional
Object describing spatial posistions of the nodes. If omitted, the nodes have no spatial attatchment.
Returns
-------
NodeCollection:
Object representing the IDs of created nodes, see :py:class:`.NodeCollection` for more.
Raises
------
NESTError
If setting node parameters fail. However, the nodes will still have
been created.
"""
model_deprecation_warning(model)
if positions is not None:
if not isinstance(positions, (nest.spatial.free, nest.spatial.grid)):
raise TypeError('`positions` must be either a nest.spatial.free object or nest.spatial.grid object')
layer_specs = {'elements': model}
layer_specs['edge_wrap'] = positions.edge_wrap
if isinstance(positions, nest.spatial.free):
layer_specs['positions'] = positions.pos
if isinstance(positions.pos, Parameter):
layer_specs['n'] = n
else:
if n > 1:
raise kernel.NESTError(
'Cannot specify number of nodes with grid positions')
layer_specs['shape'] = positions.shape
if positions.center is not None:
layer_specs['center'] = positions.center
if positions.extent is not None:
layer_specs['extent'] = positions.extent
if params is None:
params = {}
layer = sli_func('CreateLayerParams', layer_specs, params)
return layer
params_contains_list = True
if isinstance(params, dict) and params:
params_contains_list = [is_iterable(v) or isinstance(v, Parameter)
for k, v in params.items()]
params_contains_list = max(params_contains_list)
if not params_contains_list:
cmd = "/%s 3 1 roll exch Create" % model
sps(params)
else:
cmd = "/%s exch Create" % model
sps(n)
sr(cmd)
node_ids = spp()
if params is not None and params_contains_list:
try:
SetStatus(node_ids, params)
except:
warnings.warn(
"SetStatus() call failed, but nodes have already been " +
"created! The node IDs of the new nodes are: {0}.".format(node_ids))
raise
return node_ids
@check_stack
def PrintNodes():
"""Print the `node ID` ranges and `model names` of all the nodes in the network."""
sr("PrintNodesToStream")
print(spp())
def GetNodes(properties={}, local_only=False):
"""Return all nodes with the given properties as `NodeCollection`.
Parameters
----------
properties : dict, optional
Only node IDs of nodes matching the properties given in the
dictionary exactly will be returned. Matching properties with float
values (e.g. the membrane potential) may fail due to tiny numerical
discrepancies and should be avoided. Note that when a params dict is
present, thread parallelization is not possible, the function will
be run thread serial.
local_only : bool, optional
If True, only node IDs of nodes simulated on the local MPI process will
be returned. By default, node IDs of nodes in the entire simulation
will be returned. This requires MPI communication and may slow down
the script.
Returns
-------
NodeCollection:
`NodeCollection` of nodes
"""
return sli_func('GetNodes', properties, local_only)
@check_stack
def GetLocalNodeCollection(nc):
"""Get local nodes of a `NodeCollection` as a new `NodeCollection`.
This function returns the local nodes of a `NodeCollection`. If there are no
local elements, an empty `NodeCollection` is returned.
Parameters
----------
nc: NodeCollection
`NodeCollection` for which to get local nodes
Returns
-------
NodeCollection:
Object representing the local nodes of the given `NodeCollection`
"""
if not isinstance(nc, NodeCollection):
raise TypeError("GetLocalNodeCollection requires a NodeCollection in order to run")
sps(nc)
sr("LocalOnly")
return spp()
| Silmathoron/nest-simulator | pynest/nest/lib/hl_api_nodes.py | Python | gpl-2.0 | 5,988 |
from __future__ import unicode_literals
import glob
import gzip
import os
import warnings
import zipfile
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.db import (connections, router, transaction, DEFAULT_DB_ALIAS,
IntegrityError, DatabaseError)
from django.utils import lru_cache
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.utils._os import upath
from itertools import product
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
class Command(BaseCommand):
help = 'Installs the named fixture(s) in the database.'
missing_args_message = ("No database fixture specified. Please provide the "
"path of at least one fixture in the command line.")
def add_arguments(self, parser):
parser.add_argument('args', metavar='fixture', nargs='+',
help='Fixture labels.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
'fixtures into. Defaults to the "default" database.')
parser.add_argument('--app', action='store', dest='app_label',
default=None, help='Only look for fixtures in the specified app.')
parser.add_argument('--ignorenonexistent', '-i', action='store_true',
dest='ignore', default=False,
help='Ignores entries in the serialized data for fields that do not '
'currently exist on the model.')
def handle(self, *fixture_labels, **options):
self.ignore = options.get('ignore')
self.using = options.get('database')
self.app_label = options.get('app_label')
self.hide_empty = options.get('hide_empty', False)
self.verbosity = options.get('verbosity')
with transaction.atomic(using=self.using):
self.loaddata(fixture_labels)
# Close the DB connection -- unless we're still in a transaction. This
# is required as a workaround for an edge case in MySQL: if the same
# connection is used to create tables, load data, and query, the query
# can return incorrect results. See Django #7572, MySQL #37735.
if transaction.get_autocommit(self.using):
connections[self.using].close()
def loaddata(self, fixture_labels):
connection = connections[self.using]
# Keep a count of the installed objects and fixtures
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
self.models = set()
self.serialization_formats = serializers.get_public_serializer_formats()
# Forcing binary mode may be revisited after dropping Python 2 support (see #22399)
self.compression_formats = {
None: (open, 'rb'),
'gz': (gzip.GzipFile, 'rb'),
'zip': (SingleZipReader, 'r'),
}
if has_bz2:
self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
with connection.constraint_checks_disabled():
for fixture_label in fixture_labels:
self.load_label(fixture_label)
# Since we disabled constraint checks, we must manually check for
# any invalid keys that might have been added
table_names = [model._meta.db_table for model in self.models]
try:
connection.check_constraints(table_names=table_names)
except Exception as e:
e.args = ("Problem installing fixtures: %s" % e,)
raise
# If we found even one object in a fixture, we need to reset the
# database sequences.
if self.loaded_object_count > 0:
sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models)
if sequence_sql:
if self.verbosity >= 2:
self.stdout.write("Resetting sequences\n")
with connection.cursor() as cursor:
for line in sequence_sql:
cursor.execute(line)
if self.verbosity >= 1:
if self.fixture_count == 0 and self.hide_empty:
pass
elif self.fixture_object_count == self.loaded_object_count:
self.stdout.write("Installed %d object(s) from %d fixture(s)" %
(self.loaded_object_count, self.fixture_count))
else:
self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" %
(self.loaded_object_count, self.fixture_object_count, self.fixture_count))
def load_label(self, fixture_label):
"""
Loads fixtures files for a given label.
"""
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
try:
self.fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if self.verbosity >= 2:
self.stdout.write("Installing %s fixture '%s' from %s." %
(ser_fmt, fixture_name, humanize(fixture_dir)))
objects = serializers.deserialize(ser_fmt, fixture,
using=self.using, ignorenonexistent=self.ignore)
for obj in objects:
objects_in_fixture += 1
if router.allow_migrate(self.using, obj.object.__class__):
loaded_objects_in_fixture += 1
self.models.add(obj.object.__class__)
try:
obj.save(using=self.using)
except (DatabaseError, IntegrityError) as e:
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
'app_label': obj.object._meta.app_label,
'object_name': obj.object._meta.object_name,
'pk': obj.object.pk,
'error_msg': force_text(e)
},)
raise
self.loaded_object_count += loaded_objects_in_fixture
self.fixture_object_count += objects_in_fixture
except Exception as e:
if not isinstance(e, CommandError):
e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
raise
finally:
fixture.close()
# Warn if the fixture we loaded contains 0 objects.
if objects_in_fixture == 0:
warnings.warn(
"No fixture data found for '%s'. (File format may be "
"invalid.)" % fixture_name,
RuntimeWarning
)
@lru_cache.lru_cache(maxsize=None)
def find_fixtures(self, fixture_label):
"""
Finds fixture files for a given label.
"""
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
databases = [self.using, None]
cmp_fmts = list(self.compression_formats.keys()) if cmp_fmt is None else [cmp_fmt]
ser_fmts = serializers.get_public_serializer_formats() if ser_fmt is None else [ser_fmt]
if self.verbosity >= 2:
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
if os.path.isabs(fixture_name):
fixture_dirs = [os.path.dirname(fixture_name)]
fixture_name = os.path.basename(fixture_name)
else:
fixture_dirs = self.fixture_dirs
if os.path.sep in os.path.normpath(fixture_name):
fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name))
for dir_ in fixture_dirs]
fixture_name = os.path.basename(fixture_name)
suffixes = ('.'.join(ext for ext in combo if ext)
for combo in product(databases, ser_fmts, cmp_fmts))
targets = set('.'.join((fixture_name, suffix)) for suffix in suffixes)
fixture_files = []
for fixture_dir in fixture_dirs:
if self.verbosity >= 2:
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
fixture_files_in_dir = []
for candidate in glob.iglob(os.path.join(fixture_dir, fixture_name + '*')):
if os.path.basename(candidate) in targets:
# Save the fixture_dir and fixture_name for future error messages.
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
if self.verbosity >= 2 and not fixture_files_in_dir:
self.stdout.write("No fixture '%s' in %s." %
(fixture_name, humanize(fixture_dir)))
# Check kept for backwards-compatibility; it isn't clear why
# duplicates are only allowed in different directories.
if len(fixture_files_in_dir) > 1:
raise CommandError(
"Multiple fixtures named '%s' in %s. Aborting." %
(fixture_name, humanize(fixture_dir)))
fixture_files.extend(fixture_files_in_dir)
if not fixture_files:
# Warning kept for backwards-compatibility; why not an exception?
warnings.warn("No fixture named '%s' found." % fixture_name)
return fixture_files
@cached_property
def fixture_dirs(self):
"""
Return a list of fixture directories.
The list contains the 'fixtures' subdirectory of each installed
application, if it exists, the directories in FIXTURE_DIRS, and the
current directory.
"""
dirs = []
fixture_dirs = settings.FIXTURE_DIRS
if len(fixture_dirs) != len(set(fixture_dirs)):
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
for app_config in apps.get_app_configs():
app_label = app_config.label
app_dir = os.path.join(app_config.path, 'fixtures')
if app_dir in fixture_dirs:
raise ImproperlyConfigured(
"'%s' is a default fixture directory for the '%s' app "
"and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label)
)
if self.app_label and app_label != self.app_label:
continue
if os.path.isdir(app_dir):
dirs.append(app_dir)
dirs.extend(list(fixture_dirs))
dirs.append('')
dirs = [upath(os.path.abspath(os.path.realpath(d))) for d in dirs]
return dirs
def parse_name(self, fixture_name):
"""
Splits fixture name in name, serialization format, compression format.
"""
parts = fixture_name.rsplit('.', 2)
if len(parts) > 1 and parts[-1] in self.compression_formats:
cmp_fmt = parts[-1]
parts = parts[:-1]
else:
cmp_fmt = None
if len(parts) > 1:
if parts[-1] in self.serialization_formats:
ser_fmt = parts[-1]
parts = parts[:-1]
else:
raise CommandError(
"Problem installing fixture '%s': %s is not a known "
"serialization format." % (''.join(parts[:-1]), parts[-1]))
else:
ser_fmt = None
name = '.'.join(parts)
return name, ser_fmt, cmp_fmt
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
def humanize(dirname):
return "'%s'" % dirname if dirname else 'absolute path'
| Sonicbids/django | django/core/management/commands/loaddata.py | Python | bsd-3-clause | 12,444 |
""":mod:`wikidata.commonsmedia` --- `Wikimedia Commons`_
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. _Wikimedia Commons: https://commons.wikimedia.org/
.. versionadded:: 0.3.0
"""
import collections.abc
from typing import Mapping, Optional, Tuple, cast
import urllib.parse
from .client import Client
__all__ = 'File', 'FileError'
class File:
"""Represent a file on `Wikimedia Commons`_."""
__slots__ = 'client', 'title', 'data'
def __init__(self, client: Client, title: str) -> None:
self.client = client
self.title = title
self.data = None # type: Optional[Mapping[str, object]]
@property
def page_url(self) -> str:
"""(:class:`str`) The canonical url of the page."""
url = self.attributes['canonicalurl']
assert isinstance(url, str)
return url
@property
def image_url(self) -> Optional[str]:
r"""(:class:`~typing.Optional`\ [:class:`str`]) The image url.
It may be :const:`None` if it's not an image.
"""
images = self.attributes.get('imageinfo', [])
if images and isinstance(images, collections.abc.Sequence):
return images[0]['url']
return None
@property
def image_mimetype(self) -> Optional[str]:
r"""(:class:`~typing.Optional`\ [:class:`str`]) The MIME type of
the image. It may be :const:`None` if it's not an image.
"""
images = self.attributes.get('imageinfo', [])
if images and isinstance(images, collections.abc.Sequence):
return images[0]['mime']
return None
@property
def image_resolution(self) -> Optional[Tuple[int, int]]:
r"""(:class:`~typing.Optional`\ [:class:`~typing.Tuple`\ [:class:`int`,
:class:`int`]]) The (width, height) pair of the image.
It may be :const:`None` if it's not an image.
"""
images = self.attributes.get('imageinfo', [])
if images and isinstance(images, collections.abc.Sequence):
img = images[0]
return img['width'], img['height']
return None
@property
def image_size(self) -> Optional[int]:
r"""(:class:`~typing.Optional`\ [:class:`int`]) The size of the image
in bytes. It may be :const:`None` if it's not an image.
"""
images = self.attributes.get('imageinfo', [])
if images and isinstance(images, collections.abc.Sequence):
return images[0]['size']
return None
@property
def attributes(self) -> Mapping[str, object]:
if self.data is None:
self.load()
assert self.data is not None
return self.data
def load(self) -> None:
url = './w/api.php?action=query&prop=imageinfo|info&inprop=url&iiprop=url|size|mime&format=json&titles={}' # noqa: E501
url = url.format(urllib.parse.quote(self.title))
result = cast(Mapping[str, object], self.client.request(url))
if result.get('error'):
raise FileError('the server respond an error: ' +
repr(result['error']))
query = result['query']
assert isinstance(query, collections.abc.Mapping)
self.data = next(iter(query['pages'].values()))
def __repr__(self) -> str:
return '<{0.__module__}.{0.__qualname__} {1!r}>'.format(
type(self), self.title
)
class FileError(ValueError, RuntimeError):
"""Exception raised when something goes wrong with :class:`File`."""
| dahlia/wikidata | wikidata/commonsmedia.py | Python | gpl-3.0 | 3,528 |
# pylint: disable=too-few-public-methods
from enkiblog import models
import os.path
from websauna.utils.time import now
from random import randint
from uuid import uuid4
import factory
from enkiblog.core.utils import slugify
from enkiblog.core.testing.fakefactory import BaseFactory, DB_SESSION_PROXY
class TagFactory(BaseFactory):
class Meta:
model = models.Tag
# title = factory.Faker('words', )
title = factory.LazyAttribute(lambda obj: str(uuid4().hex)) # XXX: !!! see previous
class BasePostFactory(BaseFactory):
class Meta:
model = models.Post
title = factory.Faker('catch_phrase')
description = factory.Faker('sentence')
body = factory.Faker('text', max_nb_chars=2000)
slug = factory.LazyAttribute(
lambda obj: slugify(obj.title, models.Post.slug, DB_SESSION_PROXY))
tags = factory.LazyFunction(lambda: [TagFactory() for i in range(randint(1, 6))])
class PostFactory(BasePostFactory):
state = 'public'
published_at = factory.LazyAttribute(lambda obj: now())
class MediaFactory(BaseFactory):
class Meta:
model = models.Media
# XXX: will break in case of many files
description = factory.Faker('slug')
title = str(__file__.rsplit(os.path.sep, 1)[-1])
slug = factory.Faker('slug')
blob = bytes(os.path.abspath(__file__), 'utf-8')
| enkidulan/enkiblog | src/enkiblog/tests/fakefactory.py | Python | apache-2.0 | 1,348 |
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
| bmi-forum/bmi-python | setup.py | Python | mit | 781 |
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class AccountMandate(models.Model):
_inherit = "account.banking.mandate"
@api.multi
def validate(self):
"""Validate LSV/DD Contracts when mandate is validated."""
super().validate()
contracts = self._trigger_contracts("mandate")
contracts.mandate_valid()
return True
@api.multi
def cancel(self):
"""Set back contracts in waiting mandate state."""
super().cancel()
contracts = self._trigger_contracts("active") + self._trigger_contracts(
"waiting"
)
contracts.contract_waiting_mandate()
return True
@api.multi
def _trigger_contracts(self, state):
""" Fires a given transition on contracts in selected state. """
contracts = self.env["recurring.contract"].with_context(lang="en_US")
for mandate in self:
contracts |= contracts.search(
[
("partner_id", "child_of", mandate.partner_id.id),
("state", "=", state),
"|", ("payment_mode_id.name", "ilike", "LSV"),
("payment_mode_id", "ilike", "Postfinance")
]
)
return contracts
| eicher31/compassion-switzerland | sponsorship_switzerland/models/account_banking_mandate.py | Python | agpl-3.0 | 1,630 |
# -*- coding: utf-8 -*-
import logging
import wx
import wx.adv
from outwiker.gui.guiconfig import PluginsConfig
from outwiker.core.system import getCurrentDir, getOS
from outwiker.gui.preferences.baseprefpanel import BasePrefPanel
logger = logging.getLogger('pluginspanel')
class PluginsPanel(BasePrefPanel):
"""
Панель со списком установленных плагинов
"""
def __init__(self, parent, application):
super(PluginsPanel, self).__init__(parent)
self._application = application
self.__htmlMinWidth = 150
self.__createGui()
self.__controller = PluginsController(self)
self.SetupScrolling()
def __createGui(self):
self.pluginsList = wx.CheckListBox(self, -1, style=wx.LB_SORT)
self.pluginsList.SetMinSize((50, 20))
self.__downloadLink = wx.adv.HyperlinkCtrl(
self,
-1,
_(u"Download more plugins"),
_(u"https://jenyay.net/Outwiker/PluginsEn"))
self.pluginsInfo = getOS().getHtmlRender(self)
self.__layout()
def __layout(self):
self.mainSizer = wx.FlexGridSizer(cols=1)
self.mainSizer.AddGrowableRow(0)
self.mainSizer.AddGrowableCol(0)
self.pluginsSizer = wx.FlexGridSizer(cols=2)
self.pluginsSizer.AddGrowableRow(0)
self.pluginsSizer.AddGrowableCol(0)
self.pluginsSizer.AddGrowableCol(1)
self.pluginsSizer.Add(self.pluginsList, flag=wx.EXPAND)
self.pluginsSizer.Add(self.pluginsInfo, flag=wx.EXPAND)
self.mainSizer.Add(self.pluginsSizer,
flag=wx.ALL | wx.EXPAND,
border=2)
self.mainSizer.Add(self.__downloadLink,
flag=wx.ALL | wx.ALIGN_LEFT,
border=2)
self.SetSizer(self.mainSizer)
def LoadState(self):
self.__controller.loadState()
def Save(self):
self.__controller.save()
class PluginsController(object):
"""
Контроллер, отвечающий за работу панели со списком плагинов
"""
def __init__(self, pluginspanel):
self.__owner = pluginspanel
# Т.к. под виндой к элементам CheckListBox нельзя
# прикреплять пользовательские данные,
# придется их хранить отдельно.
# Ключ - имя плагина, оно же текст строки
# Значение - экземпляр плагина
self.__pluginsItems = {}
self.__owner.Bind(wx.EVT_LISTBOX,
self.__onSelectItem,
self.__owner.pluginsList)
def __onSelectItem(self, event):
htmlContent = u""
if event.IsSelection():
plugin = self.__pluginsItems[event.GetString()]
assert plugin is not None
htmlContent = self.__createPluginInfo(plugin)
self.__owner.pluginsInfo.SetPage(htmlContent, getCurrentDir())
def __createPluginInfo(self, plugin):
assert plugin is not None
infoTemplate = u"""<html>
<head>
<meta http-equiv='content-type' content='text/html; charset=utf-8'/>
</head>
<body>
{name}<br>
{version}<br>
{url}<br>
{description}<br>
</body>
</html>"""
plugin_name = u"""<h3>{name}</h3>""".format(name=plugin.name)
plugin_version = u"""<b>{version_header}:</b> {version}""".format(
version_header=_(u"Version"),
version=plugin.version)
plugin_description = u"""<b>{description_head}:</b> {description}""".format(
description_head=_(u"Description"),
description=plugin.description.replace("\n", "<br>"))
if plugin.url is not None:
plugin_url = u"""<br><b>{site_head}</b>: <a href="{url}">{url}</a><br>""".format(
site_head=_("Site"),
url=plugin.url)
else:
plugin_url = u""
result = infoTemplate.format(
name=plugin_name,
version=plugin_version,
description=plugin_description,
url=plugin_url)
return result
def loadState(self):
self.__pluginsItems = {}
self.__owner.pluginsList.Clear()
self.__appendEnabledPlugins()
self.__appendDisabledPlugins()
self.__appendInvalidPlugins()
def __appendEnabledPlugins(self):
"""
Добавить загруженные плагины в список
"""
enablePlugins = {plugin.name: plugin
for plugin
in self.__owner._application.plugins}
self.__owner.pluginsList.Append(list(enablePlugins))
self.__pluginsItems.update(enablePlugins)
self.__owner.pluginsList.SetCheckedStrings(list(enablePlugins))
def __appendDisabledPlugins(self):
"""
Добавить отключенные плагины в список
"""
self.__owner.pluginsList.Append(list(self.__owner._application.plugins.disabledPlugins))
self.__pluginsItems.update(self.__owner._application.plugins.disabledPlugins)
def __appendInvalidPlugins(self):
invalid_plugins = self.__owner._application.plugins.invalidPlugins
for plugin in invalid_plugins:
self.__owner.pluginsList.Append(plugin.name)
self.__pluginsItems[plugin.name] = plugin
def save(self):
config = PluginsConfig(self.__owner._application.config)
config.disabledPlugins.value = self.__getDisabledPlugins()
# enable/disable plugins state
self.__owner._application.plugins.updateDisableList()
def __getDisabledPlugins(self):
"""
Return list of unchecked plugins
"""
checked = self.__owner.pluginsList.GetCheckedStrings()
disabledList = list(set(self.__pluginsItems) - set(checked))
return disabledList
| unreal666/outwiker | src/outwiker/gui/preferences/pluginspanel.py | Python | gpl-3.0 | 6,067 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "urbansense.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| johnsonc/urbansense | manage.py | Python | gpl-2.0 | 253 |
#Builtin imports
import os
import logging
import json
import time
import threading
#External imports
import telegram
#from socketIO_client import SocketIO, LoggingNamespace, BaseNamespace
from telegram.ext import (
Updater, CommandHandler, MessageHandler, Filters
)
import requests
import dataset
CONF_FILE = "will-telegram.conf"
if os.path.isfile('will-telegram.conf'):
configuration_data = json.loads(open('will-telegram.conf').read())
SERVER_URL = configuration_data["server_url"]
TOKEN = configuration_data["bot_token"]
LOGFILE = configuration_data["logfile"]
if configuration_data["debug"]:
DB_URL = configuration_data["debug_db_url"]
else:
DB_URL = configuration_data["db_url"]
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
filename=LOGFILE,
filemode='w'
)
log = logging.getLogger()
db = dataset.connect(DB_URL)
help_str = '''
Welcome to W.I.L.L! If you don't have an account yet, please sign up at https://willbeddow.com/static/signup_page.html.
There are only two commands that you need to learn about for this bot:
/login <username> <password>: login to W.I.L.L and generate a session token.
/help: Print this message
'''
def help(bot, update):
'''Echo the help string'''
bot.sendMessage(update.message.chat_id, help_str)
#def socket_io_thread(bot,session_id, chat_id ):
# socketIO = SocketIO(SERVER_URL, 80)
# log.info("In socket_io thread")
# socketIO.on('connect', lambda: socketIO.emit("get_updates", session_id))
# socketIO.on('update', lambda x: bot.sendMessage(chat_id, (x["value"])))
# socketIO.on('disconnect', lambda x: bot.sendMessage(chat_id, "Update server has disconnected"))
# socketIO.on('debug', lambda x: log.info("Got debug message {0} from socketIO".format(x["value"])))
# socketIO.wait()
def login(bot, update):
'''Login to W.I.L.L and store the session id in the db'''
message = update.message.text
message_split = message.split("/login ")[1].split(" ")
username = message_split[0]
password = message_split[1]
log.info("Trying to log into W.I.L.L with username {0}".format(username))
payload = {"username": username, "password": password}
response = requests.post(url="{0}/api/start_session".format(SERVER_URL), data=payload).json()
if response["type"] == "success":
log.info("Logged in user {0} successfully, putting their session token in the db")
update.message.reply_text(response["text"]+" Thank you for logging in. For security, please delete the /login" \
" command in the chat")
db['telegram'].upsert(dict(
username=username, chat_id=update.message.chat_id, session_id=response["data"]["session_id"])
, ['username']
)
#socket_thread = threading.Thread(target=socket_io_thread, args=(
# bot, response["data"]["session_id"],update.message.chat_id))
#TODO: fix this later
#socket_thread.start()
else:
log.info("Got error logging in with user {0}. Error text is {1}".format(username, response["text"]))
update.message.reply_text(response["text"])
def start(bot, update):
'''Standard /start command'''
update.message.reply_text("Welcome to W.I.L.L! To get started, please run /login <username> <password>.")
def command(bot, update):
'''A W.I.L.L command'''
message = update.message.text
user = db['telegram'].find_one(chat_id=update.message.chat_id)
if user:
session_id = user["session_id"]
payload = {"session_id": session_id, "command": message}
try:
response = requests.post(
url="{0}/api/command".format(SERVER_URL),
data=payload
).json()
log.info("Got response {0} from command {1}".format(response, command))
if response["type"] == "success":
update.message.reply_text(response["text"])
else:
if response["text"] == "Invalid session id":
update.message.reply_text(
"It looks like W.I.L.L has rebooted. Please run /login <username> <password> "
"again to start another session")
else:
update.message.reply_text("Error: " + response["text"])
except Exception as command_exception:
log.info("Caught exception {0}, {1} while sending command to W.I.L.L".format(
command_exception.message,
command_exception.args
))
update.message.reply_text("Didn't receive a response from the W.I.L.L server, W.I.L.L is most likely down")
else:
update.message.reply_text("Couldn't find you in the database. Please run /login <username> <password>")
def error(bot, update, error):
'''Log an error'''
log.warn('Update "%s" caused error "%s"' % (update, error))
def main():
updater = Updater(TOKEN)
dp = updater.dispatcher
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("login", login))
dp.add_handler(CommandHandler("help", help))
dp.add_handler(MessageHandler(Filters.text, command))
dp.add_error_handler(error)
updater.start_polling()
updater.idle()
if __name__ == "__main__":
main() | ironman5366/W.I.L.L | interfaces/W.I.L.L-Telegram/main.py | Python | mit | 5,460 |
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 07 01:31:25 2014
@author: Sifan
"""
x = 1515361
# x = 15
ans = 0
if x >= 0:
while ans*ans < x:
ans = ans + 1
if ans*ans == x:
print "The square root of %d is %d." % (x, ans)
else:
print "%d is not a perfect square." % (x)
else:
print x, "is negative." | wnduan/IntroToComSci | lecture_.py | Python | mit | 341 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sanction
from buildbot.www import auth
from buildbot.www import resource
from posixpath import join
from twisted.internet import defer
from twisted.internet import threads
class OAuth2LoginResource(auth.LoginResource):
# disable reconfigResource calls
needsReconfig = False
def __init__(self, master, _auth):
auth.LoginResource.__init__(self, master)
self.auth = _auth
@defer.inlineCallbacks
def renderLogin(self, request):
code = request.args.get("code", [""])[0]
if not code:
url = yield self.auth.getLoginURL()
defer.returnValue(url)
else:
details = yield self.auth.verifyCode(code)
request.getSession().user_info = details
raise resource.Redirect(self.auth.homeUri)
class OAuth2Auth(auth.AuthBase):
name = 'oauth2'
def __init__(self, authUri, tokenUri, clientId,
authUriConfig, tokenConfig, **kwargs):
auth.AuthBase.__init__(self, **kwargs)
self.authUri = authUri
self.tokenUri = tokenUri
self.clientId = clientId
self.authUriConfig = authUriConfig
self.tokenConfig = tokenConfig
def reconfigAuth(self, master, new_config):
self.master = master
self.loginUri = join(new_config.www['url'], "login")
self.homeUri = new_config.www['url']
def getConfigDict(self):
return dict(name=self.name,
oauth2=True,
fa_icon=self.faIcon
)
pass
def getLoginResource(self, master):
return OAuth2LoginResource(master, self)
def getLoginURL(self):
def thd():
c = sanction.Client(auth_endpoint=self.authUri,
client_id=self.clientId)
return c.auth_uri(redirect_uri=self.loginUri,
**self.authUriConfig)
return threads.deferToThread(thd)
def verifyCode(self, code):
def thd(): # everything in deferToThread is not counted with trial --coverage :-(
c = sanction.Client(token_endpoint=self.tokenUri,
client_id=self.clientId,
**self.tokenConfig)
c.request_token(code=code,
redirect_uri=self.loginUri)
return self.getUserInfoFromOAuthClient(c)
return threads.deferToThread(thd)
def getUserInfoFromOAuthClient(self, c):
return {}
class GoogleAuth(OAuth2Auth):
name = "Google"
faIcon = "fa-google-plus"
def __init__(self, clientId, clientSecret, **kwargs):
OAuth2Auth.__init__(self,
authUri='https://accounts.google.com/o/oauth2/auth',
tokenUri='https://accounts.google.com/o/oauth2/token',
clientId=clientId,
authUriConfig=dict(scope=" ".join([
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile',
]),
access_type='offline'),
tokenConfig=dict(
resource_endpoint='https://www.googleapis.com/oauth2/v1',
client_secret=clientSecret,
token_transport=sanction.transport_headers),
**kwargs
)
def getUserInfoFromOAuthClient(self, c):
data = c.request('/userinfo')
return dict(full_name=data["name"],
username=data['sub'],
email=data["email"],
avatar_url=data["picture"])
class GitHubAuth(OAuth2Auth):
name = "GitHub"
faIcon = "fa-github"
def __init__(self, clientId, clientSecret, **kwargs):
OAuth2Auth.__init__(self,
authUri='https://github.com/login/oauth/authorize',
tokenUri='https://github.com/login/oauth/access_token',
clientId=clientId,
authUriConfig=dict(),
tokenConfig=dict(
resource_endpoint='https://api.github.com',
client_secret=clientSecret,
token_transport=sanction.transport_headers),
**kwargs
)
def getUserInfoFromOAuthClient(self, c):
user = c.request('/user')
orgs = c.request(join('/users', user['login'], "orgs"))
return dict(full_name=user['name'],
email=user['email'],
username=user['login'],
groups=[org['login'] for org in orgs])
| zozo123/buildbot | master/buildbot/www/oauth2.py | Python | gpl-3.0 | 5,670 |
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
import urllib2, urllib
VERIFY_SERVER="http://api-verify.recaptcha.net/verify"
class RecaptchaWidget(forms.Widget):
def render(self, name, value, attrs=None):
return mark_safe("""<script type="text/javascript"
src="http://api.recaptcha.net/challenge?k=%(public_key)s"></script>
<noscript>
<iframe src="http://api.recaptcha.net/noscript?k=%(public_key)s"
height="300" width="500" frameborder="0"></iframe><br>
<textarea name="recaptcha_challenge_field" rows="3" cols="40">
</textarea>
<input type="hidden" name="recaptcha_response_field" value="manual_challenge">
</noscript>""" %({'public_key': settings.RECAPTCHA_PUBLIC_KEY}))
def value_from_datadict(self, data, files, name):
return {
'recaptcha_challenge_field': data.get('recaptcha_challenge_field', None),
'recaptcha_response_field' : data.get('recaptcha_response_field', None),
'remoteip' : data.get('remoteip', None)
}
class RecaptchaField(forms.Field):
default_error_messages = {"unknown": _("Unknown error."),
"invalid-site-public-key": _("Unable to verify public key."),
"invalid-site-private-key": _("Unable to verify private key."),
"invalid-request-cookie": _("The challenge parameter was filled incorrectly."),
"incorrect-captcha-sol": _("Invalid Captcha solution."),
"verify-params-incorrect": _("Make sure you are passing all the required parameters."),
"invalid-referrer": _("Invalid Referrer. Enter the correct keys for this domain"),
"recaptcha-not-reachable": _("The reCaptcha site seems to be down. Sorry!!!")}
widget = RecaptchaWidget
def verify(self, data):
captcha_req = urllib2.Request(VERIFY_SERVER,
data=urllib.urlencode({'privatekey': settings.RECAPTCHA_PRIVATE_KEY,
'remoteip' : data['remoteip'],
'challenge' : data['recaptcha_challenge_field'],
'response' : data['recaptcha_response_field'],}))
try:
response = urllib2.urlopen(captcha_req)
except urllib2.URLError,e :
raise forms.ValidationError(e)
resp_content = response.readlines()
return_code = resp_content[0].strip()
error = resp_content[1].strip()
if not return_code == "true":
raise forms.ValidationError(self.error_messages.get(error) or error)
| theju/django-comments-apps | recaptcha_comments/fields.py | Python | mit | 3,024 |
from sequence import *
from wait_actions import *
from keypress_actions import *
from sequence_step import *
| javihernandez/accerciser-mirror | macaroon/macaroon/playback/__init__.py | Python | bsd-3-clause | 109 |
from annoying.decorators import render_to
from django.contrib.auth.decorators import login_required
from games.views import game_request
@login_required
@game_request
@render_to('frontend/index.html')
def room(request):
return {'game': request.game}
| typeinference/lgt | frontend/views.py | Python | mit | 256 |
from collections import namedtuple
from graphql import graphql
from graphql.type import (
GraphQLSchema,
GraphQLObjectType,
GraphQLField,
GraphQLNonNull,
GraphQLInt,
GraphQLString,
GraphQLID,
)
from ..node import node_definitions, to_global_id, from_global_id
User = namedtuple('User', ['id', 'name'])
Photo = namedtuple('Photo', ['id', 'width'])
userData = {
'1': User(id='1', name='John Doe'),
'2': User(id='2', name='Jane Smith'),
}
photoData = {
'3': Photo(id='3', width=300),
'4': Photo(id='4', width=400),
}
def get_node(id, info):
assert info.schema == schema
if id in userData:
return userData.get(id)
else:
return photoData.get(id)
def get_node_type(obj, _info):
if obj.id in userData:
return userType
else:
return photoType
node_interface, node_field = node_definitions(get_node, get_node_type)
userType = GraphQLObjectType(
'User',
fields=lambda: {
'id': GraphQLField(GraphQLNonNull(GraphQLID)),
'name': GraphQLField(GraphQLString),
},
interfaces=[node_interface]
)
photoType = GraphQLObjectType(
'Photo',
fields=lambda: {
'id': GraphQLField(GraphQLNonNull(GraphQLID)),
'width': GraphQLField(GraphQLInt),
},
interfaces=[node_interface]
)
queryType = GraphQLObjectType(
'Query',
fields=lambda: {
'node': node_field,
}
)
schema = GraphQLSchema(
query=queryType,
types=[userType, photoType]
)
def test_gets_the_correct_id_for_users():
query = '''
{
node(id: "1") {
id
}
}
'''
expected = {
'node': {
'id': '1',
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_gets_the_correct_id_for_photos():
query = '''
{
node(id: "4") {
id
}
}
'''
expected = {
'node': {
'id': '4',
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_gets_the_correct_name_for_users():
query = '''
{
node(id: "1") {
id
... on User {
name
}
}
}
'''
expected = {
'node': {
'id': '1',
'name': 'John Doe'
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_gets_the_correct_width_for_photos():
query = '''
{
node(id: "4") {
id
... on Photo {
width
}
}
}
'''
expected = {
'node': {
'id': '4',
'width': 400
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_gets_the_correct_typename_for_users():
query = '''
{
node(id: "1") {
id
__typename
}
}
'''
expected = {
'node': {
'id': '1',
'__typename': 'User'
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_gets_the_correct_typename_for_photos():
query = '''
{
node(id: "4") {
id
__typename
}
}
'''
expected = {
'node': {
'id': '4',
'__typename': 'Photo'
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_ignores_photo_fragments_on_user():
query = '''
{
node(id: "1") {
id
... on Photo {
width
}
}
}
'''
expected = {
'node': {
'id': '1',
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_returns_null_for_bad_ids():
query = '''
{
node(id: "5") {
id
}
}
'''
expected = {
'node': None
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_have_correct_node_interface():
query = '''
{
__type(name: "Node") {
name
kind
fields {
name
type {
kind
ofType {
name
kind
}
}
}
}
}
'''
expected = {
'__type': {
'name': 'Node',
'kind': 'INTERFACE',
'fields': [
{
'name': 'id',
'type': {
'kind': 'NON_NULL',
'ofType': {
'name': 'ID',
'kind': 'SCALAR'
}
}
}
]
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_has_correct_node_root_field():
query = '''
{
__schema {
queryType {
fields {
name
type {
name
kind
}
args {
name
type {
kind
ofType {
name
kind
}
}
}
}
}
}
}
'''
expected = {
'__schema': {
'queryType': {
'fields': [
{
'name': 'node',
'type': {
'name': 'Node',
'kind': 'INTERFACE'
},
'args': [
{
'name': 'id',
'type': {
'kind': 'NON_NULL',
'ofType': {
'name': 'ID',
'kind': 'SCALAR'
}
}
}
]
}
]
}
}
}
result = graphql(schema, query)
assert not result.errors
assert result.data == expected
def test_to_global_id_converts_unicode_strings_correctly():
my_unicode_id = u'\xfb\xf1\xf6'
g_id = to_global_id('MyType', my_unicode_id)
assert g_id == 'TXlUeXBlOsO7w7HDtg=='
my_unicode_id = u'\u06ED'
g_id = to_global_id('MyType', my_unicode_id)
assert g_id == 'TXlUeXBlOtut'
def test_from_global_id_converts_unicode_strings_correctly():
my_unicode_id = u'\xfb\xf1\xf6'
my_type, my_id = from_global_id('TXlUeXBlOsO7w7HDtg==')
assert my_type == 'MyType'
assert my_id == my_unicode_id
my_unicode_id = u'\u06ED'
my_type, my_id = from_global_id('TXlUeXBlOtut')
assert my_type == 'MyType'
assert my_id == my_unicode_id
| miracle2k/graphql-relay-py | graphql_relay/node/tests/test_node.py | Python | mit | 7,306 |
# Copyright 2019 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class IntMandate(models.Model):
_inherit = "int.mandate"
mandate_instance_id = fields.Many2one(
related="int_assembly_id.instance_id",
store="True",
index="True",
readonly=True,
)
partner_instance_search_ids = fields.Many2many(
relation="int_mandate_partner_instance_membership_rel",
)
| mozaik-association/mozaik | mozaik_membership_mandate/models/int_mandate.py | Python | agpl-3.0 | 477 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.files.storage
import products.models
class Migration(migrations.Migration):
dependencies = [
('products', '0002_auto_20150626_0657'),
]
operations = [
migrations.AlterField(
model_name='product',
name='download',
field=models.FileField(storage=django.core.files.storage.FileSystemStorage(location=b'/Users/jmitch/Dropbox/cfeprojects/Marketplace/static/protected'), null=True, upload_to=products.models.download_loc),
),
]
| codingforentrepreneurs/marketplace | src/products/migrations/0003_auto_20150626_0701.py | Python | apache-2.0 | 627 |
import wx
import listControl as lc
import getPlugins as gpi
from decimal import Decimal
import os
class Plugin():
def OnSize(self):
# Respond to size change
self.bPSize = self.bigPanel.GetSize()
self.list.SetSize((self.bPSize[0] - 118, self.bPSize[1] - 40))
self.ButtonShow(False)
self.SetButtons()
self.ButtonShow(True)
def Refresh(self,record):
self.GetExec(record)
def Clear(self):
self.list.Show(False)
self.ButtonShow(False)
def ButtonShow(self,tf):
for b in self.buttons:
b.Show(tf)
def SetButtons(self):
self.views = gpi.GetPlugIns(
self.hd+r"\plugins\Views\BLASTViewPlugins")
xPos = 300
self.buttons = []
for v in self.views.values():
self.buttons.append(wx.Button(self.bigPanel, -1,
str(v.GetName()),
pos = (self.bPSize[0] * xPos / 747,
self.bPSize[1] - 35),
size = (90, 22),
style = wx.NO_BORDER))
self.buttons[-1].SetBackgroundColour(
self.colorList[v.GetColors()]['Back'])
self.buttons[-1].SetForegroundColour(
self.colorList[v.GetColors()]['Fore'])
xPos += 100
self.bigPanel.Bind(wx.EVT_BUTTON, self.DoView, self.buttons[-1])
def Init(self, parent, bigPanel, colorList):
self.hd = os.getcwd()
self.colorList = colorList
self.bigPanel = bigPanel
self.bPSize = self.bigPanel.GetSize()
self.list = lc.TestListCtrl(self.bigPanel, -1, size = (0,0),
pos = (self.bPSize[0] - 118,
self.bPSize[1] - 40),
style = wx.LC_REPORT|wx.LC_VIRTUAL,
numCols = 7)
self.list.SetBackgroundColour(
self.colorList['ViewPanelList']['Back'])
self.list.SetForegroundColour(
self.colorList['ViewPanelList']['Fore'])
self.SetButtons()
self.ListCntrlFill()
self.list.Show(True)
self.ButtonShow(False)
def GetExec(self, Rec):
self.SetButtons()
self.list.Show(True)
self.ButtonShow(True)
self.BlastRec = Rec[0]
self.OnSelect(wx.EVT_IDLE)
def ListRefill(self):
listData = dict()
j = 0
for alignment in self.BlastRec.alignments:
for hsp in alignment.hsps:
listData[j] = (str(alignment.title), alignment.length,
hsp.score,
Decimal(hsp.expect).quantize(Decimal(10) ** -5),
hsp.identities, hsp.positives, hsp.gaps)
j += 1
self.list.Refill(listData)
def ListCntrlFill(self):
cols = ['Title', 'Length', 'Score', 'E Values',
'Idents.', 'Posits.', 'Gaps']
colWidths = [318, 50, 50, 59, 48, 48, 40]
self.list.Fill(cols, colWidths)
def OnSelect(self, event):
self.ListCntrlFill()
self.ListRefill()
def RecInfo(self):
pos = self.list.GetSelected()
matches = ['']
seqs = []
titles = []
alignment = self.BlastRec.alignments[self.list.itemIndexMap[0]]
titles.append('query')
for p in pos:
alignment = self.BlastRec.alignments[self.list.itemIndexMap[p]]
for hsp in alignment.hsps:
query = ''
i = 1
strtblnk = ''
while i < hsp.query_start:
strtblnk += '-'
query += self.BlastRec.alignments[0].hsps[0].query[i-1]
i += 1
query += hsp.query
i = 0
endblnk = ''
j = len(strtblnk)+len(hsp.query)
while i + j < len(self.BlastRec.alignments[0].hsps[0].query):
endblnk += '-'
query += self.BlastRec.alignments[0].hsps[0].query[j+i]
i += 1
t = str(alignment.title).split('|')
titles.append(str(t[0] + '|' + t[1]))
matches.append(strtblnk + str(hsp.match) + endblnk)
seqs.append(query)
seqs.append(strtblnk + str(hsp.sbjct) + endblnk)
return [matches,seqs,titles]
def DoView(self,event):
for v in self.views.values():
if v.GetName() == event.GetEventObject().GetLabelText():
v.Plugin().GetExec(self.RecInfo(), self.bigPanel, self.hd,
self.BlastRec.alignments,
self.BlastRec.application)
def GetType(self):
return "Blast Results"
def GetName(self):
return "BLASTView"
def GetType():
return "Blast Results"
def GetName():
return "BLASTView"
| fxb22/BioGUI | plugins/Views/BLASTView.py | Python | gpl-2.0 | 5,154 |
class Node:
# Constructor to create a new node
def __init__(self, data):
self.data = data
self.left = None
self.right = None
root = Node(100)
root.left = Node(120)
root.right = Node(50)
root.right.right = Node(70)
root.right.left = Node(160)
root.left.left = Node(140)
root.left.right = Node(150)
path = []
result = []
import copy
def findPath(root,key,sum_,path):
if root == None:
return
path.append(root.data)
if key == sum_+root.data:
print ("key found",path)
result.append(copy.deepcopy(path))
path.pop()
return
findPath(root.left,key,sum_ + root.data,path)
findPath(root.right,key,sum_ + root.data,path)
path.pop()
findPath(root,220,0,path)
| saurabhkumar1989/programming_question_python | my_question/binary-tree-all-path-to-given-sum.py | Python | apache-2.0 | 743 |
"""
run mondrian with given parameters
"""
# !/usr/bin/env python
# coding=utf-8
from HIBL import
from utils.read_adult_data import read_data as read_adult
from utils.read_informs_data import read_data as read_informs
import sys, copy, random
DATA_SELECT = 'a'
TYPE = False
def get_result_one(data, k=10):
"""
run mondrian for one time, with k=10
"""
print "K=%d" % k
data_back = copy.deepcopy(data)
_, eval_result = mondrian(data)
data = copy.deepcopy(data_back)
print "NCP %0.2f" % eval_result[0] + "%"
print "Running time %0.2f" % eval_result[1] + " seconds"
def get_result_k(data):
"""
change k, whle fixing QD and size of dataset
"""
data_back = copy.deepcopy(data)
# for k in [2, 5, 10, 25, 50, 100]:
for k in range(5, 105, 5):
print '#' * 30
print "K=%d" % k
result, eval_result = mondrian(data, k, TYPE)
data = copy.deepcopy(data_back)
print "NCP %0.2f" % eval_result[0] + "%"
print "Running time %0.2f" % eval_result[1] + " seconds"
def get_result_dataset(data, k=10, num_test=10):
"""
fix k and QI, while changing size of dataset
num_test is the test nubmber.
"""
data_back = copy.deepcopy(data)
length = len(data_back)
joint = 5000
dataset_num = length / joint
if length % joint == 0:
dataset_num += 1
for i in range(1, dataset_num + 1):
pos = i * joint
ncp = rtime = 0
if pos > length:
continue
print '#' * 30
print "size of dataset %d" % pos
for j in range(num_test):
temp = random.sample(data, pos)
_, eval_result = mondrian(temp, k)
ncp += eval_result[0]
rtime += eval_result[1]
data = copy.deepcopy(data_back)
ncp /= num_test
rtime /= num_test
print "Average NCP %0.2f" % ncp + "%"
print "Running time %0.2f" % rtime + " seconds"
print '#' * 30
def get_result_qi(data, k=10):
"""
change nubmber of QI, whle fixing k and size of dataset
"""
data_back = copy.deepcopy(data)
num_data = len(data[0])
for i in reversed(range(1, num_data)):
print '#' * 30
print "Number of QI=%d" % i
_, eval_result = mondrian(data, k, i)
data = copy.deepcopy(data_back)
print "NCP %0.2f" % eval_result[0] + "%"
print "Running time %0.2f" % eval_result[1] + " seconds"
if __name__ == '__main__':
FLAG = ''
LEN_ARGV = len(sys.argv)
try:
MODEL = sys.argv[1]
DATA_SELECT = sys.argv[2]
FLAG = sys.argv[3]
except IndexError:
pass
INPUT_K = 10
# read record
if MODEL == 's':
TYPE = False
else:
TYPE = True
if TYPE:
print "Relax Mondrian"
else:
print "Strict Mondrian"
if DATA_SELECT == 'i':
print "INFORMS data"
DATA = read_informs()
else:
print "Adult data"
DATA = read_adult()
if FLAG == 'k':
get_result_k(DATA)
elif FLAG == 'qi':
get_result_qi(DATA)
elif FLAG == 'data':
get_result_dataset(DATA)
elif FLAG == '':
get_result_one(DATA)
else:
try:
INPUT_K = int(FLAG)
get_result_one(DATA, INPUT_K)
except ValueError:
print "Usage: python anonymizer [r|s] [a | i] [k | qi | data]"
print "r: relax mondrian, s: strict mondrian"
print "a: adult dataset, i: INFORMS ataset"
print "k: varying k"
print "qi: varying qi numbers"
print "data: varying size of dataset"
print "example: python anonymizer a 10"
print "example: python anonymizer a k"
# anonymized dataset is stored in result
print "Finish Mondrian!!"
| qiyuangong/HILB_iDIST | anonymizer.py | Python | mit | 3,834 |
"""
Utility functions for setting "logged in" cookies used by subdomains.
"""
import json
import logging
import time
import six
from django.conf import settings
from django.contrib.auth.models import User
from django.dispatch import Signal
from django.urls import NoReverseMatch, reverse
from django.utils.http import http_date
from edx_rest_framework_extensions.auth.jwt import cookies as jwt_cookies
from edx_rest_framework_extensions.auth.jwt.constants import JWT_DELIMITER
from oauth2_provider.models import Application
from openedx.core.djangoapps.oauth_dispatch.adapters import DOTAdapter
from openedx.core.djangoapps.oauth_dispatch.api import create_dot_access_token
from openedx.core.djangoapps.oauth_dispatch.jwt import create_jwt_from_token
from openedx.core.djangoapps.user_api.accounts.utils import retrieve_last_sitewide_block_completed
from openedx.core.djangoapps.user_authn.exceptions import AuthFailedError
from student.models import CourseEnrollment
log = logging.getLogger(__name__)
CREATE_LOGON_COOKIE = Signal(providing_args=['user', 'response'])
JWT_COOKIE_NAMES = (
# Header and payload sections of a JSON Web Token containing user
# information and used as an access token.
jwt_cookies.jwt_cookie_header_payload_name(),
# Signature section of a JSON Web Token.
jwt_cookies.jwt_cookie_signature_name(),
)
# TODO (ARCH-245): Remove the following deprecated cookies.
DEPRECATED_LOGGED_IN_COOKIE_NAMES = (
# Set to 'true' if the user is logged in.
settings.EDXMKTG_LOGGED_IN_COOKIE_NAME,
# JSON-encoded dictionary with user information.
settings.EDXMKTG_USER_INFO_COOKIE_NAME,
)
ALL_LOGGED_IN_COOKIE_NAMES = JWT_COOKIE_NAMES + DEPRECATED_LOGGED_IN_COOKIE_NAMES
def are_logged_in_cookies_set(request):
""" Check whether the request has logged in cookies set. """
if _are_jwt_cookies_disabled():
cookies_that_should_exist = DEPRECATED_LOGGED_IN_COOKIE_NAMES
else:
cookies_that_should_exist = ALL_LOGGED_IN_COOKIE_NAMES
return all(
cookie_name in request.COOKIES
for cookie_name in cookies_that_should_exist
) and request.COOKIES[settings.EDXMKTG_LOGGED_IN_COOKIE_NAME]
def delete_logged_in_cookies(response):
"""
Delete cookies indicating that the user is logged in.
Arguments:
response (HttpResponse): The response sent to the client.
Returns:
HttpResponse
"""
for cookie_name in ALL_LOGGED_IN_COOKIE_NAMES:
response.delete_cookie(
cookie_name,
path='/',
domain=settings.SESSION_COOKIE_DOMAIN
)
return response
def standard_cookie_settings(request):
""" Returns the common cookie settings (e.g. expiration time). """
cookie_settings = {
'domain': settings.SESSION_COOKIE_DOMAIN,
'path': '/',
'httponly': None,
}
_set_expires_in_cookie_settings(cookie_settings, request.session.get_expiry_age())
# In production, TLS should be enabled so that this cookie is encrypted
# when we send it. We also need to set "secure" to True so that the browser
# will transmit it only over secure connections.
#
# In non-production environments (acceptance tests, devstack, and sandboxes),
# we still want to set this cookie. However, we do NOT want to set it to "secure"
# because the browser won't send it back to us. This can cause an infinite redirect
# loop in the third-party auth flow, which calls `are_logged_in_cookies_set` to determine
# whether it needs to set the cookie or continue to the next pipeline stage.
cookie_settings['secure'] = request.is_secure()
return cookie_settings
def _set_expires_in_cookie_settings(cookie_settings, expires_in):
"""
Updates the max_age and expires fields of the given cookie_settings,
based on the value of expires_in.
"""
expires_time = time.time() + expires_in
expires = http_date(expires_time)
cookie_settings.update({
'max_age': expires_in,
'expires': expires,
})
def set_logged_in_cookies(request, response, user):
"""
Set cookies at the time of user login. See ALL_LOGGED_IN_COOKIE_NAMES to see
which cookies are set.
Arguments:
request (HttpRequest): The request to the view, used to calculate
the cookie's expiration date based on the session expiration date.
response (HttpResponse): The response on which the cookie will be set.
user (User): The currently logged in user.
Returns:
HttpResponse
"""
# Note: The user may not yet be set on the request object by this time,
# especially during third party authentication. So use the user object
# that is passed in when needed.
if user.is_authenticated and not user.is_anonymous:
# JWT cookies expire at the same time as other login-related cookies
# so that cookie-based login determination remains consistent.
cookie_settings = standard_cookie_settings(request)
_set_deprecated_logged_in_cookie(response, cookie_settings)
_set_deprecated_user_info_cookie(response, request, user, cookie_settings)
_create_and_set_jwt_cookies(response, request, cookie_settings, user=user)
CREATE_LOGON_COOKIE.send(sender=None, user=user, response=response)
return response
def refresh_jwt_cookies(request, response, user):
"""
Resets the JWT related cookies in the response for the given user.
"""
cookie_settings = standard_cookie_settings(request)
_create_and_set_jwt_cookies(response, request, cookie_settings, user=user)
return response
def _set_deprecated_user_info_cookie(response, request, user, cookie_settings):
"""
Sets the user info cookie on the response.
The user info cookie has the following format:
{
"version": 1,
"username": "test-user",
"header_urls": {
"account_settings": "https://example.com/account/settings",
"resume_block":
"https://example.com//courses/org.0/course_0/Run_0/jump_to/i4x://org.0/course_0/vertical/vertical_4"
"learner_profile": "https://example.com/u/test-user",
"logout": "https://example.com/logout"
}
}
"""
user_info = _get_user_info_cookie_data(request, user)
response.set_cookie(
settings.EDXMKTG_USER_INFO_COOKIE_NAME,
json.dumps(user_info),
**cookie_settings
)
def _set_deprecated_logged_in_cookie(response, cookie_settings):
""" Sets the logged in cookie on the response. """
# Backwards compatibility: set the cookie indicating that the user
# is logged in. This is just a boolean value, so it's not very useful.
# In the future, we should be able to replace this with the "user info"
# cookie set below.
response.set_cookie(
settings.EDXMKTG_LOGGED_IN_COOKIE_NAME,
'true',
**cookie_settings
)
return response
def _get_user_info_cookie_data(request, user):
""" Returns information that will populate the user info cookie. """
# Set a cookie with user info. This can be used by external sites
# to customize content based on user information. Currently,
# we include information that's used to customize the "account"
# links in the header of subdomain sites (such as the marketing site).
header_urls = {'logout': reverse('logout')}
# Unfortunately, this app is currently used by both the LMS and Studio login pages.
# If we're in Studio, we won't be able to reverse the account/profile URLs.
# To handle this, we don't add the URLs if we can't reverse them.
# External sites will need to have fallback mechanisms to handle this case
# (most likely just hiding the links).
try:
header_urls['account_settings'] = reverse('account_settings')
header_urls['learner_profile'] = reverse('learner_profile', kwargs={'username': user.username})
except NoReverseMatch:
pass
# Add 'resume course' last completed block
try:
header_urls['resume_block'] = retrieve_last_sitewide_block_completed(user)
except User.DoesNotExist:
pass
# Convert relative URL paths to absolute URIs
for url_name, url_path in six.iteritems(header_urls):
header_urls[url_name] = request.build_absolute_uri(url_path)
user_info = {
'version': settings.EDXMKTG_USER_INFO_COOKIE_VERSION,
'username': user.username,
'header_urls': header_urls,
}
return user_info
def _create_and_set_jwt_cookies(response, request, cookie_settings, user=None):
""" Sets a cookie containing a JWT on the response. """
if _are_jwt_cookies_disabled():
return
expires_in = settings.JWT_AUTH['JWT_IN_COOKIE_EXPIRATION']
_set_expires_in_cookie_settings(cookie_settings, expires_in)
jwt = _create_jwt(request, user, expires_in)
jwt_header_and_payload, jwt_signature = _parse_jwt(jwt)
_set_jwt_cookies(
response,
cookie_settings,
jwt_header_and_payload,
jwt_signature,
)
def _create_jwt(request, user, expires_in):
"""
Creates and returns a jwt for the given user with the given expires_in value.
"""
oauth_application = _get_login_oauth_client()
access_token = create_dot_access_token(
# Note: Scopes for JWT cookies do not require additional permissions
request, user, oauth_application, expires_in=expires_in, scopes=['user_id', 'email', 'profile'],
)
return create_jwt_from_token(access_token, DOTAdapter(), use_asymmetric_key=True)
def _parse_jwt(jwt):
"""
Parses and returns the following parts of the jwt: header_and_payload, signature
"""
jwt_parts = jwt.split(JWT_DELIMITER)
header_and_payload = JWT_DELIMITER.join(jwt_parts[0:2])
signature = jwt_parts[2]
return header_and_payload, signature
def _set_jwt_cookies(response, cookie_settings, jwt_header_and_payload, jwt_signature):
"""
Sets the given jwt_header_and_payload, jwt_signature, and refresh token in 3 different cookies.
The latter 2 cookies are set as httponly.
"""
cookie_settings['httponly'] = None
response.set_cookie(
jwt_cookies.jwt_cookie_header_payload_name(),
jwt_header_and_payload,
**cookie_settings
)
cookie_settings['httponly'] = True
response.set_cookie(
jwt_cookies.jwt_cookie_signature_name(),
jwt_signature,
**cookie_settings
)
def _get_login_oauth_client():
"""
Returns the configured OAuth Client/Application used for Login.
"""
login_client_id = settings.JWT_AUTH['JWT_LOGIN_CLIENT_ID']
try:
return Application.objects.get(client_id=login_client_id)
except Application.DoesNotExist:
raise AuthFailedError(
u"OAuth Client for the Login service, '{}', is not configured.".format(login_client_id)
)
def _are_jwt_cookies_disabled():
"""
Returns whether the use of JWT cookies is disabled.
"""
# Skip JWT cookies for most unit tests, since it raises errors when
# a login oauth client cannot be found in the database in ``_get_login_oauth_client``.
# This solution is not ideal, but see https://github.com/edx/edx-platform/pull/19180#issue-226706355
# for a discussion of alternative solutions that did not work or were halted.
if settings.FEATURES.get('DISABLE_SET_JWT_COOKIES_FOR_TESTS', False):
return True
# For Ironwood, we don't set JWK settings by default. Make sure we don't fail trying
# to use empty settings. This means by default, micro-frontends won't work, but Ironwood
# has none. Also, OAuth scopes won't work, but that is still a new and specialized feature.
# Installations that need them can create JWKs and add them to the settings.
private_signing_jwk = settings.JWT_AUTH['JWT_PRIVATE_SIGNING_JWK']
if private_signing_jwk == "None" or not private_signing_jwk:
return True
return False
| edx-solutions/edx-platform | openedx/core/djangoapps/user_authn/cookies.py | Python | agpl-3.0 | 12,065 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# File: savemanager.py
# by Arzaroth Lekva
# lekva@arzaroth.com
#
from __future__ import print_function, absolute_import, unicode_literals
import zlib
import struct
import celestia.utility.xxtea as xxtea
class SaveError(Exception):
pass
def read_or_raise(file, size):
data = file.read(size)
if len(data) != size:
raise SaveError("Unable to read, truncated or corrupted file")
return data
class SaveManager(object):
SECTIONS = 2
def __init__(self, filename, gluid=b''):
self.filename = filename
self.gluid = gluid
def _save_buffer(self, data, file, gluid=None):
crc_res = zlib.crc32(data) & 0xffffffff
uncompress_size = len(data)
decrypt_data = zlib.compress(data)
crc_offset = len(decrypt_data) + 4
decrypt_data += struct.pack('I', crc_res)
raw_data = xxtea.encrypt(decrypt_data,
self.gluid if gluid is None else gluid)
data_size = len(raw_data)
file.write(struct.pack('3I', uncompress_size, crc_offset, data_size))
file.write(raw_data)
def _load_buffer(self, file):
metadata = read_or_raise(file, 12)
uncompress_size, crc_offset, data_size = struct.unpack('3I', metadata)
if crc_offset > data_size:
raise SaveError("Bad size or crc_offset")
raw_data = read_or_raise(file, data_size)
decrypt_data = xxtea.decrypt(raw_data,
self.gluid)
crc_value = struct.unpack('I', decrypt_data[crc_offset - 4:crc_offset])[0]
try:
res = zlib.decompress(decrypt_data)
except zlib.error as e:
raise SaveError("Unable to decompress data, truncated or corrupted file, or bad decryption key")
if len(res) != uncompress_size:
raise SaveError("Invalid inflated data")
crc_res = zlib.crc32(res) & 0xffffffff
if crc_res != crc_value:
raise SaveError("crc mismatch")
return res
def load(self, legacy=False):
try:
with open(self.filename, 'rb') as file:
print('Loading save.', end='')
if legacy:
results = [decompress_data(file.read())]
print('.', end='')
else:
file.seek(-4, 2)
if struct.unpack('I', read_or_raise(file, 4))[0] != SaveManager.SECTIONS:
raise SaveError("Invalid sections number, truncated or corrupted file")
print('.', end='')
file.seek(0, 0)
results = [self._load_buffer(file) for _ in range(SaveManager.SECTIONS)]
print('.\nDone!')
except Exception as e:
raise SaveError(str(e))
save_number = struct.unpack('I', results[0])[0] if len(results) > 1 else 10
return results[-1], save_number
def save(self, data, filename, save_number=10,
gluid=None, legacy=False):
try:
with open(filename, 'wb') as file:
print('Writing save.', end='')
if legacy:
file.write(data)
print('.', end='')
else:
self._save_buffer(struct.pack('I', save_number), file, gluid)
print('.', end='')
self._save_buffer(data, file, gluid)
print('.', end='')
file.write(struct.pack('I', SaveManager.SECTIONS))
print('.\nDone!')
except Exception as e:
raise SaveError(str(e))
def decompress_data(data):
res = zlib.decompress(data[16:])
uncompress_size, compress_size = struct.unpack('2I', data[:8])
if len(res) != uncompress_size or len(data[16:]) != compress_size:
raise SaveError("Invalid inflated data")
return res
def compress_data(data):
compressed_data = zlib.compress(data)
res = struct.pack('4I', len(data), len(compressed_data), 0, 0)
res += compressed_data
return res
| Arzaroth/CelestiaSunrise | celestia/save/savemanager.py | Python | bsd-2-clause | 4,136 |
#! /usr/bin/env python
# -*- coding:Utf8 -*-
# Enregistrer les coordonnées des membres d'un club
def encodage():
"renvoie la liste des valeurs entrées, ou une liste vide"
print("*** Veuillez entrer les données (ou <Enter> pour terminer) :")
while 1:
nom = input("Nom : ")
if nom == "":
return []
prenom = input("Prénom : ")
rueNum = input("Adresse (N° et rue) : ")
cPost = input("Code postal : ")
local = input("Localité : ")
tel = input("N° de téléphone : ")
print(nom, prenom, rueNum, cPost, local, tel)
ver = input("Entrez <Enter> si c'est correct, sinon <n> ")
if ver == "":
break
return [nom, prenom, rueNum, cPost, local, tel]
def enregistrer(liste):
"enregistre les données de la liste en les séparant par des <#>"
i = 0
while i < len(liste):
of.write(liste[i] + "#")
i = i + 1
of.write("\n") # caractère de fin de ligne
nomF = input('Nom du fichier destinataire : ')
of = open(nomF, 'a')
while 1:
tt = encodage()
if tt == []:
break
enregistrer(tt)
of.close() | widowild/messcripts | exercice/python3/solutions_exercices/exercice_9_08.py | Python | gpl-3.0 | 1,176 |
#TODO; write tests when we activate algo for permissions.
| msabramo/kallithea | kallithea/tests/models/test_user_permissions_on_repos.py | Python | gpl-3.0 | 58 |
# Flipping bits
# Developer: Murillo Grubler
# https://www.hackerrank.com/challenges/flipping-bits/problem
# Reference: https://en.wikipedia.org/wiki/4,294,967,295
# Time complexity: O(1)
def flippingBits(N):
return N^4294967295
# Start algorithm
n = int(input().strip())
for a0 in range(n):
print(flippingBits(int(input().strip()))) | Murillo/Hackerrank-Algorithms | Algorithms/Bit Manipulation/flipping-bits.py | Python | mit | 343 |
"""
TODO try to move these tests to the appropriate locations
"""
from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import numpy as np
import theano
import treeano
import treeano.lasagne
from treeano.lasagne.inits import GlorotUniformInit
from treeano.nodes import (InputNode,
SequentialNode,
HyperparameterNode)
from treeano.lasagne.nodes import (DenseNode,
ReLUNode)
fX = theano.config.floatX
def test_dense_node():
np.random.seed(42)
nodes = [
InputNode("a", shape=(3, 4, 5)),
DenseNode("b"),
]
sequential = SequentialNode("c", nodes)
hp_node = HyperparameterNode(
"d",
sequential,
num_units=14,
inits=[treeano.inits.ConstantInit(1)])
network = hp_node.network()
fn = network.function(["a"], ["d"])
x = np.random.randn(3, 4, 5).astype(fX)
res = np.dot(x.reshape(3, 20), np.ones((20, 14))) + np.ones(14)
np.testing.assert_allclose(fn(x)[0],
res,
rtol=1e-5,
atol=1e-8)
def test_fully_connected_and_relu_node():
np.random.seed(42)
nodes = [
InputNode("a", shape=(3, 4, 5)),
DenseNode("b"),
ReLUNode("e"),
]
sequential = SequentialNode("c", nodes)
hp_node = HyperparameterNode(
"d",
sequential,
num_units=14,
inits=[treeano.inits.ConstantInit(1)])
network = hp_node.network()
fn = network.function(["a"], ["d"])
x = np.random.randn(3, 4, 5).astype(fX)
res = np.dot(x.reshape(3, 20), np.ones((20, 14))) + np.ones(14)
np.testing.assert_allclose(fn(x)[0],
np.clip(res, 0, np.inf),
rtol=1e-5,
atol=1e-8)
def test_glorot_uniform_initialization():
np.random.seed(42)
nodes = [
InputNode("a", shape=(3, 4, 5)),
DenseNode("b"),
ReLUNode("e"),
]
sequential = SequentialNode("c", nodes)
hp_node = HyperparameterNode("d",
sequential,
num_units=1000,
inits=[GlorotUniformInit()])
network = hp_node.network()
fc_node = network["b"]
W_value = fc_node.get_vw("W").value
b_value = fc_node.get_vw("b").value
np.testing.assert_allclose(0,
W_value.mean(),
atol=1e-2)
np.testing.assert_allclose(np.sqrt(2.0 / (20 + 1000)),
W_value.std(),
atol=1e-2)
np.testing.assert_allclose(np.zeros(1000),
b_value,
rtol=1e-5,
atol=1e-8)
| diogo149/treeano | treeano/lasagne/tests/multi_test.py | Python | apache-2.0 | 2,914 |
# -*- coding: utf-8 -*-
import time
from ..utils.purge import uniquify
class EventManager:
def __init__(self, core):
self.pyload = core
self._ = core._
self.clients = []
def new_client(self, uuid):
self.clients.append(Client(uuid))
def clean(self):
for n, client in enumerate(self.clients):
if client.last_active + 30 < time.time():
del self.clients[n]
def get_events(self, uuid):
events = []
valid_uuid = False
for client in self.clients:
if client.uuid == uuid:
client.last_active = time.time()
valid_uuid = True
while client.new_events():
events.append(client.pop_event().to_list())
break
if not valid_uuid:
self.new_client(uuid)
events = [
ReloadAllEvent("queue").to_list(),
ReloadAllEvent("collector").to_list(),
]
return uniquify(events) # return uniquify(events, repr)
def add_event(self, event):
for client in self.clients:
client.add_event(event)
class Client:
def __init__(self, uuid):
self.uuid = uuid
self.last_active = time.time()
self.events = []
def new_events(self):
return len(self.events) > 0
def pop_event(self):
if not len(self.events):
return None
return self.events.pop(0)
def add_event(self, event):
self.events.append(event)
class UpdateEvent:
def __init__(self, itype, iid, destination):
assert itype == "pack" or itype == "file"
assert destination == "queue" or destination == "collector"
self.type = itype
self.id = iid
self.destination = destination
def to_list(self):
return ["update", self.destination, self.type, self.id]
class RemoveEvent:
def __init__(self, itype, iid, destination):
assert itype == "pack" or itype == "file"
assert destination == "queue" or destination == "collector"
self.type = itype
self.id = iid
self.destination = destination
def to_list(self):
return ["remove", self.destination, self.type, self.id]
class InsertEvent:
def __init__(self, itype, iid, after, destination):
assert itype == "pack" or itype == "file"
assert destination == "queue" or destination == "collector"
self.type = itype
self.id = iid
self.after = after
self.destination = destination
def to_list(self):
return ["insert", self.destination, self.type, self.id, self.after]
class ReloadAllEvent:
def __init__(self, destination):
assert destination == "queue" or destination == "collector"
self.destination = destination
def to_list(self):
return ["reload", self.destination]
class AccountUpdateEvent:
def to_list(self):
return ["account"]
class ConfigUpdateEvent:
def to_list(self):
return ["config"]
| vuolter/pyload | src/pyload/core/managers/event_manager.py | Python | agpl-3.0 | 3,084 |
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import optparse
import os
import sys
import types
import unittest
from chromedriver_launcher import ChromeDriverLauncher
import py_unittest_util
import test_paths
# Add the PYTHON_BINDINGS first so that our 'test' module is found instead of
# Python's.
sys.path = [test_paths.PYTHON_BINDINGS] + sys.path
from selenium.webdriver.remote.webdriver import WebDriver
# Implementation inspired from unittest.main()
class Main(object):
"""Main program for running WebDriver tests."""
_options, _args = None, None
TESTS_FILENAME = 'WEBDRIVER_TESTS'
_platform_map = {
'win32': 'win',
'darwin': 'mac',
'linux2': 'linux',
'linux3': 'linux',
}
TEST_PREFIX = 'selenium.test.selenium.webdriver.common.'
def __init__(self):
self._ParseArgs()
self._Run()
def _ParseArgs(self):
"""Parse command line args."""
parser = optparse.OptionParser()
parser.add_option(
'-v', '--verbose', action='store_true', default=False,
help='Output verbosely.')
parser.add_option(
'', '--log-file', type='string', default=None,
help='Provide a path to a file to which the logger will log')
parser.add_option(
'', '--filter', type='string', default='*',
help='Filter for specifying what tests to run, google test style.')
parser.add_option(
'', '--driver-exe', type='string', default=None,
help='Path to the default ChromeDriver executable to use.')
parser.add_option(
'', '--chrome-exe', type='string', default=None,
help='Path to the default Chrome executable to use.')
parser.add_option(
'', '--list', action='store_true', default=False,
help='List tests instead of running them.')
self._options, self._args = parser.parse_args()
# Setup logging - start with defaults
level = logging.WARNING
format = None
if self._options.verbose:
level=logging.DEBUG
format='%(asctime)s %(levelname)-8s %(message)s'
logging.basicConfig(level=level, format=format,
filename=self._options.log_file)
@staticmethod
def _IsTestClass(obj):
"""Returns whether |obj| is a unittest.TestCase."""
return isinstance(obj, (type, types.ClassType)) and \
issubclass(obj, unittest.TestCase)
@staticmethod
def _GetModuleFromName(test_name):
"""Return the module from the given test name.
Args:
test_name: dot-separated string for a module, a test case or a test
method
Examples: omnibox (a module)
omnibox.OmniboxTest (a test case)
omnibox.OmniboxTest.testA (a test method)
Returns:
tuple with first item corresponding to the module and second item
corresponding to the parts of the name that did not specify the module
Example: _GetModuleFromName('my_module.MyClass.testThis') returns
(my_module, ['MyClass', 'testThis'])
"""
parts = test_name.split('.')
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy: raise
for comp in parts[1:]:
if type(getattr(module, comp)) is not types.ModuleType:
break
module = getattr(module, comp)
return (module, parts[len(parts_copy):])
@staticmethod
def _GetTestsFromName(name):
"""Get a list of all test names from the given string.
Args:
name: dot-separated string for a module, a test case or a test method.
Examples: omnibox (a module)
omnibox.OmniboxTest (a test case)
omnibox.OmniboxTest.testA (a test method)
Returns:
[omnibox.OmniboxTest.testA, omnibox.OmniboxTest.testB, ...]
"""
def _GetTestsFromTestCase(class_obj):
"""Return all test method names from given class object."""
return [class_obj.__name__ + '.' + x for x in dir(class_obj) if
x.startswith('test')]
def _GetTestsFromModule(module):
"""Return all test method names from the given module object."""
tests = []
for name in dir(module):
obj = getattr(module, name)
if Main._IsTestClass(obj):
tests.extend([module.__name__ + '.' + x for x in
_GetTestsFromTestCase(obj)])
return tests
(obj, parts) = Main._GetModuleFromName(name)
for comp in parts:
obj = getattr(obj, comp)
if type(obj) == types.ModuleType:
return _GetTestsFromModule(obj)
elif Main._IsTestClass(obj):
return [module.__name__ + '.' + x for x in _GetTestsFromTestCase(obj)]
elif type(obj) == types.UnboundMethodType:
return [name]
else:
logging.warn('No tests in "%s"' % name)
return []
@staticmethod
def _EvalDataFrom(filename):
"""Return eval of python code from given file.
The datastructure used in the file will be preserved.
"""
data_file = os.path.join(filename)
contents = open(data_file).read()
try:
ret = eval(contents, {'__builtins__': None}, None)
except:
print >>sys.stderr, '%s is an invalid data file.' % data_file
raise
return ret
def _GetTestNamesFrom(self, filename):
modules = self._EvalDataFrom(filename)
all_names = modules.get('all', []) + \
modules.get(self._platform_map[sys.platform], [])
args = []
excluded = []
# Find all excluded tests. Excluded tests begin with '-'.
for name in all_names:
if name.startswith('-'): # Exclude
excluded.extend(self._GetTestsFromName(self.TEST_PREFIX + name[1:]))
else:
args.extend(self._GetTestsFromName(self.TEST_PREFIX + name))
for name in excluded:
args.remove(name)
if excluded:
logging.debug('Excluded %d test(s): %s' % (len(excluded), excluded))
return args
def _FakePytestHack(self):
"""Adds a fake 'pytest' module to the system modules.
A single test in text_handling_tests.py depends on the pytest module for
its test skipping capabilities. Without pytest, we can not run any tests
in the text_handling_tests.py module.
We are not sure we want to add pytest to chrome's third party dependencies,
so for now create a fake pytest module so that we can at least import and
run all the tests that do not depend on it. Those depending on it are
disabled.
"""
import imp
sys.modules['pytest'] = imp.new_module('pytest')
sys.modules['pytest'].mark = imp.new_module('mark')
sys.modules['pytest'].mark.ignore_chrome = lambda x: x
def _Run(self):
"""Run the tests."""
# TODO(kkania): Remove this hack.
self._FakePytestHack()
# In the webdriver tree, the python 'test' module is moved under the root
# 'selenium' one for testing. Here we mimic that by setting the 'selenium'
# module's 'test' attribute and adding 'selenium.test' to the system
# modules.
import selenium
import test
selenium.test = test
sys.modules['selenium.test'] = test
# Load and decide which tests to run.
test_names = self._GetTestNamesFrom(
os.path.join(os.path.dirname(__file__), self.TESTS_FILENAME))
all_tests_suite = unittest.defaultTestLoader.loadTestsFromNames(test_names)
filtered_suite = py_unittest_util.FilterTestSuite(
all_tests_suite, self._options.filter)
if self._options.list is True:
print '\n'.join(py_unittest_util.GetTestNamesFromSuite(filtered_suite))
sys.exit(0)
# The tests expect to run with preset 'driver' and 'webserver' class
# properties.
driver_exe = self._options.driver_exe or test_paths.CHROMEDRIVER_EXE
chrome_exe = self._options.chrome_exe or test_paths.CHROME_EXE
if driver_exe is None or not os.path.exists(os.path.expanduser(driver_exe)):
raise RuntimeError('ChromeDriver could not be found')
if chrome_exe is None or not os.path.exists(os.path.expanduser(chrome_exe)):
raise RuntimeError('Chrome could not be found')
driver_exe = os.path.expanduser(driver_exe)
chrome_exe = os.path.expanduser(chrome_exe)
# Increase number of http client threads to 10 to prevent hangs.
# The hang seems to occur because Chrome keeps too many multiple
# simultaneous connections open to our webserver.
server = ChromeDriverLauncher(
os.path.expanduser(driver_exe), test_paths.WEBDRIVER_TEST_DATA,
http_threads=10).Launch()
driver = WebDriver(server.GetUrl(),
{'chrome.binary': os.path.expanduser(chrome_exe)})
# The tests expect a webserver. Since ChromeDriver also operates as one,
# just pass this dummy class with the right info.
class DummyWebserver:
pass
webserver = DummyWebserver()
webserver.port = server.GetPort()
for test in py_unittest_util.GetTestsFromSuite(filtered_suite):
test.__class__.driver = driver
test.__class__.webserver = webserver
verbosity = 1
if self._options.verbose:
verbosity = 2
result = py_unittest_util.GTestTextTestRunner(verbosity=verbosity).run(
filtered_suite)
server.Kill()
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
Main()
| aYukiSekiguchi/ACCESS-Chromium | chrome/test/webdriver/test/run_webdriver_tests.py | Python | bsd-3-clause | 9,476 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Kamaelia.Community.THF.Kamaelia.UI.OpenGL.Vector import Vector
from Kamaelia.Community.THF.Kamaelia.UI.OpenGL.Transform import Transform
from Kamaelia.Community.THF.Kamaelia.UI.OpenGL.OpenGLComponent import OpenGLComponent
import Axon
import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
import time,math
class SimpleFold(OpenGLComponent):
def setup(self):
size = self.size/2.0
self.poly = [ (-size.x, -size.y),
(-size.x, +size.y),
(+size.x, +size.y),
(+size.x, -size.y),
]
self.starttime = time.time()
self.foldpoint = (size.x*+0.8, size.y*-0.8)
self.foldline = ( self.foldpoint, (0.5, 1.0) )
def draw(self):
normalpoly, foldedpoly = fold(self.poly, self.foldline)
glBegin(GL_POLYGON)
glColor3f(1.0, 1.0, 1.0)
for (x,y) in normalpoly:
glVertex3f(x, y, 0.0)
glEnd()
glBegin(GL_POLYGON)
glColor3f(0.75, 0.75, 0.75)
for (x,y) in foldedpoly:
glVertex3f(x, y, 0.0)
glEnd()
def frame(self):
size = self.size/2.0
angle = (time.time()-self.starttime) / 2.0
delta = math.cos(angle), math.sin(angle)
self.foldline = ( self.foldpoint, delta)
self.redraw()
def fold(poly, foldline):
"""\
Folds a 2d poly CONVEX (not concave) across a line.
Takes in a list of (X,Y) points reresenting a poly and a line (point_on_line, delta)
and returns a list of [poly,poly]
(second poly is the folded one)
"""
foldpoint = foldline[0]
folddelta = foldline[1]
prev = poly[-1]
normpoly = []
foldpoly = []
subpoly = []
currentside = whichSide(prev, foldline)
for point in poly:
intersect = bisect(prev, point, foldline)
pointside = whichSide(point, foldline)
print ">",point, intersect
if intersect>=0.0 and intersect<=1.0:
ipoint = interpolate(prev,point,intersect)
else:
ipoint = tuple(point)
subpoly.append( ipoint )
print subpoly
print currentside, pointside
if currentside==0:
currentside = pointside
print currentside, pointside
if pointside * currentside < 0.0: # different signs, we've switched sides
print "switching", currentside, pointside, subpoly
if currentside<0.0:
normpoly.extend(subpoly)
print "N",normpoly
else:
foldpoly.extend(subpoly)
print "F",foldpoly
subpoly = [ipoint,point]
currentside = pointside
prev=point
if currentside<0.0:
normpoly.extend(subpoly)
else:
foldpoly.extend(subpoly)
print
print "N=",normpoly
print "F=",foldpoly
for i in range(0,len(foldpoly)):
foldpoly[i] = reflect(foldpoly[i],foldline)
return normpoly,foldpoly
def whichSide(point,line):
"""Returns -ve, 0, +ve if point is on LHS, ontop, or RHS of line"""
linepoint, linedelta = line
# determine which side of the fold line this initial point is on
# which side of the line is it on? right hand side, or left?
pdx = point[0]-linepoint[0]
pdy = point[1]-linepoint[1]
if linedelta[0]==0:
return pdx
elif linedelta[0]>0:
return (linedelta[1]/linedelta[0])*pdx - pdy
elif linedelta[0]<0:
return pdy - (linedelta[1]/linedelta[0])*pdx
def bisect(start,end,line):
"""Returns the point of intersection of a line between start and end
and an infinite line (defined by a point and delta vector).
0 = intersects at start
0.5 = intersects half way between start and end
1 = intersects at end
<0 or >1 = intersects outside of those bounds
None = lines are parallel
"""
point,delta = line
divisor = ( (end[1]-start[1])*delta[0] - (end[0]-start[0])*delta[1] )
if divisor != 0.0:
intersect = ( (point[1]-start[1])*delta[0] - (point[0]-start[0])*delta[1] ) / divisor
else:
return None
return intersect
def interpolate(start,end,val):
return [ start*(1.0-val) + end*val for (start,end) in zip(start,end) ]
def reflect(point,foldline):
foldpoint = foldline[0]
dx,dy = foldline[1]
# move line (and therefore the point) so the line passes through (0,0)
px = point[0]-foldpoint[0]
py = point[1]-foldpoint[1]
# find closest point on the line
if dx == 0.0:
cx = 0
cy = py
elif dy == 0.0:
cx = px
cy = 0
else:
cx = (py + px*dx/dy)/(dy/dx + dx/dy)
cy = py + (dx/dy)*(px-cx)
# reflect
rx = point[0] - 2.0*(px-cx)
ry = point[1] - 2.0*(py-cy)
return rx,ry
if __name__ == '__main__':
from Kamaelia.Community.THF.Kamaelia.UI.OpenGL.SkyGrassBackground import SkyGrassBackground
SkyGrassBackground(size=(5000,5000,0), position=(0,0,-100)).activate()
SimpleFold(position=(0,0,-22), size=(10,10,2)).run()
| sparkslabs/kamaelia_ | Sketches/MH/OpenGL/Folding.py | Python | apache-2.0 | 6,204 |
#!/usr/bin/env python
__author__ = "Andrea L Halweg-Edwards"
__copyright__ = "Copyright 2015, The LASER Project"
__credits__ = ["Andrea L Halweg-Edwards"]
__license__ = "BSD"
__version__ = "0.1.0-dev"
__maintainer__ = "Andrea L Halweg-Edwards"
__email__ = "andrea.edwards@colorado.edu"
__status__ = "Development"
class ConfigurationManager(object):
""""""
def __init__(self):
self.web_port = 3005
config = ConfigurationManager()
| AndreaEdwards/dna_assembly_tools | config.py | Python | bsd-3-clause | 449 |
#!/usr/bin/env python3
# vim:fileencoding=utf-8
# Copyright (c) 2014 Masami HIRATA <msmhrt@gmail.com>
import re
import unittest
class TestYnbr(unittest.TestCase):
def test_ynbr(self):
from ynbr import yield_none_becomes_return
restr_type_error_1 = (r"\A" +
re.escape("yield_none_becomes_return() takes" +
" from 0 to 1 positional arguments" +
" but 2 were given"))
restr_type_error_2 = (r"\A" +
re.escape("yield_none_becomes_return() takes" +
" 1 argument but 2 were given."))
restr_type_error_3 = (r"\A" +
re.escape("@yield_none_becomes_return is used" +
" only for generator functions"))
@yield_none_becomes_return
def a_ham():
yield None
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return
def a_ham():
num = yield 1
return num
self.assertEqual(a_ham(), 1)
del a_ham
@yield_none_becomes_return()
def a_ham():
yield None
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return()
def a_ham():
num = yield 2
return num
self.assertEqual(a_ham(), 2)
del a_ham
@yield_none_becomes_return(None)
def a_ham():
yield None
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return(None)
def a_ham():
num = yield 3
return num
self.assertEqual(a_ham(), 3)
del a_ham
@yield_none_becomes_return(4)
def a_ham():
yield None
self.assertEqual(a_ham(), 4)
del a_ham
@yield_none_becomes_return(5)
def a_ham():
num = yield 6
return num
self.assertEqual(a_ham(), 6)
del a_ham
@yield_none_becomes_return(value=None)
def a_ham():
yield None
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return(value=None)
def a_ham():
num = yield 7
return num
self.assertEqual(a_ham(), 7)
del a_ham
def a_generator():
yield None
@yield_none_becomes_return(value=a_generator)
def a_ham():
yield None
self.assertEqual(a_ham(), a_generator)
del a_ham, a_generator
def a_generator():
yield None
@yield_none_becomes_return(value=a_generator)
def a_ham():
num = yield 8
return num
self.assertEqual(a_ham(), 8)
del a_ham, a_generator
@yield_none_becomes_return(value=9)
def a_ham():
yield None
self.assertEqual(a_ham(), 9)
del a_ham
@yield_none_becomes_return(value=10)
def a_ham():
num = yield 11
return num
self.assertEqual(a_ham(), 11)
del a_ham
with self.assertRaisesRegex(TypeError, restr_type_error_1):
@yield_none_becomes_return(12, 13)
def a_ham():
yield None
del a_ham # for pyflakes
with self.assertRaisesRegex(TypeError, restr_type_error_2):
@yield_none_becomes_return(14, value=15)
def a_ham():
yield None
del a_ham # for pyflakes
with self.assertRaisesRegex(TypeError, restr_type_error_2):
@yield_none_becomes_return(16, value=17)
def a_ham():
num = yield 18
return num
del a_ham # for pyflakes
with self.assertRaisesRegex(TypeError, restr_type_error_2):
@yield_none_becomes_return(None, value=None)
def a_ham():
yield
del a_ham # for pyflakes
with self.assertRaisesRegex(TypeError, restr_type_error_3):
@yield_none_becomes_return
def a_ham():
pass
del a_ham
with self.assertRaisesRegex(TypeError, restr_type_error_3):
@yield_none_becomes_return
def a_ham():
return
del a_ham
with self.assertRaisesRegex(TypeError, restr_type_error_3):
@yield_none_becomes_return
def a_ham():
return 19
del a_ham
@yield_none_becomes_return(20)
def a_ham():
yield
self.assertEqual(a_ham(), 20)
del a_ham
@yield_none_becomes_return(21)
def a_ham():
yield 22
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return(23)
def a_ham():
yield
return
self.assertEqual(a_ham(), 23)
del a_ham
@yield_none_becomes_return(24)
def a_ham():
yield 25
return
self.assertEqual(a_ham(), None)
del a_ham
@yield_none_becomes_return
def a_ham():
first = yield 26
second = yield (27, 28)
third = yield (29,)
return first, second, third
self.assertEqual(a_ham(), (26, (27, 28), (29,)))
del a_ham
| msmhrt/ynbr | tests/test_ynbr.py | Python | bsd-2-clause | 5,498 |
'''
In this little assignment you are given a string of space separated numbers, and have to return the highest and lowest number.
Example:
high_and_low("1 2 3 4 5") # return "5 1"
high_and_low("1 2 -3 4 5") # return "5 -3"
high_and_low("1 9 3 4 -5") # return "9 -5"
'''
def high_and_low(numbers):
# We can use the built in .split() function to go from string -> list
toIterate = numbers.split()
# If we don't use float as a key, it will compare the values as strings.
return max(toIterate, key=float) + ' ' + min(toIterate, key=float)
if __name__ == '__main__':
print(high_and_low("4 5 29 54 4 0 -214 542 -64 1 -3 6 -6"))
| SebastianLloret/CSCI-1310 | Misc/minMax.py | Python | gpl-3.0 | 648 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
Functions for helping with serialization and deserialization of
Bokeh objects.
Certain NumPy array dtypes can be serialized to a binary format for
performance and efficiency. The list of supported dtypes is:
{binary_array_types}
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import base64
import datetime as dt
import sys
import uuid
from math import isinf, isnan
from threading import Lock
# External imports
import numpy as np
# Bokeh imports
from ..settings import settings
from .dependencies import import_optional
from .string import format_docstring
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
pd = import_optional('pandas')
BINARY_ARRAY_TYPES = set([
np.dtype(np.float32),
np.dtype(np.float64),
np.dtype(np.uint8),
np.dtype(np.int8),
np.dtype(np.uint16),
np.dtype(np.int16),
np.dtype(np.uint32),
np.dtype(np.int32),
])
DATETIME_TYPES = set([
dt.time,
dt.datetime,
np.datetime64,
])
if pd:
try:
_pd_timestamp = pd.Timestamp
except AttributeError:
_pd_timestamp = pd.tslib.Timestamp
DATETIME_TYPES.add(_pd_timestamp)
DATETIME_TYPES.add(pd.Timedelta)
DATETIME_TYPES.add(pd.Period)
DATETIME_TYPES.add(type(pd.NaT))
NP_EPOCH = np.datetime64(0, 'ms')
NP_MS_DELTA = np.timedelta64(1, 'ms')
DT_EPOCH = dt.datetime.utcfromtimestamp(0)
__doc__ = format_docstring(__doc__, binary_array_types="\n".join("* ``np." + str(x) + "``" for x in BINARY_ARRAY_TYPES))
__all__ = (
'array_encoding_disabled',
'convert_date_to_datetime',
'convert_datetime_array',
'convert_datetime_type',
'convert_timedelta_type',
'decode_base64_dict',
'encode_binary_dict',
'encode_base64_dict',
'is_datetime_type',
'is_timedelta_type',
'make_globally_unique_id',
'make_id',
'serialize_array',
'transform_array',
'transform_array_to_list',
'transform_column_source_data',
'traverse_data',
'transform_series',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def is_datetime_type(obj):
''' Whether an object is any date, time, or datetime type recognized by
Bokeh.
Arg:
obj (object) : the object to test
Returns:
bool : True if ``obj`` is a datetime type
'''
return isinstance(obj, _dt_tuple)
def is_timedelta_type(obj):
''' Whether an object is any timedelta type recognized by Bokeh.
Arg:
obj (object) : the object to test
Returns:
bool : True if ``obj`` is a timedelta type
'''
return isinstance(obj, (dt.timedelta, np.timedelta64))
def convert_date_to_datetime(obj):
''' Convert a date object to a datetime
Args:
obj (date) : the object to convert
Returns:
datetime
'''
return (dt.datetime(*obj.timetuple()[:6]) - DT_EPOCH).total_seconds() * 1000
def convert_timedelta_type(obj):
''' Convert any recognized timedelta value to floating point absolute
milliseconds.
Arg:
obj (object) : the object to convert
Returns:
float : milliseconds
'''
if isinstance(obj, dt.timedelta):
return obj.total_seconds() * 1000.
elif isinstance(obj, np.timedelta64):
return (obj / NP_MS_DELTA)
def convert_datetime_type(obj):
''' Convert any recognized date, time, or datetime value to floating point
milliseconds since epoch.
Arg:
obj (object) : the object to convert
Returns:
float : milliseconds
'''
# Pandas NaT
if pd and obj is pd.NaT:
return np.nan
# Pandas Period
if pd and isinstance(obj, pd.Period):
return obj.to_timestamp().value / 10**6.0
# Pandas Timestamp
if pd and isinstance(obj, _pd_timestamp): return obj.value / 10**6.0
# Pandas Timedelta
elif pd and isinstance(obj, pd.Timedelta): return obj.value / 10**6.0
# Datetime (datetime is a subclass of date)
elif isinstance(obj, dt.datetime):
diff = obj.replace(tzinfo=None) - DT_EPOCH
return diff.total_seconds() * 1000
# XXX (bev) ideally this would not be here "dates are not datetimes"
# Date
elif isinstance(obj, dt.date):
return convert_date_to_datetime(obj)
# NumPy datetime64
elif isinstance(obj, np.datetime64):
epoch_delta = obj - NP_EPOCH
return (epoch_delta / NP_MS_DELTA)
# Time
elif isinstance(obj, dt.time):
return (obj.hour * 3600 + obj.minute * 60 + obj.second) * 1000 + obj.microsecond / 1000.
def convert_datetime_array(array):
''' Convert NumPy datetime arrays to arrays to milliseconds since epoch.
Args:
array : (obj)
A NumPy array of datetime to convert
If the value passed in is not a NumPy array, it will be returned as-is.
Returns:
array
'''
if not isinstance(array, np.ndarray):
return array
# not quite correct, truncates to ms..
if array.dtype.kind == 'M':
array = array.astype('datetime64[us]').astype('int64') / 1000.
elif array.dtype.kind == 'm':
array = array.astype('timedelta64[us]').astype('int64') / 1000.
# XXX (bev) special case dates, not great
elif array.dtype.kind == 'O' and len(array) > 0 and isinstance(array[0], dt.date):
try:
array = array.astype('datetime64[us]').astype('int64') / 1000.
except Exception:
pass
return array
def make_id():
''' Return a new unique ID for a Bokeh object.
Normally this function will return simple monotonically increasing integer
IDs (as strings) for identifying Bokeh objects within a Document. However,
if it is desirable to have globally unique for every object, this behavior
can be overridden by setting the environment variable ``BOKEH_SIMPLE_IDS=no``.
Returns:
str
'''
global _simple_id
if settings.simple_ids():
with _simple_id_lock:
_simple_id += 1
return str(_simple_id)
else:
return make_globally_unique_id()
def make_globally_unique_id():
''' Return a globally unique UUID.
Some situations, e.g. id'ing dynamically created Divs in HTML documents,
always require globally unique IDs.
Returns:
str
'''
return str(uuid.uuid4())
def array_encoding_disabled(array):
''' Determine whether an array may be binary encoded.
The NumPy array dtypes that can be encoded are:
{binary_array_types}
Args:
array (np.ndarray) : the array to check
Returns:
bool
'''
# disable binary encoding for non-supported dtypes
return array.dtype not in BINARY_ARRAY_TYPES
array_encoding_disabled.__doc__ = format_docstring(array_encoding_disabled.__doc__,
binary_array_types="\n ".join("* ``np." + str(x) + "``"
for x in BINARY_ARRAY_TYPES))
def transform_array(array, force_list=False, buffers=None):
''' Transform a NumPy arrays into serialized format
Converts un-serializable dtypes and returns JSON serializable
format
Args:
array (np.ndarray) : a NumPy array to be transformed
force_list (bool, optional) : whether to only output to standard lists
This function can encode some dtypes using a binary encoding, but
setting this argument to True will override that and cause only
standard Python lists to be emitted. (default: False)
buffers (set, optional) :
If binary buffers are desired, the buffers parameter may be
provided, and any columns that may be sent as binary buffers
will be added to the set. If None, then only base64 encoding
will be used (default: None)
If force_list is True, then this value will be ignored, and
no buffers will be generated.
**This is an "out" parameter**. The values it contains will be
modified in-place.
Returns:
JSON
'''
array = convert_datetime_array(array)
return serialize_array(array, force_list=force_list, buffers=buffers)
def transform_array_to_list(array):
''' Transforms a NumPy array into a list of values
Args:
array (np.nadarray) : the NumPy array series to transform
Returns:
list or dict
'''
if (array.dtype.kind in ('u', 'i', 'f') and (~np.isfinite(array)).any()):
transformed = array.astype('object')
transformed[np.isnan(array)] = 'NaN'
transformed[np.isposinf(array)] = 'Infinity'
transformed[np.isneginf(array)] = '-Infinity'
return transformed.tolist()
elif (array.dtype.kind == 'O' and pd and pd.isnull(array).any()):
transformed = array.astype('object')
transformed[pd.isnull(array)] = 'NaN'
return transformed.tolist()
return array.tolist()
def transform_series(series, force_list=False, buffers=None):
''' Transforms a Pandas series into serialized form
Args:
series (pd.Series) : the Pandas series to transform
force_list (bool, optional) : whether to only output to standard lists
This function can encode some dtypes using a binary encoding, but
setting this argument to True will override that and cause only
standard Python lists to be emitted. (default: False)
buffers (set, optional) :
If binary buffers are desired, the buffers parameter may be
provided, and any columns that may be sent as binary buffers
will be added to the set. If None, then only base64 encoding
will be used (default: None)
If force_list is True, then this value will be ignored, and
no buffers will be generated.
**This is an "out" parameter**. The values it contains will be
modified in-place.
Returns:
list or dict
'''
# not checking for pd here, this function should only be called if it
# is already known that series is a Pandas Series type
if isinstance(series, pd.PeriodIndex):
vals = series.to_timestamp().values
else:
vals = series.values
return transform_array(vals, force_list=force_list, buffers=buffers)
def serialize_array(array, force_list=False, buffers=None):
''' Transforms a NumPy array into serialized form.
Args:
array (np.ndarray) : the NumPy array to transform
force_list (bool, optional) : whether to only output to standard lists
This function can encode some dtypes using a binary encoding, but
setting this argument to True will override that and cause only
standard Python lists to be emitted. (default: False)
buffers (set, optional) :
If binary buffers are desired, the buffers parameter may be
provided, and any columns that may be sent as binary buffers
will be added to the set. If None, then only base64 encoding
will be used (default: None)
If force_list is True, then this value will be ignored, and
no buffers will be generated.
**This is an "out" parameter**. The values it contains will be
modified in-place.
Returns:
list or dict
'''
if isinstance(array, np.ma.MaskedArray):
array = array.filled(np.nan) # Set masked values to nan
if (array_encoding_disabled(array) or force_list):
return transform_array_to_list(array)
if not array.flags['C_CONTIGUOUS']:
array = np.ascontiguousarray(array)
if buffers is None:
return encode_base64_dict(array)
else:
return encode_binary_dict(array, buffers)
def traverse_data(obj, buffers=None):
''' Recursively traverse an object until a flat list is found.
The flat list is converted to a numpy array and passed to transform_array()
to handle ``nan``, ``inf``, and ``-inf``.
Args:
obj (list) : a list of values or lists
'''
if all(isinstance(el, np.ndarray) for el in obj):
return [transform_array(el, buffers=buffers) for el in obj]
obj_copy = []
for item in obj:
# Check the base/common case first for performance reasons
# Also use type(x) is float because it's faster than isinstance
if type(item) is float:
if isnan(item):
item = 'NaN'
elif isinf(item):
if item > 0:
item = 'Infinity'
else:
item = '-Infinity'
obj_copy.append(item)
elif isinstance(item, (list, tuple)): # check less common type second
obj_copy.append(traverse_data(item))
else:
obj_copy.append(item)
return obj_copy
def transform_column_source_data(data, buffers=None, cols=None):
''' Transform ``ColumnSourceData`` data to a serialized format
Args:
data (dict) : the mapping of names to data columns to transform
buffers (set, optional) :
If binary buffers are desired, the buffers parameter may be
provided, and any columns that may be sent as binary buffers
will be added to the set. If None, then only base64 encoding
will be used (default: None)
**This is an "out" parameter**. The values it contains will be
modified in-place.
cols (list[str], optional) :
Optional list of subset of columns to transform. If None, all
columns will be transformed (default: None)
Returns:
JSON compatible dict
'''
to_transform = set(data) if cols is None else set(cols)
data_copy = {}
for key in to_transform:
if pd and isinstance(data[key], (pd.Series, pd.Index)):
data_copy[key] = transform_series(data[key], buffers=buffers)
elif isinstance(data[key], np.ndarray):
data_copy[key] = transform_array(data[key], buffers=buffers)
else:
data_copy[key] = traverse_data(data[key], buffers=buffers)
return data_copy
def encode_binary_dict(array, buffers):
''' Send a numpy array as an unencoded binary buffer
The encoded format is a dict with the following structure:
.. code:: python
{
'__buffer__' : << an ID to locate the buffer >>,
'shape' : << array shape >>,
'dtype' : << dtype name >>,
'order' : << byte order at origin (little or big)>>
}
Args:
array (np.ndarray) : an array to encode
buffers (set) :
Set to add buffers to
**This is an "out" parameter**. The values it contains will be
modified in-place.
Returns:
dict
'''
buffer_id = make_id()
buf = (dict(id=buffer_id), array.tobytes())
buffers.append(buf)
return {
'__buffer__' : buffer_id,
'shape' : array.shape,
'dtype' : array.dtype.name,
'order' : sys.byteorder
}
def encode_base64_dict(array):
''' Encode a NumPy array using base64:
The encoded format is a dict with the following structure:
.. code:: python
{
'__ndarray__' : << base64 encoded array data >>,
'shape' : << array shape >>,
'dtype' : << dtype name >>,
}
Args:
array (np.ndarray) : an array to encode
Returns:
dict
'''
return {
'__ndarray__' : base64.b64encode(array.data).decode('utf-8'),
'shape' : array.shape,
'dtype' : array.dtype.name
}
def decode_base64_dict(data):
''' Decode a base64 encoded array into a NumPy array.
Args:
data (dict) : encoded array data to decode
Data should have the format encoded by :func:`encode_base64_dict`.
Returns:
np.ndarray
'''
b64 = base64.b64decode(data['__ndarray__'])
array = np.copy(np.frombuffer(b64, dtype=data['dtype']))
if len(data['shape']) > 1:
array = array.reshape(data['shape'])
return array
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
_simple_id = 999
_simple_id_lock = Lock()
_dt_tuple = tuple(DATETIME_TYPES)
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| ericmjl/bokeh | bokeh/util/serialization.py | Python | bsd-3-clause | 17,759 |
#
# Cython -- Things that don't belong
# anywhere else in particular
#
import os, sys, re, codecs
modification_time = os.path.getmtime
def cached_function(f):
cache = {}
uncomputed = object()
def wrapper(*args):
res = cache.get(args, uncomputed)
if res is uncomputed:
res = cache[args] = f(*args)
return res
return wrapper
def cached_method(f):
cache_name = '__%s_cache' % f.__name__
def wrapper(self, *args):
cache = getattr(self, cache_name, None)
if cache is None:
cache = {}
setattr(self, cache_name, cache)
if args in cache:
return cache[args]
res = cache[args] = f(self, *args)
return res
return wrapper
def replace_suffix(path, newsuf):
base, _ = os.path.splitext(path)
return base + newsuf
def open_new_file(path):
if os.path.exists(path):
# Make sure to create a new file here so we can
# safely hard link the output files.
os.unlink(path)
# we use the ISO-8859-1 encoding here because we only write pure
# ASCII strings or (e.g. for file names) byte encoded strings as
# Unicode, so we need a direct mapping from the first 256 Unicode
# characters to a byte sequence, which ISO-8859-1 provides
return codecs.open(path, "w", encoding="ISO-8859-1")
def castrate_file(path, st):
# Remove junk contents from an output file after a
# failed compilation.
# Also sets access and modification times back to
# those specified by st (a stat struct).
try:
f = open_new_file(path)
except EnvironmentError:
pass
else:
f.write(
"#error Do not use this file, it is the result of a failed Cython compilation.\n")
f.close()
if st:
os.utime(path, (st.st_atime, st.st_mtime-1))
def file_newer_than(path, time):
ftime = modification_time(path)
return ftime > time
@cached_function
def search_include_directories(dirs, qualified_name, suffix, pos,
include=False, sys_path=False):
# Search the list of include directories for the given
# file name. If a source file position is given, first
# searches the directory containing that file. Returns
# None if not found, but does not report an error.
# The 'include' option will disable package dereferencing.
# If 'sys_path' is True, also search sys.path.
if sys_path:
dirs = dirs + tuple(sys.path)
if pos:
file_desc = pos[0]
from Cython.Compiler.Scanning import FileSourceDescriptor
if not isinstance(file_desc, FileSourceDescriptor):
raise RuntimeError("Only file sources for code supported")
if include:
dirs = (os.path.dirname(file_desc.filename),) + dirs
else:
dirs = (find_root_package_dir(file_desc.filename),) + dirs
dotted_filename = qualified_name
if suffix:
dotted_filename += suffix
if not include:
names = qualified_name.split('.')
package_names = tuple(names[:-1])
module_name = names[-1]
module_filename = module_name + suffix
package_filename = "__init__" + suffix
for dir in dirs:
path = os.path.join(dir, dotted_filename)
if path_exists(path):
return path
if not include:
package_dir = check_package_dir(dir, package_names)
if package_dir is not None:
path = os.path.join(package_dir, module_filename)
if path_exists(path):
return path
path = os.path.join(dir, package_dir, module_name,
package_filename)
if path_exists(path):
return path
return None
@cached_function
def find_root_package_dir(file_path):
dir = os.path.dirname(file_path)
if file_path == dir:
return dir
elif is_package_dir(dir):
return find_root_package_dir(dir)
else:
return dir
@cached_function
def check_package_dir(dir, package_names):
for dirname in package_names:
dir = os.path.join(dir, dirname)
if not is_package_dir(dir):
return None
return dir
@cached_function
def is_package_dir(dir_path):
for filename in ("__init__.py",
"__init__.pyx",
"__init__.pxd"):
path = os.path.join(dir_path, filename)
if path_exists(path):
return 1
@cached_function
def path_exists(path):
# try on the filesystem first
if os.path.exists(path):
return True
# figure out if a PEP 302 loader is around
try:
loader = __loader__
# XXX the code below assumes a 'zipimport.zipimporter' instance
# XXX should be easy to generalize, but too lazy right now to write it
archive_path = getattr(loader, 'archive', None)
if archive_path:
normpath = os.path.normpath(path)
if normpath.startswith(archive_path):
arcname = normpath[len(archive_path)+1:]
try:
loader.get_data(arcname)
return True
except IOError:
return False
except NameError:
pass
return False
# file name encodings
def decode_filename(filename):
if isinstance(filename, unicode):
return filename
try:
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
filename = filename.decode(filename_encoding)
except UnicodeDecodeError:
pass
return filename
# support for source file encoding detection
_match_file_encoding = re.compile(u"coding[:=]\s*([-\w.]+)").search
def detect_file_encoding(source_filename):
f = open_source_file(source_filename, encoding="UTF-8", error_handling='ignore')
try:
return detect_opened_file_encoding(f)
finally:
f.close()
def detect_opened_file_encoding(f):
# PEPs 263 and 3120
# Most of the time the first two lines fall in the first 250 chars,
# and this bulk read/split is much faster.
lines = f.read(250).split("\n")
if len(lines) > 2:
m = _match_file_encoding(lines[0]) or _match_file_encoding(lines[1])
if m:
return m.group(1)
else:
return "UTF-8"
else:
# Fallback to one-char-at-a-time detection.
f.seek(0)
chars = []
for i in range(2):
c = f.read(1)
while c and c != u'\n':
chars.append(c)
c = f.read(1)
encoding = _match_file_encoding(u''.join(chars))
if encoding:
return encoding.group(1)
return "UTF-8"
def skip_bom(f):
"""
Read past a BOM at the beginning of a source file.
This could be added to the scanner, but it's *substantially* easier
to keep it at this level.
"""
if f.read(1) != u'\uFEFF':
f.seek(0)
normalise_newlines = re.compile(u'\r\n?|\n').sub
class NormalisedNewlineStream(object):
"""The codecs module doesn't provide universal newline support.
This class is used as a stream wrapper that provides this
functionality. The new 'io' in Py2.6+/3.x supports this out of the
box.
"""
def __init__(self, stream):
# let's assume .read() doesn't change
self.stream = stream
self._read = stream.read
self.close = stream.close
self.encoding = getattr(stream, 'encoding', 'UTF-8')
def read(self, count=-1):
data = self._read(count)
if u'\r' not in data:
return data
if data.endswith(u'\r'):
# may be missing a '\n'
data += self._read(1)
return normalise_newlines(u'\n', data)
def readlines(self):
content = []
data = self.read(0x1000)
while data:
content.append(data)
data = self.read(0x1000)
return u''.join(content).splitlines(True)
def seek(self, pos):
if pos == 0:
self.stream.seek(0)
else:
raise NotImplementedError
io = None
if sys.version_info >= (2,6):
try:
import io
except ImportError:
pass
def open_source_file(source_filename, mode="r",
encoding=None, error_handling=None,
require_normalised_newlines=True):
if encoding is None:
# Most of the time the coding is unspecified, so be optimistic that
# it's UTF-8.
f = open_source_file(source_filename, encoding="UTF-8", mode=mode, error_handling='ignore')
encoding = detect_opened_file_encoding(f)
if (encoding == "UTF-8"
and error_handling == 'ignore'
and require_normalised_newlines):
f.seek(0)
skip_bom(f)
return f
else:
f.close()
#
if not os.path.exists(source_filename):
try:
loader = __loader__
if source_filename.startswith(loader.archive):
return open_source_from_loader(
loader, source_filename,
encoding, error_handling,
require_normalised_newlines)
except (NameError, AttributeError):
pass
#
if io is not None:
stream = io.open(source_filename, mode=mode,
encoding=encoding, errors=error_handling)
else:
# codecs module doesn't have universal newline support
stream = codecs.open(source_filename, mode=mode,
encoding=encoding, errors=error_handling)
if require_normalised_newlines:
stream = NormalisedNewlineStream(stream)
skip_bom(stream)
return stream
def open_source_from_loader(loader,
source_filename,
encoding=None, error_handling=None,
require_normalised_newlines=True):
nrmpath = os.path.normpath(source_filename)
arcname = nrmpath[len(loader.archive)+1:]
data = loader.get_data(arcname)
if io is not None:
return io.TextIOWrapper(io.BytesIO(data),
encoding=encoding,
errors=error_handling)
else:
try:
import cStringIO as StringIO
except ImportError:
import StringIO
reader = codecs.getreader(encoding)
stream = reader(StringIO.StringIO(data))
if require_normalised_newlines:
stream = NormalisedNewlineStream(stream)
return stream
def str_to_number(value):
# note: this expects a string as input that was accepted by the
# parser already
if len(value) < 2:
value = int(value, 0)
elif value[0] == '0':
if value[1] in 'xX':
# hex notation ('0x1AF')
value = int(value[2:], 16)
elif value[1] in 'oO':
# Py3 octal notation ('0o136')
value = int(value[2:], 8)
elif value[1] in 'bB':
# Py3 binary notation ('0b101')
value = int(value[2:], 2)
else:
# Py2 octal notation ('0136')
value = int(value, 8)
else:
value = int(value, 0)
return value
def long_literal(value):
if isinstance(value, basestring):
value = str_to_number(value)
return not -2**31 <= value < 2**31
# all() and any() are new in 2.5
try:
# Make sure to bind them on the module, as they will be accessed as
# attributes
all = all
any = any
except NameError:
def all(items):
for item in items:
if not item:
return False
return True
def any(items):
for item in items:
if item:
return True
return False
@cached_function
def get_cython_cache_dir():
"""get the cython cache dir
Priority:
1. CYTHON_CACHE_DIR
2. (OS X): ~/Library/Caches/Cython
(posix not OS X): XDG_CACHE_HOME/cython if XDG_CACHE_HOME defined
3. ~/.cython
"""
if 'CYTHON_CACHE_DIR' in os.environ:
return os.environ['CYTHON_CACHE_DIR']
parent = None
if os.name == 'posix':
if sys.platform == 'darwin':
parent = os.path.expanduser('~/Library/Caches')
else:
# this could fallback on ~/.cache
parent = os.environ.get('XDG_CACHE_HOME')
if parent and os.path.isdir(parent):
return os.path.join(parent, 'cython')
# last fallback: ~/.cython
return os.path.expanduser(os.path.join('~', '.cython'))
| Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/third_party/cython/src/Cython/Utils.py | Python | mit | 12,805 |
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import logging
from .bug import Bug
from .attachment import Attachment
from webkitpy.common.config.committers import CommitterList, Reviewer
_log = logging.getLogger(__name__)
def _id_to_object_dictionary(*objects):
dictionary = {}
for thing in objects:
dictionary[thing["id"]] = thing
return dictionary
# Testing
_patch1 = {
"id": 10000,
"bug_id": 50000,
"url": "http://example.com/10000",
"name": "Patch1",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "foo@bar.com",
"commit-queue": "+",
"committer_email": "foo@bar.com",
"attacher_email": "Contributer1",
}
_patch2 = {
"id": 10001,
"bug_id": 50000,
"url": "http://example.com/10001",
"name": "Patch2",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "reviewer2@webkit.org",
"commit-queue": "+",
"committer_email": "non-committer@example.com",
"attacher_email": "eric@webkit.org",
}
_patch3 = {
"id": 10002,
"bug_id": 50001,
"url": "http://example.com/10002",
"name": "Patch3",
"is_obsolete": False,
"is_patch": True,
"review": "?",
"commit-queue": "-",
"attacher_email": "eric@webkit.org",
"attach_date": datetime.datetime.today(),
}
_patch4 = {
"id": 10003,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "Patch3",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"commit-queue": "?",
"reviewer_email": "foo@bar.com",
"attacher_email": "Contributer2",
}
_patch5 = {
"id": 10004,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "Patch5",
"is_obsolete": False,
"is_patch": True,
"review": "+",
"reviewer_email": "foo@bar.com",
"attacher_email": "eric@webkit.org",
}
_patch6 = { # Valid committer, but no reviewer.
"id": 10005,
"bug_id": 50003,
"url": "http://example.com/10002",
"name": "ROLLOUT of r3489",
"is_obsolete": False,
"is_patch": True,
"commit-queue": "+",
"committer_email": "foo@bar.com",
"attacher_email": "eric@webkit.org",
}
_patch7 = { # Valid review, patch is marked obsolete.
"id": 10006,
"bug_id": 50002,
"url": "http://example.com/10002",
"name": "Patch7",
"is_obsolete": True,
"is_patch": True,
"review": "+",
"reviewer_email": "foo@bar.com",
"attacher_email": "eric@webkit.org",
}
# This matches one of Bug.unassigned_emails
_unassigned_email = "webkit-unassigned@lists.webkit.org"
# This is needed for the FlakyTestReporter to believe the bug
# was filed by one of the webkitpy bots.
_commit_queue_email = "commit-queue@webkit.org"
_bug1 = {
"id": 50000,
"title": "Bug with two r+'d and cq+'d patches, one of which has an "
"invalid commit-queue setter.",
"reporter_email": "foo@foo.com",
"assigned_to_email": _unassigned_email,
"cc_emails": [],
"attachments": [_patch1, _patch2],
"bug_status": "UNCONFIRMED",
"comments": [],
}
_bug2 = {
"id": 50001,
"title": "Bug with a patch needing review.",
"reporter_email": "eric@webkit.org",
"assigned_to_email": "foo@foo.com",
"cc_emails": ["abarth@webkit.org", ],
"attachments": [_patch3],
"bug_status": "ASSIGNED",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Message1.\nCommitted r35: <http://trac.webkit.org/changeset/35>",
},
],
}
_bug3 = {
"id": 50002,
"title": "The third bug",
"reporter_email": "foo@foo.com",
"assigned_to_email": _unassigned_email,
"cc_emails": [],
"attachments": [_patch7],
"bug_status": "NEW",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Committed r30: <http://trac.webkit.org/changeset/30>",
},
{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Committed r31: <http://trac.webkit.org/changeset/31>",
},
],
}
_bug4 = {
"id": 50003,
"title": "The fourth bug",
"reporter_email": "foo@foo.com",
"assigned_to_email": "foo@foo.com",
"cc_emails": [],
"attachments": [_patch4, _patch5, _patch6],
"bug_status": "REOPENED",
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Committed r25: <http://trac.webkit.org/changeset/30>",
},
{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Rolled out in <http://trac.webkit.org/changeset/26",
},
],
}
_bug5 = {
"id": 50004,
"title": "The fifth bug",
"reporter_email": _commit_queue_email,
"assigned_to_email": "foo@foo.com",
"cc_emails": [],
"attachments": [],
"bug_status": "RESOLVED",
"dup_id": 50002,
"comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
"comment_email": "bar@foo.com",
"text": "Committed r15: <http://trac.webkit.org/changeset/15>",
},
],
}
class MockBugzillaQueries(object):
def __init__(self, bugzilla):
self._bugzilla = bugzilla
def _all_bugs(self):
return map(lambda bug_dictionary: Bug(bug_dictionary, self._bugzilla),
self._bugzilla.bug_cache.values())
def fetch_bug_ids_from_commit_queue(self):
bugs_with_commit_queued_patches = filter(
lambda bug: bug.commit_queued_patches(),
self._all_bugs())
return map(lambda bug: bug.id(), bugs_with_commit_queued_patches)
def fetch_attachment_ids_from_review_queue(self):
unreviewed_patches = sum([bug.unreviewed_patches()
for bug in self._all_bugs()], [])
return map(lambda patch: patch.id(), unreviewed_patches)
def fetch_patches_from_commit_queue(self):
return sum([bug.commit_queued_patches()
for bug in self._all_bugs()], [])
def fetch_bug_ids_from_pending_commit_list(self):
bugs_with_reviewed_patches = filter(lambda bug: bug.reviewed_patches(),
self._all_bugs())
bug_ids = map(lambda bug: bug.id(), bugs_with_reviewed_patches)
# NOTE: This manual hack here is to allow testing logging in
# test_assign_to_committer the real pending-commit query on bugzilla
# will return bugs with patches which have r+, but are also obsolete.
return bug_ids + [50002]
def fetch_bugs_from_review_queue(self, cc_email=None):
unreviewed_bugs = [bug for bug in self._all_bugs() if bug.unreviewed_patches()]
if cc_email:
return [bug for bug in unreviewed_bugs if cc_email in bug.cc_emails()]
return unreviewed_bugs
def fetch_patches_from_pending_commit_list(self):
return sum([bug.reviewed_patches() for bug in self._all_bugs()], [])
def fetch_bugs_matching_search(self, search_string):
return [self._bugzilla.fetch_bug(50004), self._bugzilla.fetch_bug(50003)]
def fetch_bugs_matching_quicksearch(self, search_string):
return [self._bugzilla.fetch_bug(50001), self._bugzilla.fetch_bug(50002),
self._bugzilla.fetch_bug(50003), self._bugzilla.fetch_bug(50004)]
_mock_reviewers = [Reviewer("Foo Bar", "foo@bar.com"),
Reviewer("Reviewer2", "reviewer2@webkit.org")]
# FIXME: Bugzilla is the wrong Mock-point. Once we have a BugzillaNetwork
# class we should mock that instead.
# Most of this class is just copy/paste from Bugzilla.
class MockBugzilla(object):
bug_server_url = "http://example.com"
bug_cache = _id_to_object_dictionary(_bug1, _bug2, _bug3, _bug4, _bug5)
attachment_cache = _id_to_object_dictionary(_patch1,
_patch2,
_patch3,
_patch4,
_patch5,
_patch6,
_patch7)
def __init__(self):
self.queries = MockBugzillaQueries(self)
# FIXME: This should move onto the Host object, and we should use a MockCommitterList
self.committers = CommitterList(reviewers=_mock_reviewers)
self.username = None
self._override_patch = None
def authenticate(self):
self.username = "username@webkit.org"
def create_bug(self,
bug_title,
bug_description,
component=None,
diff=None,
patch_description=None,
cc=None,
blocked=None,
mark_for_review=False,
mark_for_commit_queue=False):
_log.info("MOCK create_bug")
_log.info("bug_title: %s" % bug_title)
_log.info("bug_description: %s" % bug_description)
if component:
_log.info("component: %s" % component)
if cc:
_log.info("cc: %s" % cc)
if blocked:
_log.info("blocked: %s" % blocked)
return 60001
def quips(self):
return ["Good artists copy. Great artists steal. - Pablo Picasso"]
def fetch_bug(self, bug_id):
return Bug(self.bug_cache.get(int(bug_id)), self)
def set_override_patch(self, patch):
self._override_patch = patch
def fetch_attachment(self, attachment_id):
if self._override_patch:
return self._override_patch
attachment_dictionary = self.attachment_cache.get(attachment_id)
if not attachment_dictionary:
print "MOCK: fetch_attachment: %s is not a known attachment id" % attachment_id
return None
bug = self.fetch_bug(attachment_dictionary["bug_id"])
for attachment in bug.attachments(include_obsolete=True):
if attachment.id() == int(attachment_id):
return attachment
def bug_url_for_bug_id(self, bug_id):
return "%s/%s" % (self.bug_server_url, bug_id)
def fetch_bug_dictionary(self, bug_id):
return self.bug_cache.get(bug_id)
def attachment_url_for_id(self, attachment_id, action="view"):
action_param = ""
if action and action != "view":
action_param = "&action=%s" % action
return "%s/%s%s" % (self.bug_server_url, attachment_id, action_param)
def reassign_bug(self, bug_id, assignee=None, comment_text=None):
_log.info("MOCK reassign_bug: bug_id=%s, assignee=%s" % (bug_id, assignee))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def set_flag_on_attachment(self,
attachment_id,
flag_name,
flag_value,
comment_text=None):
_log.info("MOCK setting flag '%s' to '%s' on attachment '%s' with comment '%s'" % (
flag_name, flag_value, attachment_id, comment_text))
def post_comment_to_bug(self, bug_id, comment_text, cc=None):
_log.info("MOCK bug comment: bug_id=%s, cc=%s\n--- Begin comment ---\n%s\n--- End comment ---\n" % (
bug_id, cc, comment_text))
def add_attachment_to_bug(self, bug_id, file_or_string, description, filename=None, comment_text=None, mimetype=None):
_log.info("MOCK add_attachment_to_bug: bug_id=%s, description=%s filename=%s mimetype=%s" %
(bug_id, description, filename, mimetype))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def add_patch_to_bug(self,
bug_id,
diff,
description,
comment_text=None,
mark_for_review=False,
mark_for_commit_queue=False,
mark_for_landing=False):
_log.info("MOCK add_patch_to_bug: bug_id=%s, description=%s, mark_for_review=%s, mark_for_commit_queue=%s, mark_for_landing=%s" %
(bug_id, description, mark_for_review, mark_for_commit_queue, mark_for_landing))
if comment_text:
_log.info("-- Begin comment --")
_log.info(comment_text)
_log.info("-- End comment --")
def add_cc_to_bug(self, bug_id, ccs):
pass
def obsolete_attachment(self, attachment_id, message=None):
pass
def reopen_bug(self, bug_id, message):
_log.info("MOCK reopen_bug %s with comment '%s'" % (bug_id, message))
def close_bug_as_fixed(self, bug_id, message):
pass
def clear_attachment_flags(self, attachment_id, message):
pass
| klim-iv/phantomjs-qt5 | src/webkit/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py | Python | bsd-3-clause | 14,927 |
import unittest
import HW6
class TestHW6(unittest.TestCase):
def test_111(self):
self.assertEqual(HW6.solve([1,1,1,1,1,1]), 1)
def test_123(self):
self.assertEqual(HW6.solve([1,2,3]), 3)
def test_2(self):
self.assertEqual(HW6.solve([3,4,5,6]), 6)
def test_3(self):
self.assertEqual(HW6.solve([1,4,3,9,1,2,4,10]), 10)
if __name__ == '__main__':
unittest.main()
| cuixiongyi/XiongyiCui_WPI | HW6test.py | Python | bsd-2-clause | 417 |
"""
inspectors.py # Per-endpoint view introspection
See schemas.__init__.py for package overview.
"""
import re
import warnings
from collections import OrderedDict
from django.db import models
from django.utils.encoding import force_text, smart_text
from django.utils.six.moves.urllib import parse as urlparse
from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions, serializers
from rest_framework.compat import coreapi, coreschema, uritemplate
from rest_framework.settings import api_settings
from rest_framework.utils import formatting
from .utils import is_list_view
header_regex = re.compile('^[a-zA-Z][0-9A-Za-z_]*:')
def field_to_schema(field):
title = force_text(field.label) if field.label else ''
description = force_text(field.help_text) if field.help_text else ''
if isinstance(field, (serializers.ListSerializer, serializers.ListField)):
child_schema = field_to_schema(field.child)
return coreschema.Array(
items=child_schema,
title=title,
description=description
)
elif isinstance(field, serializers.Serializer):
return coreschema.Object(
properties=OrderedDict([
(key, field_to_schema(value))
for key, value
in field.fields.items()
]),
title=title,
description=description
)
elif isinstance(field, serializers.ManyRelatedField):
return coreschema.Array(
items=coreschema.String(),
title=title,
description=description
)
elif isinstance(field, serializers.RelatedField):
return coreschema.String(title=title, description=description)
elif isinstance(field, serializers.MultipleChoiceField):
return coreschema.Array(
items=coreschema.Enum(enum=list(field.choices.keys())),
title=title,
description=description
)
elif isinstance(field, serializers.ChoiceField):
return coreschema.Enum(
enum=list(field.choices.keys()),
title=title,
description=description
)
elif isinstance(field, serializers.BooleanField):
return coreschema.Boolean(title=title, description=description)
elif isinstance(field, (serializers.DecimalField, serializers.FloatField)):
return coreschema.Number(title=title, description=description)
elif isinstance(field, serializers.IntegerField):
return coreschema.Integer(title=title, description=description)
elif isinstance(field, serializers.DateField):
return coreschema.String(
title=title,
description=description,
format='date'
)
elif isinstance(field, serializers.DateTimeField):
return coreschema.String(
title=title,
description=description,
format='date-time'
)
if field.style.get('base_template') == 'textarea.html':
return coreschema.String(
title=title,
description=description,
format='textarea'
)
return coreschema.String(title=title, description=description)
def get_pk_description(model, model_field):
if isinstance(model_field, models.AutoField):
value_type = _('unique integer value')
elif isinstance(model_field, models.UUIDField):
value_type = _('UUID string')
else:
value_type = _('unique value')
return _('A {value_type} identifying this {name}.').format(
value_type=value_type,
name=model._meta.verbose_name,
)
class ViewInspector(object):
"""
Descriptor class on APIView.
Provide subclass for per-view schema generation
"""
def __get__(self, instance, owner):
"""
Enables `ViewInspector` as a Python _Descriptor_.
This is how `view.schema` knows about `view`.
`__get__` is called when the descriptor is accessed on the owner.
(That will be when view.schema is called in our case.)
`owner` is always the owner class. (An APIView, or subclass for us.)
`instance` is the view instance or `None` if accessed from the class,
rather than an instance.
See: https://docs.python.org/3/howto/descriptor.html for info on
descriptor usage.
"""
self.view = instance
return self
@property
def view(self):
"""View property."""
assert self._view is not None, "Schema generation REQUIRES a view instance. (Hint: you accessed `schema` from the view class rather than an instance.)"
return self._view
@view.setter
def view(self, value):
self._view = value
@view.deleter
def view(self):
self._view = None
def get_link(self, path, method, base_url):
"""
Generate `coreapi.Link` for self.view, path and method.
This is the main _public_ access point.
Parameters:
* path: Route path for view from URLConf.
* method: The HTTP request method.
* base_url: The project "mount point" as given to SchemaGenerator
"""
raise NotImplementedError(".get_link() must be overridden.")
class AutoSchema(ViewInspector):
"""
Default inspector for APIView
Responsible for per-view instrospection and schema generation.
"""
def __init__(self, manual_fields=None):
"""
Parameters:
* `manual_fields`: list of `coreapi.Field` instances that
will be added to auto-generated fields, overwriting on `Field.name`
"""
self._manual_fields = manual_fields
def get_link(self, path, method, base_url):
fields = self.get_path_fields(path, method)
fields += self.get_serializer_fields(path, method)
fields += self.get_pagination_fields(path, method)
fields += self.get_filter_fields(path, method)
if self._manual_fields is not None:
by_name = {f.name: f for f in fields}
for f in self._manual_fields:
by_name[f.name] = f
fields = list(by_name.values())
if fields and any([field.location in ('form', 'body') for field in fields]):
encoding = self.get_encoding(path, method)
else:
encoding = None
description = self.get_description(path, method)
if base_url and path.startswith('/'):
path = path[1:]
return coreapi.Link(
url=urlparse.urljoin(base_url, path),
action=method.lower(),
encoding=encoding,
fields=fields,
description=description
)
def get_description(self, path, method):
"""
Determine a link description.
This will be based on the method docstring if one exists,
or else the class docstring.
"""
view = self.view
method_name = getattr(view, 'action', method.lower())
method_docstring = getattr(view, method_name, None).__doc__
if method_docstring:
# An explicit docstring on the method or action.
return formatting.dedent(smart_text(method_docstring))
description = view.get_view_description()
lines = [line for line in description.splitlines()]
current_section = ''
sections = {'': ''}
for line in lines:
if header_regex.match(line):
current_section, seperator, lead = line.partition(':')
sections[current_section] = lead.strip()
else:
sections[current_section] += '\n' + line
# TODO: SCHEMA_COERCE_METHOD_NAMES appears here and in `SchemaGenerator.get_keys`
coerce_method_names = api_settings.SCHEMA_COERCE_METHOD_NAMES
header = getattr(view, 'action', method.lower())
if header in sections:
return sections[header].strip()
if header in coerce_method_names:
if coerce_method_names[header] in sections:
return sections[coerce_method_names[header]].strip()
return sections[''].strip()
def get_path_fields(self, path, method):
"""
Return a list of `coreapi.Field` instances corresponding to any
templated path variables.
"""
view = self.view
model = getattr(getattr(view, 'queryset', None), 'model', None)
fields = []
for variable in uritemplate.variables(path):
title = ''
description = ''
schema_cls = coreschema.String
kwargs = {}
if model is not None:
# Attempt to infer a field description if possible.
try:
model_field = model._meta.get_field(variable)
except:
model_field = None
if model_field is not None and model_field.verbose_name:
title = force_text(model_field.verbose_name)
if model_field is not None and model_field.help_text:
description = force_text(model_field.help_text)
elif model_field is not None and model_field.primary_key:
description = get_pk_description(model, model_field)
if hasattr(view, 'lookup_value_regex') and view.lookup_field == variable:
kwargs['pattern'] = view.lookup_value_regex
elif isinstance(model_field, models.AutoField):
schema_cls = coreschema.Integer
field = coreapi.Field(
name=variable,
location='path',
required=True,
schema=schema_cls(title=title, description=description, **kwargs)
)
fields.append(field)
return fields
def get_serializer_fields(self, path, method):
"""
Return a list of `coreapi.Field` instances corresponding to any
request body input, as determined by the serializer class.
"""
view = self.view
if method not in ('PUT', 'PATCH', 'POST'):
return []
if not hasattr(view, 'get_serializer'):
return []
try:
serializer = view.get_serializer()
except exceptions.APIException:
serializer = None
warnings.warn('{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for {} {}.'
.format(view.__class__.__name__, method, path))
if isinstance(serializer, serializers.ListSerializer):
return [
coreapi.Field(
name='data',
location='body',
required=True,
schema=coreschema.Array()
)
]
if not isinstance(serializer, serializers.Serializer):
return []
fields = []
for field in serializer.fields.values():
if field.read_only or isinstance(field, serializers.HiddenField):
continue
required = field.required and method != 'PATCH'
field = coreapi.Field(
name=field.field_name,
location='form',
required=required,
schema=field_to_schema(field)
)
fields.append(field)
return fields
def get_pagination_fields(self, path, method):
view = self.view
if not is_list_view(path, method, view):
return []
pagination = getattr(view, 'pagination_class', None)
if not pagination:
return []
paginator = view.pagination_class()
return paginator.get_schema_fields(view)
def _allows_filters(self, path, method):
"""
Determine whether to include filter Fields in schema.
Default implementation looks for ModelViewSet or GenericAPIView
actions/methods that cause filtering on the default implementation.
Override to adjust behaviour for your view.
Note: Introduced in v3.7: Initially "private" (i.e. with leading underscore)
to allow changes based on user experience.
"""
if getattr(self.view, 'filter_backends', None) is None:
return False
if hasattr(self.view, 'action'):
return self.view.action in ["list", "retrieve", "update", "partial_update", "destroy"]
return method.lower() in ["get", "put", "patch", "delete"]
def get_filter_fields(self, path, method):
if not self._allows_filters(path, method):
return []
fields = []
for filter_backend in self.view.filter_backends:
fields += filter_backend().get_schema_fields(self.view)
return fields
def get_encoding(self, path, method):
"""
Return the 'encoding' parameter to use for a given endpoint.
"""
view = self.view
# Core API supports the following request encodings over HTTP...
supported_media_types = {
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data',
}
parser_classes = getattr(view, 'parser_classes', [])
for parser_class in parser_classes:
media_type = getattr(parser_class, 'media_type', None)
if media_type in supported_media_types:
return media_type
# Raw binary uploads are supported with "application/octet-stream"
if media_type == '*/*':
return 'application/octet-stream'
return None
class ManualSchema(ViewInspector):
"""
Allows providing a list of coreapi.Fields,
plus an optional description.
"""
def __init__(self, fields, description=''):
"""
Parameters:
* `fields`: list of `coreapi.Field` instances.
* `descripton`: String description for view. Optional.
"""
assert all(isinstance(f, coreapi.Field) for f in fields), "`fields` must be a list of coreapi.Field instances"
self._fields = fields
self._description = description
def get_link(self, path, method, base_url):
if base_url and path.startswith('/'):
path = path[1:]
return coreapi.Link(
url=urlparse.urljoin(base_url, path),
action=method.lower(),
encoding=None,
fields=self._fields,
description=self._description
)
return self._link
| jpadilla/django-rest-framework | rest_framework/schemas/inspectors.py | Python | bsd-2-clause | 14,722 |
import networkx as nx
import numpy as np
import pandas as pd
def normalise(x):
x = x[:]#deepcopy error
x -= min(x)
x /= max(x)
return x
def jgraph(posjac):
'''
networkx graph object from posjac at timestep
'''
posjac = 1 - normalise(np.log10(posjac).replace([np.inf,-np.inf],np.nan).dropna())
split = [i.split('->') for i in posjac.index]
#graph
G = nx.DiGraph()
for e in range(len(split)):
G.add_edge(split[e][0],split[e][1],weight=posjac[e])
G.remove_edges_from(G.selfloop_edges())
return G
def getnx(self, ts ,save=False):
'''
Create a networkx graph from a DSMACC new class
Usage:
getnx(a,a.ts[-1], 'propane')
'''
self.create_posjac()
G = nx.DiGraph()
posjac = self.posjac.loc[ts,:]
split = [i.split('->') for i in posjac.index]
for e in range(len(split)):
G.add_edge(split[e][0],split[e][1],weight=posjac[e])
G.remove_edges_from(G.selfloop_edges())
if save:
nx.write_weighted_edgelist(G, save+'.wedgelist')
#G=nx.read_weighted_edgelist('propane.wedgelist',create_using=nx.DiGraph)
return G
def pagerank(a):
return geobj2df(metric(tograph(group_hour(a.jacsp))))
def tograph(jac):
'''
Use hourly avg
'''
rt = []
for t in jac.iterrows():
jacsp=t[1]
#inverse negative links
index = np.array(jacsp.index)
lt = list(jacsp<0)
index[lt] = map(lambda x: '->'.join(reversed(x.split('->'))),index[lt])
jacsp.index = index
jacsp = jacsp.abs()
#normalize jacsp
jacsp = jacsp*1.01 - jacsp.min().min()
jacsp /= jacsp.max().max()
split = [i.split('->') for i in jacsp.index]
#graph
G = nx.DiGraph()
for e in range(len(split)):
G.add_edge(split[e][0],split[e][1],weight=jacsp[e])
G.remove_edges_from(G.selfloop_edges())
rt.append({'graph':G,'time':t[0]})
return rt
def metric(GS,met = 'nx.pagerank'):
'''
GS - out array from to_graph
'''
metfn = eval(met)
for gt in range(len(GS)):
res = metfn(GS[gt]['graph'])
res = [[key, res[key]] for key, value in sorted(res.iteritems(), key=lambda k,v: (v,k))]
GS[gt][met] = res
return GS
def geobj2df(GS,what = 'nx.pagerank'):
res = []
index = []
for s in GS:
index.append(s['time'])
s = pd.DataFrame(s[what])
s.index = s[0]
s=s[1]
res.append(s)
df = pd.concat(res,axis = 1).T
df.index = index
df = (df*1.1).subtract(df.min(axis=0
))
df=df.divide(df.max(axis=1),axis=0)
import zcreate_centrality as p
#p.createhtml(df)
return df
| wolfiex/DSMACC-testing | zgraph.py | Python | gpl-3.0 | 2,805 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# This file is part of WAPT Enterprise Edition
# Copyright (C) 2017 Tranquil IT Systems https://www.tranquil.it
# All Rights Reserved.
#
# WAPT aims to help Windows systems administrators to deploy
# setup and update applications on users PC.
# -----------------------------------------------------------------------
from __future__ import absolute_import
from waptserver.config import __version__
import os
import sys
from waptserver.tasks import *
try:
from waptenterprise.waptserver.wsus_tasks import *
from waptenterprise.waptserver.repositories_tasks import *
waptenterprise = True
except:
waptenterprise = False | tranquilit/WAPT | waptserver/tasks_common.py | Python | gpl-3.0 | 773 |
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class SandboxBankTransferFireWebhookRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'webhook': (str,), # noqa: E501
'client_id': (str,), # noqa: E501
'secret': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'webhook': 'webhook', # noqa: E501
'client_id': 'client_id', # noqa: E501
'secret': 'secret', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, webhook, *args, **kwargs): # noqa: E501
"""SandboxBankTransferFireWebhookRequest - a model defined in OpenAPI
Args:
webhook (str): The URL to which the webhook should be sent.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
client_id (str): Your Plaid API `client_id`. The `client_id` is required and may be provided either in the `PLAID-CLIENT-ID` header or as part of a request body.. [optional] # noqa: E501
secret (str): Your Plaid API `secret`. The `secret` is required and may be provided either in the `PLAID-SECRET` header or as part of a request body.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.webhook = webhook
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| plaid/plaid-python | plaid/model/sandbox_bank_transfer_fire_webhook_request.py | Python | mit | 7,108 |
import logging
import os
from pylons import request, tmpl_context as c
from pylons.controllers.util import redirect
from pylons.i18n import _
from openspending.model import meta as db
from openspending.model.badge import Badge
from openspending.ui.lib.base import require
from openspending.lib.jsonexport import to_jsonp
from openspending.ui.lib import helpers as h
from openspending.ui.lib.hypermedia import (badges_apply_links,
badge_apply_links)
from openspending.ui.lib.base import BaseController
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class BadgeController(BaseController):
def index(self, format='html'):
"""
List all badges in the system. Default is to present the
user with an html site, but the user can request a json list
of badges.
"""
c.badges = Badge.all()
# If the requested format is json return a list of badges
if format == 'json':
return to_jsonp({"badges": badges_apply_links([b.as_dict()
for b in c.badges])})
# Return html representation
return templating.render('badge/index.html')
def information(self, id, format='html'):
"""
Show information about the badge. Default is to present the
user with the badge on an html site, but the user can request a
json representation of the badge
"""
# Get the badge
c.badge = Badge.by_id(id=id)
# Return a json representation if the format requested is 'json'
if format == 'json':
return to_jsonp({"badge": badge_apply_links(c.badge.as_dict())})
# Return html representation
return templating.render('badge/information.html')
def create(self):
"""
Create a new badge in the system
"""
# Check if user is allowed to create a badge
require.badge.create()
import shutil
label = request.params['badge-label']
description = request.params['badge-description']
image = request.POST['badge-image']
try:
# Get upload directory for Badge and generate a random filename
upload_dir = h.get_object_upload_dir(Badge)
random_filename = h.get_uuid_filename(image.filename)
# Open the filename and copy the uploaded image
permanent_filename = os.path.join(upload_dir, random_filename)
permanent_image = open(permanent_filename, 'w')
shutil.copyfileobj(image.file, permanent_image)
upload_image_path = h.upload(random_filename, Badge)
# Close image files
image.file.close()
permanent_image.close()
except OSError:
upload_image_path = ''
h.flash_error(_('Uploading files not supported at the moment.'))
badge = Badge(label, upload_image_path, description, c.account)
db.session.add(badge)
db.session.commit()
redirect(h.url_for(controller='badge', action='information',
id=badge.id))
def give(self, dataset):
"""
Award a given badge to a given dataset.
"""
# Get the dataset
self._get_dataset(dataset)
# Get the badge
badge_id = request.params.get('badge', None)
badge = Badge.by_id(id=badge_id)
if badge:
# See if user can award this badge to a this dataset
require.badge.give(badge, c.dataset)
# Add the dataset to the badge datasets and commit to database
badge.datasets.append(c.dataset)
db.session.commit()
else:
# If we don't find the badge id we flash an error message
h.flash_error(_('Badge not found.'))
# Go to the dataset's main page
redirect(h.url_for(controller='dataset', action='view',
dataset=c.dataset.name))
| mxabierto/openspending | openspending/ui/controllers/badge.py | Python | agpl-3.0 | 4,060 |
"""
Django settings for kanq project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6k5x=1bv14kh6y8iz3tzx8q14o!y1r1t!jdb*4nps+o-8yx_cw'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: don't run with auth turned off in production!
AUTH_REQUIRED=True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'django_celery_beat',
'oauth2_provider',
'social_django',
'rest_framework_social_oauth2',
'corsheaders',
'api',
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# Use CORS, so that the client can access them from the other host
CORS_ORIGIN_WHITELIST = (
'localhost:4200'
)
ROOT_URLCONF = 'kanq.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
'django.template.context_processors.request'
],
},
},
]
# Facebook auth settings
SOCIAL_AUTH_FACEBOOK_KEY = os.environ.get('KANQ_FACEBOOK_KEY')
SOCIAL_AUTH_FACEBOOK_SECRET = os.environ.get('KANQ_FACEBOOK_SECRET')
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = {
'fields': 'id, name, email, age_range'
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.ext.rest_framework.OAuth2Authentication',
'rest_framework.authentication.TokenAuthentication',
'rest_framework_social_oauth2.authentication.SocialAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 10,
}
if AUTH_REQUIRED:
REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = (
'rest_framework.permissions.IsAuthenticated',
)
AUTHENTICATION_BACKENDS = (
'rest_framework_social_oauth2.backends.DjangoOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
WSGI_APPLICATION = 'kanq.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'OPTIONS': {
'read_default_file': './mysql.cnf',
},
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGIN_URL = 'login'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "api/static")
AUTH_USER_MODEL = 'api.User'
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'EET'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
# Logging not working currently (hopefully not true anymore)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': 'info.log',
},
},
'loggers': {
'api': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True,
},
},
}
| frostblooded/kanq | kanq/settings.py | Python | mit | 5,634 |
# Copyright (C) 2018 Collin Capano
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# self.option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# =============================================================================
#
# Preamble
#
# =============================================================================
#
"""Provides IO for the emcee sampler.
"""
from .base_sampler import BaseSamplerFile
from .posterior import PosteriorFile
class CPNestFile(BaseSamplerFile):
"""Class to handle file IO for the ``cpnest`` sampler."""
name = 'cpnest_file'
def write_resume_point(self):
pass
def write_niterations(self, niterations):
"""
Writes the given number of iterations to the sampler group.
"""
self[self.sampler_group].attrs['niterations'] = niterations
def write_sampler_metadata(self, sampler):
"""
Adds writing betas to MultiTemperedMCMCIO.
"""
self.attrs['sampler'] = sampler.name
if self.sampler_group not in self.keys():
# create the sampler group
self.create_group(self.sampler_group)
self[self.sampler_group].attrs['nlivepoints'] = sampler.nlive
# write the model's metadata
sampler.model.write_metadata(self)
def write_samples(self, samples, parameters=None):
"""Writes samples to the given file.
Results are written to ``samples_group/{vararg}``, where ``{vararg}``
is the name of a model params. The samples are written as an
array of length ``niterations``.
Parameters
-----------
samples : dict
The samples to write. Each array in the dictionary should have
length niterations.
parameters : list, optional
Only write the specified parameters to the file. If None, will
write all of the keys in the ``samples`` dict.
"""
# since we're just writing a posterior use
# PosteriorFile's write_samples
PosteriorFile.write_samples(self, samples, parameters=parameters)
| cmbiwer/pycbc | pycbc/inference/io/cpnest.py | Python | gpl-3.0 | 2,726 |
#!/usr/bin/python3
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Arch Linux container for building all dependencies of all Arch Linux
# packages.
from utilities import log, timestamp, run_cmd, recursive_chown
import os
import os.path
import shutil
import subprocess
def toolchain_specific_setup(args):
log("info", "Running android-specific setup")
if not os.path.isdir("/sysroot"):
os.mkdir("/sysroot")
recursive_chown("/sysroot")
# wget and curl output unsuitable progress bars even when not
# connected to a TTY. Turn them off.
with open("/etc/wgetrc", "a") as f:
print("verbose = off", file=f)
with open("/etc/.curlrc", "a") as f:
print("silent", file=f)
print("show-error", file=f)
log("info", "Downloading & unpacking NDK")
os.chdir("/home/tuscan")
setup_file = "/home/tuscan/ndk.bin"
cmd = ("wget -O %s"
" http://dl.google.com/android/ndk/android-"
"ndk-r10e-linux-x86_64.bin" % (setup_file))
run_cmd(cmd)
cmd = "chmod +x " + setup_file
run_cmd(cmd)
run_cmd(setup_file, output=False)
log("info", "Setting up toolchain")
cmd = ("/home/tuscan/android-ndk-r10e/build/tools/"
"make-standalone-toolchain.sh"
" --arch=arm --platform=android-21 "
" --install-dir=" + "/sysroot")
run_cmd(cmd)
cmd = "chown -R tuscan: " + "/sysroot"
run_cmd(cmd, as_root=True)
cmd = "chown -R tuscan: /home/tuscan/android-ndk-r10e"
run_cmd(cmd, as_root=True)
bindirs = [
"/sysroot/bin",
"/sysroot/libexec/gcc/arm-linux-androideabi/4.8"
]
for d in bindirs:
for f in os.listdir(d):
f = os.path.join(d, f)
cmd = "chmod a+rx %s" % f
run_cmd(cmd, as_root=True)
for f in os.listdir("/sysroot"):
if os.path.isdir(os.path.join("/sysroot", f)):
shutil.copytree(os.path.join("/sysroot", f),
os.path.join("/toolchain_root", f))
elif os.path.isfile(os.path.join("/sysroot", f)):
shutil.copy(os.path.join("/sysroot", f), "/toolchain_root")
recursive_chown("/toolchain_root")
| karkhaz/tuscan | toolchains/install_bootstrap/android/setup.py | Python | apache-2.0 | 2,742 |
from xml.etree import ElementTree
import requests
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class Translator(object):
AUTH_URL = "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13"
API_ROOT = "http://api.microsofttranslator.com/v2/Http.svc"
TRANSLATE_URL = "http://api.microsofttranslator.com/v2/Http.svc/Translates"
def __init__(self, client_id, client_secret):
self.__token = ""
self.authorize(client_id, client_secret)
def authorize(self, client_id, client_secret):
headers = {
"Content-type": "application/x-www-form-urlencoded"
}
params = urlencode({
"grant_type": "client_credentials",
"client_id": client_id,
"client_secret": client_secret,
"scope": "http://api.microsofttranslator.com"
})
resp = requests.post(self.AUTH_URL, data=params, headers=headers)
if resp.ok:
_body = resp.json()
self.__token = _body["access_token"]
else:
resp.raise_for_status()
def detect(self, text):
params = {
"text": text
}
url = self.API_ROOT + "/Detect?" + urlencode(params)
resp = requests.get(url, headers=self.__make_header())
result = {}
if resp.ok:
root = ElementTree.fromstring(resp.content)
result = root.text
else:
resp.raise_for_status()
return result
def translate(self, text, lang_to, lang_from=""):
# language codes
# https://msdn.microsoft.com/en-us/library/hh456380.aspx
params = {
"text": text,
"to": lang_to
}
if lang_from:
params["from"] = lang_from
url = self.API_ROOT + "/Translate?" + urlencode(params)
resp = requests.get(url, headers=self.__make_header())
result = {}
if resp.ok:
root = ElementTree.fromstring(resp.content)
result = root.text
else:
resp.raise_for_status()
return result
def __make_header(self):
return {
"Authorization": "Bearer {0}".format(self.__token)
}
| icoxfog417/pyoxford | pyoxford/translator_api.py | Python | mit | 2,262 |
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import os
import unittest
from ssl import SSLError
from .fake_webapp import EXAMPLE_APP
from splinter.request_handler.request_handler import RequestHandler
from splinter.request_handler.status_code import HttpResponseError
from tests import TESTS_ROOT
class RequestHandlerTestCase(unittest.TestCase):
def setUp(self):
self.request = RequestHandler()
self.status_code = self.request.connect(EXAMPLE_APP)
def test_should_receive_an_url_and_get_a_success_response(self):
self.assertTrue(self.status_code.is_success())
def test_should_start_a_request_with_localhost(self):
self.assertEqual("127.0.0.1", self.request.host)
def test_should_start_a_request_with_port_5000(self):
self.assertEqual(5000, self.request.port)
def test_should_visit_alert_page_and_get_a_success_response(self):
request = RequestHandler()
status_code = request.connect(EXAMPLE_APP + "alert")
self.assertTrue(status_code.is_success())
def test_should_compare_app_index_with_404_and_get_false(self):
self.assertFalse(self.status_code == 404)
def test_is_success_should_be_false_when_url_does_not_exists(self):
request = RequestHandler()
status_code = request.connect(EXAMPLE_APP + "page-that-doesnt-exists")
self.assertFalse(status_code.is_success())
def test_should_be_able_to_represent_exception_as_string(self):
"HttpResponseError exception should be representable as string"
error = HttpResponseError(404, "Not Found")
self.assertEqual("404 - Not Found", str(error))
def test_should_not_connect_to_non_http_protocols(self):
mockfile_path = "file://%s" % os.path.join(TESTS_ROOT, "mockfile.txt")
request = RequestHandler()
status_code = request.connect(mockfile_path)
self.assertTrue(status_code.is_success())
def test_should_connect_to_pages_with_query_string(self):
request = RequestHandler()
url = EXAMPLE_APP + "query?model"
status_code = request.connect(url)
self.assertTrue(status_code.is_success())
def test_should_connect_to_https_protocols(self):
# We do not run an HTTPS server, but we know we handle https
# if we get an SSLError accessing a non-HTTPS site.
with self.assertRaises(SSLError):
request = RequestHandler()
url = EXAMPLE_APP.replace('http', 'https')
request.connect(url)
self.assertEqual(request.scheme, 'https')
def test_should_set_user_agent(self):
request = RequestHandler()
url = EXAMPLE_APP + 'useragent'
request.connect(url)
self.assertEqual(b'python/splinter', request.response.read())
def test_should_be_able_to_connect_with_basic_auth(self):
request = RequestHandler()
url = 'http://admin:secret@localhost:5000/authenticate'
request.connect(url)
self.assertEqual(b'Success!', request.response.read())
| gjvis/splinter | tests/test_request_handler.py | Python | bsd-3-clause | 3,176 |
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from stardate.models import Blog
from stardate.utils import get_post_model
Post = get_post_model()
class LatestPostsFeed(Feed):
def get_object(self, request, blog_slug):
return get_object_or_404(Blog, slug=blog_slug)
def title(self, obj):
return "%s: Recent posts" % obj.name
def link(self, obj):
return obj.get_absolute_url()
def items(self, obj):
return Post.objects.published().filter(blog=obj)[:5]
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.publish
def item_description(self, item):
return item.body
| blturner/django-stardate | stardate/feeds.py | Python | bsd-3-clause | 811 |
from __future__ import print_function
import argparse
import os.path
import re
import sys
from typing import Optional
from typing import Sequence
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
parser.add_argument(
'--django', default=False, action='store_true',
help='Use Django-style test naming pattern (test*.py)',
)
args = parser.parse_args(argv)
retcode = 0
test_name_pattern = 'test.*.py' if args.django else '.*_test.py'
for filename in args.filenames:
base = os.path.basename(filename)
if (
not re.match(test_name_pattern, base) and
not base == '__init__.py' and
not base == 'conftest.py'
):
retcode = 1
print(
'{} does not match pattern "{}"'.format(
filename, test_name_pattern,
),
)
return retcode
if __name__ == '__main__':
sys.exit(main())
| Harwood/pre-commit-hooks | pre_commit_hooks/tests_should_end_in_test.py | Python | mit | 1,071 |
# Copyright (c) 2010 Aldo Cortesi
# Copyright (c) 2010, 2014 dequis
# Copyright (c) 2012 Randall Ma
# Copyright (c) 2012-2014 Tycho Andersen
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2013 horsik
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2020 Mikel Ward
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
from typing import List # noqa: F401
from libqtile import bar, hook, layout, qtile, widget
from libqtile.config import Click, Drag, Group, Key, Screen
from libqtile.lazy import lazy
from libqtile.log_utils import logger
import mydynamic
import mystack
screeninfo = None
try:
import screeninfo
except:
pass
mod = "mod4"
alt = "mod1"
# TODO(mikel): calculate width
quarter_width = 860
num_columns = 3
slice_role = "browser"
slice_wmclass = None
if os.environ.get("QTILE_XEPHYR"):
quarter_width = 480
mod = alt
slice_role = None
slice_wmclass = "xclock"
def window_to_previous_screen(qtile):
curr = qtile.screens.index(qtile.current_screen)
prev = (curr - 1) % len(qtile.screens)
group = qtile.screens[prev].group.name
qtile.current_window.togroup(group)
def window_to_next_screen(qtile):
curr = qtile.screens.index(qtile.current_screen)
next = (curr + 1) % len(qtile.screens)
group = qtile.screens[next].group.name
qtile.current_window.togroup(group)
keys = [
# Switch between screens (a.k.a. monitors)
Key([mod], "Page_Up", lazy.prev_screen(), desc="Move monitor focus to previous screen"),
Key([mod], "Page_Down", lazy.next_screen(), desc="Move monitor focus to next screen"),
# Switch between groups (a.k.a. workspaces)
Key([mod], "Tab", lazy.screen.toggle_group(), desc="Switch to the previous group"),
# Switch between windows
Key([mod, alt], "Down", lazy.layout.down(), desc="Move focus down"),
Key([mod, alt], "Up", lazy.layout.up(), desc="Move focus up"),
Key([mod, alt], "Right", lazy.layout.right(), desc="Move focus right"),
Key([mod, alt], "Left", lazy.layout.left(), desc="Move focus left"),
Key([mod], "Right", lazy.layout.swap_stack_right()),
Key([mod], "Left", lazy.layout.swap_stack_left()),
Key([alt], "Tab", lazy.layout.next(), desc="Focus the next window"),
Key([alt, "shift"], "Tab", lazy.layout.previous(), desc="Focus the previous window"),
# Move windows
Key([mod, "shift"], "Page_Up", lazy.function(window_to_previous_screen), desc="Move window to previous screen"),
Key([mod, "shift"], "Page_Down", lazy.function(window_to_next_screen), desc="Move window to next screen"),
Key([mod, "shift"], "Down", lazy.layout.shuffle_down(), desc="Move window down"),
Key([mod, "shift"], "Up", lazy.layout.shuffle_up(), desc="Move window up"),
Key([mod, "shift"], "Right", lazy.layout.shuffle_right(), desc="Move window right"),
Key([mod, "shift"], "Left", lazy.layout.shuffle_left(), desc="Move window left"),
# Toggle between different layouts
Key([mod], "grave", lazy.to_layout_index(-1), desc="Switch to layout -1"),
Key([mod], "apostrophe", lazy.to_layout_index(0), desc="Switch to layout 0"),
Key([mod], "comma", lazy.to_layout_index(1), desc="Switch to layout 1"),
Key([mod], "period", lazy.to_layout_index(2), desc="Switch to layout 2"),
Key([mod], "Return", lazy.to_layout_index(-1), desc="Switch to layout -1"),
Key([mod], "equal", lazy.to_layout_index(3), desc="Switch to layout 3"),
Key([mod], "BackSpace", lazy.window.kill(), desc="Kill focused window"),
Key([mod, "control"], "r", lazy.restart(), desc="Restart qtile"),
Key([mod, "control"], "q", lazy.shutdown(), desc="Shutdown qtile"),
Key([mod, "control"], "x", lazy.shutdown(), desc="Shutdown qtile"),
Key([mod], "space", lazy.spawncmd(), desc="Spawn a command using a prompt widget"),
]
groups = [Group(i) for i in "1234567890"]
for i in groups:
keys.extend(
[
# mod + letter of group = switch to group
Key([mod], i.name, lazy.group[i.name].toscreen(toggle=True), desc="Switch to group {}".format(i.name)),
# mod + shift + letter of group = move focused window to group
Key([mod, "shift"], i.name, lazy.window.togroup(i.name, switch_group=False), desc="Move focused window to group {}".format(i.name)),
]
)
layouts = [
mystack.MyStack(name="3center", widths=[1.0 / 4, 1.0 / 2, 1.0 / 4]),
mystack.MyStack(name="2equal", widths=[1.0 / 2, 1.0 / 2]),
mydynamic.MyDynamic(name="21:9", left_fractions=[1.0 / 4], center_fractions=[1.0 / 2], right_fraction=[1.0 / 4]),
mydynamic.MyDynamic(name="16:9", left_fractions=[], center_fractions=[2.0 / 3], right_fraction=[1.0 / 3]),
layout.Max(),
]
widget_defaults = dict(
font="sans",
fontsize=12,
padding=3,
)
extension_defaults = widget_defaults.copy()
def screen(main=True):
top = bar.Bar(
widgets=list(
filter(
None,
[
widget.GroupBox(),
widget.Prompt(),
widget.Spacer(),
widget.CurrentLayout(),
widget.Sep(),
widget.WindowName(width=bar.CALCULATED, show_state=False),
widget.Spacer(),
widget.Clipboard(max_width=30),
widget.Clock(format="%b %-d %H:%M"),
(widget.PulseVolume() if main else None),
(widget.Systray() if main else None),
],
)
),
size=24,
)
def update_bar_background():
if top.screen == top.qtile.current_screen:
top.background = '#000000'
else:
top.background = '#666666'
top.draw()
hook.subscribe.current_screen_change(update_bar_background)
return Screen(top=top)
screens = []
if screeninfo:
monitors = screeninfo.get_monitors()
logger.info('screeninfo detected %d monitors', len(monitors))
main = monitors[0]
if monitors[0].name.startswith('e') and len(monitors) > 1:
main = monitors[1]
for monitor in monitors:
screens.append(screen(main==monitor))
else:
logger.info('screeninfo not available, only configuring a single screen')
screens.append(screen(True))
# Drag floating layouts.
mouse = [
Drag([mod], "Button1", lazy.window.set_position_floating(), start=lazy.window.get_position()),
Drag([mod], "Button3", lazy.window.set_size_floating(), start=lazy.window.get_size()),
Click([mod], "Button2", lazy.window.bring_to_front()),
]
dgroups_key_binder = None
dgroups_app_rules = [] # type: List
main = None
follow_mouse_focus = True
bring_front_click = False
cursor_warp = True
floating_layout = layout.Floating(
float_rules=[
# Run the utility of `xprop` to see the wm class and name of an X client.
{"wmclass": "confirm"},
{"wmclass": "dialog"},
{"wmclass": "download"},
{"wmclass": "error"},
{"wmclass": "file_progress"},
{"wmclass": "notification"},
{"wmclass": "splash"},
{"wmclass": "toolbar"},
{"wmclass": "confirmreset"}, # gitk
{"wmclass": "makebranch"}, # gitk
{"wmclass": "maketag"}, # gitk
{"wname": "branchdialog"}, # gitk
{"wname": "pinentry"}, # GPG key password entry
{"wmclass": "ssh-askpass"}, # ssh-askpass
{"wname": "meet.google.com is sharing your screen."},
{"wname": "meet.google.com is sharing a window."},
]
)
auto_fullscreen = True
focus_on_window_activation = "smart"
# Pretend to be "LG3D" so that Java apps behave correctly.
wmname = "LG3D"
# Restart to handle a monitor appearing or disappearing.
# This seems to cause an infinite loop.
# @hook.subscribe.screen_change
# def restart_on_randr(ev):
# logger.info('screen_change, restarting')
# qtile.cmd_restart()
| mikelward/conf | config/qtile/config.py | Python | apache-2.0 | 8,878 |
from rest_framework import serializers
from mmkitarchive.models import Item, Category
# Category
class CategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('url', 'id', 'name')
url = serializers.HyperlinkedIdentityField(
view_name=Category.get_api_detail_view_name()
)
class CategoryCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('name', )
class CategoryRetrieveUpdateSerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('id', 'name')
# Item
class ItemCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ('name', 'description', 'created', 'author', 'category')
class ItemListSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ('url', 'id', 'name', 'created', 'category')
url = serializers.HyperlinkedIdentityField(
view_name=Item.get_api_detail_view_name()
)
category = CategoryListSerializer(
read_only=True
)
class ItemUpdateSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ('id', 'name', 'description', 'created', 'author', 'category')
class ItemRetrieveSerializer(ItemUpdateSerializer):
class Meta(ItemUpdateSerializer.Meta):
pass
category = CategoryListSerializer(
read_only=True
)
| einsfr/mmkit2 | mmkitarchive/serializers.py | Python | mit | 1,495 |
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines logger classes and few convinience methods, not related to the declarations
tree
"""
import os
import sys
import logging
import tempfile
from fs_utils import files_walker
from fs_utils import directories_walker
def _create_logger_( name ):
"""implementation details"""
logger = logging.getLogger(name)
handler = logging.StreamHandler()
#handler.setFormatter( logging.Formatter( os.linesep + '%(levelname)s %(message)s' ) )
handler.setFormatter( logging.Formatter( '%(levelname)s %(message)s' ) )
logger.addHandler(handler)
logger.setLevel(logging.WARNING)
return logger
class loggers:
"""class-namespace, defines few loggers classes, used in the project"""
cxx_parser = _create_logger_( 'pygccxml.cxx_parser' )
"""logger for C++ parser functionality
If you set this logger level to DEBUG, you will be able to see the exact
command line, used to invoke GCC-XML and errors that occures during XML parsing
"""
gccxml = cxx_parser #backward compatability
pdb_reader = _create_logger_( 'pygccxml.pdb_reader' )
"""logger for MS .pdb file reader functionality
"""
queries_engine = _create_logger_( 'pygccxml.queries_engine' )
"""logger for query engine functionality.
If you set this logger level to DEBUG, you will be able to see what queries
you do against declarations tree, measure performance and may be even to improve it.
Query engine reports queries and whether they are optimized or not.
"""
declarations_cache = _create_logger_( 'pygccxml.declarations_cache' )
"""logger for declarations tree cache functionality
If you set this logger level to DEBUG, you will be able to see what is exactly
happens, when you read the declarations from cache file. You will be able to
decide, whether it worse for you to use this or that cache strategy.
"""
root = logging.getLogger( 'pygccxml' )
"""root logger exists for your convinience only"""
all = [ root, cxx_parser, queries_engine, declarations_cache, pdb_reader ]
"""contains all logger classes, defined by the class"""
def remove_file_no_raise(file_name ):
"""removes file from disk, if exception is raised, it silently ignores it"""
try:
if os.path.exists(file_name):
os.remove( file_name )
except Exception, error:
loggers.root.error( "Error ocured while removing temprorary created file('%s'): %s"
% ( file_name, str( error ) ) )
def create_temp_file_name(suffix, prefix=None, dir=None):
"""small convinience function that creates temporal file.
This function is a wrapper aroung Python built-in function - tempfile.mkstemp
"""
if not prefix:
prefix = tempfile.template
fd, name = tempfile.mkstemp( suffix=suffix, prefix=prefix, dir=dir )
file_obj = os.fdopen( fd )
file_obj.close()
return name
def normalize_path( some_path ):
"""return os.path.normpath( os.path.normcase( some_path ) )"""
return os.path.normpath( os.path.normcase( some_path ) )
def get_architecture():
"""returns computer architecture: 32 or 64.
The guess is based on maxint.
"""
if sys.maxint == 2147483647:
return 32
elif sys.maxint == 9223372036854775807:
return 64
else:
raise RuntimeError( "Unknown architecture" )
#The following code is cut-and-paste from this post:
#http://groups.google.com/group/comp.lang.python/browse_thread/thread/5b71896c06bd0f76/
#Thanks to Michele Simionato, for it
class cached(property):
'Convert a method into a cached attribute'
def __init__(self, method):
private = '_' + method.__name__
def fget(s):
try:
return getattr(s, private)
except AttributeError:
value = method(s)
setattr(s, private, value)
return value
def fdel(s):
del s.__dict__[private]
super(cached, self).__init__(fget, fdel=fdel)
@staticmethod
def reset(self):
cls = self.__class__
for name in dir(cls):
attr = getattr(cls, name)
if isinstance(attr, cached):
delattr(self, name)
class enum( object ):
"""Usage example:
class fruits(enum):
apple = 0
orange = 1
fruits.has_value( 1 )
fruits.name_of( 1 )
"""
@classmethod
def has_value( cls, enum_numeric_value ):
for name, value in cls.__dict__.iteritems():
if enum_numeric_value == value:
return True
else:
return False
@classmethod
def name_of( cls, enum_numeric_value ):
for name, value in cls.__dict__.iteritems():
if enum_numeric_value == value:
return name
else:
raise RuntimeError( 'Unable to find name for value(%d) in enumeration "%s"'
% ( enum_numeric_value, cls.__name__ ) )
| daviddoria/itkHoughTransform | Wrapping/WrapITK/Languages/SwigInterface/pygccxml-1.0.0/pygccxml/utils/__init__.py | Python | apache-2.0 | 5,211 |
import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = 'af4576ee'.decode('hex')
P2P_PORT = 10888
ADDRESS_VERSION = 50
RPC_PORT = 10889
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
'myriadcoinaddress' in (yield bitcoind.rpc_help()) and
not (yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC = lambda height: 1000*100000000 >> (height + 1)//967680
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('groestl_hash').getPoWHash(data))
BLOCK_PERIOD = 150 # s
SYMBOL = 'MYR'
CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'myriadcoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/myriadcoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.myriadcoin'), 'myriadcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/address/'
TX_EXPLORER_URL_PREFIX = 'http://birdonwheels5.no-ip.org/tx/'
SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**27 - 1)
DUMB_SCRYPT_DIFF = 1
DUST_THRESHOLD = 0.001e8
| depboy/p2pool-depboy | p2pool/bitcoin/networks/myriad_groestl.py | Python | gpl-3.0 | 1,216 |
# Vetor de 5 posições
soma = 0
i = 1
vetor = []
while i <= 5:
n = int (input ("Digite um Número : "))
vetor.append (n)
i += 1
print ("Vetor : ", vetor)
| SANDEISON/Python | 03 - Atacando os tipos básicos/01 - Armazenando mais informações com as listas/02 - Vetor de 5 posicao.py | Python | gpl-2.0 | 178 |
#!/usr/bin/env python
'''
'''
import roslib; roslib.load_manifest('robbie')
import rospy
import time
import actionlib
from face_recognition.msg import *
from std_msgs.msg import String
from festival.srv import *
from datetime import datetime, timedelta
from time import localtime, strftime
class Greeting():
def __init__(self):
#need to use action client
#self.pub = rospy.Publisher("/fr_order", FRClientGoal, self._recog_once)
self.client = actionlib.SimpleActionClient('face_recognition', face_recognition.msg.FaceRecognitionAction)
self._NamePublisher = rospy.Publisher("name", String)
rospy.Subscriber("/speech_text", String, self.speech_callback)
rospy.Subscriber("/face_recognition/feedback", FaceRecognitionActionFeedback, self._feedback)
#self.client.wait_for_server()
#define afternoon and morning
self.noon = strftime("%p:", localtime())
if self.noon == "AM:":
self.noon1 = "Goood Morning "
else:
self.noon1 ="Good After Noon "
self.local = strftime("%H:%M:", localtime())
#local time
self.local = strftime("%H:%M:", localtime())
#start speech service
rospy.wait_for_service('speak_text')
try:
self.speak_text_service = rospy.ServiceProxy('speak_text', FestivalSpeech)
except rospy.ServiceException, e:
print "Failed to acquire Festival SpeakText service: %s"%e
self.speak_text_service(self.noon1 + " Robbie is on line" + " the time is " + self.local)
def speech_callback(self,text):
self.hold = text.data
if self.hold == "good morning":
goal = face_recognition.msg.FaceRecognitionGoal(order_id=1, order_argument="none")
self.client.send_goal(goal)
time.sleep(0.2)
#self.hold = "good morning tim"
goal = face_recognition.msg.FaceRecognitionGoal(order_id=0, order_argument="none")
self.client.send_goal(goal)
elif self.hold == "hello":
self.hold = "hello tim"
rospy.logwarn(str(self.hold))
def _feedback(self, text):
self.name = text.feedback.names
self.confidence = text.feedback.confidence
#rospy.loginfo(self.confidence[0])
if self.confidence[0] > 0.8:
self.name1 = (self.name[0])
rospy.loginfo(self.name1)
self.speak_text_service("good morning " + " " + str(self.name1))
else:
self.name1 = "unknown"
self.speak_text_service("Hello my name is Robbie what is your name please ")
self._NamePublisher.publish(self.name1)
def Start(self):
rospy.logdebug("Starting")
def Stop(self):
rospy.logdebug("Stopping")
self.speak_text_service("Robbie's brain is going to sleep")
if __name__ == '__main__':
try:
st = Greeting()
rospy.init_node('Greeting_node')
rospy.spin()
except rospy.ROSInterruptException:
pass
| peterheim1/robbie | bin/greeting_backup.py | Python | gpl-3.0 | 3,118 |
from __future__ import absolute_import
import responses
import mock
from sentry.testutils import TestCase
from sentry.models import Integration
class GitHubAppsClientTest(TestCase):
@mock.patch('sentry.integrations.github.client.get_jwt', return_value='jwt_token_1')
@responses.activate
def test_save_token(self, get_jwt):
integration = Integration.objects.create(
provider='github',
name='Github Test Org',
external_id='1',
metadata={
'access_token': None,
'expires_at': None,
}
)
install = integration.get_installation(organization_id='123')
client = install.get_client()
responses.add(
method=responses.POST,
url='https://api.github.com/installations/1/access_tokens',
body='{"token": "12345token", "expires_at": "2030-01-01T00:00:00Z"}',
status=200,
content_type='application/json',
)
token = client.get_token()
assert token == '12345token'
assert len(responses.calls) == 1
# Second get_token doesn't have to make an API call
token = client.get_token()
assert token == '12345token'
assert len(responses.calls) == 1
| looker/sentry | tests/sentry/integrations/github/test_client.py | Python | bsd-3-clause | 1,297 |
#!/usr/bin/python
# -*- coding: iso-8859-2 -*-
__version__ = (0, 0, 1, 2)
__author__ = "Lukasz Antczak"
__contact__ = "antczak.lukasz@hotmail.com"
__homepage__ = ""
__docformat__ = "restructuredtext"
#try:
# from django.conf import settings
# settings.INSTALLED_APPS.insert(0, 'cassango')
#except ImportError:
# setup.py imports this file in order to read version/author/... metadata
# but does not necessarily have a Django context.
# pass
| jabadabadu/cassango | cassango/__init__.py | Python | bsd-2-clause | 480 |
#!/usr/bin/env python
"""
Copyright (c) 2013-2018, Citrix Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import traceback
import json
import argparse
def success(result):
return {"Status": "Success", "Value": result}
def handle_exception(e, code=None, params=None):
s = sys.exc_info()
files = []
lines = []
for slot in traceback.extract_tb(s[2]):
files.append(slot[0])
lines.append(slot[1])
backtrace = {
"error": str(s[1]),
"files": files,
"lines": lines,
}
code = "SR_BACKEND_FAILURE"
params = [s[0].__name__, str(s[1])]
if hasattr(e, "code"):
code = e.code
if hasattr(e, "params"):
params = e.params
results = {
"code": code,
"params": params,
"backtrace": backtrace,
}
print >>sys.stdout, json.dumps(results)
sys.exit(1)
class XenAPIException(Exception):
def __init__(self, code, params):
Exception.__init__(self)
if not isinstance(code, str) and not isinstance(code, unicode):
raise (TypeError("string", repr(code)))
if not isinstance(params, list):
raise (TypeError("list", repr(params)))
self.code = code
self.params = params
class MissingDependency(Exception):
def __init__(self, missing):
self.missing = missing
def __str__(self):
return "There is a missing dependency: %s not found" % self.missing
class Rpc_light_failure(Exception):
def __init__(self, name, args):
self.name = name
self.args = args
def failure(self):
# rpc-light marshals a single result differently to a list of results
args = list(self.args)
marshalled_args = args
if len(args) == 1:
marshalled_args = args[0]
return {'Status': 'Failure',
'ErrorDescription': [self.name, marshalled_args]}
class Unimplemented(Rpc_light_failure):
def __init__(self, name):
Rpc_light_failure.__init__(self, "Unimplemented", [name])
class InternalError(Rpc_light_failure):
def __init__(self, error):
Rpc_light_failure.__init__(self, "Internal_error", [error])
class UnmarshalException(InternalError):
def __init__(self, thing, ty, desc):
InternalError.__init__(
self,
"UnmarshalException thing=%s ty=%s desc=%s" % (thing, ty, desc))
class TypeError(InternalError):
def __init__(self, expected, actual):
InternalError.__init__(
self, "TypeError expected=%s actual=%s" % (expected, actual))
class UnknownMethod(InternalError):
def __init__(self, name):
InternalError.__init__(self, "Unknown method %s" % name)
def is_long(x):
try:
long(x)
return True
except ValueError:
return False
class ListAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
k = values[0]
v = values[1]
if ((hasattr(namespace, self.dest)
and getattr(namespace, self.dest) is not None)):
getattr(namespace, self.dest)[k] = v
else:
setattr(namespace, self.dest, {k: v})
| xapi-project/message-switch | xapi-storage/python/xapi/__init__.py | Python | isc | 4,435 |
#!/usr/bin/env python
import os
import sys
import dotenv
dotenv.read_dotenv()
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'storageofknowledge.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Settings')
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
| ArtyomSliusar/StorageOfKnowledge | storageofknowledge/manage.py | Python | gpl-3.0 | 364 |
import caduc.timer
import caduc.image
import docker
import docker.utils
import docker.errors
import logging
import pytest
import sys
import time
import sure
import unittest
from caduc.cmd import create_watcher
from .. import mock
docker_error = None
no_docker = False
try:
docker.Client(**docker.utils.kwargs_from_env(assert_hostname=False)).version()
except Exception as e:
docker_error = e
no_docker = True
class ControlledTimer(object):
def __init__(self, delay, cb):
self.cb = cb
self.delay = delay
self.started = False
def start(self):
self.started = True
def cancel(self):
self.started = False
def _trigger(self):
if not self.started:
raise RuntimeError("Cannot trigger a non started timer on %r" % self.cb)
self.cb()
def __str__(self):
return "<Timer: delay: %s started: %s>" % (self.delay, self.started)
@unittest.skipIf(no_docker, "Failed to connect to docker host, error: %s" % docker_error)
class IntegrationTest(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger(type(self).__name__)
self.client = docker.Client(**docker.utils.kwargs_from_env(assert_hostname=False))
options = mock.Mock()
options.debug = False
options.config = ['images.test-*.grace_time=1s']
options.config_path = None
options.image_gracetime = '1d'
self.options = options
self.containers = set()
self.images = set()
def tearDown(self):
caduc.timer.Timer.CancelAll()
for container in self.containers :
try:
self.client.remove_container(container,
v=True,
force=True
)
except:
pass
for image in self.images:
try:
self.client.remove_image(image,
force=True
)
except:
pass
def build_test_image(self, image_name):
for line in self.client.build("tests/fixtures/images", image_name):
sys.stdout.write(line)
self.images.add(image_name)
def start_test_container(self, image_name):
container = self.client.create_container('test-image-build', command='tail -f /dev/null', tty=True)
self.containers.add(container['Id'])
return container
def remove_test_container(self, container):
self.client.remove_container(container,
v=True,
force=True
)
try:
if isinstance(container, dict):
self.containers.remove(container['Id'])
else:
self.containers.remove(container)
except:
pass
def dict_intersect(self, d1, d2):
"""
Returns the shared definition of 2 dicts
"""
common_keys = set(d1.keys()) & set(d2.keys())
r = {}
for key in common_keys:
if isinstance(d1[key], dict) and isinstance(d2[key], dict):
r[key] = self.dict_intersect(d1[key], d2[key])
else:
if d1[key] == d2[key]:
r[key] = d1[key]
return r
def wait_for_event(self, listener, watcher, event):
for e in listener:
watcher.handle(e)
common = self.dict_intersect(e,event)
self.logger.info('event: %r, waiting for: %r, shared keys: %r', e, event, common)
if common == event:
return
@mock.patch('caduc.image.Image.Timer', new=ControlledTimer)
@pytest.mark.timeout(5)
def test_image_tag_plans_image_deletion(self):
watcher = create_watcher(self.options, [])
listener = self.client.events(decode=True)
self.build_test_image('test-image-build')
self.wait_for_event(listener, watcher, {
'Action':'tag',
'Actor': {
'Attributes':{
'name':'test-image-build:latest'
}
}
}
)
watcher.images['test-image-build'].event.should.not_be(None)
watcher.images['test-image-build'].event.started.should.be.truthy
watcher.images['test-image-build'].event.delay.should.be.eql(1)
@mock.patch('caduc.image.Image.Timer', new=ControlledTimer)
@pytest.mark.timeout(5)
def test_existing_image_deletion_is_planned(self):
self.build_test_image('test-image-build')
watcher = create_watcher(self.options, [])
self.logger.info(watcher.images['test-image-build'])
watcher.images['test-image-build'].event.should.not_be(None)
watcher.images['test-image-build'].event.started.should.be.truthy
@mock.patch('caduc.image.Image.Timer', new=ControlledTimer)
@pytest.mark.timeout(5)
def test_container_creation_cancels_image_deletion(self):
self.build_test_image('test-image-build')
watcher = create_watcher(self.options, [])
old_event = watcher.images['test-image-build'].event
listener = self.client.events(decode=True)
container = self.start_test_container('test-image-build')
self.wait_for_event(listener, watcher, {
'Action': 'create',
'Type': 'container',
})
old_event.started.should.not_be.truthy
watcher.images['test-image-build'].event.should.be(None)
@mock.patch('caduc.image.Image.Timer', new=ControlledTimer)
@pytest.mark.timeout(5)
def test_container_removal_schedules_image_removal(self):
self.build_test_image('test-image-build')
container = self.start_test_container('test-image-build')
listener = self.client.events(decode=True)
watcher = create_watcher(self.options, [])
self.remove_test_container(container)
self.wait_for_event(listener, watcher, {
'Action': 'destroy',
})
watcher.images['test-image-build'].event.should.not_be(None)
watcher.images['test-image-build'].event.started.should.be.truthy
watcher.images['test-image-build'].event.delay.should.eql(1)
@mock.patch('caduc.image.Image.Timer', new=ControlledTimer)
@pytest.mark.timeout(5)
def test_container_removal_schedules_image_removal(self):
self.build_test_image('test-image-build')
listener = self.client.events(decode=True)
watcher = create_watcher(self.options, [])
watcher.images['test-image-build'].event._trigger()
self.wait_for_event(listener, watcher, {
'Action': 'delete',
})
| tjamet/caduc | tests/integration/test_main.py | Python | gpl-3.0 | 6,652 |
#----------------------------------------------------------------------------
# Persistent element grouping support.
#
#----------------------------------------------------------------------------
# Copyright 2017, Martin Kolman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#----------------------------------------------------------------------------
import blitzdb
import copy
from threading import RLock
from core.base import TsubamePersistentBase
from core import filter
class Group(TsubamePersistentBase):
"""And ordered serializable group of items.
Supports atomic operations on the contained group.
"""
data_defaults = {"members" : []}
def __init__(self, db, data):
super(Group, self).__init__(db, data)
self._group_lock = RLock()
self._members = []
def add(self, item):
with self._group_lock:
# the items to be added need to be
# persistent class instances
self.data.members.append(item.data)
self._members.append(item)
def clear(self):
with self._group_lock:
self.data.members.clear()
self._members.clear()
def _load_members(self):
raise NotImplementedError
@property
def members(self):
with self._group_lock:
return self._members
def pop(self, index):
with self._group_lock:
self.data.members.pop(index)
return self._members.pop(index)
# NOTE: The reordering operations need to be always done
# in the backing filter data list as well to keep
# them in sync.
def swap(self, item_1, item_2):
# TODO
raise NotImplementedError
def swap_by_index(self):
# TODO
raise NotImplementedError
def replace_items(self, new_items):
with self._group_lock:
self._members.clear()
self.data.members.clear()
self._members = new_items
self.data.members = [i.data for i in new_items]
def __repr__(self):
return "%s containing %s" % (self.__class__.__name__, self.members)
class FilterGroupData(blitzdb.Document):
pass
class FilterGroup(Group):
data_defaults = copy.deepcopy(Group.data_defaults)
@classmethod
def new(cls, db):
data = FilterGroupData(copy.deepcopy(cls.data_defaults))
return cls(db, data)
def __init__(self, db, data):
super(FilterGroup, self).__init__(db, data)
with self._group_lock:
self._load_members()
def _load_members(self):
for item_data in self.data.members:
# Fetch the functional filter class based
# based on data class.
cls = filter.CLASS_MAP.get(item_data.__class__)
if cls is None:
self.log.error("Filter class class not found for data: %s",
item_data)
else:
self._members.append(cls(self.db, item_data))
def filter_messages(self, messages):
for single_filter in self.members:
messages = single_filter.filter_messages(messages)
return list(messages) # make sure we return a list of messages | M4rtinK/tsubame | core/group.py | Python | gpl-3.0 | 3,816 |
from test_support import *
prove_all(steps=50000, prover=["cvc4", "altergo"])
| ptroja/spark2014 | testsuite/gnatprove/tests/P530-022__loopinv/test.py | Python | gpl-3.0 | 79 |
from sense_hat import SenseHat
import time, sys
sense = SenseHat()
sense.set_rotation(180)
red = [255,0,0]
orange = [255,127,0]
yellow = [255,255,0]
green = [0,255,0]
blue = [0,0,255]
indego = [75,0,130]
violet = [159,0,255]
black = [0,0,0]
x = [0,255,0]
o = [0,0,0]
r = [255,0,0]
frame_delay = 0.2
pause_delay = 2
repeat = 50
image1 = [
o,x,o,o,o,o,x,o,
o,o,x,o,o,x,o,o,
o,o,x,x,x,x,o,o,
o,x,r,x,x,r,x,o,
x,o,x,x,x,x,o,x,
x,o,x,x,x,x,o,x,
o,o,x,o,o,x,o,o,
o,x,o,o,o,o,x,o
]
image2 = [
o,x,o,o,o,o,x,o,
o,o,x,o,o,x,o,o,
x,o,x,x,x,x,o,x,
x,x,r,x,x,r,x,x,
o,o,x,x,x,x,o,o,
o,o,x,x,x,x,o,o,
o,o,x,o,o,x,o,o,
o,o,o,x,x,o,o,o
]
image3 = [
o,o,o,x,x,o,o,o,
o,x,x,x,x,x,x,o,
x,x,x,x,x,x,x,x,
x,r,r,x,x,r,r,x,
x,x,x,x,x,x,x,x,
o,o,x,o,o,x,o,o,
o,x,o,x,x,o,x,o,
x,o,o,o,o,o,o,x
]
image4 = [
o,o,o,x,x,o,o,o,
o,x,x,x,x,x,x,o,
x,x,x,x,x,x,x,x,
x,r,r,x,x,r,r,x,
x,x,x,x,x,x,x,x,
o,x,o,o,o,o,x,o,
x,o,o,x,x,o,o,x,
x,x,x,o,o,x,x,x
]
image5 = [
o,o,o,x,x,o,o,o,
o,o,x,x,x,x,o,o,
o,x,r,x,x,r,x,o,
x,x,x,x,x,x,x,x,
x,x,x,x,x,x,x,x,
o,x,o,x,x,o,x,o,
x,o,o,o,o,o,o,x,
o,x,o,o,o,o,x,o
]
image6 = [
o,o,o,x,x,o,o,o,
o,o,x,x,x,x,o,o,
o,x,r,x,x,r,x,o,
x,x,x,x,x,x,x,x,
x,x,x,x,x,x,x,x,
o,o,x,o,o,x,o,o,
o,x,o,x,x,o,x,o,
x,o,x,o,o,x,o,x
]
image7 = [
o,o,o,x,x,o,o,o,
o,o,x,x,x,x,o,o,
o,x,x,x,x,x,x,o,
x,x,o,x,o,x,o,x,
x,x,x,x,x,x,x,x,
x,x,x,x,x,x,x,x,
o,x,o,x,x,o,x,o,
o,x,o,o,o,o,x,o
]
image8 = [
o,o,o,x,x,o,o,o,
o,o,x,x,x,x,o,o,
o,x,x,x,x,x,x,o,
x,o,x,o,x,o,x,x,
x,x,x,x,x,x,x,x,
x,x,x,x,x,x,x,x,
o,x,o,x,x,o,x,o,
o,x,o,o,o,o,x,o
]
i1 = 1
while (i1 <= repeat):
sense.set_pixels(image1)
time.sleep(frame_delay)
sense.set_pixels(image2)
time.sleep(frame_delay)
i1 += 1
time.sleep(pause_delay)
i2 = 1
while (i2 <= repeat):
sense.set_pixels(image3)
time.sleep(frame_delay)
sense.set_pixels(image4)
time.sleep(frame_delay)
i2 += 1
time.sleep(pause_delay)
i3 = 1
while (i3 <= repeat):
sense.set_pixels(image5)
time.sleep(frame_delay)
sense.set_pixels(image6)
time.sleep(frame_delay)
i3 += 1
time.sleep(pause_delay)
i4 = 1
while (i4 <= repeat):
sense.set_pixels(image7)
time.sleep(frame_delay)
sense.set_pixels(image8)
time.sleep(frame_delay)
i4 += 1
time.sleep(pause_delay)
print "Program Complete - System Exiting...."
sense.show_message("Python Programming by CathalStewart.co.uk.........;-)", scroll_speed=0.05, back_colour=[0,0,0])
time.sleep(pause_delay)
sys.exit()
| poslogic/Raspberry-Pi-Sense-Hat | invader.py | Python | gpl-3.0 | 2,399 |
import importlib
import logging
import sys
import textwrap
import unittest
from os.path import abspath, sep
from queue import Empty, Queue
from tempfile import mkdtemp
from threading import Thread
import pytest
from errbot.backends.base import ONLINE, Message, Person, Room, RoomOccupant
from errbot.bootstrap import setup_bot
from errbot.core import ErrBot
from errbot.core_plugins.wsview import reset_app
from errbot.rendering import text
from errbot.utils import deprecated
log = logging.getLogger(__name__)
QUIT_MESSAGE = "$STOP$"
STZ_MSG = 1
STZ_PRE = 2
STZ_IQ = 3
class TestPerson(Person):
"""
This is an identifier just represented as a string.
DO NOT USE THIS DIRECTLY AS IT IS NOT COMPATIBLE WITH MOST BACKENDS,
use self.build_identifier(identifier_as_string) instead.
Note to back-end implementors: You should provide a custom
<yourbackend>Identifier object that adheres to this interface.
You should not directly inherit from SimpleIdentifier, inherit
from object instead and make sure it includes all properties and
methods exposed by this class.
"""
def __init__(self, person, client=None, nick=None, fullname=None, email=None):
self._person = person
self._client = client
self._nick = nick
self._fullname = fullname
self._email = email
@property
def person(self):
"""This needs to return the part of the identifier pointing to a person."""
return self._person
@property
def client(self):
"""This needs to return the part of the identifier pointing to a client
from which a person is sending a message from.
Returns None is unspecified"""
return self._client
@property
def nick(self):
"""This needs to return a short display name for this identifier e.g. gbin.
Returns None is unspecified"""
return self._nick
@property
def fullname(self):
"""This needs to return a long display name for this identifier e.g. Guillaume Binet.
Returns None is unspecified"""
return self._fullname
@property
def email(self):
"""This needs to return an email for this identifier e.g. Guillaume.Binet@gmail.com.
Returns None is unspecified"""
return self._email
aclattr = person
def __unicode__(self):
if self.client:
return f"{self._person}/{self._client}"
return f"{self._person}"
__str__ = __unicode__
def __eq__(self, other):
if not isinstance(other, Person):
return False
return self.person == other.person
# noinspection PyAbstractClass
class TestOccupant(TestPerson, RoomOccupant):
"""This is a MUC occupant represented as a string.
DO NOT USE THIS DIRECTLY AS IT IS NOT COMPATIBLE WITH MOST BACKENDS,
"""
def __init__(self, person, room):
super().__init__(person)
self._room = room
@property
def room(self):
return self._room
def __unicode__(self):
return self._person + "@" + str(self._room)
__str__ = __unicode__
def __eq__(self, other):
return self.person == other.person and self.room == other.room
class TestRoom(Room):
def invite(self, *args):
pass
def __init__(self, name, occupants=None, topic=None, bot=None):
"""
:param name: Name of the room
:param occupants: Occupants of the room
:param topic: The MUC's topic
"""
if occupants is None:
occupants = []
self._occupants = occupants
self._topic = topic
self._bot = bot
self._name = name
self._bot_mucid = TestOccupant(
self._bot.bot_config.BOT_IDENTITY["username"], self._name
)
@property
def occupants(self):
return self._occupants
def find_croom(self):
""" find back the canonical room from a this room"""
for croom in self._bot._rooms:
if croom == self:
return croom
return None
@property
def joined(self):
room = self.find_croom()
if room:
return self._bot_mucid in room.occupants
return False
def join(self, username=None, password=None):
if self.joined:
logging.warning(
"Attempted to join room %s, but already in this room.", self
)
return
if not self.exists:
log.debug("Room %s doesn't exist yet, creating it.", self)
self.create()
room = self.find_croom()
room._occupants.append(self._bot_mucid)
log.info("Joined room %s.", self)
self._bot.callback_room_joined(room)
def leave(self, reason=None):
if not self.joined:
logging.warning("Attempted to leave room %s, but not in this room.", self)
return
room = self.find_croom()
room._occupants.remove(self._bot_mucid)
log.info("Left room %s.", self)
self._bot.callback_room_left(room)
@property
def exists(self):
return self.find_croom() is not None
def create(self):
if self.exists:
logging.warning("Room %s already created.", self)
return
self._bot._rooms.append(self)
log.info("Created room %s.", self)
def destroy(self):
if not self.exists:
logging.warning("Cannot destroy room %s, it doesn't exist.", self)
return
self._bot._rooms.remove(self)
log.info("Destroyed room %s.", self)
@property
def topic(self):
return self._topic
@topic.setter
def topic(self, topic):
self._topic = topic
room = self.find_croom()
room._topic = self._topic
log.info("Topic for room %s set to %s.", self, topic)
self._bot.callback_room_topic(self)
def __unicode__(self):
return self._name
def __str__(self):
return self._name
def __eq__(self, other):
return self._name == other._name
class TestBackend(ErrBot):
def change_presence(self, status: str = ONLINE, message: str = "") -> None:
pass
def __init__(self, config):
config.BOT_LOG_LEVEL = logging.DEBUG
config.CHATROOM_PRESENCE = (
"testroom",
) # we are testing with simple identfiers
config.BOT_IDENTITY = {
"username": "err"
} # we are testing with simple identfiers
self.bot_identifier = self.build_identifier("Err") # whatever
super().__init__(config)
self.incoming_stanza_queue = Queue()
self.outgoing_message_queue = Queue()
self.sender = self.build_identifier(
config.BOT_ADMINS[0]
) # By default, assume this is the admin talking
self.reset_rooms()
self.md = text()
def send_message(self, msg):
log.info("\n\n\nMESSAGE:\n%s\n\n\n", msg.body)
super().send_message(msg)
self.outgoing_message_queue.put(self.md.convert(msg.body))
def send_stream_request(self, user, fsource, name, size, stream_type):
# Just dump the stream contents to the message queue
self.outgoing_message_queue.put(fsource.read())
def serve_forever(self):
self.connect_callback() # notify that the connection occured
try:
while True:
log.debug("waiting on queue")
stanza_type, entry = self.incoming_stanza_queue.get()
log.debug("message received")
if entry == QUIT_MESSAGE:
log.info("Stop magic message received, quitting...")
break
if stanza_type is STZ_MSG:
msg = Message(entry)
msg.frm = self.sender
msg.to = self.bot_identifier # To me only
self.callback_message(msg)
# implements the mentions.
mentioned = [
self.build_identifier(word[1:])
for word in entry.split()
if word.startswith("@")
]
if mentioned:
self.callback_mention(msg, mentioned)
elif stanza_type is STZ_PRE:
log.info("Presence stanza received.")
self.callback_presence(entry)
elif stanza_type is STZ_IQ:
log.info("IQ stanza received.")
else:
log.error("Unknown stanza type.")
except EOFError:
pass
except KeyboardInterrupt:
pass
finally:
log.debug("Trigger disconnect callback")
self.disconnect_callback()
log.debug("Trigger shutdown")
self.shutdown()
def connect(self):
return
def build_identifier(self, text_representation):
return TestPerson(text_representation)
def build_reply(self, msg, text=None, private=False, threaded=False):
msg = self.build_message(text)
msg.frm = self.bot_identifier
msg.to = msg.frm
return msg
@property
def mode(self):
return "test"
def rooms(self):
return [r for r in self._rooms if r.joined]
def query_room(self, room):
try:
return [r for r in self._rooms if str(r) == str(room)][0]
except IndexError:
r = TestRoom(room, bot=self)
return r
def prefix_groupchat_reply(self, message, identifier):
super().prefix_groupchat_reply(message, identifier)
message.body = f"@{identifier.nick} {message.body}"
def pop_message(self, timeout=5, block=True):
return self.outgoing_message_queue.get(timeout=timeout, block=block)
def push_message(self, msg):
self.incoming_stanza_queue.put((STZ_MSG, msg), timeout=5)
def push_presence(self, presence):
"""presence must at least duck type base.Presence"""
self.incoming_stanza_queue.put((STZ_PRE, presence), timeout=5)
def zap_queues(self):
while not self.incoming_stanza_queue.empty():
msg = self.incoming_stanza_queue.get(block=False)
log.error("Message left in the incoming queue during a test: %s.", msg)
while not self.outgoing_message_queue.empty():
msg = self.outgoing_message_queue.get(block=False)
log.error("Message left in the outgoing queue during a test: %s.", msg)
def reset_rooms(self):
"""Reset/clear all rooms"""
self._rooms = []
class ShallowConfig:
pass
class TestBot:
"""
A minimal bot utilizing the TestBackend, for use with unit testing.
Only one instance of this class should globally be active at any one
time.
End-users should not use this class directly. Use
:func:`~errbot.backends.test.testbot` or
:class:`~errbot.backends.test.FullStackTest` instead, which use this
class under the hood.
"""
bot_thread = None
def __init__(
self, extra_plugin_dir=None, loglevel=logging.DEBUG, extra_config=None
):
self.setup(
extra_plugin_dir=extra_plugin_dir,
loglevel=loglevel,
extra_config=extra_config,
)
def setup(self, extra_plugin_dir=None, loglevel=logging.DEBUG, extra_config=None):
"""
:param extra_config: Piece of extra configuration you want to inject to the config.
:param extra_plugin_dir: Path to a directory from which additional
plugins should be loaded.
:param loglevel: Logging verbosity. Expects one of the constants
defined by the logging module.
"""
tempdir = mkdtemp()
# This is for test isolation.
config = ShallowConfig()
config.__dict__.update(
importlib.import_module("errbot.config-template").__dict__
)
config.BOT_DATA_DIR = tempdir
config.BOT_LOG_FILE = tempdir + sep + "log.txt"
config.STORAGE = "Memory"
if extra_config is not None:
log.debug("Merging %s to the bot config.", repr(extra_config))
for k, v in extra_config.items():
setattr(config, k, v)
# reset logging to console
logging.basicConfig(format="%(levelname)s:%(message)s")
file = logging.FileHandler(config.BOT_LOG_FILE, encoding="utf-8")
self.logger = logging.getLogger("")
self.logger.setLevel(loglevel)
self.logger.addHandler(file)
config.BOT_EXTRA_PLUGIN_DIR = extra_plugin_dir
config.BOT_LOG_LEVEL = loglevel
self.bot_config = config
def start(self, timeout=2):
"""
Start the bot
Calling this method when the bot has already started will result
in an Exception being raised.
:param timeout: Timeout for the ready message pop. pop will be done 60 times so the total timeout is 60*timeout
"""
if self.bot_thread is not None:
raise Exception("Bot has already been started")
self._bot = setup_bot("Test", self.logger, self.bot_config)
self.bot_thread = Thread(
target=self.bot.serve_forever, name="TestBot main thread"
)
self.bot_thread.setDaemon(True)
self.bot_thread.start()
self.bot.push_message("!echo ready")
# Ensure bot is fully started and plugins are loaded before returning
try:
for i in range(60):
# Gobble initial error messages...
msg = self.bot.pop_message(timeout=timeout)
if msg == "ready":
break
log.warning("Queue was not empty, the non-consumed message is:")
log.warning(msg)
log.warning("Check the previous test and remove spurrious messages.")
except Empty:
raise AssertionError('The "ready" message has not been received (timeout).')
@property
def bot(self) -> ErrBot:
return self._bot
def stop(self):
"""
Stop the bot
Calling this method before the bot has started will result in an
Exception being raised.
"""
if self.bot_thread is None:
raise Exception("Bot has not yet been started")
self.bot.push_message(QUIT_MESSAGE)
self.bot_thread.join()
reset_app() # empty the bottle ... hips!
log.info("Main bot thread quits")
self.bot.zap_queues()
self.bot.reset_rooms()
self.bot_thread = None
def pop_message(self, timeout=5, block=True):
return self.bot.pop_message(timeout, block)
def push_message(self, msg):
return self.bot.push_message(msg)
def push_presence(self, presence):
"""presence must at least duck type base.Presence"""
return self.bot.push_presence(presence)
def exec_command(self, command, timeout=5):
"""Execute a command and return the first response.
This makes more py.test'ist like:
assert 'blah' in exec_command('!hello')
"""
self.bot.push_message(command)
return self.bot.pop_message(timeout)
def zap_queues(self):
return self.bot.zap_queues()
def assertInCommand(self, command, response, timeout=5, dedent=False):
"""Assert the given command returns the given response"""
if dedent:
command = "\n".join(textwrap.dedent(command).splitlines()[1:])
self.bot.push_message(command)
msg = self.bot.pop_message(timeout)
assert response in msg, f"{response} not in {msg}."
@deprecated(assertInCommand)
def assertCommand(self, command, response, timeout=5, dedent=False):
"""Assert the given command returns the given response"""
pass
def assertCommandFound(self, command, timeout=5):
"""Assert the given command exists"""
self.bot.push_message(command)
assert "not found" not in self.bot.pop_message(timeout)
def inject_mocks(self, plugin_name: str, mock_dict: dict):
"""Inject mock objects into the plugin
mock_dict = {
'field_1': obj_1,
'field_2': obj_2,
}
testbot.inject_mocks(HelloWorld, mock_dict)
assert 'blah' in testbot.exec_command('!hello')
"""
plugin = self.bot.plugin_manager.get_plugin_obj_by_name(plugin_name)
if plugin is None:
raise Exception(f'"{plugin_name}" is not loaded.')
for field, mock_obj in mock_dict.items():
if not hasattr(plugin, field):
raise ValueError(f'No property/attribute named "{field}" attached.')
setattr(plugin, field, mock_obj)
class FullStackTest(unittest.TestCase, TestBot):
"""
Test class for use with Python's unittest module to write tests
against a fully functioning bot.
For example, if you wanted to test the builtin `!about` command,
you could write a test file with the following::
from errbot.backends.test import FullStackTest
class TestCommands(FullStackTest):
def test_about(self):
self.push_message('!about')
self.assertIn('Err version', self.pop_message())
"""
def setUp(
self,
extra_plugin_dir=None,
extra_test_file=None,
loglevel=logging.DEBUG,
extra_config=None,
):
"""
:param extra_plugin_dir: Path to a directory from which additional
plugins should be loaded.
:param extra_test_file: [Deprecated but kept for backward-compatibility,
use extra_plugin_dir instead]
Path to an additional plugin which should be loaded.
:param loglevel: Logging verbosity. Expects one of the constants
defined by the logging module.
:param extra_config: Piece of extra bot config in a dict.
"""
if extra_plugin_dir is None and extra_test_file is not None:
extra_plugin_dir = sep.join(abspath(extra_test_file).split(sep)[:-2])
self.setup(
extra_plugin_dir=extra_plugin_dir,
loglevel=loglevel,
extra_config=extra_config,
)
self.start()
def tearDown(self):
self.stop()
@pytest.fixture
def testbot(request) -> TestBot:
"""
Pytest fixture to write tests against a fully functioning bot.
For example, if you wanted to test the builtin `!about` command,
you could write a test file with the following::
def test_about(testbot):
testbot.push_message('!about')
assert "Err version" in testbot.pop_message()
It's possible to provide additional configuration to this fixture,
by setting variables at module level or as class attributes (the
latter taking precedence over the former). For example::
extra_plugin_dir = '/foo/bar'
def test_about(testbot):
testbot.push_message('!about')
assert "Err version" in testbot.pop_message()
..or::
extra_plugin_dir = '/foo/bar'
class Tests:
# Wins over `extra_plugin_dir = '/foo/bar'` above
extra_plugin_dir = '/foo/baz'
def test_about(self, testbot):
testbot.push_message('!about')
assert "Err version" in testbot.pop_message()
..to load additional plugins from the directory `/foo/bar` or
`/foo/baz` respectively. This works for the following items, which are
passed to the constructor of :class:`~errbot.backends.test.TestBot`:
* `extra_plugin_dir`
* `loglevel`
"""
def on_finish():
bot.stop()
# setup the logging to something digestable.
logger = logging.getLogger("")
logging.getLogger("MARKDOWN").setLevel(
logging.ERROR
) # this one is way too verbose in debug
logger.setLevel(logging.DEBUG)
console_hdlr = logging.StreamHandler(sys.stdout)
console_hdlr.setFormatter(
logging.Formatter("%(levelname)-8s %(name)-25s %(message)s")
)
logger.handlers = []
logger.addHandler(console_hdlr)
kwargs = {}
for attr, default in (
("extra_plugin_dir", None),
("extra_config", None),
("loglevel", logging.DEBUG),
):
if hasattr(request, "instance"):
kwargs[attr] = getattr(request.instance, attr, None)
if kwargs[attr] is None:
kwargs[attr] = getattr(request.module, attr, default)
bot = TestBot(**kwargs)
bot.start()
request.addfinalizer(on_finish)
return bot
| apophys/err | errbot/backends/test.py | Python | gpl-3.0 | 20,732 |
import pygame
WHITE = (255, 255, 255)
class Car(pygame.sprite.Sprite):
#This class represents a car. It derives from the "Sprite" class in Pygame.
def __init__(self, color, width, height):
# Call the parent class (Sprite) constructor
#super().__init__()
# Pass in the color of the car, and its x and y position, width and height.
# Set the background color and set it to be transparent
self.image = pygame.Surface([width, height])
self.image.fill(WHITE)
self.image.set_colorkey(WHITE)
# Draw the car (a rectangle!)
pygame.draw.rect(self.image, color, [0, 0, width, height])
# Instead we could load a proper pciture of a car...
# self.image = pygame.image.load("car.png").convert_alpha()
# Fetch the rectangle object that has the dimensions of the image.
self.rect = self.image.get_rect()
def moveRight(self, pixels):
self.rect.x += pixels
def moveLeft(self, pixels):
self.rect.x -= pixels
def moveForward(self, pixels):
self.rect.y -= pixels
def moveBackward(self, pixels):
self.rect.y += pixels
| stivosaurus/rpi-snippets | reference_scripts/car.py | Python | unlicense | 1,188 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import collections
import random
import re
import string
import time
import types
import itertools
import jsonpatch
import jsonpointer
import yaql.context
import murano.common.config as cfg
import murano.dsl.helpers as helpers
_random_string_counter = None
def _transform_json(json, mappings):
if isinstance(json, types.ListType):
return [_transform_json(t, mappings) for t in json]
if isinstance(json, types.DictionaryType):
result = {}
for key, value in json.items():
result[_transform_json(key, mappings)] = \
_transform_json(value, mappings)
return result
elif isinstance(json, types.ListType):
result = []
for value in json:
result.append(_transform_json(value, mappings))
return result
elif isinstance(json, types.StringTypes) and json.startswith('$'):
value = _convert_macro_parameter(json[1:], mappings)
if value is not None:
return value
return json
def _convert_macro_parameter(macro, mappings):
replaced = [False]
def replace(match):
replaced[0] = True
return unicode(mappings.get(match.group(1)))
result = re.sub('{(\\w+?)}', replace, macro)
if replaced[0]:
return result
else:
return mappings[macro]
@yaql.context.EvalArg('format', types.StringTypes)
def _format(format, *args):
return format.format(*[t() for t in args])
@yaql.context.EvalArg('src', types.StringTypes)
@yaql.context.EvalArg('substring', types.StringTypes)
@yaql.context.EvalArg('value', types.StringTypes)
def _replace_str(src, substring, value):
return src.replace(substring, value)
@yaql.context.EvalArg('src', types.StringTypes)
@yaql.context.EvalArg('replacements', dict)
def _replace_dict(src, replacements):
for key, value in replacements.iteritems():
if isinstance(src, str):
src = src.replace(key, str(value))
else:
src = src.replace(key, unicode(value))
return src
def _len(value):
return len(value())
def _coalesce(*args):
for t in args:
val = t()
if val:
return val
return None
@yaql.context.EvalArg('value', types.StringTypes)
def _base64encode(value):
return base64.b64encode(value)
@yaql.context.EvalArg('value', types.StringTypes)
def _base64decode(value):
return base64.b64decode(value)
@yaql.context.EvalArg('group', types.StringTypes)
@yaql.context.EvalArg('setting', types.StringTypes)
def _config(group, setting):
return cfg.CONF[group][setting]
@yaql.context.EvalArg('setting', types.StringTypes)
def _config_default(setting):
return cfg.CONF[setting]
@yaql.context.EvalArg('value', types.StringTypes)
def _upper(value):
return value.upper()
@yaql.context.EvalArg('value', types.StringTypes)
def _lower(value):
return value.lower()
@yaql.context.EvalArg('separator', types.StringTypes)
def _join(separator, collection):
return separator.join(str(t) for t in collection())
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('separator', types.StringTypes)
def _split(value, separator):
return value.split(separator)
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('prefix', types.StringTypes)
def _startswith(value, prefix):
return value.startswith(prefix)
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('suffix', types.StringTypes)
def _endswith(value, suffix):
return value.endswith(suffix)
@yaql.context.EvalArg('value', types.StringTypes)
def _trim(value):
return value.strip()
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('pattern', types.StringTypes)
def _mathces(value, pattern):
return re.match(pattern, value) is not None
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('index', int)
@yaql.context.EvalArg('length', int)
def _substr3(value, index, length):
if length < 0:
return value[index:]
else:
return value[index:index + length]
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('index', int)
def _substr2(value, index):
return _substr3(value, index, -1)
def _str(value):
value = value()
if value is None:
return ''
elif value is True:
return 'true'
elif value is False:
return 'false'
return unicode(value)
def _int(value):
value = value()
if value is None:
return 0
return int(value)
def _pselect(collection, composer):
if isinstance(collection, types.ListType):
return helpers.parallel_select(collection, composer)
else:
return helpers.parallel_select(collection(), composer)
def _patch(obj, patch):
obj = obj()
patch = patch()
if not isinstance(patch, types.ListType):
patch = [patch]
patch = jsonpatch.JsonPatch(patch)
try:
return patch.apply(obj)
except jsonpointer.JsonPointerException:
return obj
def _int2base(x, base):
"""Converts decimal integers into another number base
from base-2 to base-36.
:param x: decimal integer
:param base: number base, max value is 36
:return: integer converted to the specified base
"""
digs = string.digits + string.lowercase
if x < 0:
sign = -1
elif x == 0:
return '0'
else:
sign = 1
x *= sign
digits = []
while x:
digits.append(digs[x % base])
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return ''.join(digits)
def _random_name():
"""Replace '#' char in pattern with supplied number, if no pattern is
supplied generate short and unique name for the host.
:param pattern: hostname pattern
:param number: number to replace with in pattern
:return: hostname
"""
global _random_string_counter
counter = _random_string_counter or 1
# generate first 5 random chars
prefix = ''.join(random.choice(string.lowercase) for _ in range(5))
# convert timestamp to higher base to shorten hostname string
# (up to 8 chars)
timestamp = _int2base(int(time.time() * 1000), 36)[:8]
# third part of random name up to 2 chars
# (1295 is last 2-digit number in base-36, 1296 is first 3-digit number)
suffix = _int2base(counter, 36)
_random_string_counter = (counter + 1) % 1296
return prefix + timestamp + suffix
@yaql.context.EvalArg('self', dict)
def _values(self):
return self.values()
@yaql.context.EvalArg('self', dict)
def _keys(self):
return self.keys()
@yaql.context.EvalArg('self', collections.Iterable)
def _flatten(self):
for i in self:
if isinstance(i, collections.Iterable):
for ii in i:
yield ii
else:
yield i
@yaql.context.EvalArg('self', dict)
@yaql.context.EvalArg('other', dict)
def _merge_with(self, other):
return helpers.merge_dicts(self, other)
@yaql.context.EvalArg('collection', collections.Iterable)
@yaql.context.EvalArg('count', int)
def _skip(collection, count):
return itertools.islice(collection, count, None)
@yaql.context.EvalArg('collection', collections.Iterable)
@yaql.context.EvalArg('count', int)
def _take(collection, count):
return itertools.islice(collection, count)
def register(context):
context.register_function(
lambda json, mappings: _transform_json(json(), mappings()), 'bind')
context.register_function(_format, 'format')
context.register_function(_replace_str, 'replace')
context.register_function(_replace_dict, 'replace')
context.register_function(_len, 'len')
context.register_function(_coalesce, 'coalesce')
context.register_function(_base64decode, 'base64decode')
context.register_function(_base64encode, 'base64encode')
context.register_function(_config, 'config')
context.register_function(_config_default, 'config')
context.register_function(_lower, 'toLower')
context.register_function(_upper, 'toUpper')
context.register_function(_join, 'join')
context.register_function(_split, 'split')
context.register_function(_pselect, 'pselect')
context.register_function(_startswith, 'startsWith')
context.register_function(_endswith, 'endsWith')
context.register_function(_trim, 'trim')
context.register_function(_mathces, 'matches')
context.register_function(_substr2, 'substr')
context.register_function(_substr3, 'substr')
context.register_function(_str, 'str')
context.register_function(_int, 'int')
context.register_function(_patch, 'patch')
context.register_function(_random_name, 'randomName')
# Temporary workaround, these functions should be moved to YAQL
context.register_function(_keys, 'keys')
context.register_function(_values, 'values')
context.register_function(_flatten, 'flatten')
context.register_function(_merge_with, 'mergeWith')
context.register_function(_skip, 'skip')
context.register_function(_take, 'take')
| sergmelikyan/murano | murano/engine/system/yaql_functions.py | Python | apache-2.0 | 9,663 |
#!/usr/bin/env python
# Time how long it takes to backup and recover the current ZAP session
# with a basic sanity check (counting the number of messages before and after recovery)
import datetime, time, sys, getopt
from pprint import pprint
from zapv2 import ZAPv2
def main(argv):
# -------------------------------------------------------------------------
# Default Configurations - use -z/-zap and -w/-wavsep for different IP addrs
# -------------------------------------------------------------------------
zapHostPort = 'http://localhost:8090'
try:
opts, args = getopt.getopt(argv,"z:",["zap="])
except getopt.GetoptError:
# TODO
print('zap-backup-test.py -z <ZAPhostPort>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('zap-backup-test.py -z <ZAPhostPort>')
sys.exit()
elif opt in ("-z", "--zap"):
zapHostPort = arg
zap = ZAPv2(proxies={'http': zapHostPort, 'https': zapHostPort})
# Count number of messages
old_mcount = zap.core.number_of_messages()
old_acount = zap.core.number_of_alerts()
print('Initial msg count: %s' % old_mcount)
print('Initial alert count: %s' % old_acount)
# Time backup
start_time = time.time()
zap.core.save_session(name='backup-test', overwrite='true')
backup_time = (time.time() - start_time)
print('Backed up: %s' % str(time.strftime('%H:%M:%S', time.gmtime(int(backup_time)))))
# Time new session
start_time = time.time()
zap.core.new_session(name='backup-empty', overwrite='true')
new_time = (time.time() - start_time)
print('New session: %s' % str(time.strftime('%H:%M:%S', time.gmtime(int(new_time)))))
# Sanity check new session
new_mcount = zap.core.number_of_messages()
if (new_mcount != '0'):
print('Unexpected empty count: %s' % new_mcount)
# Time restore
start_time = time.time()
zap.core.load_session(name='backup-test')
rec_time = (time.time() - start_time)
print('Loaded: %s' % str(time.strftime('%H:%M:%S', time.gmtime(int(rec_time)))))
rec_mcount = zap.core.number_of_messages()
rec_acount = zap.core.number_of_alerts()
if (old_mcount == rec_mcount):
print('PASS: msg counts match')
else:
print('FAIL: msg counts differ - original: %s recovered: %s' % old_mcount, rec_mcount)
if (old_acount == rec_acount):
print('PASS: alert counts match')
else:
print('FAIL: alert counts differ - original: %s recovered: %s' % old_acount, rec_acount)
if __name__ == "__main__":
main(sys.argv[1:])
| zapbot/zap-mgmt-scripts | zap-backup-test.py | Python | apache-2.0 | 2,576 |
#!/usr/bin/env python2.7
if __name__ == '__main__': print "Loading..."
######## IMPORTS ########
# system imports
import atexit, codecs, json, os, socket, subprocess, sys, time
# local imports
import database
from CloseableThread import CloseableThread
from AccountManager import AccountManager
from util import settings, credentials, \
SoldOut, BadItem, InsufficientFunds, \
URLOpenError, JSONDecodeError
######## SETUP ########
try:
from ThreadSafeFile import ThreadSafeFile as file_
except:
file_ = lambda x: x
print "! Warning: Threadsafe printing unavailable. Output may be interleaved"
isfunc = lambda o: hasattr(o, '__call__')
hasfunc = lambda o, name: hasattr(o, name) and isfunc(getattr(o, name))
class DualWriter():
def __init__(self, one, two):
if not hasfunc(one, 'write') or not hasfunc(two, 'write'):
raise ValueError('one and two must both be writeable')
self.one = one
self.two = two
def write(self, o):
return [self.one.write(o), self.two.write(o)]
def __getattribute__(self, name):
funcs = []
if hasfunc(self.one, name):
funcs.append(getattr(self.one, name))
else: funcs.append(lambda *args, **kwargs: None)
if hasfunc(self.two, name):
funcs.append(getattr(self.two, name))
else: funcs.append(lambda *args, **kwargs: None)
return lambda *args, **kwargs: [f(*args, **kwargs) for f in funcs]
sys.stdout = file_(sys.stdout)
log = file_(open('log.txt', 'w'))
sys.stdout = DualWriter(sys.stdout, log)
sys.stderr = DualWriter(sys.stderr, log)
NORMAL = 1
EMULATE = 2
SEARCH = None
# only import serial if a serial device is turned on
if settings.RFID_SCANNER == NORMAL or settings.DISPENSER == NORMAL:
import serial
## Socket Set-Up
HOST = socket.gethostbyname(socket.gethostname())
PHONE_PORT = 8636
MONEY_PORT = 8637
EMU_RFID_PORT = 8638
try:
phone_listener = socket.socket()
phone_listener.bind(("", PHONE_PORT)) #Windows Phone can't connect while debugging if I pass HOST
phone_listener.listen(1)
phone_sock = None
money_listener = socket.socket()
money_listener.bind(("127.0.0.1", MONEY_PORT))
money_listener.listen(1)
money_sock = None
if settings.RFID_SCANNER == EMULATE:
rfid_listener = socket.socket()
rfid_listener.bind(("127.0.0.1", EMU_RFID_PORT))
rfid_listener.listen(1)
rfid_sock = None
except socket.error as e:
if e.errno == 10048:
raw_input("""!! Fatal Error: Socket already in use. Close all other instances of this server
!! and then restart it. If you don't have any visible instances open, try
!! checking for python.exe instances in the task manager.
[ENTER] to exit.""")
exit()
else:
print e.errno
raise e
## Serial Set-UP
if settings.RFID_SCANNER == NORMAL and type(settings.RFID_SCANNER_COMPORT) == int:
settings.RFID_SCANNER_COMPORT = serial.device(settings.RFID_SCANNER_COMPORT - 1)
if settings.DISPENSER == NORMAL and type(settings.DISPENSER_COMPORT) == int:
settings.DISPENSER_COMPORT = serial.device(settings.DISPENSER_COMPORT - 1)
rfid_serial = None
rfid_device = None
dispenser_serial = None
dispenser_device = None
## Subprocess Set-Up
money_process = None
def start_money():
global money_process
if settings.BILL_ACCEPTOR == NORMAL and not money_process:
money_process = subprocess.Popen(["../Munay/bin/Release/Munay.exe"],
creationflags = \
subprocess.CREATE_NEW_CONSOLE)
def close_money():
global money_process
if settings.BILL_ACCEPTOR == NORMAL and money_process:
money_process.terminate()
money_process = None
## account
account_manager = None
print_relogin_message = False
## Helpers
# helper function to listen for a serial connection on a port
def get_serial(n, wait = 1, get_timeout = None, **kwargs):
if get_timeout:
end = time.time() + get_timeout
while True:
try:
s = serial.Serial(n, **kwargs)
return s
except serial.SerialException:
if get_timeout and time.time() + wait > end:
return
time.sleep(wait)
def sanitize_chr(c):
o = ord(c)
return chr(o if 32 <= o < 127 else 63)
def sanitize(string):
return ''.join(map(sanitize_chr, string))
def StopThreads():
print("StopThreads called")
money_thread.stop()
phone_thread.stop()
rfid_thread.stop()
dispenser_thread.stop()
def exit_handler():
money_thread._Thread__stop()
if money_process:
money_process.terminate()
phone_thread._Thread__stop()
rfid_thread._Thread__stop()
dispenser_thread._Thread__stop()
exit()
## Main Control Structures
# listen to phone
def phone_receiver():
global phone_sock
while phone_thread.running:
# connection
print "Waiting for phone client"
phone_sock, address = phone_listener.accept()
print "Phone client connected from ", address
while phone_thread.running:
# wait for message
try:
message = phone_sock.recv(512).rstrip()
if len(message) == 0: # disconnected
break
except: # disconnected
break
handle_phone_message(message)
#if program is here, phone client has disconnected
print "Phone client disconnected"
phone_sock.shutdown()
phone_sock.close()
phone_sock = None
account_manager.log_out()
def send_vend_failure(reason, vendId):
phone_sock.send(json.dumps({'type' : 'vend failure',
'reason' : reason,
'vendId' : vendId})+"\n")
def handle_rfid(rfid):
global money_sock, print_relogin_message, phone_sock
if account_manager.log_in(rfid):
print_relogin_message = True
response = {"type" : "log in",
"username" : account_manager.username,
"balance" : account_manager.balance}
start_money()
phone_sock.send(json.dumps(response)+"\n")
print "Logged in as " + account_manager.username
#else invalid rfid tag, or currently logged in as guest
class BadRequest(Exception): pass
def handle_phone_message(message):
try:
request = json.loads(message)
except:
print "! Anomolous message from phone client: %s" % message
return
if not 'type' in request:
print "Bad request from phone"
try:
if request['type'] == "guest":
print "Logging in as guest"
handle_rfid("000GUEST000")
if request['type'] == "log out":
log_out()
elif request['type'] == "vend":
try:
try:
buy_item(request['vendId'])
except BadItem:
send_vend_failure('vendId', request['vendId'])
return
except InsufficientFunds:
send_vend_failure('balance', request['vendId'])
return
except SoldOut:
send_vend_failure('quantity', request['vendId'])
return
except URLOpenError as e:
print "[Error] Could not connect to http://my.studentrnd.org/"
send_vend_failure('error', request['vendId'])
return
except JSONDecodeError as e:
print "Invalid credentials"
send_vend_failure('error', request['vendId'])
return
except Exception as e:
print "! Error handling 'vend' request'"
print "! Error Type: " + e.__class__.__name__
print "! Error Message: " + e.message
send_vend_failure('error', request['vendId'])
return
# return a 'vend success' response
phone_sock.send(json.dumps({"type" : "vend success",
"balance" : account_manager.balance})+"\n")
elif request['type'] == "inventory":
send_inventory(request['key'] if 'key' in request else None)
except KeyError as e:
print "Bad '%s' request from phone: '%s' not found in request" % (request['key'],
e[0])
def log_out():
account_manager.log_out()
print "Logged out."
try:
if money_sock != None:
money_sock.send("disable\n")
except socket.error:
print "[ERROR] failed to communicate with bill acceptor controller"
close_money()
# listen to money controller
def money_receiver():
global money_listener, money_sock
while money_thread.running: # main loop
print "Waiting for money controller"
money_sock, address = money_listener.accept() # wait for a connection
print "Money client connection from ", address
if account_manager.logged_in():
try:
money_sock.send("enable\n")
except:
print "[ERROR] failed to enable the bill acceptor"
while money_thread.running: # recieve loop
try:
message = money_sock.recv(500).rstrip() # wait for a message
if len(message) == 0: # disconnected
break
except: # connection error
break
try:
amount = int(message)
except ValueError:
print "Anomolous message from money client: " + message
continue
accept_money(amount)
#if the program is here, money client has disconnected
print "Money client disconnected"
money_sock = None
def accept_money(amount):
global money_sock, phone_sock
if account_manager.logged_in():
account_manager.deposit(amount)
print "Deposited $" + str(amount) + \
" into " + account_manager.username + "'s account." + \
" New balance: $" + str(account_manager.balance)
response = json.dumps({"type" : "balance update",
"balance" : account_manager.balance})
try:
phone_sock.send(response+"\n")
except:
print "[WARNING] failed to communicate with phone"
else: # this shouldn't happen, the bill acceptor is disabled while not logged in
print message + " dollars inserted; ejecting because user not logged in"
try: # tell money client to return bill and disable the acceptor
money_sock.send("return\n")
money_sock.send("disable\n")
except:
print "[WARNING] failed to tell money client to return bills"
#listen to rfid scanner
def rfid_receiver():
global phone_sock, money_sock, rfid_serial, rfid_device, dispenser_device, \
rfid_listener, rfid_sock, print_relogin_message
while rfid_thread.running:
# a real rfid scanner
if settings.RFID_SCANNER == NORMAL:
# setup serial device
if settings.RFID_SCANNER_COMPORT: # if specified in settings, as it should be
print "Waiting for RFID scanner"
rfid_serial = get_serial(settings.RFID_SCANNER_COMPORT, 4,
baudrate = 2400)
rfid_device = settings.RFID_SCANNER_COMPORT
else: # hopefully not used
print "Scanning for RFID scanner"
while not rfid_serial:
for i in range(1, 10):
try:
device = serial.device(i)
if device != dispenser_device:
rfid_serial = serial.Serial(device)
rfid_device = device
break
except serial.SerialException:
continue
if rfid_serial.baudrate != 2400:
rfid_serial.close()
rfid_serial.baudrate = 2400
rfid_serial.open()
print "Connected to RFID scanner"
else: #emulated
print "Waiting for RFID scanner emulator"
rfid_sock, address = rfid_listener.accept()
print "RFID Scanner emulator client connected from ", address
while rfid_thread.running:
if settings.RFID_SCANNER == NORMAL:
try:
rfid_serial.flushInput()
rfid = rfid_serial.read(12).strip()
except serial.SerialException:
print "serial.SerialException"
print "exiting"
StopThreads()
break
else: # emulated
try:
rfid = rfid_sock.recv(500).strip()
if len(rfid) == 0:
break
except:
break
#handle rfid tag
if phone_sock:
print "In 'if phone_sock'"
if rfid == account_manager.rfid:
if print_relogin_message:
if account_manager.username == None:
print "Trying to log in as None"
else:
print "Already logged in as " + account_manager.username
print_relogin_message = False
continue
print "handle_rfid next line"
handle_rfid(rfid)
#else not connected to client
print "Disconnected from RFID scanner."
def make_item(vendId, price, quantity, name):
return {"vendId" : str(vendId).zfill(2),
"price" : str(price),
"quantity" : str(quantity),
"name" : sanitize(name)}
def send_inventory(key):
db_key = database.get_db_key()
if db_key != None and key != None and key == db_key:
phone_sock.send(json.dumps({"type" : "inventory",
"inventory" : {"key" : db_key}})+"\n")
else:
categories = list()
for item in database.get_items(order_by = "category"):
cat_name = sanitize(item[4])
if len(categories) == 0 or categories[-1]['name'] != cat_name:
categories.append({"name" : cat_name, "items" : list()})
categories[-1]['items'].append(make_item(*item[0:4]))
phone_sock.send(json.dumps({"type" : "inventory",
"inventory" : {"key" : db_key,
"categories" : categories}})+"\n")
# dispenser_controller does not communicate with the dispenser (dispenser_serial)
# it only connects and checks the connection.
# It is not run if settings.DISPENSER == EMULATE
def dispenser_controller():
global dispenser_serial, rfid_device, dispenser_device
while dispenser_thread.running:
if settings.DISPENSER_COMPORT:
print "Waiting for vending machine controller"
dispenser_serial = get_serial(settings.DISPENSER_COMPORT)
dispenser_device = settings.DISPENSER_COMPORT
else:
print "Looking for vending machine controller"
dispenser_serial = None
while not dispenser_serial:
for i in range(1, 10):
try:
device = serial.device(i)
if device != rfid_device:
dispenser_serial = serial.Serial(device)
dispenser_device = device
break
except serial.SerialException:
continue
print "Connected to vending machine controller"
while dispenser_thread.running:
try:
if len(dispenser_serial.read(512)) == 0:
break
except:
StopThreads()
break
time.sleep(3)
#buy_item actually communicates with dispenser controller
def buy_item(vendId):
global dispenser_serial, phone_sock
row = database.get_item(vendId)
if not row:
raise BadItem()
price, quantity, name, cat = row
if quantity < 1:
raise SoldOut()
account_manager.withdraw(price, "Vending machine purchase: " + name)
if account_manager.account_type > AccountManager.TEST:
database.vend_item(vendId)
# vend the item
print "Dispensing item " + vendId
if dispenser_serial:
dispenser_serial.write("I" + vendId)
def main():
global account_manager
print "Starting server on %s." % HOST
account_manager = AccountManager()
database.connect()
money_thread = CloseableThread(target = money_receiver)
phone_thread = CloseableThread(target = phone_receiver)
rfid_thread = CloseableThread(target = rfid_receiver)
dispenser_thread = CloseableThread(target = dispenser_controller)
money_thread.start()
phone_thread.start()
rfid_thread.start()
if settings.DISPENSER == NORMAL:
dispenser_thread.start()
if __name__ == '__main__':
main()
atexit.register(exit_handler)
| zaquestion/vendttp | server/server.py | Python | gpl-2.0 | 15,737 |
import os, re, subprocess, shlex, sys, yaml
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
from astropy.io import fits
import cosmics
#from pisco_lib import *
# edited 5/9/17
"""
pisco_combine: run pisco pipeline to reduce the raw data to clean data with correct WCS
The pipeline is a combination of LA Cosmics, Astrometry, Sextractor, SCAMP and SWARP.
ARGUMENTS:
1. raw directory (e.g., 'ut170103/')
2. fieldname for object (e.g., 'Field027')
Examples: python pisco_pipeline/pisco_combine.py data/ Field026
python pisco_pipeline/pisco_combine.py ut170619/ Field292 'twilight'
python pisco_pipeline/pisco_combine.py ut171209/ CHIPS0152-5028 'twilight'
python pisco_pipeline/pisco_combine.py ut171209/ CHIPS1011-0505 'twilight'
"""
# add twilight option to use twilight flats
# add look up seeing (FWHM) from the header
# scamp_v2 for doing scamp for g-r and i-z simultaniously, instead of doing all 4 bands simultaniously
#edit 2/13/17: added yaml parameter to control all the sextractor parameters
def filter_name(index):
"""
filter_name: turn index [1,8] into letter band (g,r,i,z) for PISCO quadrant data
INPUT:
- index: number
OUTPUT:
- a pair of letter for corresponding band and dome band
"""
if index == 1 or index == 2:
filter_name = 'g'
dome_name = 'g'
elif index == 3 or index == 4:
filter_name = 'r'
dome_name = 'r'
else:
dome_name = 'iz'
if index == 5 or index == 6:
filter_name = 'i'
elif index == 7 or index == 8:
filter_name = 'z'
return [filter_name, dome_name]
def list_file_name(dir, name, end=0):
"""
list_file_name: list all filename which started with 'name' and end with
'end' in 'dir' directory
INPUT:
- dir: directory to search in
- name: begining of the file name
- end: ending of the file name
OUTPUT:
- list of all filename in that directory
"""
names = []
for file in os.listdir(dir):
if file.startswith(name):
if end == 0:
names.append(os.path.join(dir, file))
else:
if file.endswith(end):
names.append(os.path.join(dir, file))
if len(names) == 0:
print 'Cannot find the files'
return names
def list_file_name_seeing(dir, name, end=0, startdir=0):
names = []
for root, dirs, files in os.walk(dir):
for file in files:
if file.startswith(name):
if end == 0:
if startdir == 0:
names.append(os.path.join(root, file))
else:
if root.split('/')[-1][:2] == startdir:
names.append(os.path.join(root, file))
else:
if file.endswith(end):
if startdir == 0:
names.append(os.path.join(root, file))
else:
if root.split('/')[-1][:2] == startdir:
names.append(os.path.join(root, file))
if len(names) == 0:
print 'Cannot find the files'
return names
def open_files(names, index, bias=np.array([]), twilight=False):
"""
open_files: use to open multiple bias or domeflat files at once and take the mean to
to get the average bias/domeflat file for image reduction
bias: take the mean of all bias files
domeflat: subtracted by average 'bias' (also calcualted from open_files) before take the mean
INPUT:
- name: starting name of the bias/domeflat files (output from 'list_file_name')
- index: extension of the fits file to read in (8 extension of PISCO - two each for different bands)
- (optional) bias: average 2D bias image (required to calculate domeflat correctly)
OUTPUT:
- 2D array of average bias/domeflat images
"""
ch_bs = []
for name in names:
hdulist = fits.open(name)
ch_b = hdulist[index].data
if len(bias) == 0:
ch_bs.append(ch_b) # for bias to combine as a mean
else:
# for domeflat-bias before combine into a mean
ch_bs.append(ch_b - bias)
if twilight == True:
print 'working on twlight flat'
return np.median(np.array(ch_bs), axis=0)
else:
return np.mean(np.array(ch_bs), axis=0)
def plot_one_chip(ax, data, vmin, vmax):
"""
plot_one_chip: plot 2D array 'data' on 'ax' with Normalize scale 'vmin' and 'vmax'
INPUT:
- ax: ax from fig,ax=plt.subplots(...)
- data: 2D array data to be plot
- vmin: normalize scale for the minimum
- vmax: normalize scale for the maximum
OUTPUT:
- plot of the data at a specified normalize scale
"""
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax)
c_m = matplotlib.cm.bwr_r
s_m = matplotlib.cm.ScalarMappable(cmap=c_m, norm=norm)
s_m.set_array([])
ax.imshow(data, cmap=c_m, norm=norm)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
def save_fits(index, dir, outdir, fieldname, final_image, name):
"""
save_fits: save the fits file from 2D array 'final_image' with a known header from the raw PISCO data
'fieldname' (changed the size to accompany the attachment of two amplifiers) with the output 'name'
in 'reduced/' directory
INPUT:
- index: specific the band (g, r, i, z) that we want to save on.
- dir: input directory for raw PISCO data
- fieldname: starting of the name for the raw PISCO data (e.g., 'Field027_B_73')
- final_image: 2D array of image that we want to save to the fits file
- name: output name of the fits file in 'reduced/' directory
OUTPUT:
- fits file in 'reduced/' directory
"""
ch1_name = list_file_name(dir, fieldname)
hdulist = fits.open(ch1_name[0])
hdu0 = hdulist[0]
hdu0.header['NAXIS'] = 2
hdulist[index].header['NAXIS1'] = '1546'
hdulist[index].header['DATASEC'] = '[1:1546,1:3092]'
hdulist[index].header['TRIMSEC'] = '[1:1546,1:3092]'
hdulist[index].header['ORIGSEC'] = '[1:1546,1:3092]'
hdulist[index].header['CCDSEC'] = '[1:1546,3093:6184]'
hdulist[index].header['DETSEC'] = '[1:1546,3093:6184]'
hdu1 = fits.ImageHDU(final_image, name='filter ' +
filter_name(index)[0], header=hdulist[index].header) #*1000 (edit: add 1000)
hdu_l = fits.HDUList(hdus=[hdu0, hdu1])
# if not os.path.exists(outdir):
# os.makedirs(outdir)
outname = os.path.join(outdir, name)
print 'saving the fits file ' + outname
hdu_l.writeto(outname, overwrite=True)
data, header = fits.getdata(outname, header=True)
fits.writeto(outname, data, header, overwrite=True)
def reduce_data(dir, index, fieldname, flat='domeflat'):
"""
reduce_data: combine raw PISCO data with bias and domeflat to create 2D array of output image
using function list_file_name, open_files
INPUT:
- dir: directory for the raw PISCO data
- index: index for the band of the image that we want to reduce
- fieldname: the begining of the file name (e.g., 'Field027_B_73')
- (extra) cut: -27 is the number of pixel needed to be cut out for the gap in the image
OUTPUT:
- ch1: 2D array of raw input image
- bias: 2D array for the bias image
- domeflat: 2D array for the domeflat image
- img: 2D array of the output image after subtraction of bias and normalization with domeflat
"""
print fieldname[0:5]
if (fieldname[0:5] == 'Field') or (fieldname[0:3] == 'PKS') or (fieldname[0:4] == 'SDSS'):
cut = -27
elif fieldname[0:5] == 'CHIPS':
cut = -32
else:
print 'there is no fieldname for that' + fieldname
print cut
ch1_name = list_file_name(dir, fieldname)
print 'working on %s with the index=%i' % (ch1_name[0], index)
hdulist = fits.open(ch1_name[0])
ch1 = hdulist[index].data
bias_names = list_file_name(dir, 'Bias_')
if flat == 'domeflat':
domeflat_names = list_file_name(dir, "domeflat" + filter_name(index)[1])
if flat == 'twilight':
if (fieldname[0:5] == 'Field') or (fieldname[0:3] == 'PKS') or (fieldname[0:4] == 'SDSS'):
domeflat_names = list_file_name(dir, "twiflat_")
elif fieldname[0:5] == 'CHIPS':
domeflat_names = list_file_name(dir, "twiflats_")
else:
print 'there is no fieldname for twilight flat' + fieldname
bias = open_files(bias_names, index)
if flat == 'domeflat':
domeflat = open_files(domeflat_names, index, bias=bias, twilight=False)
elif flat == 'twilight':
domeflat = open_files(domeflat_names, index, bias=bias, twilight=True)
domeflat[domeflat == 0] = 1e-4
# if index in [1,2,3,4]:
# mean=np.median(domeflat[350:2550, 10:-10])
# elif index in [5,6,7,8]:
# mean=np.median(domeflat[650:2800, 10:-10])
# domeflat=domeflat/mean
# if index in [1,2]:
# img = (ch1 - bias) / (domeflat * np.median(ch1[350:2550, 20:-20] - bias) / np.median(
# domeflat[350:2550, 20:-20])) # normalized flat file for g,z band
# elif index in [3,4,5,6,7,8]:
# img = (ch1 - bias) / (domeflat)
# for Field292
# if fieldname=='Field292':
# if index in [3,4,5,6]:
# img = (ch1 - bias) / (domeflat)
# else:
# img = (ch1 - bias) / (domeflat * np.median(ch1 - bias) / np.median(domeflat)) # normalized flat file for g,z band
# else:
# img = (ch1 - bias) / (domeflat * np.median(ch1 - bias) / np.median(domeflat))
# img = (ch1 - bias) / (domeflat * np.median(ch1 - bias) / np.median(domeflat))
img = (ch1 - bias) / (domeflat / np.median(domeflat))
print ch1.shape, bias.shape, domeflat.shape, np.median(ch1), np.median(bias), np.median(domeflat),\
np.median(domeflat / np.median(domeflat)), np.median(img)
ch1, bias, domeflat, img = ch1[:, :cut], bias[:, :cut], domeflat[:, :cut], img[:, :cut]
if index % 2 == 0:
return np.fliplr(ch1), np.fliplr(bias), np.fliplr(domeflat), np.fliplr(img)
else:
return ch1, bias, domeflat, img
def cosmic_reduce(dir, field, band):
"""
cosmic_reduce: read the FITS file and use L.A. Cosmic (http://www.astro.yale.edu/dokkum/lacosmic/)
to remove cosmic rays in the images
INPUT:
- dir: directory input of the combine images ('reduced/')
- field: beginning of the file name (e.g., 'Field027_A_72')
- band: {'g','r','i','z'} band
PARAMETERS for LA Cosmic:
- gain and readnoise are the property from the telescope (PISCO: gain 4 ADU/e, readnoise 3 e -Brian[3/27/17])
- satlevel: identify saturated level for bright stars
- sigclip, sigfrac, objlim
OUTPUT:
- nField..._g.fits: not clean data (original with a mask cut)
- cField..._g.fits: clean version, removed cosmic ray
- mField..._g.fits: masked file to remove cosmic ray
"""
if not os.path.isfile(os.path.join(dir, 'cosmics', 'c' + field + '_' + band + '.fits')):
print 'working on the cosmic ' + 'c' + field + '_' + band
array, header = cosmics.fromfits(
os.path.join(dir, field + '_' + band + '.fits'))
print os.path.join(dir, field + '_' + band + '.fits')
# cutting the circular aperature of the image out to only have good pixels
# in the center
file_dir = find_fits_dir(field.split('_')[0])[-9:]
if (file_dir == 'ut170103/') | (file_dir == 'ut170104/'):
satfield = '_field'
elif (file_dir == 'ut170619/') | (file_dir == 'ut170621/') | (file_dir == 'ut170624/'):
satfield = '_field2'
else:
satfield = ''
# if field[0:5] == 'Field':
# satfield='_field2'
# else:
# satfield=''
with open("pisco_pipeline/params.yaml", 'r') as stream:
try:
param=yaml.load(stream)
except yaml.YAMLError as exc:
print(exc)
# param['satur_level%s_%s'%(satfield,band)]
if band == 'g':
array_c = array[20:-20, 350:2550] # [20:-20,350:2550]
satlv = param['satur_level%s_%s'%(satfield,band)] #2000.0
elif band == 'r':
array_c = array[30:-20, 400:2550] # [20:-20,350:2550]
satlv = param['satur_level%s_%s'%(satfield,band)]# 1250.0
elif band == 'i':
array_c = array[20:-40, 650:2800] # [20:-20,650:2800]
satlv = param['satur_level%s_%s'%(satfield,band)] #600.0
elif band == 'z':
array_c = array[20:-20, 650:2750] # [20:-20,650:2800]
satlv = param['satur_level%s_%s' % (satfield, band)] # 1500.0
# c = cosmics.cosmicsimage(array_c, gain=4.0, readnoise=3.0, sigclip=2.5, sigfrac=0.5,
# objlim=5.0, satlevel=satlv, verbose=False)
# c = cosmics.cosmicsimage(array_c, gain=0.25, readnoise=3.0, sigclip=4.0, sigfrac=0.4,
# objlim=5.0, satlevel=satlv, verbose=False)
c = cosmics.cosmicsimage(array_c, gain=0.25, readnoise=3.0, sigclip=6.0, sigfrac=0.4,
objlim=5.0, satlevel=satlv, verbose=False) # sigclip=8.0, sigfrac=0.4
#sigclip 4.5, sigfrac=0.5, objlim=2, niter=4
#IMAC: sigclip: 6, niter=4, objlim=5.0, sigfrac=0.3, (gain/readnoise from PISCO)
c.run(maxiter=4)
cosmics.tofits(os.path.join(dir, 'cosmics', 'c' + field + '_' + band + '.fits'), c.cleanarray, header)
cosmics.tofits(os.path.join(dir, 'cosmics', 'm' + field + '_' + band + '.fits'), c.mask, header)
cosmics.tofits(os.path.join(dir, 'cosmics', 'n' + field + '_' + band + '.fits'), array_c, header)
else:
print 'already did the cosmic with this band ' + band
def ra_dec(name):
ra = float(name[6:8]) * 15 + float(name[8:10]) / 4.
if name[10] == '-':
dec = float(name[10:13]) - float(name[13:15]) / 60.
else:
dec = float(name[10:13]) + float(name[13:15]) / 60.
return ra, dec
def ra_dec_field(field):
de = pd.read_csv(
'/Users/taweewat/Documents/red_sequence/field_chips_all_obj.csv')
ra = de[de.name == field[1:9]].RA0.values[0]
dec = de[de.name == field[1:9]].DEC0.values[0]
return ra, dec
def ra_dec_sdss(field):
de = pd.read_csv(
'/Users/taweewat/Documents/xray_project/ned-result/final_sdss_cut5.csv', index_col=0)
ra = de[de.name == field[1:8]].RA.values[0]
dec = de[de.name == field[1:8]].DEC.values[0]
return ra, dec
def astrometry_solve(cosmicdir, field, outdir):
"""
astrometry_solve: apply astrometry algorithm to find celestial Coordinate (WCS) for the image
REQUIRE:
- appropriate index files in '/usr/local/astrometry/data/' for Astrometry to have enough patch of
the sky to search for the position
INPUT:
- cosmicdir: input directory for cosmic-ray subtracted fits file ('reduced/cosmics/')
- field: begining of the file name after cosmic-ray subtraction for each band and each exposure
(e.g. 'cField027_B_73_z')
- outdir: output directory for these outputs ('wcs/')
OUTPUT:
- wcs/.wcs file: for the header with appropriate coordinate
- new_fits/..._new.fits: updated fits file with new wcs information in 'new_fits' directory
"""
# if not os.path.exists(outdir):
# os.makedirs(os.path.join(outdir))
if not os.path.isfile(os.path.join(outdir, field + '.wcs')):
print field, field[0:6]
if field[0:6] == 'cCHIPS':
ra, dec = ra_dec(field)
elif field[0:4] == 'cPKS':
ra=209.0225
dec=-34.3530556
elif field[0:5] == 'cSDSS':
ra, dec = ra_dec_sdss(field)
else:
ra, dec = ra_dec_field(field)
# cmd = 'solve-field %s --downsample 2 --overwrite --scale-unit arcsecperpix --scale-low 0.08 --scale-high 0.3 --dir %s' \
# % (os.path.join(cosmicdir, field + '.fits'), outdir)
cmd = 'solve-field %s --downsample 2 --overwrite --scale-unit arcsecperpix --scale-low 0.08 --scale-high 0.3 --dir %s --ra %s --dec %s --radius 2' \
% (os.path.join(cosmicdir, field + '.fits'), outdir, str(ra), str(dec))
print cmd
sub = subprocess.check_call(shlex.split(cmd))
if sub == 0:
print 'finish solve-field and updating fits headers'
else:
print 'solve-field does not work.'
else:
print 'already have ' + field + '.wcs'
orig = fits.open(os.path.join(cosmicdir, field + '.fits'))
wcs_file = fits.open(os.path.join(outdir, field + '.wcs'))
header = wcs_file[0].header
wcsaxes_index = np.where(np.array(header.keys()) == 'WCSAXES')[0][0]
for i in range(wcsaxes_index, len(header)):
orig[0].header[header.keys()[i]] = header.values()[i]
orig.writeto(os.path.join('new_fits', field + '_new.fits'), overwrite=True)
def find_fits_dir(field):
home = '/Users/taweewat/Documents/pisco_code/'
dirs = ['ut170103/', 'ut170104/', 'ut170619/', 'ut170621/',
'ut170624/', 'ut171208/', 'ut171209/', 'ut171212/']
myReg = re.compile(r'(%s_A).*' % field)
for di in dirs:
diri = home + di
for text in os.listdir(diri):
if myReg.search(text) != None:
filename = myReg.search(text).group()
allfilename = diri
return allfilename
def sextracting(field, band):
"""
sextracting: run Sextractor to find all the point sources in .ldac.fits format (suitable for SCAMP input)
INPUT:
- config.sex: sextractor config file
- field: begining of the file name for each band and each exposure (e.g. 'cSDSS123_B_64_r')
OUTPUT:
- new_fits/..._new.ldac.fits: source catalogs of all the point source from Sextractor
"""
with open("pisco_pipeline/params.yaml", 'r') as stream:
try:
param=yaml.load(stream)
except yaml.YAMLError as exc:
print(exc)
file_dir = find_fits_dir(field[1:].split('_')[0])[-9:]
print 'directory:', file_dir
if (file_dir == 'ut170103/') | (file_dir == 'ut170104/'):
satfield = '_field'
elif (file_dir == 'ut170619/') | (file_dir == 'ut170621/') | (file_dir == 'ut170624/'):
satfield = '_field2'
else:
satfield = ''
# if field[0:6] == 'cField':
# satfield = '_field2'
# else:
# satfield = ''
fieldname = field.split('_')[0][1:] + '_' + field.split('_')[1]
seeing = float(fits.open(list_file_name_seeing(
'/Users/taweewat/Documents/pisco_code/', fieldname, startdir='ut')[0])[0].header['FWHM1'])
# seeing=1.14
pxscale=0.22
cmd = 'sex %s -c pisco_pipeline/config.sex -CATALOG_NAME %s -SEEING_FWHM %s -SATUR_LEVEL %s -CHECKIMAGE_NAME %s,%s -PIXEL_SCALE %s' % \
(os.path.join('new_fits', field + '_new.fits'),
os.path.join('new_fits', field + '_new.ldac.fits'), str(seeing), str(param['satur_level%s_%s'%(satfield,band)]),'check_%s.fits'%(band),'segment_%s.fits'%(band),str(pxscale))
print cmd
subprocess.check_call(shlex.split(cmd))
cmd = 'sex %s -c pisco_pipeline/config.sex -CATALOG_NAME %s -CATALOG_TYPE ASCII -SEEING_FWHM %s -SATUR_LEVEL %s -CHECKIMAGE_NAME %s,%s -PIXEL_SCALE %s' % \
(os.path.join('new_fits', field + '_new.fits'),
os.path.join('new_fits', 'tmp_%s.cat' % band), str(seeing), str(param['satur_level%s_%s'%(satfield, band)]), 'check_%s.fits'%(band), 'segment_%s.fits'%(band),str(pxscale))
print cmd
subprocess.check_call(shlex.split(cmd))
name = ['NUMBER', 'EXT_NUMBER', 'XWIN_WORLD', 'YWIN_WORLD', 'MAG_AUTO', 'MAGERR_AUTO', 'MAG_APER', 'MAGERR_APER', 'XWIN_IMAGE',
'YWIN_IMAGE', 'ERRAWIN_IMAGE', 'ERRBWIN_IMAGE', 'ERRTHETAWIN_IMAGE', 'FLUX_AUTO', 'FLUXERR_AUTO', 'FLAGS',
'FLUX RADIUS', 'CLASS_STAR', 'ALPHA_J2000', 'DELTA_J2000']
df0 = pd.read_csv(os.path.join('new_fits', 'tmp_%s.cat' %
band), delim_whitespace=True, names=name)
hdu = fits.open(os.path.join('new_fits', field + '_new.ldac.fits'))
print 'number of total stars (objects) found', df0.shape[0]
# df0 = df0[(df0['FLUX_AUTO'] < float(param['flux_auto_%s'%band])).values]
df0=df0[(df0['FLAGS']<5).values]
# df0=df0[(df0['FLAGS']<5).values & (df0['CLASS_STAR']>0.95).values] #edit: Field292, CHIPS2223-3455
# df0=df0[(df0['FLAGS']<5).values & (df0['CLASS_STAR']>0.90).values] #edit: SDSS501
print 'number of stars (CLASS_STAR>0.8 & FLAGS<5) using in Sextractor', len(np.array(df0.index))
hdu[2].data = hdu[2].data[np.array(df0.index)]
hdu.writeto(os.path.join('new_fits', field +
'_new.ldac.fits'), overwrite=True)
# if band == 'i':
# # ploting PNG for viewing purpose
# img = fits.open('segment_%s.fits' % band)[0].data
# cmap = plt.cm.spring
# cmap.set_bad(color='black')
# img0 = np.ma.masked_where(img < 0.05, img)
# plt.imshow(img0, origin='lower', cmap=cmap, interpolation='none')
# plt.savefig('plt_bf_%s_%s.png' % (field, band))
# img[~np.in1d(img, df0['NUMBER']).reshape(img.shape)] = 0
# img = np.ma.masked_where(img < 0.05, img)
# plt.imshow(img, origin='lower', cmap=cmap, interpolation='none')
# plt.savefig('plt_%s_%s.png' % (field, band))
def scamp(fieldname): # , band):
"""
scamp: run SCAMP to align the coordinate better after Astrometry with distortions.
(need to run all exposure and all filters at once to get the best performance)
INPUT:
- config.scamp: SCAMP config file
- fieldname: begining of the file name (e.g., 'cField027')
OUTPUT:
- new_fits/...ldac.head: SCAMP output which includes new celestial coordinates for fixing WCS
"""
cmd = 'scamp %s -c pisco_pipeline/config.scamp' % ' '.join(
list_file_name('new_fits', fieldname, end='_new.ldac.fits')) # % band))
print cmd
subprocess.check_call(shlex.split(cmd))
def scamp_v2(fieldname): # , band):
"""
scamp: run SCAMP to align the coordinate better after Astrometry with distortions.
(need to run all exposure and all filters at once to get the best performance)
INPUT:
- config.scamp: SCAMP config file
- fieldname: begining of the file name (e.g., 'cField027')
OUTPUT:
- new_fits/...ldac.head: SCAMP output which includes new celestial coordinates for fixing WCS
"""
cmd = 'scamp %s -c pisco_pipeline/config.scamp' % ' '.join(
list_file_name('new_fits', fieldname, end='g_new.ldac.fits') + list_file_name('new_fits', fieldname, end='r_new.ldac.fits')) # % band))
print cmd
subprocess.check_call(shlex.split(cmd))
cmd = 'scamp %s -c pisco_pipeline/config.scamp' % ' '.join(
list_file_name('new_fits', fieldname, end='i_new.ldac.fits') + list_file_name('new_fits', fieldname, end='z_new.ldac.fits')) # % band))
print cmd
subprocess.check_call(shlex.split(cmd))
def swarp(fieldname):
"""
swarp: run SWARP to combine multiple exposure into a better image with SCAMP output to help correct the location
INPUT:
- config.swarp: SWARP config file
- fieldname: begining of the file name (e.g., 'cField027')
OUTPUT:
- final/coadd_'fieldname'_'g'.fits: final image for each 'band' with corrected WCS
"""
bands = ['g', 'r', 'i', 'z']
print 'Swarping...'
for band in bands:
coadd_files = list_file_name(
'new_fits', fieldname, end=band + '_new.fits')
cmd = 'swarp %s -c pisco_pipeline/config.swarp -IMAGEOUT_NAME %s' %\
(' '.join(coadd_files), os.path.join(
'final', 'coadd_' + fieldname + '_' + band + '.fits'))
print cmd
subprocess.check_call(shlex.split(cmd))
def save_rgb_image(field):
cmd = "ds9 -zscale -rgb -red final/coadd_c%s_i.fits -green final/coadd_c%s_r.fits -blue final/coadd_c%s_g.fits -zoom to fit -saveimage final/img%s.eps -exit" % \
(field, field, field, field) # -exit
print cmd
subprocess.check_call(shlex.split(cmd))
print 'finished saving final/img%s.eps' % field
def purge(dir, pattern):
for f in os.listdir(dir):
if re.search(pattern, f):
print 'remove', f
os.remove(os.path.join(dir, f))
def run_bash_code(cmd):
print cmd
try:
subprocess.check_output(shlex.split(cmd))
except subprocess.CalledProcessError as e:
print e
# --------
if __name__ == "__main__":
print 'Number of arguments:', len(sys.argv), 'arguments.'
print 'Argument List:', str(sys.argv)
# Pipeline to run PISCO reduction data
dir = str(sys.argv[1])
fieldname = str(sys.argv[2])
outdir = 'wcs'
reducedir = 'reduced'
cosmicdir = os.path.join(reducedir, 'cosmics')
if len(sys.argv) > 3:
flattype = str(sys.argv[3])
else:
flattype = 'domeflat'
if not os.path.exists(outdir):
os.makedirs(os.path.join(outdir))
if not os.path.exists(reducedir):
os.makedirs(reducedir)
if not os.path.exists(cosmicdir):
os.makedirs(cosmicdir)
if not os.path.exists('new_fits'):
os.makedirs(os.path.join('new_fits'))
if not os.path.exists('final'):
os.makedirs('final')
fields = [name.split('/')[-1].split('.')[0]
for name in list_file_name(dir, fieldname)]
print 'All fields:', fields
### Combine two amplifiers and bias and flat fielding
for field in fields:
for index in [1, 3, 5, 7]:
ch1, bias1, domeflat1, img1 = reduce_data(
dir, index, field, flat=flattype)
ch2, bias2, domeflat2, img2 = reduce_data(
dir, index + 1, field, flat=flattype)
final_image = np.concatenate(
(img1, img2 * np.median(img1[:,750:]) / np.median(img2[:,:30])), axis=1)
save_fits(index, dir, reducedir, field, final_image,
"%s_%s.fits" % (field, filter_name(index)[0]))
### Cosmic ray reduction using L.A. Cosmic
bands = ['g', 'r', 'i', 'z']
for field in fields:
for band in bands:
cosmic_reduce(reducedir, field, band)
cfieldname = 'c' + fieldname
print 'number of files in %s is %i' % (cosmicdir, len(list_file_name(cosmicdir, cfieldname)))
os.system('rm plt*')
print 'All_cosmic_file:', list_file_name(cosmicdir, cfieldname)
for field_long in list_file_name(cosmicdir, cfieldname):
field = field_long.split('/')[2].split('.')[0]
print 'Field', field
# Astrometry to get a rough estimate on the World Coordinate System
# (WCS) for each images
astrometry_solve(cosmicdir, field, outdir)
# Sextracting
band = field.split('_')[3]
print band
sextracting(field, band[0])
if fieldname == 'Field173':
cmd = 'rm new_fits/cField173_A_100_i_new.fits'
run_bash_code(cmd)
# # SCAMP
# # for band in bands:
# scamp_v2(cfieldname)
scamp(cfieldname)
# # SWARP
swarp(cfieldname)
# # save eps file for RGB image
save_rgb_image(fieldname)
# purge('reduced', "%s_.*\.fits" % fieldname)
# purge('new_fits', "c%s_.*\.fits" % fieldname)
# purge('wcs', "c%s_.*\.new" % fieldname)
# purge('reduced/cosmics', "n%s_.*\.fits" % fieldname)
| leogulus/pisco_pipeline | pisco_combine.py | Python | mit | 27,489 |
# convenience functions to skip over connecting the corpus
# from multi.celery_app import set_config
import multi.celery_app as celery_app
import db.corpora as cp
from proc.general_utils import getRootDir
def ez_connect(corpus="AAC", es_config=None):
"""
Simplifies connecting to the Corpus
:param corpus:
:return: corpus instance
"""
# global MINERVA_ELASTICSEARCH_ENDPOINT
root_dir = ""
if corpus == "AAC":
root_dir = getRootDir("aac")
elif corpus == "PMC_CSC":
root_dir = getRootDir("pmc_coresc")
elif corpus is None:
root_dir = ""
else:
raise ValueError("Unknown corpus")
cp.useElasticCorpus()
if es_config:
celery_app.MINERVA_ELASTICSEARCH_ENDPOINT = celery_app.set_config(es_config)
cp.Corpus.connectCorpus(root_dir, endpoint=celery_app.MINERVA_ELASTICSEARCH_ENDPOINT)
if corpus:
cp.Corpus.setCorpusFilter(corpus)
return cp.Corpus
| danieldmm/minerva | db/ez_connect.py | Python | gpl-3.0 | 957 |
"""
This file was generated with the custommenu management command, it contains
the classes for the admin menu, you can customize this class as you want.
To activate your custom menu add the following to your settings.py::
ADMIN_TOOLS_MENU = 'demo_admin_tools_zinnia.menu.CustomMenu'
"""
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from admin_tools.menu import Menu
from admin_tools.menu import items
class CustomMenu(Menu):
"""
Custom Menu for demo_admin_tools_zinnia admin site.
"""
def __init__(self, **kwargs):
Menu.__init__(self, **kwargs)
self.children += [
items.MenuItem(_('Dashboard'), reverse('admin:index')),
items.Bookmarks(),
items.AppList(
_('Weblog'),
models=('zinnia.*', 'tagging.*',
'django.contrib.comments.*')
),
items.AppList(
_('Administration'),
models=('django.contrib.*',),
exclude=('django.contrib.comments.*',)
)
]
def init_with_context(self, context):
"""
Use this method if you need to access the request context.
"""
return super(CustomMenu, self).init_with_context(context)
| django-blog-zinnia/admin-tools-zinnia | demo_admin_tools_zinnia/menu.py | Python | bsd-3-clause | 1,323 |
#!/usr/bin/env python
#
# WebExtract v1.0
# A web-based utility to extract archive files for unix systems.
#
# @link http://github.com/farhadi/webextract
# @copyright Copyright 2010, Ali Farhadi (http://farhadi.ir/)
# @license GNU General Public License 3.0 (http://www.gnu.org/licenses/gpl.html)
#
import time, re, subprocess, urllib, os, crypt, pwd, spwd, base64, sys
from daemon import Daemon
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class WebExtractHandler(BaseHTTPRequestHandler):
def render(self, **data):
data = dict({
'status': 200,
'title': '',
'user': '',
'content': '',
'image': False
}, **data)
self.send_response(data['status'])
if data['image']:
self.send_header('Content-type', 'image/png')
self.send_header('Expires', time.strftime(
'%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(time.time() + 315360000)
))
self.send_header('Cache-Control', 'max-age=315360000')
self.end_headers()
f = open(os.path.dirname(sys.argv[0]) + os.sep + data['image'])
self.wfile.write(f.read())
f.close()
return
self.send_header('Content-type', 'text/html')
if data['status'] == 401:
self.send_header(
'WWW-Authenticate',
'Basic realm="Enter your FTP username and password:"'
)
self.end_headers()
f = open(os.path.dirname(sys.argv[0]) + '/template.html')
template = f.read()
f.close()
pattern = "|".join(['\{' + key + '\}' for key in data.keys()])
template = re.sub(pattern, lambda m: str(data[m.group()[1:-1]]), template)
self.wfile.write(template)
def auth(self):
try:
auth = self.headers.getheader('Authorization')
if auth.startswith('Basic '):
auth = base64.b64decode(auth[6:])
username, password = auth.split(':', 1)
if username != 'root':
hashpass = spwd.getspnam(username)[1]
if crypt.crypt(password, hashpass) == hashpass:
return username
except:
pass
self.render(
status=401,
title='Access Denied',
content='Incorrect username or password. Please try again.'
)
return False
def do_GET(self):
try:
if self.path == '/?logout':
self.render(status=401, title='Logout', content='You are now logged out.')
return
if (
re.match('^/\?images/\w+\.png$', self.path) and
os.path.exists(os.path.dirname(sys.argv[0]) + os.sep + self.path[2:])
):
self.render(image=self.path[2:])
return
username = self.auth()
if username:
homefolder = pwd.getpwnam(username)[5]
path = os.path.abspath(homefolder + urllib.unquote_plus(self.path))
data = {'user': 'Welcome <b>%s</b>! <a href="/?logout">Logout</a>' % username}
if not os.path.exists(path) or not path.startswith(homefolder):
self.render(
status=404,
title='Not Found',
content='File Not Found: %s' % urllib.unquote_plus(self.path),
**data
)
return
if os.path.isdir(path):
path += os.sep
if not self.path.endswith(os.sep):
self.path += os.sep
data['content'] = ''
if self.path != os.sep:
data['content'] += \
'<a class="up" href="%s..">Up to higher level directory</a>' % self.path
files = []
dirs = []
for file in os.listdir(path):
try:
mtime = time.strftime(
'%Y/%m/%d %H:%M:%S',
time.localtime(os.path.getmtime(path + file))
)
except:
mtime = ''
if os.path.isdir(path + file):
dirs.append(
('<tr><td class="name"><a class="dir" href="%s/">%s</a></td>' +
'<td></td><td class="mtime">%s</td></tr>') %
(self.path + urllib.quote_plus(file), file, mtime)
)
else:
try:
size = os.path.getsize(path + file)
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if size < 1024.0:
size = "%3.1f" % size
if size.endswith('.0'):
size = size[:-2]
size += ' ' + x;
break
size /= 1024.0
except:
size = ''
if file.endswith(('.zip', '.rar', '.tar', '.tar.gz', '.tgz')):
extract = ' (<a href="%s">extract</a>)' % \
(self.path + urllib.quote_plus(file))
type = 'archive'
else:
extract = ''
type = 'file'
files.append(
('<tr><td class="name"><span class="%s">%s</span>%s</td>' +
'<td class="size">%s</td><td class="mtime">%s</td></tr>') %
(type, file, extract, size, mtime)
)
files = [(x.lower(), x) for x in files]
files.sort()
files = [x[1] for x in files]
dirs = [(x.lower(), x) for x in dirs]
dirs.sort()
dirs = [x[1] for x in dirs]
data['content'] += \
('<table><thead><tr><th class="name">Name</th><th class="size">Size</th>' + \
'<th class="mtime">Last Modified</th></tr></thead><tbody>%s</tbody>' + \
'</table>') % (''.join(dirs) + ''.join(files))
self.render(title=urllib.unquote_plus(self.path), **data)
elif path.endswith(('.zip', '.rar', '.tar', '.tar.gz', '.tgz')):
if path.endswith(('.tar.gz', '.tgz')):
cmd = ['sudo', '-u', username, 'tar', 'xvfz', path]
elif path.endswith('.tar'):
cmd = ['sudo', '-u', username, 'tar', 'xvf', path]
elif path.endswith('.rar'):
cmd = ['sudo', '-u', username, 'unrar', 'x', '-o+', path]
elif path.endswith('.zip'):
cmd = ['sudo', '-u', username, 'unzip', '-o', path]
os.chdir(os.path.dirname(path))
res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.render(
title='Extracting ' + urllib.unquote_plus(self.path),
content=('<a class="up" href="%s">Back to parent directory</a>' +
'<pre>%s</pre><b>Finished.</b>') %
(os.path.dirname(self.path), res.stdout.read() + res.stderr.read()),
**data
)
else:
self.render(title='Error', content='File format not supported.', **data)
return
except:
self.render(
status=500,
title='Internal Server Error',
content='An internal error has occurred. Please contact your hosting provider.'
)
class WebExtractDaemon(Daemon):
def run(self):
if len(sys.argv) == 3:
port = sys.argv[2]
else:
port = 2121
server = HTTPServer(('', int(port)), WebExtractHandler)
server.serve_forever()
if __name__ == "__main__":
daemon = WebExtractDaemon('webextract')
if len(sys.argv) > 1:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'status' == sys.argv[1]:
daemon.status()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|status" % sys.argv[0]
sys.exit(2)
| farhadi/webextract | webextract.py | Python | gpl-3.0 | 6,622 |
from selenium.webdriver.support.select import Select
def get_selected_option(browser, css_selector):
# Takes a css selector for a <select> element and returns the value of
# the selected option
select = Select(browser.find_element_by_css_selector(css_selector))
return select.first_selected_option.get_attribute('value')
| egineering-llc/egat_example_project | tests/test_helpers/selenium_helper.py | Python | mit | 338 |
#!/usr/bin/env python
import sys
import argparse
import subprocess
import ansi2html
import six
try:
import pexpect
except ImportError:
pexpect = None
class TeeFile:
def __init__(self, source, target, slow=True):
self.source = source
self.target = target
self.slow = slow
def __getattr__(self, name):
return getattr(self.source, name)
def read(self, bufsize=None):
if bufsize:
return self._read(bufsize)
if not self.slow:
return self._read()
s = six.b('')
while True:
t = self._read(1)
if not t and not s:
return t
if not t:
return s
s += t
return s
def _read(self, *args):
s = self.source.read(*args)
if s:
self.target.write(s)
self.target.flush()
return s
def main(argv):
p = argparse.ArgumentParser(prog=argv[0], description='pty wrapper')
p.add_argument('--fork', '-f', default=not pexpect, action='store_true',
help='Skip pexpect - simply fork')
p.add_argument('--fast-fork', '-F', dest='fastfork', action='store_true',
help='Read fork output at once')
p.add_argument('--wrapper', '-w', default=None,
help='Wrapper program (usually winpty "console")')
p.add_argument('--log', '-l', default='colorlog.html',
help='File to write HTML log (default - "colorlog.html")')
p.add_argument('--clog', '-c', default=None,
help='File to write ANSI log (default - not write)')
p.add_argument('--title', '-t', default='colorlog',
help='Title for html document')
p.add_argument('args', nargs='*', help='What to run')
args = p.parse_args(argv[1:])
args.slow = True
if args.fastfork:
args.fork = True
args.slow = False
# print(repr(args))
if args.wrapper:
args.args.insert(0, args.wrapper)
prog, prog_args = args.args[0], args.args[1:]
if args.fork:
p = subprocess.Popen([prog] + prog_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = sys.stdout
if hasattr(stdout, 'buffer') and hasattr(stdout.buffer, 'read'):
stdout = stdout.buffer
p.stdout = TeeFile(p.stdout, stdout, slow=args.slow)
out, err = p.communicate()
res = p.wait()
assert out
assert not err
else:
p = pexpect.spawn(prog, prog_args)
p.logfile = logfile = six.BytesIO()
p.interact(escape_character=None)
p.sendeof()
res = p.wait()
out = logfile.getvalue()
if args.clog:
f = open(args.clog, 'wb')
try:
f.write(out)
finally:
f.close()
if args.log:
c = ansi2html.Ansi2HTMLConverter(dark_bg=False, scheme='xterm',
title=args.title, output_encoding='utf-8')
html = c.convert(out.decode('utf-8'))
html = six.b(html)
f = open(args.log, 'wb')
try:
f.write(html)
finally:
f.close()
return res
if __name__ == '__main__':
sys.exit(main(sys.argv))
| pyhedgehog/hook.io-sdk-python | helpers/colorlog.py | Python | unlicense | 3,259 |
# coding=utf-8
# Author: Dennis Lutter <lad1337@gmail.com>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
from adba.aniDBerrors import AniDBCommandTimeoutError
import sickbeard
from sickbeard import db, helpers, logger
class BlackAndWhiteList(object):
blacklist = []
whitelist = []
def __init__(self, show_id):
if not show_id:
raise BlackWhitelistNoShowIDException()
self.show_id = show_id
self.load()
def load(self):
"""
Builds black and whitelist
"""
logger.log('Building black and white list for {id}'.format(id=self.show_id), logger.DEBUG)
self.blacklist = self._load_list(b'blacklist')
self.whitelist = self._load_list(b'whitelist')
def _add_keywords(self, table, values):
"""
DB: Adds keywords into database for current show
:param table: SQL table to add keywords to
:param values: Values to be inserted in table
"""
main_db_con = db.DBConnection()
for value in values:
main_db_con.action('INSERT INTO [' + table + '] (show_id, keyword) VALUES (?,?)', [self.show_id, value])
def set_black_keywords(self, values):
"""
Sets blacklist to new value
:param values: Complete list of keywords to be set as blacklist
"""
self._del_all_keywords(b'blacklist')
self._add_keywords(b'blacklist', values)
self.blacklist = values
logger.log('Blacklist set to: {blacklist}'.format(blacklist=self.blacklist), logger.DEBUG)
def set_white_keywords(self, values):
"""
Sets whitelist to new value
:param values: Complete list of keywords to be set as whitelist
"""
self._del_all_keywords(b'whitelist')
self._add_keywords(b'whitelist', values)
self.whitelist = values
logger.log('Whitelist set to: {whitelist}'.format(whitelist=self.whitelist), logger.DEBUG)
def _del_all_keywords(self, table):
"""
DB: Remove all keywords for current show
:param table: SQL table remove keywords from
"""
main_db_con = db.DBConnection()
main_db_con.action('DELETE FROM [' + table + '] WHERE show_id = ?', [self.show_id])
def _load_list(self, table):
"""
DB: Fetch keywords for current show
:param table: Table to fetch list of keywords from
:return: keywords in list
"""
main_db_con = db.DBConnection()
sql_results = main_db_con.select('SELECT keyword FROM [' + table + '] WHERE show_id = ?', [self.show_id])
if not sql_results or not len(sql_results):
return []
groups = []
for result in sql_results:
groups.append(result[b'keyword'])
logger.log('BWL: {id} loaded keywords from {table}: {groups}'.format
(id=self.show_id, table=table, groups=groups), logger.DEBUG)
return groups
def is_valid(self, result):
"""
Check if result is valid according to white/blacklist for current show
:param result: Result to analyse
:return: False if result is not allowed in white/blacklist, True if it is
"""
if self.whitelist or self.blacklist:
if not result.release_group:
logger.log('Failed to detect release group, invalid result', logger.DEBUG)
return False
if result.release_group.lower() in [x.lower() for x in self.whitelist]:
white_result = True
elif not self.whitelist:
white_result = True
else:
white_result = False
if result.release_group.lower() in [x.lower() for x in self.blacklist]:
black_result = False
else:
black_result = True
logger.log('Whitelist check passed: {white}. Blacklist check passed: {black}'.format
(white=white_result, black=black_result), logger.DEBUG)
if white_result and black_result:
return True
else:
return False
else:
logger.log('No Whitelist and Blacklist defined, check passed.', logger.DEBUG)
return True
class BlackWhitelistNoShowIDException(Exception):
"""No show_id was given"""
def short_group_names(groups):
"""
Find AniDB short group names for release groups
:param groups: list of groups to find short group names for
:return: list of shortened group names
"""
groups = groups.split(',')
short_group_list = []
if helpers.set_up_anidb_connection():
for groupName in groups:
try:
group = sickbeard.ADBA_CONNECTION.group(gname=groupName)
except AniDBCommandTimeoutError:
logger.log('Timeout while loading group from AniDB. Trying next group', logger.DEBUG)
except Exception:
logger.log('Failed while loading group from AniDB. Trying next group', logger.DEBUG)
else:
for line in group.datalines:
if line[b'shortname']:
short_group_list.append(line[b'shortname'])
else:
if groupName not in short_group_list:
short_group_list.append(groupName)
else:
short_group_list = groups
return short_group_list
| b0ttl3z/SickRage | sickbeard/blackandwhitelist.py | Python | gpl-3.0 | 6,181 |
#####################################################################
# Example : kalman filtering based cam shift object track processing
# from a video file specified on the command line (e.g. python FILE.py
# video_file) or from an attached web camera
# N.B. u se mouse to select region
# Author : Toby Breckon, toby.breckon@durham.ac.uk
# Copyright (c) 2016 Toby Breckon
# Durham University, UK
# License : LGPL - http://www.gnu.org/licenses/lgpl.html
# based in part on code from: Learning OpenCV 3 Computer Vision with Python
# Chapter 8 code samples, Minichino / Howse, Packt Publishing.
# and also code from:
# https://docs.opencv.org/3.3.1/dc/df6/tutorial_py_histogram_backprojection.html
#####################################################################
import cv2
import argparse
import sys
import math
import numpy as np
#####################################################################
keep_processing = True
selection_in_progress = False # support interactive region selection
fullscreen = False # run in fullscreen mode
# parse command line arguments for camera ID or video file
parser = argparse.ArgumentParser(
description='Perform ' +
sys.argv[0] +
' example operation on incoming camera/video image')
parser.add_argument(
"-c",
"--camera_to_use",
type=int,
help="specify camera to use",
default=0)
parser.add_argument(
"-r",
"--rescale",
type=float,
help="rescale image by this factor",
default=1.0)
parser.add_argument(
'video_file',
metavar='video_file',
type=str,
nargs='?',
help='specify optional video file')
args = parser.parse_args()
#####################################################################
# select a region using the mouse
boxes = []
current_mouse_position = np.ones(2, dtype=np.int32)
def on_mouse(event, x, y, flags, params):
global boxes
global selection_in_progress
current_mouse_position[0] = x
current_mouse_position[1] = y
if event == cv2.EVENT_LBUTTONDOWN:
boxes = []
# print 'Start Mouse Position: '+str(x)+', '+str(y)
sbox = [x, y]
selection_in_progress = True
boxes.append(sbox)
elif event == cv2.EVENT_LBUTTONUP:
# print 'End Mouse Position: '+str(x)+', '+str(y)
ebox = [x, y]
selection_in_progress = False
boxes.append(ebox)
#####################################################################
# return centre of a set of points representing a rectangle
def center(points):
x = np.float32(
(points[0][0] +
points[1][0] +
points[2][0] +
points[3][0]) /
4.0)
y = np.float32(
(points[0][1] +
points[1][1] +
points[2][1] +
points[3][1]) /
4.0)
return np.array([np.float32(x), np.float32(y)], np.float32)
#####################################################################
# this function is called as a call-back everytime the trackbar is moved
# (here we just do nothing)
def nothing(x):
pass
#####################################################################
# define video capture object
try:
# to use a non-buffered camera stream (via a separate thread)
if not(args.video_file):
import camera_stream
cap = camera_stream.CameraVideoStream()
else:
cap = cv2.VideoCapture() # not needed for video files
except BaseException:
# if not then just use OpenCV default
print("INFO: camera_stream class not found - camera input may be buffered")
cap = cv2.VideoCapture()
# define display window name
window_name = "Kalman Object Tracking" # window name
window_name2 = "Hue histogram back projection" # window name
window_nameSelection = "initial selected region"
# init kalman filter object
kalman = cv2.KalmanFilter(4, 2)
kalman.measurementMatrix = np.array([[1, 0, 0, 0],
[0, 1, 0, 0]], np.float32)
kalman.transitionMatrix = np.array([[1, 0, 1, 0],
[0, 1, 0, 1],
[0, 0, 1, 0],
[0, 0, 0, 1]], np.float32)
kalman.processNoiseCov = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]], np.float32) * 0.03
measurement = np.array((2, 1), np.float32)
prediction = np.zeros((2, 1), np.float32)
print("\nObservation in image: BLUE")
print("Prediction from Kalman: GREEN\n")
# if command line arguments are provided try to read video_name
# otherwise default to capture from attached H/W camera
if (((args.video_file) and (cap.open(str(args.video_file))))
or (cap.open(args.camera_to_use))):
# create window by name (note flags for resizable or not)
cv2.namedWindow(window_name, cv2.WINDOW_NORMAL)
cv2.namedWindow(window_name2, cv2.WINDOW_NORMAL)
cv2.namedWindow(window_nameSelection, cv2.WINDOW_NORMAL)
# set sliders for HSV selection thresholds
s_lower = 60
cv2.createTrackbar("s lower", window_name2, s_lower, 255, nothing)
s_upper = 255
cv2.createTrackbar("s upper", window_name2, s_upper, 255, nothing)
v_lower = 32
cv2.createTrackbar("v lower", window_name2, v_lower, 255, nothing)
v_upper = 255
cv2.createTrackbar("v upper", window_name2, v_upper, 255, nothing)
# set a mouse callback
cv2.setMouseCallback(window_name, on_mouse, 0)
cropped = False
# Setup the termination criteria for search, either 10 iteration or
# move by at least 1 pixel pos. difference
term_crit = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 1)
while (keep_processing):
# if video file successfully open then read frame from video
if (cap.isOpened):
ret, frame = cap.read()
# rescale if specified
if (args.rescale != 1.0):
frame = cv2.resize(
frame, (0, 0), fx=args.rescale, fy=args.rescale)
# start a timer (to see how long processing and display takes)
start_t = cv2.getTickCount()
# get parameters from track bars
s_lower = cv2.getTrackbarPos("s lower", window_name2)
s_upper = cv2.getTrackbarPos("s upper", window_name2)
v_lower = cv2.getTrackbarPos("v lower", window_name2)
v_upper = cv2.getTrackbarPos("v upper", window_name2)
# select region using the mouse and display it
if (len(boxes) > 1) and (boxes[0][1] < boxes[1][1]) and (
boxes[0][0] < boxes[1][0]):
crop = frame[boxes[0][1]:boxes[1][1],
boxes[0][0]:boxes[1][0]].copy()
h, w, c = crop.shape # size of template
if (h > 0) and (w > 0):
cropped = True
# convert region to HSV
hsv_crop = cv2.cvtColor(crop, cv2.COLOR_BGR2HSV)
# select all Hue (0-> 180) and Sat. values but eliminate values
# with very low saturation or value (due to lack of useful
# colour information)
mask = cv2.inRange(
hsv_crop, np.array(
(0., float(s_lower), float(v_lower))), np.array(
(180., float(s_upper), float(v_upper))))
# construct a histogram of hue and saturation values and
# normalize it
crop_hist = cv2.calcHist(
[hsv_crop], [
0, 1], mask, [
180, 255], [
0, 180, 0, 255])
cv2.normalize(crop_hist, crop_hist, 0, 255, cv2.NORM_MINMAX)
# set intial position of object
track_window = (
boxes[0][0],
boxes[0][1],
boxes[1][0] -
boxes[0][0],
boxes[1][1] -
boxes[0][1])
cv2.imshow(window_nameSelection, crop)
# reset list of boxes
boxes = []
# interactive display of selection box
if (selection_in_progress):
top_left = (boxes[0][0], boxes[0][1])
bottom_right = (
current_mouse_position[0],
current_mouse_position[1])
cv2.rectangle(frame, top_left, bottom_right, (0, 255, 0), 2)
# if we have a selected region
if (cropped):
# convert incoming image to HSV
img_hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# back projection of histogram based on Hue and Saturation only
img_bproject = cv2.calcBackProject(
[img_hsv], [
0, 1], crop_hist, [
0, 180, 0, 255], 1)
cv2.imshow(window_name2, img_bproject)
# apply camshift to predict new location (observation)
# basic HSV histogram comparision with adaptive window size
# see :
# http://docs.opencv.org/3.1.0/db/df8/tutorial_py_meanshift.html
ret, track_window = cv2.CamShift(
img_bproject, track_window, term_crit)
# draw observation on image - in BLUE
x, y, w, h = track_window
frame = cv2.rectangle(
frame, (x, y), (x + w, y + h), (255, 0, 0), 2)
# extract centre of this observation as points
pts = cv2.boxPoints(ret)
pts = np.int0(pts)
# (cx, cy), radius = cv2.minEnclosingCircle(pts)
# use to correct kalman filter
kalman.correct(center(pts))
# get new kalman filter prediction
prediction = kalman.predict()
# draw predicton on image - in GREEN
frame = cv2.rectangle(frame,
(int(prediction[0] - (0.5 * w)),
int(prediction[1] - (0.5 * h))),
(int(prediction[0] + (0.5 * w)),
int(prediction[1] + (0.5 * h))),
(0,
255,
0),
2)
else:
# before we have cropped anything show the mask we are using
# for the S and V components of the HSV image
img_hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# select all Hue values (0-> 180) but eliminate values with very
# low saturation or value (due to lack of useful colour info.)
mask = cv2.inRange(
img_hsv, np.array(
(0., float(s_lower), float(v_lower))), np.array(
(180., float(s_upper), float(v_upper))))
cv2.imshow(window_name2, mask)
# display image
cv2.imshow(window_name, frame)
cv2.setWindowProperty(
window_name,
cv2.WND_PROP_FULLSCREEN,
cv2.WINDOW_FULLSCREEN & fullscreen)
# stop the timer and convert to ms. (to see how long processing and
# display takes)
stop_t = ((cv2.getTickCount() - start_t) /
cv2.getTickFrequency()) * 1000
# start the event loop - essential
# cv2.waitKey() is a keyboard binding function (argument is the time in
# milliseconds). It waits for specified milliseconds for any keyboard
# event. If you press any key in that time, the program continues.
# If 0 is passed, it waits indefinitely for a key stroke.
# (bitwise and with 0xFF to extract least significant byte of
# multi-byte response)
# wait 40ms or less depending on processing time taken (i.e. 1000ms /
# 25 fps = 40 ms)
key = cv2.waitKey(max(2, 40 - int(math.ceil(stop_t)))) & 0xFF
# It can also be set to detect specific key strokes by recording which
# key is pressed
# e.g. if user presses "x" then exit / press "f" for fullscreen
# display
if (key == ord('x')):
keep_processing = False
elif (key == ord('f')):
fullscreen = not(fullscreen)
# close all windows
cv2.destroyAllWindows()
else:
print("No video file specified or camera connected.")
#####################################################################
| tobybreckon/python-examples-cv | kalman_tracking_live.py | Python | lgpl-3.0 | 12,509 |
"""unique-property-association.py: compute properties that share the same (sub,obj)
Usage: unique-property-association.py prov-o-data.nt
"""
import os, os.path
import sys
import re
import unittest
import logging
import httplib
import urllib
import time
import StringIO
import codecs
from rdflib import Graph
import pprint
def createModel(filename):
print "==== Compute property association for: " + filename + "===="
g = Graph()
g.parse(filename, format="nt")
return g
def getUniquePredicates(g):
### find all the properties
properties = g.predicates()
keys = set()
for p in properties:
if p not in keys:
keys.add(p)
#print keys
return keys
def analyse(filename):
g = createModel(filename)
### find all the unique properties
properties = getUniquePredicates(g)
### compute the association
associations = {}
for seed_p in properties:
### create the index by p to other associative p
#print "seed_p: " + seed_p
pairs = g.subject_objects(seed_p)
countMap = {}
for sub_obj in pairs:
for s,p,o in g.triples((sub_obj[0],None,sub_obj[1])):
### a property that is used with the same (sub,obj) as seed_p
if p!=seed_p:
if p in countMap:
countMap[p]+=1
else:
countMap[p] = 1
associations[seed_p] = countMap
for i in countMap:
print seed_p + "\t" + i + "\t" + str(countMap[i])
return
def main(argv):
# /Users/zhaoj/workspace/ProvQ/data/ta-201402.nt
#/Users/zhaoj/workspace/ProvQ/data/test-ta-prov.nt
analyse(argv)
if __name__ == "__main__":
main(sys.argv[1]) | junszhao/ProvQ | src/analysis/unique-property-association.py | Python | apache-2.0 | 1,856 |
from . import statistics
from . import intervals | JelleAalbers/plunc | plunc/__init__.py | Python | mit | 48 |
"""Code ccollaborator review records."""
import csv
import subprocess
import tempfile
import urllib.parse
from ccollab2eeplatform.log import logger
from ccollab2eeplatform.ccollab.review_record import ReviewRecord
__all__ = ('fetch_review_records')
def _create_download_command(creation_date_lo, creation_date_hi):
"""Create a full command used to download review CSV from ccollab."""
creation_date_filter = {
'reviewCreationDateFilter': 'lo={0}|||hi={1}'.format(creation_date_lo,
creation_date_hi)
}
command = ''.join([
'ccollab admin wget \"/go?',
'reviewCreatorUserLoginVis=y',
'&numDefectsVis=y',
'&reviewCreatorUserNameVis=y',
'&reviewIdVis=y',
'&reviewPersonDurationVis=y',
'&reviewCreationDateVis=y',
'&numCommentsVis=y',
'&locVis=y',
'&locChangedVis=y',
'&data-format=csv',
'&page=ReportReviewList',
'&formSubmittedreportConfig=1',
'&' + urllib.parse.urlencode(creation_date_filter),
'\"'
])
return command
def fetch_review_records(creation_date_lo, creation_date_hi):
"""Download review CSV into a temp file."""
command = _create_download_command(creation_date_lo, creation_date_hi)
review_records = []
# The downloaded CSV file is 'utf-8-sig' encoded.
# utf-8: 'ABC'
# utf-8-sig: '\xef\xbb\xbfABC'
with tempfile.TemporaryFile(mode='w+', encoding='utf-8-sig') as temp_csv:
logger.info('Downloading review CSV file ...')
subprocess.run(command, shell=True, stdout=temp_csv)
logger.info('Downloading review CSV file ... Done')
temp_csv.seek(0)
review_reader = csv.reader(temp_csv, delimiter=',')
# skip header record
try:
next(review_reader)
except StopIteration:
pass
else:
for record in review_reader:
review_records.append(record)
return review_records
def fetch_review_records(creation_date_lo, creation_date_hi):
# pylint: disable=function-redefined, unused-argument
"""Download review CSV into a temp file."""
review_records = []
import os
filepath = os.path.dirname(__file__) + './reviews-report.csv'
with open(filepath) as temp_csv:
review_reader = csv.reader(temp_csv, delimiter=',')
# skip header row
next(review_reader)
for record in review_reader:
review_records.append(ReviewRecord(record))
return review_records
| CVBDL/ccollab2eeplatform-python | ccollab2eeplatform/ccollab/review.py | Python | mit | 2,592 |
from __future__ import division
from six import with_metaclass
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import integrate
class BaseDiscretizer(with_metaclass(ABCMeta)):
"""
Base class for the discretizer classes in pgmpy. The discretizer
classes are used to discretize a continuous random variable
distribution into discrete probability masses.
Parameters
----------
factor: A ContinuousNode or a ContinuousFactor object
the continuous node or factor representing the distribution
to be discretized.
low, high: float
the range over which the function will be discretized.
cardinality: int
the number of states required in the discretized output.
Examples
--------
>>> from scipy.stats import norm
>>> from pgmpy.factors.continuous import ContinuousNode
>>> normal = ContinuousNode(norm(0, 1).pdf)
>>> from pgmpy.discretize import BaseDiscretizer
>>> class ChildDiscretizer(BaseDiscretizer):
... def get_discrete_values(self):
... pass
>>> discretizer = ChildDiscretizer(normal, -3, 3, 10)
>>> discretizer.factor
<pgmpy.factors.continuous.ContinuousNode.ContinuousNode object at 0x04C98190>
>>> discretizer.cardinality
10
>>> discretizer.get_labels()
['x=-3.0', 'x=-2.4', 'x=-1.8', 'x=-1.2', 'x=-0.6', 'x=0.0', 'x=0.6', 'x=1.2', 'x=1.8', 'x=2.4']
"""
def __init__(self, factor, low, high, cardinality):
self.factor = factor
self.low = low
self.high = high
self.cardinality = cardinality
@abstractmethod
def get_discrete_values(self):
"""
This method implements the algorithm to discretize the given
continuous distribution.
It must be implemented by all the subclasses of BaseDiscretizer.
Returns
-------
A list of discrete values or a DiscreteFactor object.
"""
pass
def get_labels(self):
"""
Returns a list of strings representing the values about
which the discretization method calculates the probabilty
masses.
Default value is the points -
[low, low+step, low+2*step, ......... , high-step]
unless the method is overridden by a subclass.
Examples
--------
>>> from pgmpy.factors import ContinuousNode
>>> from pgmpy.discretize import BaseDiscretizer
>>> class ChildDiscretizer(BaseDiscretizer):
... def get_discrete_values(self):
... pass
>>> from scipy.stats import norm
>>> node = ContinuousNode(norm(0).pdf)
>>> child = ChildDiscretizer(node, -5, 5, 20)
>>> chld.get_labels()
['x=-5.0', 'x=-4.5', 'x=-4.0', 'x=-3.5', 'x=-3.0', 'x=-2.5',
'x=-2.0', 'x=-1.5', 'x=-1.0', 'x=-0.5', 'x=0.0', 'x=0.5', 'x=1.0',
'x=1.5', 'x=2.0', 'x=2.5', 'x=3.0', 'x=3.5', 'x=4.0', 'x=4.5']
"""
step = (self.high - self.low) / self.cardinality
labels = ['x={i}'.format(i=str(i)) for i in np.round(
np.arange(self.low, self.high, step), 3)]
return labels
class RoundingDiscretizer(BaseDiscretizer):
"""
This class uses the rounding method for discretizing the
given continuous distribution.
For the rounding method,
The probability mass is,
cdf(x+step/2)-cdf(x), for x = low
cdf(x+step/2)-cdf(x-step/2), for low < x <= high
where, cdf is the cumulative density function of the distribution
and step = (high-low)/cardinality.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.continuous import ContinuousNode
>>> from pgmpy.factors.continuous import RoundingDiscretizer
>>> std_normal_pdf = lambda x : np.exp(-x*x/2) / (np.sqrt(2*np.pi))
>>> std_normal = ContinuousNode(std_normal_pdf)
>>> std_normal.discretize(RoundingDiscretizer, low=-3, high=3,
... cardinality=12)
[0.001629865203424451, 0.009244709419989363, 0.027834684208773178,
0.065590616803038182, 0.120977578710013, 0.17466632194020804,
0.19741265136584729, 0.17466632194020937, 0.12097757871001302,
0.065590616803036905, 0.027834684208772664, 0.0092447094199902269]
"""
def get_discrete_values(self):
step = (self.high - self.low) / self.cardinality
# for x=[low]
discrete_values = [self.factor.cdf(self.low + step/2) - self.factor.cdf(self.low)]
# for x=[low+step, low+2*step, ........., high-step]
points = np.linspace(self.low + step, self.high - step, self.cardinality - 1)
discrete_values.extend([self.factor.cdf(i + step/2) - self.factor.cdf(i - step/2) for i in points])
return discrete_values
class UnbiasedDiscretizer(BaseDiscretizer):
"""
This class uses the unbiased method for discretizing the
given continuous distribution.
The unbiased method for discretization is the matching of the
first moment method. It involves calculating the first order
limited moment of the distribution which is done by the _lim_moment
method.
For this method,
The probability mass is,
(E(x) - E(x + step))/step + 1 - cdf(x), for x = low
(2 * E(x) - E(x - step) - E(x + step))/step, for low < x < high
(E(x) - E(x - step))/step - 1 + cdf(x), for x = high
where, E(x) is the first limiting moment of the distribution
about the point x, cdf is the cumulative density function
and step = (high-low)/cardinality.
Reference
---------
Klugman, S. A., Panjer, H. H. and Willmot, G. E.,
Loss Models, From Data to Decisions, Fourth Edition,
Wiley, section 9.6.5.2 (Method of local monment matching) and
exercise 9.41.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors import ContinuousNode
>>> from pgmpy.factors.continuous import UnbiasedDiscretizer
# exponential distribution with rate = 2
>>> exp_pdf = lambda x: 2*np.exp(-2*x) if x>=0 else 0
>>> exp_node = ContinuousNode(exp_pdf)
>>> exp_node.discretize(UnbiasedDiscretizer, low=0, high=5, cardinality=10)
[0.39627368905806137, 0.4049838434034298, 0.13331784003148325,
0.043887287876647259, 0.014447413395300212, 0.0047559685431339703,
0.0015656350182896128, 0.00051540201980112557, 0.00016965346326140994,
3.7867260839208328e-05]
"""
def get_discrete_values(self):
lev = self._lim_moment
step = (self.high - self.low) / (self.cardinality - 1)
# for x=[low]
discrete_values = [(lev(self.low) - lev(self.low + step)) / step +
1 - self.factor.cdf(self.low)]
# for x=[low+step, low+2*step, ........., high-step]
points = np.linspace(self.low + step, self.high - step, self.cardinality - 2)
discrete_values.extend([(2 * lev(i) - lev(i - step) - lev(i + step)) / step for i in points])
# for x=[high]
discrete_values.append((lev(self.high) - lev(self.high - step)) / step - 1 + self.factor.cdf(self.high))
return discrete_values
def _lim_moment(self, u, order=1):
"""
This method calculates the kth order limiting moment of
the distribution. It is given by -
E(u) = Integral (-inf to u) [ (x^k)*pdf(x) dx ] + (u^k)(1-cdf(u))
where, pdf is the probability density function and cdf is the
cumulative density function of the distribution.
Reference
---------
Klugman, S. A., Panjer, H. H. and Willmot, G. E.,
Loss Models, From Data to Decisions, Fourth Edition,
Wiley, definition 3.5 and equation 3.8.
Parameters
----------
u: float
The point at which the moment is to be calculated.
order: int
The order of the moment, default is first order.
"""
def fun(x):
return np.power(x, order) * self.factor.pdf(x)
return (integrate.quad(fun, -np.inf, u)[0] +
np.power(u, order)*(1 - self.factor.cdf(u)))
def get_labels(self):
labels = list('x={i}'.format(i=str(i)) for i in np.round
(np.linspace(self.low, self.high, self.cardinality), 3))
return labels
| sandeepkrjha/pgmpy | pgmpy/factors/continuous/discretize.py | Python | mit | 8,295 |
#!/usr/bin/env python
import os
import shutil
import sys
import ratemyflight
class ProjectException(Exception):
pass
def create_project():
"""
Copies the contents of the project_template directory to a new directory
specified as an argument to the command line.
"""
# Ensure a directory name is specified.
script_name = os.path.basename(sys.argv[0])
usage_text = "Usage: ratemyflight project_name"
usage_text += "\nProject names beginning with \"-\" are illegal."
if len(sys.argv) != 2:
raise ProjectException(usage_text)
project_name = sys.argv[1]
if project_name.startswith("-"):
raise ProjectException(usage_text)
# Ensure the given directory name doesn't clash with an existing Python
# package/module.
try:
__import__(project_name)
except ImportError:
pass
else:
raise ProjectException("'%s' conflicts with the name of an existing "
"Python module and cannot be used as a project name. Please try "
"another name." % project_name)
ratemyflight_path = os.path.dirname(os.path.abspath(ratemyflight.__file__))
from_path = os.path.join(ratemyflight_path, "project_template")
to_path = os.path.join(os.getcwd(), project_name)
shutil.copytree(from_path, to_path)
shutil.move(os.path.join(to_path, "local_settings.py.template"),
os.path.join(to_path, "local_settings.py"))
if __name__ == "__main__":
try:
create_project()
except ProjectException, e:
print
print e
print
| stephenmcd/ratemyflight | ratemyflight/scripts/create_project.py | Python | bsd-2-clause | 1,581 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutClasses(Koan):
class Dog:
"Dogs need regular walkies. Never, ever let them drive."
def test_instances_of_classes_can_be_created_adding_parentheses(self):
# NOTE: The .__name__ attribute will convert the class
# into a string value.
fido = self.Dog()
self.assertEqual('Dog', fido.__class__.__name__)
def test_classes_have_docstrings(self):
self.assertRegexpMatches(self.Dog.__doc__, 'need regular walkies')
# ------------------------------------------------------------------
class Dog2:
def __init__(self):
self._name = 'Paul'
def set_name(self, a_name):
self._name = a_name
def test_init_method_is_the_constructor(self):
dog = self.Dog2()
self.assertEqual('Paul', dog._name)
def test_private_attributes_are_not_really_private(self):
dog = self.Dog2()
dog.set_name("Fido")
self.assertEqual('Fido', dog._name)
# The _ prefix in _name implies private ownership, but nothing is truly
# private in Python.
def test_you_can_also_access_the_value_out_using_getattr_and_dict(self):
fido = self.Dog2()
fido.set_name("Fido")
self.assertEqual('Fido', getattr(fido, "_name"))
# getattr(), setattr() and delattr() are a way of accessing attributes
# by method rather than through assignment operators
self.assertEqual('Fido', fido.__dict__["_name"])
# Yes, this works here, but don't rely on the __dict__ object! Some
# class implementations use optimization which result in __dict__ not
# showing everything.
# ------------------------------------------------------------------
class Dog3:
def __init__(self):
self._name = None
def set_name(self, a_name):
self._name = a_name
def get_name(self):
return self._name
name = property(get_name, set_name)
def test_that_name_can_be_read_as_a_property(self):
fido = self.Dog3()
fido.set_name("Fido")
# access as method
self.assertEqual('Fido', fido.get_name())
# access as property
self.assertEqual('Fido', fido.name)
# ------------------------------------------------------------------
class Dog4:
def __init__(self):
self._name = None
@property
def name(self):
return self._name
@name.setter
def name(self, a_name):
self._name = a_name
def test_creating_properties_with_decorators_is_slightly_easier(self):
fido = self.Dog4()
fido.name = "Fido"
self.assertEqual('Fido', fido.name)
# ------------------------------------------------------------------
class Dog5:
def __init__(self, initial_name):
self._name = initial_name
@property
def name(self):
return self._name
def test_init_provides_initial_values_for_instance_variables(self):
fido = self.Dog5("Fido")
self.assertEqual("Fido", fido.name)
def test_args_must_match_init(self):
with self.assertRaises(TypeError):
self.Dog5()
# THINK ABOUT IT:
# Why is this so?
#
# Because __init__ requires 1 argument and none were given
def test_different_objects_have_different_instance_variables(self):
fido = self.Dog5("Fido")
rover = self.Dog5("Rover")
self.assertEqual(False, rover.name == fido.name)
# ------------------------------------------------------------------
class Dog6:
def __init__(self, initial_name):
self._name = initial_name
def get_self(self):
return self
def __str__(self):
return self._name
def __repr__(self):
return "<Dog named '" + self._name + "'>"
def test_inside_a_method_self_refers_to_the_containing_object(self):
fido = self.Dog6("Fido")
# self.assertEqual("<Dog named 'Fido'>", fido.get_self()) # Not a string!
def test_str_provides_a_string_version_of_the_object(self):
fido = self.Dog6("Fido")
self.assertEqual("Fido", str(fido))
def test_str_is_used_explicitly_in_string_interpolation(self):
fido = self.Dog6("Fido")
self.assertEqual("My dog is Fido", "My dog is " + str(fido))
def test_repr_provides_a_more_complete_string_version(self):
fido = self.Dog6("Fido")
self.assertEqual("<Dog named 'Fido'>", repr(fido))
def test_all_objects_support_str_and_repr(self):
seq = [1, 2, 3]
self.assertEqual("[1, 2, 3]", str(seq))
self.assertEqual("[1, 2, 3]", repr(seq))
self.assertEqual("STRING", str("STRING"))
self.assertEqual("'STRING'", repr("STRING"))
| sourabhv/python-koans-solutions | python3/koans/about_classes.py | Python | mit | 4,953 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import contextlib
import csv
import logging
from typing import Dict, List # noqa: F401
from snapcraft.internal import errors
logger = logging.getLogger(__name__)
class Mount:
"""A class to provide programmatic access to a specific mountpoint"""
def __init__(self, mountinfo_row: List[str]) -> None:
# Parse the row according to section 3.5 of
# https://www.kernel.org/doc/Documentation/filesystems/proc.txt
try:
self.mount_id = mountinfo_row[0]
self.parent_id = mountinfo_row[1]
self.st_dev = mountinfo_row[2]
self.root = mountinfo_row[3]
self.mount_point = mountinfo_row[4]
self.mount_options = mountinfo_row[5]
separator_index = mountinfo_row.index("-")
self.optional_fields = mountinfo_row[6:separator_index]
self.filesystem_type = mountinfo_row[separator_index + 1]
self.mount_source = mountinfo_row[separator_index + 2]
self.super_options = mountinfo_row[separator_index + 3]
except IndexError as e:
raise errors.InvalidMountinfoFormat(" ".join(mountinfo_row)) from e
class MountInfo:
"""A class to provide programmatic access to /proc/self/mountinfo"""
def __init__(self, *, mountinfo_file: str = "/proc/self/mountinfo") -> None:
"""Create a new MountInfo instance.
:param str mountinfo_file: Path to mountinfo file to be parsed.
"""
# Maintain two dicts pointing to the same underlying objects:
# a dict of mount points to Mounts, and a dict of roots to Mounts.
self._mount_point_mounts = {} # type: Dict[str, Mount]
root_mounts = collections.defaultdict(
list
) # type: Dict[str, List[Mount]] # noqa
with contextlib.suppress(FileNotFoundError):
with open(mountinfo_file) as f:
for row in csv.reader(f, delimiter=" "):
try:
mount = Mount(row)
self._mount_point_mounts[mount.mount_point] = mount
root_mounts[mount.root].append(mount)
except errors.InvalidMountinfoFormat as e:
logger.warning(str(e))
self._root_mounts = dict(root_mounts)
def for_mount_point(self, mount_point: str) -> Mount:
try:
return self._mount_point_mounts[mount_point]
except KeyError:
raise errors.MountPointNotFoundError(mount_point)
def for_root(self, root: str) -> List[Mount]:
try:
return self._root_mounts[root]
except KeyError:
raise errors.RootNotMountedError(root)
| chipaca/snapcraft | snapcraft/internal/mountinfo.py | Python | gpl-3.0 | 3,392 |
class EventException(Exception):
"""
Event related Exception
"""
pass
class NotConcernedEvent(EventException):
"""
Exception raised when concerned ``SynergyObject`` is not concerned by Event.
"""
pass
class UselessMechanism(EventException):
"""
Exception raised when concerned ``SynergyObject`` is not concerned by this Mechanism.
"""
pass
class ImproperlyConfigured(Exception):
pass
class NotFound(Exception):
pass | buxx/synergine | synergine/core/exceptions.py | Python | apache-2.0 | 483 |
from splicer import Splicer
from parser import Parser | Zephirot93/subs-audio-splicer | __init__.py | Python | mit | 53 |
#
# Unit tests for the multiprocessing package
#
import unittest
import queue as pyqueue
import time
import io
import itertools
import sys
import os
import gc
import errno
import signal
import array
import socket
import random
import logging
import struct
import operator
import test.support
import test.script_helper
# Skip tests if _multiprocessing wasn't built.
_multiprocessing = test.support.import_module('_multiprocessing')
# Skip tests if sem_open implementation is broken.
test.support.import_module('multiprocessing.synchronize')
# import threading after _multiprocessing to raise a more revelant error
# message: "No module named _multiprocessing". _multiprocessing is not compiled
# without thread support.
import threading
import multiprocessing.dummy
import multiprocessing.connection
import multiprocessing.managers
import multiprocessing.heap
import multiprocessing.pool
from multiprocessing import util
try:
from multiprocessing import reduction
HAS_REDUCTION = reduction.HAVE_SEND_HANDLE
except ImportError:
HAS_REDUCTION = False
try:
from multiprocessing.sharedctypes import Value, copy
HAS_SHAREDCTYPES = True
except ImportError:
HAS_SHAREDCTYPES = False
try:
import msvcrt
except ImportError:
msvcrt = None
#
#
#
def latin(s):
return s.encode('latin')
#
# Constants
#
LOG_LEVEL = util.SUBWARNING
#LOG_LEVEL = logging.DEBUG
DELTA = 0.1
CHECK_TIMINGS = False # making true makes tests take a lot longer
# and can sometimes cause some non-serious
# failures because some calls block a bit
# longer than expected
if CHECK_TIMINGS:
TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.82, 0.35, 1.4
else:
TIMEOUT1, TIMEOUT2, TIMEOUT3 = 0.1, 0.1, 0.1
HAVE_GETVALUE = not getattr(_multiprocessing,
'HAVE_BROKEN_SEM_GETVALUE', False)
WIN32 = (sys.platform == "win32")
from multiprocessing.connection import wait
def wait_for_handle(handle, timeout):
if timeout is not None and timeout < 0.0:
timeout = None
return wait([handle], timeout)
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except:
MAXFD = 256
# To speed up tests when using the forkserver, we can preload these:
PRELOAD = ['__main__', 'test.test_multiprocessing_forkserver']
#
# Some tests require ctypes
#
try:
from ctypes import Structure, c_int, c_double
except ImportError:
Structure = object
c_int = c_double = None
def check_enough_semaphores():
"""Check that the system supports enough semaphores to run the test."""
# minimum number of semaphores available according to POSIX
nsems_min = 256
try:
nsems = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
return
if nsems == -1 or nsems >= nsems_min:
return
raise unittest.SkipTest("The OS doesn't support enough semaphores "
"to run the test (required: %d)." % nsems_min)
#
# Creates a wrapper for a function which records the time it takes to finish
#
class TimingWrapper(object):
def __init__(self, func):
self.func = func
self.elapsed = None
def __call__(self, *args, **kwds):
t = time.time()
try:
return self.func(*args, **kwds)
finally:
self.elapsed = time.time() - t
#
# Base class for test cases
#
class BaseTestCase(object):
ALLOWED_TYPES = ('processes', 'manager', 'threads')
def assertTimingAlmostEqual(self, a, b):
if CHECK_TIMINGS:
self.assertAlmostEqual(a, b, 1)
def assertReturnsIfImplemented(self, value, func, *args):
try:
res = func(*args)
except NotImplementedError:
pass
else:
return self.assertEqual(value, res)
# For the sanity of Windows users, rather than crashing or freezing in
# multiple ways.
def __reduce__(self, *args):
raise NotImplementedError("shouldn't try to pickle a test case")
__reduce_ex__ = __reduce__
#
# Return the value of a semaphore
#
def get_value(self):
try:
return self.get_value()
except AttributeError:
try:
return self._Semaphore__value
except AttributeError:
try:
return self._value
except AttributeError:
raise NotImplementedError
#
# Testcases
#
class _TestProcess(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
def test_current(self):
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
current = self.current_process()
authkey = current.authkey
self.assertTrue(current.is_alive())
self.assertTrue(not current.daemon)
self.assertIsInstance(authkey, bytes)
self.assertTrue(len(authkey) > 0)
self.assertEqual(current.ident, os.getpid())
self.assertEqual(current.exitcode, None)
def test_daemon_argument(self):
if self.TYPE == "threads":
self.skipTest('test not appropriate for {}'.format(self.TYPE))
# By default uses the current process's daemon flag.
proc0 = self.Process(target=self._test)
self.assertEqual(proc0.daemon, self.current_process().daemon)
proc1 = self.Process(target=self._test, daemon=True)
self.assertTrue(proc1.daemon)
proc2 = self.Process(target=self._test, daemon=False)
self.assertFalse(proc2.daemon)
@classmethod
def _test(cls, q, *args, **kwds):
current = cls.current_process()
q.put(args)
q.put(kwds)
q.put(current.name)
if cls.TYPE != 'threads':
q.put(bytes(current.authkey))
q.put(current.pid)
def test_process(self):
q = self.Queue(1)
e = self.Event()
args = (q, 1, 2)
kwargs = {'hello':23, 'bye':2.54}
name = 'SomeProcess'
p = self.Process(
target=self._test, args=args, kwargs=kwargs, name=name
)
p.daemon = True
current = self.current_process()
if self.TYPE != 'threads':
self.assertEqual(p.authkey, current.authkey)
self.assertEqual(p.is_alive(), False)
self.assertEqual(p.daemon, True)
self.assertNotIn(p, self.active_children())
self.assertTrue(type(self.active_children()) is list)
self.assertEqual(p.exitcode, None)
p.start()
self.assertEqual(p.exitcode, None)
self.assertEqual(p.is_alive(), True)
self.assertIn(p, self.active_children())
self.assertEqual(q.get(), args[1:])
self.assertEqual(q.get(), kwargs)
self.assertEqual(q.get(), p.name)
if self.TYPE != 'threads':
self.assertEqual(q.get(), current.authkey)
self.assertEqual(q.get(), p.pid)
p.join()
self.assertEqual(p.exitcode, 0)
self.assertEqual(p.is_alive(), False)
self.assertNotIn(p, self.active_children())
@classmethod
def _test_terminate(cls):
time.sleep(100)
def test_terminate(self):
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
p = self.Process(target=self._test_terminate)
p.daemon = True
p.start()
self.assertEqual(p.is_alive(), True)
self.assertIn(p, self.active_children())
self.assertEqual(p.exitcode, None)
join = TimingWrapper(p.join)
self.assertEqual(join(0), None)
self.assertTimingAlmostEqual(join.elapsed, 0.0)
self.assertEqual(p.is_alive(), True)
self.assertEqual(join(-1), None)
self.assertTimingAlmostEqual(join.elapsed, 0.0)
self.assertEqual(p.is_alive(), True)
# XXX maybe terminating too soon causes the problems on Gentoo...
time.sleep(1)
p.terminate()
if hasattr(signal, 'alarm'):
# On the Gentoo buildbot waitpid() often seems to block forever.
# We use alarm() to interrupt it if it blocks for too long.
def handler(*args):
raise RuntimeError('join took too long: %s' % p)
old_handler = signal.signal(signal.SIGALRM, handler)
try:
signal.alarm(10)
self.assertEqual(join(), None)
finally:
signal.alarm(0)
signal.signal(signal.SIGALRM, old_handler)
else:
self.assertEqual(join(), None)
self.assertTimingAlmostEqual(join.elapsed, 0.0)
self.assertEqual(p.is_alive(), False)
self.assertNotIn(p, self.active_children())
p.join()
# XXX sometimes get p.exitcode == 0 on Windows ...
#self.assertEqual(p.exitcode, -signal.SIGTERM)
def test_cpu_count(self):
try:
cpus = multiprocessing.cpu_count()
except NotImplementedError:
cpus = 1
self.assertTrue(type(cpus) is int)
self.assertTrue(cpus >= 1)
def test_active_children(self):
self.assertEqual(type(self.active_children()), list)
p = self.Process(target=time.sleep, args=(DELTA,))
self.assertNotIn(p, self.active_children())
p.daemon = True
p.start()
self.assertIn(p, self.active_children())
p.join()
self.assertNotIn(p, self.active_children())
@classmethod
def _test_recursion(cls, wconn, id):
wconn.send(id)
if len(id) < 2:
for i in range(2):
p = cls.Process(
target=cls._test_recursion, args=(wconn, id+[i])
)
p.start()
p.join()
def test_recursion(self):
rconn, wconn = self.Pipe(duplex=False)
self._test_recursion(wconn, [])
time.sleep(DELTA)
result = []
while rconn.poll():
result.append(rconn.recv())
expected = [
[],
[0],
[0, 0],
[0, 1],
[1],
[1, 0],
[1, 1]
]
self.assertEqual(result, expected)
@classmethod
def _test_sentinel(cls, event):
event.wait(10.0)
def test_sentinel(self):
if self.TYPE == "threads":
self.skipTest('test not appropriate for {}'.format(self.TYPE))
event = self.Event()
p = self.Process(target=self._test_sentinel, args=(event,))
with self.assertRaises(ValueError):
p.sentinel
p.start()
self.addCleanup(p.join)
sentinel = p.sentinel
self.assertIsInstance(sentinel, int)
self.assertFalse(wait_for_handle(sentinel, timeout=0.0))
event.set()
p.join()
self.assertTrue(wait_for_handle(sentinel, timeout=1))
#
#
#
class _UpperCaser(multiprocessing.Process):
def __init__(self):
multiprocessing.Process.__init__(self)
self.child_conn, self.parent_conn = multiprocessing.Pipe()
def run(self):
self.parent_conn.close()
for s in iter(self.child_conn.recv, None):
self.child_conn.send(s.upper())
self.child_conn.close()
def submit(self, s):
assert type(s) is str
self.parent_conn.send(s)
return self.parent_conn.recv()
def stop(self):
self.parent_conn.send(None)
self.parent_conn.close()
self.child_conn.close()
class _TestSubclassingProcess(BaseTestCase):
ALLOWED_TYPES = ('processes',)
def test_subclassing(self):
uppercaser = _UpperCaser()
uppercaser.daemon = True
uppercaser.start()
self.assertEqual(uppercaser.submit('hello'), 'HELLO')
self.assertEqual(uppercaser.submit('world'), 'WORLD')
uppercaser.stop()
uppercaser.join()
def test_stderr_flush(self):
# sys.stderr is flushed at process shutdown (issue #13812)
if self.TYPE == "threads":
self.skipTest('test not appropriate for {}'.format(self.TYPE))
testfn = test.support.TESTFN
self.addCleanup(test.support.unlink, testfn)
proc = self.Process(target=self._test_stderr_flush, args=(testfn,))
proc.start()
proc.join()
with open(testfn, 'r') as f:
err = f.read()
# The whole traceback was printed
self.assertIn("ZeroDivisionError", err)
self.assertIn("test_multiprocessing.py", err)
self.assertIn("1/0 # MARKER", err)
@classmethod
def _test_stderr_flush(cls, testfn):
sys.stderr = open(testfn, 'w')
1/0 # MARKER
@classmethod
def _test_sys_exit(cls, reason, testfn):
sys.stderr = open(testfn, 'w')
sys.exit(reason)
def test_sys_exit(self):
# See Issue 13854
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
testfn = test.support.TESTFN
self.addCleanup(test.support.unlink, testfn)
for reason, code in (([1, 2, 3], 1), ('ignore this', 1)):
p = self.Process(target=self._test_sys_exit, args=(reason, testfn))
p.daemon = True
p.start()
p.join(5)
self.assertEqual(p.exitcode, code)
with open(testfn, 'r') as f:
self.assertEqual(f.read().rstrip(), str(reason))
for reason in (True, False, 8):
p = self.Process(target=sys.exit, args=(reason,))
p.daemon = True
p.start()
p.join(5)
self.assertEqual(p.exitcode, reason)
#
#
#
def queue_empty(q):
if hasattr(q, 'empty'):
return q.empty()
else:
return q.qsize() == 0
def queue_full(q, maxsize):
if hasattr(q, 'full'):
return q.full()
else:
return q.qsize() == maxsize
class _TestQueue(BaseTestCase):
@classmethod
def _test_put(cls, queue, child_can_start, parent_can_continue):
child_can_start.wait()
for i in range(6):
queue.get()
parent_can_continue.set()
def test_put(self):
MAXSIZE = 6
queue = self.Queue(maxsize=MAXSIZE)
child_can_start = self.Event()
parent_can_continue = self.Event()
proc = self.Process(
target=self._test_put,
args=(queue, child_can_start, parent_can_continue)
)
proc.daemon = True
proc.start()
self.assertEqual(queue_empty(queue), True)
self.assertEqual(queue_full(queue, MAXSIZE), False)
queue.put(1)
queue.put(2, True)
queue.put(3, True, None)
queue.put(4, False)
queue.put(5, False, None)
queue.put_nowait(6)
# the values may be in buffer but not yet in pipe so sleep a bit
time.sleep(DELTA)
self.assertEqual(queue_empty(queue), False)
self.assertEqual(queue_full(queue, MAXSIZE), True)
put = TimingWrapper(queue.put)
put_nowait = TimingWrapper(queue.put_nowait)
self.assertRaises(pyqueue.Full, put, 7, False)
self.assertTimingAlmostEqual(put.elapsed, 0)
self.assertRaises(pyqueue.Full, put, 7, False, None)
self.assertTimingAlmostEqual(put.elapsed, 0)
self.assertRaises(pyqueue.Full, put_nowait, 7)
self.assertTimingAlmostEqual(put_nowait.elapsed, 0)
self.assertRaises(pyqueue.Full, put, 7, True, TIMEOUT1)
self.assertTimingAlmostEqual(put.elapsed, TIMEOUT1)
self.assertRaises(pyqueue.Full, put, 7, False, TIMEOUT2)
self.assertTimingAlmostEqual(put.elapsed, 0)
self.assertRaises(pyqueue.Full, put, 7, True, timeout=TIMEOUT3)
self.assertTimingAlmostEqual(put.elapsed, TIMEOUT3)
child_can_start.set()
parent_can_continue.wait()
self.assertEqual(queue_empty(queue), True)
self.assertEqual(queue_full(queue, MAXSIZE), False)
proc.join()
@classmethod
def _test_get(cls, queue, child_can_start, parent_can_continue):
child_can_start.wait()
#queue.put(1)
queue.put(2)
queue.put(3)
queue.put(4)
queue.put(5)
parent_can_continue.set()
def test_get(self):
queue = self.Queue()
child_can_start = self.Event()
parent_can_continue = self.Event()
proc = self.Process(
target=self._test_get,
args=(queue, child_can_start, parent_can_continue)
)
proc.daemon = True
proc.start()
self.assertEqual(queue_empty(queue), True)
child_can_start.set()
parent_can_continue.wait()
time.sleep(DELTA)
self.assertEqual(queue_empty(queue), False)
# Hangs unexpectedly, remove for now
#self.assertEqual(queue.get(), 1)
self.assertEqual(queue.get(True, None), 2)
self.assertEqual(queue.get(True), 3)
self.assertEqual(queue.get(timeout=1), 4)
self.assertEqual(queue.get_nowait(), 5)
self.assertEqual(queue_empty(queue), True)
get = TimingWrapper(queue.get)
get_nowait = TimingWrapper(queue.get_nowait)
self.assertRaises(pyqueue.Empty, get, False)
self.assertTimingAlmostEqual(get.elapsed, 0)
self.assertRaises(pyqueue.Empty, get, False, None)
self.assertTimingAlmostEqual(get.elapsed, 0)
self.assertRaises(pyqueue.Empty, get_nowait)
self.assertTimingAlmostEqual(get_nowait.elapsed, 0)
self.assertRaises(pyqueue.Empty, get, True, TIMEOUT1)
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1)
self.assertRaises(pyqueue.Empty, get, False, TIMEOUT2)
self.assertTimingAlmostEqual(get.elapsed, 0)
self.assertRaises(pyqueue.Empty, get, timeout=TIMEOUT3)
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT3)
proc.join()
@classmethod
def _test_fork(cls, queue):
for i in range(10, 20):
queue.put(i)
# note that at this point the items may only be buffered, so the
# process cannot shutdown until the feeder thread has finished
# pushing items onto the pipe.
def test_fork(self):
# Old versions of Queue would fail to create a new feeder
# thread for a forked process if the original process had its
# own feeder thread. This test checks that this no longer
# happens.
queue = self.Queue()
# put items on queue so that main process starts a feeder thread
for i in range(10):
queue.put(i)
# wait to make sure thread starts before we fork a new process
time.sleep(DELTA)
# fork process
p = self.Process(target=self._test_fork, args=(queue,))
p.daemon = True
p.start()
# check that all expected items are in the queue
for i in range(20):
self.assertEqual(queue.get(), i)
self.assertRaises(pyqueue.Empty, queue.get, False)
p.join()
def test_qsize(self):
q = self.Queue()
try:
self.assertEqual(q.qsize(), 0)
except NotImplementedError:
self.skipTest('qsize method not implemented')
q.put(1)
self.assertEqual(q.qsize(), 1)
q.put(5)
self.assertEqual(q.qsize(), 2)
q.get()
self.assertEqual(q.qsize(), 1)
q.get()
self.assertEqual(q.qsize(), 0)
@classmethod
def _test_task_done(cls, q):
for obj in iter(q.get, None):
time.sleep(DELTA)
q.task_done()
def test_task_done(self):
queue = self.JoinableQueue()
workers = [self.Process(target=self._test_task_done, args=(queue,))
for i in range(4)]
for p in workers:
p.daemon = True
p.start()
for i in range(10):
queue.put(i)
queue.join()
for p in workers:
queue.put(None)
for p in workers:
p.join()
def test_timeout(self):
q = multiprocessing.Queue()
start = time.time()
self.assertRaises(pyqueue.Empty, q.get, True, 0.2)
delta = time.time() - start
self.assertGreaterEqual(delta, 0.18)
#
#
#
class _TestLock(BaseTestCase):
def test_lock(self):
lock = self.Lock()
self.assertEqual(lock.acquire(), True)
self.assertEqual(lock.acquire(False), False)
self.assertEqual(lock.release(), None)
self.assertRaises((ValueError, threading.ThreadError), lock.release)
def test_rlock(self):
lock = self.RLock()
self.assertEqual(lock.acquire(), True)
self.assertEqual(lock.acquire(), True)
self.assertEqual(lock.acquire(), True)
self.assertEqual(lock.release(), None)
self.assertEqual(lock.release(), None)
self.assertEqual(lock.release(), None)
self.assertRaises((AssertionError, RuntimeError), lock.release)
def test_lock_context(self):
with self.Lock():
pass
class _TestSemaphore(BaseTestCase):
def _test_semaphore(self, sem):
self.assertReturnsIfImplemented(2, get_value, sem)
self.assertEqual(sem.acquire(), True)
self.assertReturnsIfImplemented(1, get_value, sem)
self.assertEqual(sem.acquire(), True)
self.assertReturnsIfImplemented(0, get_value, sem)
self.assertEqual(sem.acquire(False), False)
self.assertReturnsIfImplemented(0, get_value, sem)
self.assertEqual(sem.release(), None)
self.assertReturnsIfImplemented(1, get_value, sem)
self.assertEqual(sem.release(), None)
self.assertReturnsIfImplemented(2, get_value, sem)
def test_semaphore(self):
sem = self.Semaphore(2)
self._test_semaphore(sem)
self.assertEqual(sem.release(), None)
self.assertReturnsIfImplemented(3, get_value, sem)
self.assertEqual(sem.release(), None)
self.assertReturnsIfImplemented(4, get_value, sem)
def test_bounded_semaphore(self):
sem = self.BoundedSemaphore(2)
self._test_semaphore(sem)
# Currently fails on OS/X
#if HAVE_GETVALUE:
# self.assertRaises(ValueError, sem.release)
# self.assertReturnsIfImplemented(2, get_value, sem)
def test_timeout(self):
if self.TYPE != 'processes':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
sem = self.Semaphore(0)
acquire = TimingWrapper(sem.acquire)
self.assertEqual(acquire(False), False)
self.assertTimingAlmostEqual(acquire.elapsed, 0.0)
self.assertEqual(acquire(False, None), False)
self.assertTimingAlmostEqual(acquire.elapsed, 0.0)
self.assertEqual(acquire(False, TIMEOUT1), False)
self.assertTimingAlmostEqual(acquire.elapsed, 0)
self.assertEqual(acquire(True, TIMEOUT2), False)
self.assertTimingAlmostEqual(acquire.elapsed, TIMEOUT2)
self.assertEqual(acquire(timeout=TIMEOUT3), False)
self.assertTimingAlmostEqual(acquire.elapsed, TIMEOUT3)
class _TestCondition(BaseTestCase):
@classmethod
def f(cls, cond, sleeping, woken, timeout=None):
cond.acquire()
sleeping.release()
cond.wait(timeout)
woken.release()
cond.release()
def check_invariant(self, cond):
# this is only supposed to succeed when there are no sleepers
if self.TYPE == 'processes':
try:
sleepers = (cond._sleeping_count.get_value() -
cond._woken_count.get_value())
self.assertEqual(sleepers, 0)
self.assertEqual(cond._wait_semaphore.get_value(), 0)
except NotImplementedError:
pass
def test_notify(self):
cond = self.Condition()
sleeping = self.Semaphore(0)
woken = self.Semaphore(0)
p = self.Process(target=self.f, args=(cond, sleeping, woken))
p.daemon = True
p.start()
p = threading.Thread(target=self.f, args=(cond, sleeping, woken))
p.daemon = True
p.start()
# wait for both children to start sleeping
sleeping.acquire()
sleeping.acquire()
# check no process/thread has woken up
time.sleep(DELTA)
self.assertReturnsIfImplemented(0, get_value, woken)
# wake up one process/thread
cond.acquire()
cond.notify()
cond.release()
# check one process/thread has woken up
time.sleep(DELTA)
self.assertReturnsIfImplemented(1, get_value, woken)
# wake up another
cond.acquire()
cond.notify()
cond.release()
# check other has woken up
time.sleep(DELTA)
self.assertReturnsIfImplemented(2, get_value, woken)
# check state is not mucked up
self.check_invariant(cond)
p.join()
def test_notify_all(self):
cond = self.Condition()
sleeping = self.Semaphore(0)
woken = self.Semaphore(0)
# start some threads/processes which will timeout
for i in range(3):
p = self.Process(target=self.f,
args=(cond, sleeping, woken, TIMEOUT1))
p.daemon = True
p.start()
t = threading.Thread(target=self.f,
args=(cond, sleeping, woken, TIMEOUT1))
t.daemon = True
t.start()
# wait for them all to sleep
for i in range(6):
sleeping.acquire()
# check they have all timed out
for i in range(6):
woken.acquire()
self.assertReturnsIfImplemented(0, get_value, woken)
# check state is not mucked up
self.check_invariant(cond)
# start some more threads/processes
for i in range(3):
p = self.Process(target=self.f, args=(cond, sleeping, woken))
p.daemon = True
p.start()
t = threading.Thread(target=self.f, args=(cond, sleeping, woken))
t.daemon = True
t.start()
# wait for them to all sleep
for i in range(6):
sleeping.acquire()
# check no process/thread has woken up
time.sleep(DELTA)
self.assertReturnsIfImplemented(0, get_value, woken)
# wake them all up
cond.acquire()
cond.notify_all()
cond.release()
# check they have all woken
for i in range(10):
try:
if get_value(woken) == 6:
break
except NotImplementedError:
break
time.sleep(DELTA)
self.assertReturnsIfImplemented(6, get_value, woken)
# check state is not mucked up
self.check_invariant(cond)
def test_timeout(self):
cond = self.Condition()
wait = TimingWrapper(cond.wait)
cond.acquire()
res = wait(TIMEOUT1)
cond.release()
self.assertEqual(res, False)
self.assertTimingAlmostEqual(wait.elapsed, TIMEOUT1)
@classmethod
def _test_waitfor_f(cls, cond, state):
with cond:
state.value = 0
cond.notify()
result = cond.wait_for(lambda : state.value==4)
if not result or state.value != 4:
sys.exit(1)
@unittest.skipUnless(HAS_SHAREDCTYPES, 'needs sharedctypes')
def test_waitfor(self):
# based on test in test/lock_tests.py
cond = self.Condition()
state = self.Value('i', -1)
p = self.Process(target=self._test_waitfor_f, args=(cond, state))
p.daemon = True
p.start()
with cond:
result = cond.wait_for(lambda : state.value==0)
self.assertTrue(result)
self.assertEqual(state.value, 0)
for i in range(4):
time.sleep(0.01)
with cond:
state.value += 1
cond.notify()
p.join(5)
self.assertFalse(p.is_alive())
self.assertEqual(p.exitcode, 0)
@classmethod
def _test_waitfor_timeout_f(cls, cond, state, success, sem):
sem.release()
with cond:
expected = 0.1
dt = time.time()
result = cond.wait_for(lambda : state.value==4, timeout=expected)
dt = time.time() - dt
# borrow logic in assertTimeout() from test/lock_tests.py
if not result and expected * 0.6 < dt < expected * 10.0:
success.value = True
@unittest.skipUnless(HAS_SHAREDCTYPES, 'needs sharedctypes')
def test_waitfor_timeout(self):
# based on test in test/lock_tests.py
cond = self.Condition()
state = self.Value('i', 0)
success = self.Value('i', False)
sem = self.Semaphore(0)
p = self.Process(target=self._test_waitfor_timeout_f,
args=(cond, state, success, sem))
p.daemon = True
p.start()
self.assertTrue(sem.acquire(timeout=10))
# Only increment 3 times, so state == 4 is never reached.
for i in range(3):
time.sleep(0.01)
with cond:
state.value += 1
cond.notify()
p.join(5)
self.assertTrue(success.value)
@classmethod
def _test_wait_result(cls, c, pid):
with c:
c.notify()
time.sleep(1)
if pid is not None:
os.kill(pid, signal.SIGINT)
def test_wait_result(self):
if isinstance(self, ProcessesMixin) and sys.platform != 'win32':
pid = os.getpid()
else:
pid = None
c = self.Condition()
with c:
self.assertFalse(c.wait(0))
self.assertFalse(c.wait(0.1))
p = self.Process(target=self._test_wait_result, args=(c, pid))
p.start()
self.assertTrue(c.wait(10))
if pid is not None:
self.assertRaises(KeyboardInterrupt, c.wait, 10)
p.join()
class _TestEvent(BaseTestCase):
@classmethod
def _test_event(cls, event):
time.sleep(TIMEOUT2)
event.set()
def test_event(self):
event = self.Event()
wait = TimingWrapper(event.wait)
# Removed temporarily, due to API shear, this does not
# work with threading._Event objects. is_set == isSet
self.assertEqual(event.is_set(), False)
# Removed, threading.Event.wait() will return the value of the __flag
# instead of None. API Shear with the semaphore backed mp.Event
self.assertEqual(wait(0.0), False)
self.assertTimingAlmostEqual(wait.elapsed, 0.0)
self.assertEqual(wait(TIMEOUT1), False)
self.assertTimingAlmostEqual(wait.elapsed, TIMEOUT1)
event.set()
# See note above on the API differences
self.assertEqual(event.is_set(), True)
self.assertEqual(wait(), True)
self.assertTimingAlmostEqual(wait.elapsed, 0.0)
self.assertEqual(wait(TIMEOUT1), True)
self.assertTimingAlmostEqual(wait.elapsed, 0.0)
# self.assertEqual(event.is_set(), True)
event.clear()
#self.assertEqual(event.is_set(), False)
p = self.Process(target=self._test_event, args=(event,))
p.daemon = True
p.start()
self.assertEqual(wait(), True)
#
# Tests for Barrier - adapted from tests in test/lock_tests.py
#
# Many of the tests for threading.Barrier use a list as an atomic
# counter: a value is appended to increment the counter, and the
# length of the list gives the value. We use the class DummyList
# for the same purpose.
class _DummyList(object):
def __init__(self):
wrapper = multiprocessing.heap.BufferWrapper(struct.calcsize('i'))
lock = multiprocessing.Lock()
self.__setstate__((wrapper, lock))
self._lengthbuf[0] = 0
def __setstate__(self, state):
(self._wrapper, self._lock) = state
self._lengthbuf = self._wrapper.create_memoryview().cast('i')
def __getstate__(self):
return (self._wrapper, self._lock)
def append(self, _):
with self._lock:
self._lengthbuf[0] += 1
def __len__(self):
with self._lock:
return self._lengthbuf[0]
def _wait():
# A crude wait/yield function not relying on synchronization primitives.
time.sleep(0.01)
class Bunch(object):
"""
A bunch of threads.
"""
def __init__(self, namespace, f, args, n, wait_before_exit=False):
"""
Construct a bunch of `n` threads running the same function `f`.
If `wait_before_exit` is True, the threads won't terminate until
do_finish() is called.
"""
self.f = f
self.args = args
self.n = n
self.started = namespace.DummyList()
self.finished = namespace.DummyList()
self._can_exit = namespace.Event()
if not wait_before_exit:
self._can_exit.set()
for i in range(n):
p = namespace.Process(target=self.task)
p.daemon = True
p.start()
def task(self):
pid = os.getpid()
self.started.append(pid)
try:
self.f(*self.args)
finally:
self.finished.append(pid)
self._can_exit.wait(30)
assert self._can_exit.is_set()
def wait_for_started(self):
while len(self.started) < self.n:
_wait()
def wait_for_finished(self):
while len(self.finished) < self.n:
_wait()
def do_finish(self):
self._can_exit.set()
class AppendTrue(object):
def __init__(self, obj):
self.obj = obj
def __call__(self):
self.obj.append(True)
class _TestBarrier(BaseTestCase):
"""
Tests for Barrier objects.
"""
N = 5
defaultTimeout = 30.0 # XXX Slow Windows buildbots need generous timeout
def setUp(self):
self.barrier = self.Barrier(self.N, timeout=self.defaultTimeout)
def tearDown(self):
self.barrier.abort()
self.barrier = None
def DummyList(self):
if self.TYPE == 'threads':
return []
elif self.TYPE == 'manager':
return self.manager.list()
else:
return _DummyList()
def run_threads(self, f, args):
b = Bunch(self, f, args, self.N-1)
f(*args)
b.wait_for_finished()
@classmethod
def multipass(cls, barrier, results, n):
m = barrier.parties
assert m == cls.N
for i in range(n):
results[0].append(True)
assert len(results[1]) == i * m
barrier.wait()
results[1].append(True)
assert len(results[0]) == (i + 1) * m
barrier.wait()
try:
assert barrier.n_waiting == 0
except NotImplementedError:
pass
assert not barrier.broken
def test_barrier(self, passes=1):
"""
Test that a barrier is passed in lockstep
"""
results = [self.DummyList(), self.DummyList()]
self.run_threads(self.multipass, (self.barrier, results, passes))
def test_barrier_10(self):
"""
Test that a barrier works for 10 consecutive runs
"""
return self.test_barrier(10)
@classmethod
def _test_wait_return_f(cls, barrier, queue):
res = barrier.wait()
queue.put(res)
def test_wait_return(self):
"""
test the return value from barrier.wait
"""
queue = self.Queue()
self.run_threads(self._test_wait_return_f, (self.barrier, queue))
results = [queue.get() for i in range(self.N)]
self.assertEqual(results.count(0), 1)
@classmethod
def _test_action_f(cls, barrier, results):
barrier.wait()
if len(results) != 1:
raise RuntimeError
def test_action(self):
"""
Test the 'action' callback
"""
results = self.DummyList()
barrier = self.Barrier(self.N, action=AppendTrue(results))
self.run_threads(self._test_action_f, (barrier, results))
self.assertEqual(len(results), 1)
@classmethod
def _test_abort_f(cls, barrier, results1, results2):
try:
i = barrier.wait()
if i == cls.N//2:
raise RuntimeError
barrier.wait()
results1.append(True)
except threading.BrokenBarrierError:
results2.append(True)
except RuntimeError:
barrier.abort()
def test_abort(self):
"""
Test that an abort will put the barrier in a broken state
"""
results1 = self.DummyList()
results2 = self.DummyList()
self.run_threads(self._test_abort_f,
(self.barrier, results1, results2))
self.assertEqual(len(results1), 0)
self.assertEqual(len(results2), self.N-1)
self.assertTrue(self.barrier.broken)
@classmethod
def _test_reset_f(cls, barrier, results1, results2, results3):
i = barrier.wait()
if i == cls.N//2:
# Wait until the other threads are all in the barrier.
while barrier.n_waiting < cls.N-1:
time.sleep(0.001)
barrier.reset()
else:
try:
barrier.wait()
results1.append(True)
except threading.BrokenBarrierError:
results2.append(True)
# Now, pass the barrier again
barrier.wait()
results3.append(True)
def test_reset(self):
"""
Test that a 'reset' on a barrier frees the waiting threads
"""
results1 = self.DummyList()
results2 = self.DummyList()
results3 = self.DummyList()
self.run_threads(self._test_reset_f,
(self.barrier, results1, results2, results3))
self.assertEqual(len(results1), 0)
self.assertEqual(len(results2), self.N-1)
self.assertEqual(len(results3), self.N)
@classmethod
def _test_abort_and_reset_f(cls, barrier, barrier2,
results1, results2, results3):
try:
i = barrier.wait()
if i == cls.N//2:
raise RuntimeError
barrier.wait()
results1.append(True)
except threading.BrokenBarrierError:
results2.append(True)
except RuntimeError:
barrier.abort()
# Synchronize and reset the barrier. Must synchronize first so
# that everyone has left it when we reset, and after so that no
# one enters it before the reset.
if barrier2.wait() == cls.N//2:
barrier.reset()
barrier2.wait()
barrier.wait()
results3.append(True)
def test_abort_and_reset(self):
"""
Test that a barrier can be reset after being broken.
"""
results1 = self.DummyList()
results2 = self.DummyList()
results3 = self.DummyList()
barrier2 = self.Barrier(self.N)
self.run_threads(self._test_abort_and_reset_f,
(self.barrier, barrier2, results1, results2, results3))
self.assertEqual(len(results1), 0)
self.assertEqual(len(results2), self.N-1)
self.assertEqual(len(results3), self.N)
@classmethod
def _test_timeout_f(cls, barrier, results):
i = barrier.wait()
if i == cls.N//2:
# One thread is late!
time.sleep(1.0)
try:
barrier.wait(0.5)
except threading.BrokenBarrierError:
results.append(True)
def test_timeout(self):
"""
Test wait(timeout)
"""
results = self.DummyList()
self.run_threads(self._test_timeout_f, (self.barrier, results))
self.assertEqual(len(results), self.barrier.parties)
@classmethod
def _test_default_timeout_f(cls, barrier, results):
i = barrier.wait(cls.defaultTimeout)
if i == cls.N//2:
# One thread is later than the default timeout
time.sleep(1.0)
try:
barrier.wait()
except threading.BrokenBarrierError:
results.append(True)
def test_default_timeout(self):
"""
Test the barrier's default timeout
"""
barrier = self.Barrier(self.N, timeout=0.5)
results = self.DummyList()
self.run_threads(self._test_default_timeout_f, (barrier, results))
self.assertEqual(len(results), barrier.parties)
def test_single_thread(self):
b = self.Barrier(1)
b.wait()
b.wait()
@classmethod
def _test_thousand_f(cls, barrier, passes, conn, lock):
for i in range(passes):
barrier.wait()
with lock:
conn.send(i)
def test_thousand(self):
if self.TYPE == 'manager':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
passes = 1000
lock = self.Lock()
conn, child_conn = self.Pipe(False)
for j in range(self.N):
p = self.Process(target=self._test_thousand_f,
args=(self.barrier, passes, child_conn, lock))
p.start()
for i in range(passes):
for j in range(self.N):
self.assertEqual(conn.recv(), i)
#
#
#
class _TestValue(BaseTestCase):
ALLOWED_TYPES = ('processes',)
codes_values = [
('i', 4343, 24234),
('d', 3.625, -4.25),
('h', -232, 234),
('c', latin('x'), latin('y'))
]
def setUp(self):
if not HAS_SHAREDCTYPES:
self.skipTest("requires multiprocessing.sharedctypes")
@classmethod
def _test(cls, values):
for sv, cv in zip(values, cls.codes_values):
sv.value = cv[2]
def test_value(self, raw=False):
if raw:
values = [self.RawValue(code, value)
for code, value, _ in self.codes_values]
else:
values = [self.Value(code, value)
for code, value, _ in self.codes_values]
for sv, cv in zip(values, self.codes_values):
self.assertEqual(sv.value, cv[1])
proc = self.Process(target=self._test, args=(values,))
proc.daemon = True
proc.start()
proc.join()
for sv, cv in zip(values, self.codes_values):
self.assertEqual(sv.value, cv[2])
def test_rawvalue(self):
self.test_value(raw=True)
def test_getobj_getlock(self):
val1 = self.Value('i', 5)
lock1 = val1.get_lock()
obj1 = val1.get_obj()
val2 = self.Value('i', 5, lock=None)
lock2 = val2.get_lock()
obj2 = val2.get_obj()
lock = self.Lock()
val3 = self.Value('i', 5, lock=lock)
lock3 = val3.get_lock()
obj3 = val3.get_obj()
self.assertEqual(lock, lock3)
arr4 = self.Value('i', 5, lock=False)
self.assertFalse(hasattr(arr4, 'get_lock'))
self.assertFalse(hasattr(arr4, 'get_obj'))
self.assertRaises(AttributeError, self.Value, 'i', 5, lock='navalue')
arr5 = self.RawValue('i', 5)
self.assertFalse(hasattr(arr5, 'get_lock'))
self.assertFalse(hasattr(arr5, 'get_obj'))
class _TestArray(BaseTestCase):
ALLOWED_TYPES = ('processes',)
@classmethod
def f(cls, seq):
for i in range(1, len(seq)):
seq[i] += seq[i-1]
@unittest.skipIf(c_int is None, "requires _ctypes")
def test_array(self, raw=False):
seq = [680, 626, 934, 821, 150, 233, 548, 982, 714, 831]
if raw:
arr = self.RawArray('i', seq)
else:
arr = self.Array('i', seq)
self.assertEqual(len(arr), len(seq))
self.assertEqual(arr[3], seq[3])
self.assertEqual(list(arr[2:7]), list(seq[2:7]))
arr[4:8] = seq[4:8] = array.array('i', [1, 2, 3, 4])
self.assertEqual(list(arr[:]), seq)
self.f(seq)
p = self.Process(target=self.f, args=(arr,))
p.daemon = True
p.start()
p.join()
self.assertEqual(list(arr[:]), seq)
@unittest.skipIf(c_int is None, "requires _ctypes")
def test_array_from_size(self):
size = 10
# Test for zeroing (see issue #11675).
# The repetition below strengthens the test by increasing the chances
# of previously allocated non-zero memory being used for the new array
# on the 2nd and 3rd loops.
for _ in range(3):
arr = self.Array('i', size)
self.assertEqual(len(arr), size)
self.assertEqual(list(arr), [0] * size)
arr[:] = range(10)
self.assertEqual(list(arr), list(range(10)))
del arr
@unittest.skipIf(c_int is None, "requires _ctypes")
def test_rawarray(self):
self.test_array(raw=True)
@unittest.skipIf(c_int is None, "requires _ctypes")
def test_getobj_getlock_obj(self):
arr1 = self.Array('i', list(range(10)))
lock1 = arr1.get_lock()
obj1 = arr1.get_obj()
arr2 = self.Array('i', list(range(10)), lock=None)
lock2 = arr2.get_lock()
obj2 = arr2.get_obj()
lock = self.Lock()
arr3 = self.Array('i', list(range(10)), lock=lock)
lock3 = arr3.get_lock()
obj3 = arr3.get_obj()
self.assertEqual(lock, lock3)
arr4 = self.Array('i', range(10), lock=False)
self.assertFalse(hasattr(arr4, 'get_lock'))
self.assertFalse(hasattr(arr4, 'get_obj'))
self.assertRaises(AttributeError,
self.Array, 'i', range(10), lock='notalock')
arr5 = self.RawArray('i', range(10))
self.assertFalse(hasattr(arr5, 'get_lock'))
self.assertFalse(hasattr(arr5, 'get_obj'))
#
#
#
class _TestContainers(BaseTestCase):
ALLOWED_TYPES = ('manager',)
def test_list(self):
a = self.list(list(range(10)))
self.assertEqual(a[:], list(range(10)))
b = self.list()
self.assertEqual(b[:], [])
b.extend(list(range(5)))
self.assertEqual(b[:], list(range(5)))
self.assertEqual(b[2], 2)
self.assertEqual(b[2:10], [2,3,4])
b *= 2
self.assertEqual(b[:], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4])
self.assertEqual(b + [5, 6], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 5, 6])
self.assertEqual(a[:], list(range(10)))
d = [a, b]
e = self.list(d)
self.assertEqual(
e[:],
[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]]
)
f = self.list([a])
a.append('hello')
self.assertEqual(f[:], [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 'hello']])
def test_dict(self):
d = self.dict()
indices = list(range(65, 70))
for i in indices:
d[i] = chr(i)
self.assertEqual(d.copy(), dict((i, chr(i)) for i in indices))
self.assertEqual(sorted(d.keys()), indices)
self.assertEqual(sorted(d.values()), [chr(i) for i in indices])
self.assertEqual(sorted(d.items()), [(i, chr(i)) for i in indices])
def test_namespace(self):
n = self.Namespace()
n.name = 'Bob'
n.job = 'Builder'
n._hidden = 'hidden'
self.assertEqual((n.name, n.job), ('Bob', 'Builder'))
del n.job
self.assertEqual(str(n), "Namespace(name='Bob')")
self.assertTrue(hasattr(n, 'name'))
self.assertTrue(not hasattr(n, 'job'))
#
#
#
def sqr(x, wait=0.0):
time.sleep(wait)
return x*x
def mul(x, y):
return x*y
class _TestPool(BaseTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.pool = cls.Pool(4)
@classmethod
def tearDownClass(cls):
cls.pool.terminate()
cls.pool.join()
cls.pool = None
super().tearDownClass()
def test_apply(self):
papply = self.pool.apply
self.assertEqual(papply(sqr, (5,)), sqr(5))
self.assertEqual(papply(sqr, (), {'x':3}), sqr(x=3))
def test_map(self):
pmap = self.pool.map
self.assertEqual(pmap(sqr, list(range(10))), list(map(sqr, list(range(10)))))
self.assertEqual(pmap(sqr, list(range(100)), chunksize=20),
list(map(sqr, list(range(100)))))
def test_starmap(self):
psmap = self.pool.starmap
tuples = list(zip(range(10), range(9,-1, -1)))
self.assertEqual(psmap(mul, tuples),
list(itertools.starmap(mul, tuples)))
tuples = list(zip(range(100), range(99,-1, -1)))
self.assertEqual(psmap(mul, tuples, chunksize=20),
list(itertools.starmap(mul, tuples)))
def test_starmap_async(self):
tuples = list(zip(range(100), range(99,-1, -1)))
self.assertEqual(self.pool.starmap_async(mul, tuples).get(),
list(itertools.starmap(mul, tuples)))
def test_map_async(self):
self.assertEqual(self.pool.map_async(sqr, list(range(10))).get(),
list(map(sqr, list(range(10)))))
def test_map_async_callbacks(self):
call_args = self.manager.list() if self.TYPE == 'manager' else []
self.pool.map_async(int, ['1'],
callback=call_args.append,
error_callback=call_args.append).wait()
self.assertEqual(1, len(call_args))
self.assertEqual([1], call_args[0])
self.pool.map_async(int, ['a'],
callback=call_args.append,
error_callback=call_args.append).wait()
self.assertEqual(2, len(call_args))
self.assertIsInstance(call_args[1], ValueError)
def test_map_unplicklable(self):
# Issue #19425 -- failure to pickle should not cause a hang
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
class A(object):
def __reduce__(self):
raise RuntimeError('cannot pickle')
with self.assertRaises(RuntimeError):
self.pool.map(sqr, [A()]*10)
def test_map_chunksize(self):
try:
self.pool.map_async(sqr, [], chunksize=1).get(timeout=TIMEOUT1)
except multiprocessing.TimeoutError:
self.fail("pool.map_async with chunksize stalled on null list")
def test_async(self):
res = self.pool.apply_async(sqr, (7, TIMEOUT1,))
get = TimingWrapper(res.get)
self.assertEqual(get(), 49)
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT1)
def test_async_timeout(self):
res = self.pool.apply_async(sqr, (6, TIMEOUT2 + 1.0))
get = TimingWrapper(res.get)
self.assertRaises(multiprocessing.TimeoutError, get, timeout=TIMEOUT2)
self.assertTimingAlmostEqual(get.elapsed, TIMEOUT2)
def test_imap(self):
it = self.pool.imap(sqr, list(range(10)))
self.assertEqual(list(it), list(map(sqr, list(range(10)))))
it = self.pool.imap(sqr, list(range(10)))
for i in range(10):
self.assertEqual(next(it), i*i)
self.assertRaises(StopIteration, it.__next__)
it = self.pool.imap(sqr, list(range(1000)), chunksize=100)
for i in range(1000):
self.assertEqual(next(it), i*i)
self.assertRaises(StopIteration, it.__next__)
def test_imap_unordered(self):
it = self.pool.imap_unordered(sqr, list(range(1000)))
self.assertEqual(sorted(it), list(map(sqr, list(range(1000)))))
it = self.pool.imap_unordered(sqr, list(range(1000)), chunksize=53)
self.assertEqual(sorted(it), list(map(sqr, list(range(1000)))))
def test_make_pool(self):
self.assertRaises(ValueError, multiprocessing.Pool, -1)
self.assertRaises(ValueError, multiprocessing.Pool, 0)
p = multiprocessing.Pool(3)
self.assertEqual(3, len(p._pool))
p.close()
p.join()
def test_terminate(self):
result = self.pool.map_async(
time.sleep, [0.1 for i in range(10000)], chunksize=1
)
self.pool.terminate()
join = TimingWrapper(self.pool.join)
join()
self.assertLess(join.elapsed, 0.5)
def test_empty_iterable(self):
# See Issue 12157
p = self.Pool(1)
self.assertEqual(p.map(sqr, []), [])
self.assertEqual(list(p.imap(sqr, [])), [])
self.assertEqual(list(p.imap_unordered(sqr, [])), [])
self.assertEqual(p.map_async(sqr, []).get(), [])
p.close()
p.join()
def test_context(self):
if self.TYPE == 'processes':
L = list(range(10))
expected = [sqr(i) for i in L]
with multiprocessing.Pool(2) as p:
r = p.map_async(sqr, L)
self.assertEqual(r.get(), expected)
self.assertRaises(ValueError, p.map_async, sqr, L)
@classmethod
def _test_traceback(cls):
raise RuntimeError(123) # some comment
def test_traceback(self):
# We want ensure that the traceback from the child process is
# contained in the traceback raised in the main process.
if self.TYPE == 'processes':
with self.Pool(1) as p:
try:
p.apply(self._test_traceback)
except Exception as e:
exc = e
else:
raise AssertionError('expected RuntimeError')
self.assertIs(type(exc), RuntimeError)
self.assertEqual(exc.args, (123,))
cause = exc.__cause__
self.assertIs(type(cause), multiprocessing.pool.RemoteTraceback)
self.assertIn('raise RuntimeError(123) # some comment', cause.tb)
with test.support.captured_stderr() as f1:
try:
raise exc
except RuntimeError:
sys.excepthook(*sys.exc_info())
self.assertIn('raise RuntimeError(123) # some comment',
f1.getvalue())
@classmethod
def _test_wrapped_exception(cls):
raise RuntimeError('foo')
def test_wrapped_exception(self):
# Issue #20980: Should not wrap exception when using thread pool
with self.Pool(1) as p:
with self.assertRaises(RuntimeError):
p.apply(self._test_wrapped_exception)
def raising():
raise KeyError("key")
def unpickleable_result():
return lambda: 42
class _TestPoolWorkerErrors(BaseTestCase):
ALLOWED_TYPES = ('processes', )
def test_async_error_callback(self):
p = multiprocessing.Pool(2)
scratchpad = [None]
def errback(exc):
scratchpad[0] = exc
res = p.apply_async(raising, error_callback=errback)
self.assertRaises(KeyError, res.get)
self.assertTrue(scratchpad[0])
self.assertIsInstance(scratchpad[0], KeyError)
p.close()
p.join()
def test_unpickleable_result(self):
from multiprocessing.pool import MaybeEncodingError
p = multiprocessing.Pool(2)
# Make sure we don't lose pool processes because of encoding errors.
for iteration in range(20):
scratchpad = [None]
def errback(exc):
scratchpad[0] = exc
res = p.apply_async(unpickleable_result, error_callback=errback)
self.assertRaises(MaybeEncodingError, res.get)
wrapped = scratchpad[0]
self.assertTrue(wrapped)
self.assertIsInstance(scratchpad[0], MaybeEncodingError)
self.assertIsNotNone(wrapped.exc)
self.assertIsNotNone(wrapped.value)
p.close()
p.join()
class _TestPoolWorkerLifetime(BaseTestCase):
ALLOWED_TYPES = ('processes', )
def test_pool_worker_lifetime(self):
p = multiprocessing.Pool(3, maxtasksperchild=10)
self.assertEqual(3, len(p._pool))
origworkerpids = [w.pid for w in p._pool]
# Run many tasks so each worker gets replaced (hopefully)
results = []
for i in range(100):
results.append(p.apply_async(sqr, (i, )))
# Fetch the results and verify we got the right answers,
# also ensuring all the tasks have completed.
for (j, res) in enumerate(results):
self.assertEqual(res.get(), sqr(j))
# Refill the pool
p._repopulate_pool()
# Wait until all workers are alive
# (countdown * DELTA = 5 seconds max startup process time)
countdown = 50
while countdown and not all(w.is_alive() for w in p._pool):
countdown -= 1
time.sleep(DELTA)
finalworkerpids = [w.pid for w in p._pool]
# All pids should be assigned. See issue #7805.
self.assertNotIn(None, origworkerpids)
self.assertNotIn(None, finalworkerpids)
# Finally, check that the worker pids have changed
self.assertNotEqual(sorted(origworkerpids), sorted(finalworkerpids))
p.close()
p.join()
def test_pool_worker_lifetime_early_close(self):
# Issue #10332: closing a pool whose workers have limited lifetimes
# before all the tasks completed would make join() hang.
p = multiprocessing.Pool(3, maxtasksperchild=1)
results = []
for i in range(6):
results.append(p.apply_async(sqr, (i, 0.3)))
p.close()
p.join()
# check the results
for (j, res) in enumerate(results):
self.assertEqual(res.get(), sqr(j))
#
# Test of creating a customized manager class
#
from multiprocessing.managers import BaseManager, BaseProxy, RemoteError
class FooBar(object):
def f(self):
return 'f()'
def g(self):
raise ValueError
def _h(self):
return '_h()'
def baz():
for i in range(10):
yield i*i
class IteratorProxy(BaseProxy):
_exposed_ = ('__next__',)
def __iter__(self):
return self
def __next__(self):
return self._callmethod('__next__')
class MyManager(BaseManager):
pass
MyManager.register('Foo', callable=FooBar)
MyManager.register('Bar', callable=FooBar, exposed=('f', '_h'))
MyManager.register('baz', callable=baz, proxytype=IteratorProxy)
class _TestMyManager(BaseTestCase):
ALLOWED_TYPES = ('manager',)
def test_mymanager(self):
manager = MyManager()
manager.start()
self.common(manager)
manager.shutdown()
# If the manager process exited cleanly then the exitcode
# will be zero. Otherwise (after a short timeout)
# terminate() is used, resulting in an exitcode of -SIGTERM.
self.assertEqual(manager._process.exitcode, 0)
def test_mymanager_context(self):
with MyManager() as manager:
self.common(manager)
self.assertEqual(manager._process.exitcode, 0)
def test_mymanager_context_prestarted(self):
manager = MyManager()
manager.start()
with manager:
self.common(manager)
self.assertEqual(manager._process.exitcode, 0)
def common(self, manager):
foo = manager.Foo()
bar = manager.Bar()
baz = manager.baz()
foo_methods = [name for name in ('f', 'g', '_h') if hasattr(foo, name)]
bar_methods = [name for name in ('f', 'g', '_h') if hasattr(bar, name)]
self.assertEqual(foo_methods, ['f', 'g'])
self.assertEqual(bar_methods, ['f', '_h'])
self.assertEqual(foo.f(), 'f()')
self.assertRaises(ValueError, foo.g)
self.assertEqual(foo._callmethod('f'), 'f()')
self.assertRaises(RemoteError, foo._callmethod, '_h')
self.assertEqual(bar.f(), 'f()')
self.assertEqual(bar._h(), '_h()')
self.assertEqual(bar._callmethod('f'), 'f()')
self.assertEqual(bar._callmethod('_h'), '_h()')
self.assertEqual(list(baz), [i*i for i in range(10)])
#
# Test of connecting to a remote server and using xmlrpclib for serialization
#
_queue = pyqueue.Queue()
def get_queue():
return _queue
class QueueManager(BaseManager):
'''manager class used by server process'''
QueueManager.register('get_queue', callable=get_queue)
class QueueManager2(BaseManager):
'''manager class which specifies the same interface as QueueManager'''
QueueManager2.register('get_queue')
SERIALIZER = 'xmlrpclib'
class _TestRemoteManager(BaseTestCase):
ALLOWED_TYPES = ('manager',)
@classmethod
def _putter(cls, address, authkey):
manager = QueueManager2(
address=address, authkey=authkey, serializer=SERIALIZER
)
manager.connect()
queue = manager.get_queue()
queue.put(('hello world', None, True, 2.25))
def test_remote(self):
authkey = os.urandom(32)
manager = QueueManager(
address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER
)
manager.start()
p = self.Process(target=self._putter, args=(manager.address, authkey))
p.daemon = True
p.start()
manager2 = QueueManager2(
address=manager.address, authkey=authkey, serializer=SERIALIZER
)
manager2.connect()
queue = manager2.get_queue()
# Note that xmlrpclib will deserialize object as a list not a tuple
self.assertEqual(queue.get(), ['hello world', None, True, 2.25])
# Because we are using xmlrpclib for serialization instead of
# pickle this will cause a serialization error.
self.assertRaises(Exception, queue.put, time.sleep)
# Make queue finalizer run before the server is stopped
del queue
manager.shutdown()
class _TestManagerRestart(BaseTestCase):
@classmethod
def _putter(cls, address, authkey):
manager = QueueManager(
address=address, authkey=authkey, serializer=SERIALIZER)
manager.connect()
queue = manager.get_queue()
queue.put('hello world')
def test_rapid_restart(self):
authkey = os.urandom(32)
manager = QueueManager(
address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER)
srvr = manager.get_server()
addr = srvr.address
# Close the connection.Listener socket which gets opened as a part
# of manager.get_server(). It's not needed for the test.
srvr.listener.close()
manager.start()
p = self.Process(target=self._putter, args=(manager.address, authkey))
p.daemon = True
p.start()
queue = manager.get_queue()
self.assertEqual(queue.get(), 'hello world')
del queue
manager.shutdown()
manager = QueueManager(
address=addr, authkey=authkey, serializer=SERIALIZER)
try:
manager.start()
except OSError as e:
if e.errno != errno.EADDRINUSE:
raise
# Retry after some time, in case the old socket was lingering
# (sporadic failure on buildbots)
time.sleep(1.0)
manager = QueueManager(
address=addr, authkey=authkey, serializer=SERIALIZER)
manager.shutdown()
#
#
#
SENTINEL = latin('')
class _TestConnection(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
@classmethod
def _echo(cls, conn):
for msg in iter(conn.recv_bytes, SENTINEL):
conn.send_bytes(msg)
conn.close()
def test_connection(self):
conn, child_conn = self.Pipe()
p = self.Process(target=self._echo, args=(child_conn,))
p.daemon = True
p.start()
seq = [1, 2.25, None]
msg = latin('hello world')
longmsg = msg * 10
arr = array.array('i', list(range(4)))
if self.TYPE == 'processes':
self.assertEqual(type(conn.fileno()), int)
self.assertEqual(conn.send(seq), None)
self.assertEqual(conn.recv(), seq)
self.assertEqual(conn.send_bytes(msg), None)
self.assertEqual(conn.recv_bytes(), msg)
if self.TYPE == 'processes':
buffer = array.array('i', [0]*10)
expected = list(arr) + [0] * (10 - len(arr))
self.assertEqual(conn.send_bytes(arr), None)
self.assertEqual(conn.recv_bytes_into(buffer),
len(arr) * buffer.itemsize)
self.assertEqual(list(buffer), expected)
buffer = array.array('i', [0]*10)
expected = [0] * 3 + list(arr) + [0] * (10 - 3 - len(arr))
self.assertEqual(conn.send_bytes(arr), None)
self.assertEqual(conn.recv_bytes_into(buffer, 3 * buffer.itemsize),
len(arr) * buffer.itemsize)
self.assertEqual(list(buffer), expected)
buffer = bytearray(latin(' ' * 40))
self.assertEqual(conn.send_bytes(longmsg), None)
try:
res = conn.recv_bytes_into(buffer)
except multiprocessing.BufferTooShort as e:
self.assertEqual(e.args, (longmsg,))
else:
self.fail('expected BufferTooShort, got %s' % res)
poll = TimingWrapper(conn.poll)
self.assertEqual(poll(), False)
self.assertTimingAlmostEqual(poll.elapsed, 0)
self.assertEqual(poll(-1), False)
self.assertTimingAlmostEqual(poll.elapsed, 0)
self.assertEqual(poll(TIMEOUT1), False)
self.assertTimingAlmostEqual(poll.elapsed, TIMEOUT1)
conn.send(None)
time.sleep(.1)
self.assertEqual(poll(TIMEOUT1), True)
self.assertTimingAlmostEqual(poll.elapsed, 0)
self.assertEqual(conn.recv(), None)
really_big_msg = latin('X') * (1024 * 1024 * 16) # 16Mb
conn.send_bytes(really_big_msg)
self.assertEqual(conn.recv_bytes(), really_big_msg)
conn.send_bytes(SENTINEL) # tell child to quit
child_conn.close()
if self.TYPE == 'processes':
self.assertEqual(conn.readable, True)
self.assertEqual(conn.writable, True)
self.assertRaises(EOFError, conn.recv)
self.assertRaises(EOFError, conn.recv_bytes)
p.join()
def test_duplex_false(self):
reader, writer = self.Pipe(duplex=False)
self.assertEqual(writer.send(1), None)
self.assertEqual(reader.recv(), 1)
if self.TYPE == 'processes':
self.assertEqual(reader.readable, True)
self.assertEqual(reader.writable, False)
self.assertEqual(writer.readable, False)
self.assertEqual(writer.writable, True)
self.assertRaises(OSError, reader.send, 2)
self.assertRaises(OSError, writer.recv)
self.assertRaises(OSError, writer.poll)
def test_spawn_close(self):
# We test that a pipe connection can be closed by parent
# process immediately after child is spawned. On Windows this
# would have sometimes failed on old versions because
# child_conn would be closed before the child got a chance to
# duplicate it.
conn, child_conn = self.Pipe()
p = self.Process(target=self._echo, args=(child_conn,))
p.daemon = True
p.start()
child_conn.close() # this might complete before child initializes
msg = latin('hello')
conn.send_bytes(msg)
self.assertEqual(conn.recv_bytes(), msg)
conn.send_bytes(SENTINEL)
conn.close()
p.join()
def test_sendbytes(self):
if self.TYPE != 'processes':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
msg = latin('abcdefghijklmnopqrstuvwxyz')
a, b = self.Pipe()
a.send_bytes(msg)
self.assertEqual(b.recv_bytes(), msg)
a.send_bytes(msg, 5)
self.assertEqual(b.recv_bytes(), msg[5:])
a.send_bytes(msg, 7, 8)
self.assertEqual(b.recv_bytes(), msg[7:7+8])
a.send_bytes(msg, 26)
self.assertEqual(b.recv_bytes(), latin(''))
a.send_bytes(msg, 26, 0)
self.assertEqual(b.recv_bytes(), latin(''))
self.assertRaises(ValueError, a.send_bytes, msg, 27)
self.assertRaises(ValueError, a.send_bytes, msg, 22, 5)
self.assertRaises(ValueError, a.send_bytes, msg, 26, 1)
self.assertRaises(ValueError, a.send_bytes, msg, -1)
self.assertRaises(ValueError, a.send_bytes, msg, 4, -1)
@classmethod
def _is_fd_assigned(cls, fd):
try:
os.fstat(fd)
except OSError as e:
if e.errno == errno.EBADF:
return False
raise
else:
return True
@classmethod
def _writefd(cls, conn, data, create_dummy_fds=False):
if create_dummy_fds:
for i in range(0, 256):
if not cls._is_fd_assigned(i):
os.dup2(conn.fileno(), i)
fd = reduction.recv_handle(conn)
if msvcrt:
fd = msvcrt.open_osfhandle(fd, os.O_WRONLY)
os.write(fd, data)
os.close(fd)
@unittest.skipUnless(HAS_REDUCTION, "test needs multiprocessing.reduction")
def test_fd_transfer(self):
if self.TYPE != 'processes':
self.skipTest("only makes sense with processes")
conn, child_conn = self.Pipe(duplex=True)
p = self.Process(target=self._writefd, args=(child_conn, b"foo"))
p.daemon = True
p.start()
self.addCleanup(test.support.unlink, test.support.TESTFN)
with open(test.support.TESTFN, "wb") as f:
fd = f.fileno()
if msvcrt:
fd = msvcrt.get_osfhandle(fd)
reduction.send_handle(conn, fd, p.pid)
p.join()
with open(test.support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"foo")
@unittest.skipUnless(HAS_REDUCTION, "test needs multiprocessing.reduction")
@unittest.skipIf(sys.platform == "win32",
"test semantics don't make sense on Windows")
@unittest.skipIf(MAXFD <= 256,
"largest assignable fd number is too small")
@unittest.skipUnless(hasattr(os, "dup2"),
"test needs os.dup2()")
def test_large_fd_transfer(self):
# With fd > 256 (issue #11657)
if self.TYPE != 'processes':
self.skipTest("only makes sense with processes")
conn, child_conn = self.Pipe(duplex=True)
p = self.Process(target=self._writefd, args=(child_conn, b"bar", True))
p.daemon = True
p.start()
self.addCleanup(test.support.unlink, test.support.TESTFN)
with open(test.support.TESTFN, "wb") as f:
fd = f.fileno()
for newfd in range(256, MAXFD):
if not self._is_fd_assigned(newfd):
break
else:
self.fail("could not find an unassigned large file descriptor")
os.dup2(fd, newfd)
try:
reduction.send_handle(conn, newfd, p.pid)
finally:
os.close(newfd)
p.join()
with open(test.support.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"bar")
@classmethod
def _send_data_without_fd(self, conn):
os.write(conn.fileno(), b"\0")
@unittest.skipUnless(HAS_REDUCTION, "test needs multiprocessing.reduction")
@unittest.skipIf(sys.platform == "win32", "doesn't make sense on Windows")
def test_missing_fd_transfer(self):
# Check that exception is raised when received data is not
# accompanied by a file descriptor in ancillary data.
if self.TYPE != 'processes':
self.skipTest("only makes sense with processes")
conn, child_conn = self.Pipe(duplex=True)
p = self.Process(target=self._send_data_without_fd, args=(child_conn,))
p.daemon = True
p.start()
self.assertRaises(RuntimeError, reduction.recv_handle, conn)
p.join()
def test_context(self):
a, b = self.Pipe()
with a, b:
a.send(1729)
self.assertEqual(b.recv(), 1729)
if self.TYPE == 'processes':
self.assertFalse(a.closed)
self.assertFalse(b.closed)
if self.TYPE == 'processes':
self.assertTrue(a.closed)
self.assertTrue(b.closed)
self.assertRaises(OSError, a.recv)
self.assertRaises(OSError, b.recv)
class _TestListener(BaseTestCase):
ALLOWED_TYPES = ('processes',)
def test_multiple_bind(self):
for family in self.connection.families:
l = self.connection.Listener(family=family)
self.addCleanup(l.close)
self.assertRaises(OSError, self.connection.Listener,
l.address, family)
def test_context(self):
with self.connection.Listener() as l:
with self.connection.Client(l.address) as c:
with l.accept() as d:
c.send(1729)
self.assertEqual(d.recv(), 1729)
if self.TYPE == 'processes':
self.assertRaises(OSError, l.accept)
class _TestListenerClient(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
@classmethod
def _test(cls, address):
conn = cls.connection.Client(address)
conn.send('hello')
conn.close()
def test_listener_client(self):
for family in self.connection.families:
l = self.connection.Listener(family=family)
p = self.Process(target=self._test, args=(l.address,))
p.daemon = True
p.start()
conn = l.accept()
self.assertEqual(conn.recv(), 'hello')
p.join()
l.close()
def test_issue14725(self):
l = self.connection.Listener()
p = self.Process(target=self._test, args=(l.address,))
p.daemon = True
p.start()
time.sleep(1)
# On Windows the client process should by now have connected,
# written data and closed the pipe handle by now. This causes
# ConnectNamdedPipe() to fail with ERROR_NO_DATA. See Issue
# 14725.
conn = l.accept()
self.assertEqual(conn.recv(), 'hello')
conn.close()
p.join()
l.close()
def test_issue16955(self):
for fam in self.connection.families:
l = self.connection.Listener(family=fam)
c = self.connection.Client(l.address)
a = l.accept()
a.send_bytes(b"hello")
self.assertTrue(c.poll(1))
a.close()
c.close()
l.close()
class _TestPoll(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
def test_empty_string(self):
a, b = self.Pipe()
self.assertEqual(a.poll(), False)
b.send_bytes(b'')
self.assertEqual(a.poll(), True)
self.assertEqual(a.poll(), True)
@classmethod
def _child_strings(cls, conn, strings):
for s in strings:
time.sleep(0.1)
conn.send_bytes(s)
conn.close()
def test_strings(self):
strings = (b'hello', b'', b'a', b'b', b'', b'bye', b'', b'lop')
a, b = self.Pipe()
p = self.Process(target=self._child_strings, args=(b, strings))
p.start()
for s in strings:
for i in range(200):
if a.poll(0.01):
break
x = a.recv_bytes()
self.assertEqual(s, x)
p.join()
@classmethod
def _child_boundaries(cls, r):
# Polling may "pull" a message in to the child process, but we
# don't want it to pull only part of a message, as that would
# corrupt the pipe for any other processes which might later
# read from it.
r.poll(5)
def test_boundaries(self):
r, w = self.Pipe(False)
p = self.Process(target=self._child_boundaries, args=(r,))
p.start()
time.sleep(2)
L = [b"first", b"second"]
for obj in L:
w.send_bytes(obj)
w.close()
p.join()
self.assertIn(r.recv_bytes(), L)
@classmethod
def _child_dont_merge(cls, b):
b.send_bytes(b'a')
b.send_bytes(b'b')
b.send_bytes(b'cd')
def test_dont_merge(self):
a, b = self.Pipe()
self.assertEqual(a.poll(0.0), False)
self.assertEqual(a.poll(0.1), False)
p = self.Process(target=self._child_dont_merge, args=(b,))
p.start()
self.assertEqual(a.recv_bytes(), b'a')
self.assertEqual(a.poll(1.0), True)
self.assertEqual(a.poll(1.0), True)
self.assertEqual(a.recv_bytes(), b'b')
self.assertEqual(a.poll(1.0), True)
self.assertEqual(a.poll(1.0), True)
self.assertEqual(a.poll(0.0), True)
self.assertEqual(a.recv_bytes(), b'cd')
p.join()
#
# Test of sending connection and socket objects between processes
#
@unittest.skipUnless(HAS_REDUCTION, "test needs multiprocessing.reduction")
class _TestPicklingConnections(BaseTestCase):
ALLOWED_TYPES = ('processes',)
@classmethod
def tearDownClass(cls):
from multiprocessing import resource_sharer
resource_sharer.stop(timeout=5)
@classmethod
def _listener(cls, conn, families):
for fam in families:
l = cls.connection.Listener(family=fam)
conn.send(l.address)
new_conn = l.accept()
conn.send(new_conn)
new_conn.close()
l.close()
l = socket.socket()
l.bind((test.support.HOST, 0))
l.listen(1)
conn.send(l.getsockname())
new_conn, addr = l.accept()
conn.send(new_conn)
new_conn.close()
l.close()
conn.recv()
@classmethod
def _remote(cls, conn):
for (address, msg) in iter(conn.recv, None):
client = cls.connection.Client(address)
client.send(msg.upper())
client.close()
address, msg = conn.recv()
client = socket.socket()
client.connect(address)
client.sendall(msg.upper())
client.close()
conn.close()
def test_pickling(self):
families = self.connection.families
lconn, lconn0 = self.Pipe()
lp = self.Process(target=self._listener, args=(lconn0, families))
lp.daemon = True
lp.start()
lconn0.close()
rconn, rconn0 = self.Pipe()
rp = self.Process(target=self._remote, args=(rconn0,))
rp.daemon = True
rp.start()
rconn0.close()
for fam in families:
msg = ('This connection uses family %s' % fam).encode('ascii')
address = lconn.recv()
rconn.send((address, msg))
new_conn = lconn.recv()
self.assertEqual(new_conn.recv(), msg.upper())
rconn.send(None)
msg = latin('This connection uses a normal socket')
address = lconn.recv()
rconn.send((address, msg))
new_conn = lconn.recv()
buf = []
while True:
s = new_conn.recv(100)
if not s:
break
buf.append(s)
buf = b''.join(buf)
self.assertEqual(buf, msg.upper())
new_conn.close()
lconn.send(None)
rconn.close()
lconn.close()
lp.join()
rp.join()
@classmethod
def child_access(cls, conn):
w = conn.recv()
w.send('all is well')
w.close()
r = conn.recv()
msg = r.recv()
conn.send(msg*2)
conn.close()
def test_access(self):
# On Windows, if we do not specify a destination pid when
# using DupHandle then we need to be careful to use the
# correct access flags for DuplicateHandle(), or else
# DupHandle.detach() will raise PermissionError. For example,
# for a read only pipe handle we should use
# access=FILE_GENERIC_READ. (Unfortunately
# DUPLICATE_SAME_ACCESS does not work.)
conn, child_conn = self.Pipe()
p = self.Process(target=self.child_access, args=(child_conn,))
p.daemon = True
p.start()
child_conn.close()
r, w = self.Pipe(duplex=False)
conn.send(w)
w.close()
self.assertEqual(r.recv(), 'all is well')
r.close()
r, w = self.Pipe(duplex=False)
conn.send(r)
r.close()
w.send('foobar')
w.close()
self.assertEqual(conn.recv(), 'foobar'*2)
#
#
#
class _TestHeap(BaseTestCase):
ALLOWED_TYPES = ('processes',)
def test_heap(self):
iterations = 5000
maxblocks = 50
blocks = []
# create and destroy lots of blocks of different sizes
for i in range(iterations):
size = int(random.lognormvariate(0, 1) * 1000)
b = multiprocessing.heap.BufferWrapper(size)
blocks.append(b)
if len(blocks) > maxblocks:
i = random.randrange(maxblocks)
del blocks[i]
# get the heap object
heap = multiprocessing.heap.BufferWrapper._heap
# verify the state of the heap
all = []
occupied = 0
heap._lock.acquire()
self.addCleanup(heap._lock.release)
for L in list(heap._len_to_seq.values()):
for arena, start, stop in L:
all.append((heap._arenas.index(arena), start, stop,
stop-start, 'free'))
for arena, start, stop in heap._allocated_blocks:
all.append((heap._arenas.index(arena), start, stop,
stop-start, 'occupied'))
occupied += (stop-start)
all.sort()
for i in range(len(all)-1):
(arena, start, stop) = all[i][:3]
(narena, nstart, nstop) = all[i+1][:3]
self.assertTrue((arena != narena and nstart == 0) or
(stop == nstart))
def test_free_from_gc(self):
# Check that freeing of blocks by the garbage collector doesn't deadlock
# (issue #12352).
# Make sure the GC is enabled, and set lower collection thresholds to
# make collections more frequent (and increase the probability of
# deadlock).
if not gc.isenabled():
gc.enable()
self.addCleanup(gc.disable)
thresholds = gc.get_threshold()
self.addCleanup(gc.set_threshold, *thresholds)
gc.set_threshold(10)
# perform numerous block allocations, with cyclic references to make
# sure objects are collected asynchronously by the gc
for i in range(5000):
a = multiprocessing.heap.BufferWrapper(1)
b = multiprocessing.heap.BufferWrapper(1)
# circular references
a.buddy = b
b.buddy = a
#
#
#
class _Foo(Structure):
_fields_ = [
('x', c_int),
('y', c_double)
]
class _TestSharedCTypes(BaseTestCase):
ALLOWED_TYPES = ('processes',)
def setUp(self):
if not HAS_SHAREDCTYPES:
self.skipTest("requires multiprocessing.sharedctypes")
@classmethod
def _double(cls, x, y, foo, arr, string):
x.value *= 2
y.value *= 2
foo.x *= 2
foo.y *= 2
string.value *= 2
for i in range(len(arr)):
arr[i] *= 2
def test_sharedctypes(self, lock=False):
x = Value('i', 7, lock=lock)
y = Value(c_double, 1.0/3.0, lock=lock)
foo = Value(_Foo, 3, 2, lock=lock)
arr = self.Array('d', list(range(10)), lock=lock)
string = self.Array('c', 20, lock=lock)
string.value = latin('hello')
p = self.Process(target=self._double, args=(x, y, foo, arr, string))
p.daemon = True
p.start()
p.join()
self.assertEqual(x.value, 14)
self.assertAlmostEqual(y.value, 2.0/3.0)
self.assertEqual(foo.x, 6)
self.assertAlmostEqual(foo.y, 4.0)
for i in range(10):
self.assertAlmostEqual(arr[i], i*2)
self.assertEqual(string.value, latin('hellohello'))
def test_synchronize(self):
self.test_sharedctypes(lock=True)
def test_copy(self):
foo = _Foo(2, 5.0)
bar = copy(foo)
foo.x = 0
foo.y = 0
self.assertEqual(bar.x, 2)
self.assertAlmostEqual(bar.y, 5.0)
#
#
#
class _TestFinalize(BaseTestCase):
ALLOWED_TYPES = ('processes',)
@classmethod
def _test_finalize(cls, conn):
class Foo(object):
pass
a = Foo()
util.Finalize(a, conn.send, args=('a',))
del a # triggers callback for a
b = Foo()
close_b = util.Finalize(b, conn.send, args=('b',))
close_b() # triggers callback for b
close_b() # does nothing because callback has already been called
del b # does nothing because callback has already been called
c = Foo()
util.Finalize(c, conn.send, args=('c',))
d10 = Foo()
util.Finalize(d10, conn.send, args=('d10',), exitpriority=1)
d01 = Foo()
util.Finalize(d01, conn.send, args=('d01',), exitpriority=0)
d02 = Foo()
util.Finalize(d02, conn.send, args=('d02',), exitpriority=0)
d03 = Foo()
util.Finalize(d03, conn.send, args=('d03',), exitpriority=0)
util.Finalize(None, conn.send, args=('e',), exitpriority=-10)
util.Finalize(None, conn.send, args=('STOP',), exitpriority=-100)
# call multiprocessing's cleanup function then exit process without
# garbage collecting locals
util._exit_function()
conn.close()
os._exit(0)
def test_finalize(self):
conn, child_conn = self.Pipe()
p = self.Process(target=self._test_finalize, args=(child_conn,))
p.daemon = True
p.start()
p.join()
result = [obj for obj in iter(conn.recv, 'STOP')]
self.assertEqual(result, ['a', 'b', 'd10', 'd03', 'd02', 'd01', 'e'])
#
# Test that from ... import * works for each module
#
class _TestImportStar(unittest.TestCase):
def get_module_names(self):
import glob
folder = os.path.dirname(multiprocessing.__file__)
pattern = os.path.join(folder, '*.py')
files = glob.glob(pattern)
modules = [os.path.splitext(os.path.split(f)[1])[0] for f in files]
modules = ['multiprocessing.' + m for m in modules]
modules.remove('multiprocessing.__init__')
modules.append('multiprocessing')
return modules
def test_import(self):
modules = self.get_module_names()
if sys.platform == 'win32':
modules.remove('multiprocessing.popen_fork')
modules.remove('multiprocessing.popen_forkserver')
modules.remove('multiprocessing.popen_spawn_posix')
else:
modules.remove('multiprocessing.popen_spawn_win32')
if not HAS_REDUCTION:
modules.remove('multiprocessing.popen_forkserver')
if c_int is None:
# This module requires _ctypes
modules.remove('multiprocessing.sharedctypes')
for name in modules:
__import__(name)
mod = sys.modules[name]
self.assertTrue(hasattr(mod, '__all__'), name)
for attr in mod.__all__:
self.assertTrue(
hasattr(mod, attr),
'%r does not have attribute %r' % (mod, attr)
)
#
# Quick test that logging works -- does not test logging output
#
class _TestLogging(BaseTestCase):
ALLOWED_TYPES = ('processes',)
def test_enable_logging(self):
logger = multiprocessing.get_logger()
logger.setLevel(util.SUBWARNING)
self.assertTrue(logger is not None)
logger.debug('this will not be printed')
logger.info('nor will this')
logger.setLevel(LOG_LEVEL)
@classmethod
def _test_level(cls, conn):
logger = multiprocessing.get_logger()
conn.send(logger.getEffectiveLevel())
def test_level(self):
LEVEL1 = 32
LEVEL2 = 37
logger = multiprocessing.get_logger()
root_logger = logging.getLogger()
root_level = root_logger.level
reader, writer = multiprocessing.Pipe(duplex=False)
logger.setLevel(LEVEL1)
p = self.Process(target=self._test_level, args=(writer,))
p.daemon = True
p.start()
self.assertEqual(LEVEL1, reader.recv())
logger.setLevel(logging.NOTSET)
root_logger.setLevel(LEVEL2)
p = self.Process(target=self._test_level, args=(writer,))
p.daemon = True
p.start()
self.assertEqual(LEVEL2, reader.recv())
root_logger.setLevel(root_level)
logger.setLevel(level=LOG_LEVEL)
# class _TestLoggingProcessName(BaseTestCase):
#
# def handle(self, record):
# assert record.processName == multiprocessing.current_process().name
# self.__handled = True
#
# def test_logging(self):
# handler = logging.Handler()
# handler.handle = self.handle
# self.__handled = False
# # Bypass getLogger() and side-effects
# logger = logging.getLoggerClass()(
# 'multiprocessing.test.TestLoggingProcessName')
# logger.addHandler(handler)
# logger.propagate = False
#
# logger.warn('foo')
# assert self.__handled
#
# Check that Process.join() retries if os.waitpid() fails with EINTR
#
class _TestPollEintr(BaseTestCase):
ALLOWED_TYPES = ('processes',)
@classmethod
def _killer(cls, pid):
time.sleep(0.1)
os.kill(pid, signal.SIGUSR1)
@unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1')
def test_poll_eintr(self):
got_signal = [False]
def record(*args):
got_signal[0] = True
pid = os.getpid()
oldhandler = signal.signal(signal.SIGUSR1, record)
try:
killer = self.Process(target=self._killer, args=(pid,))
killer.start()
try:
p = self.Process(target=time.sleep, args=(2,))
p.start()
p.join()
finally:
killer.join()
self.assertTrue(got_signal[0])
self.assertEqual(p.exitcode, 0)
finally:
signal.signal(signal.SIGUSR1, oldhandler)
#
# Test to verify handle verification, see issue 3321
#
class TestInvalidHandle(unittest.TestCase):
@unittest.skipIf(WIN32, "skipped on Windows")
def test_invalid_handles(self):
conn = multiprocessing.connection.Connection(44977608)
# check that poll() doesn't crash
try:
conn.poll()
except (ValueError, OSError):
pass
finally:
# Hack private attribute _handle to avoid printing an error
# in conn.__del__
conn._handle = None
self.assertRaises((ValueError, OSError),
multiprocessing.connection.Connection, -1)
class OtherTest(unittest.TestCase):
# TODO: add more tests for deliver/answer challenge.
def test_deliver_challenge_auth_failure(self):
class _FakeConnection(object):
def recv_bytes(self, size):
return b'something bogus'
def send_bytes(self, data):
pass
self.assertRaises(multiprocessing.AuthenticationError,
multiprocessing.connection.deliver_challenge,
_FakeConnection(), b'abc')
def test_answer_challenge_auth_failure(self):
class _FakeConnection(object):
def __init__(self):
self.count = 0
def recv_bytes(self, size):
self.count += 1
if self.count == 1:
return multiprocessing.connection.CHALLENGE
elif self.count == 2:
return b'something bogus'
return b''
def send_bytes(self, data):
pass
self.assertRaises(multiprocessing.AuthenticationError,
multiprocessing.connection.answer_challenge,
_FakeConnection(), b'abc')
#
# Test Manager.start()/Pool.__init__() initializer feature - see issue 5585
#
def initializer(ns):
ns.test += 1
class TestInitializers(unittest.TestCase):
def setUp(self):
self.mgr = multiprocessing.Manager()
self.ns = self.mgr.Namespace()
self.ns.test = 0
def tearDown(self):
self.mgr.shutdown()
self.mgr.join()
def test_manager_initializer(self):
m = multiprocessing.managers.SyncManager()
self.assertRaises(TypeError, m.start, 1)
m.start(initializer, (self.ns,))
self.assertEqual(self.ns.test, 1)
m.shutdown()
m.join()
def test_pool_initializer(self):
self.assertRaises(TypeError, multiprocessing.Pool, initializer=1)
p = multiprocessing.Pool(1, initializer, (self.ns,))
p.close()
p.join()
self.assertEqual(self.ns.test, 1)
#
# Issue 5155, 5313, 5331: Test process in processes
# Verifies os.close(sys.stdin.fileno) vs. sys.stdin.close() behavior
#
def _this_sub_process(q):
try:
item = q.get(block=False)
except pyqueue.Empty:
pass
def _test_process(q):
queue = multiprocessing.Queue()
subProc = multiprocessing.Process(target=_this_sub_process, args=(queue,))
subProc.daemon = True
subProc.start()
subProc.join()
def _afunc(x):
return x*x
def pool_in_process():
pool = multiprocessing.Pool(processes=4)
x = pool.map(_afunc, [1, 2, 3, 4, 5, 6, 7])
pool.close()
pool.join()
class _file_like(object):
def __init__(self, delegate):
self._delegate = delegate
self._pid = None
@property
def cache(self):
pid = os.getpid()
# There are no race conditions since fork keeps only the running thread
if pid != self._pid:
self._pid = pid
self._cache = []
return self._cache
def write(self, data):
self.cache.append(data)
def flush(self):
self._delegate.write(''.join(self.cache))
self._cache = []
class TestStdinBadfiledescriptor(unittest.TestCase):
def test_queue_in_process(self):
queue = multiprocessing.Queue()
proc = multiprocessing.Process(target=_test_process, args=(queue,))
proc.start()
proc.join()
def test_pool_in_process(self):
p = multiprocessing.Process(target=pool_in_process)
p.start()
p.join()
def test_flushing(self):
sio = io.StringIO()
flike = _file_like(sio)
flike.write('foo')
proc = multiprocessing.Process(target=lambda: flike.flush())
flike.flush()
assert sio.getvalue() == 'foo'
class TestWait(unittest.TestCase):
@classmethod
def _child_test_wait(cls, w, slow):
for i in range(10):
if slow:
time.sleep(random.random()*0.1)
w.send((i, os.getpid()))
w.close()
def test_wait(self, slow=False):
from multiprocessing.connection import wait
readers = []
procs = []
messages = []
for i in range(4):
r, w = multiprocessing.Pipe(duplex=False)
p = multiprocessing.Process(target=self._child_test_wait, args=(w, slow))
p.daemon = True
p.start()
w.close()
readers.append(r)
procs.append(p)
self.addCleanup(p.join)
while readers:
for r in wait(readers):
try:
msg = r.recv()
except EOFError:
readers.remove(r)
r.close()
else:
messages.append(msg)
messages.sort()
expected = sorted((i, p.pid) for i in range(10) for p in procs)
self.assertEqual(messages, expected)
@classmethod
def _child_test_wait_socket(cls, address, slow):
s = socket.socket()
s.connect(address)
for i in range(10):
if slow:
time.sleep(random.random()*0.1)
s.sendall(('%s\n' % i).encode('ascii'))
s.close()
def test_wait_socket(self, slow=False):
from multiprocessing.connection import wait
l = socket.socket()
l.bind((test.support.HOST, 0))
l.listen(4)
addr = l.getsockname()
readers = []
procs = []
dic = {}
for i in range(4):
p = multiprocessing.Process(target=self._child_test_wait_socket,
args=(addr, slow))
p.daemon = True
p.start()
procs.append(p)
self.addCleanup(p.join)
for i in range(4):
r, _ = l.accept()
readers.append(r)
dic[r] = []
l.close()
while readers:
for r in wait(readers):
msg = r.recv(32)
if not msg:
readers.remove(r)
r.close()
else:
dic[r].append(msg)
expected = ''.join('%s\n' % i for i in range(10)).encode('ascii')
for v in dic.values():
self.assertEqual(b''.join(v), expected)
def test_wait_slow(self):
self.test_wait(True)
def test_wait_socket_slow(self):
self.test_wait_socket(True)
def test_wait_timeout(self):
from multiprocessing.connection import wait
expected = 5
a, b = multiprocessing.Pipe()
start = time.time()
res = wait([a, b], expected)
delta = time.time() - start
self.assertEqual(res, [])
self.assertLess(delta, expected * 2)
self.assertGreater(delta, expected * 0.5)
b.send(None)
start = time.time()
res = wait([a, b], 20)
delta = time.time() - start
self.assertEqual(res, [a])
self.assertLess(delta, 0.4)
@classmethod
def signal_and_sleep(cls, sem, period):
sem.release()
time.sleep(period)
def test_wait_integer(self):
from multiprocessing.connection import wait
expected = 3
sorted_ = lambda l: sorted(l, key=lambda x: id(x))
sem = multiprocessing.Semaphore(0)
a, b = multiprocessing.Pipe()
p = multiprocessing.Process(target=self.signal_and_sleep,
args=(sem, expected))
p.start()
self.assertIsInstance(p.sentinel, int)
self.assertTrue(sem.acquire(timeout=20))
start = time.time()
res = wait([a, p.sentinel, b], expected + 20)
delta = time.time() - start
self.assertEqual(res, [p.sentinel])
self.assertLess(delta, expected + 2)
self.assertGreater(delta, expected - 2)
a.send(None)
start = time.time()
res = wait([a, p.sentinel, b], 20)
delta = time.time() - start
self.assertEqual(sorted_(res), sorted_([p.sentinel, b]))
self.assertLess(delta, 0.4)
b.send(None)
start = time.time()
res = wait([a, p.sentinel, b], 20)
delta = time.time() - start
self.assertEqual(sorted_(res), sorted_([a, p.sentinel, b]))
self.assertLess(delta, 0.4)
p.terminate()
p.join()
def test_neg_timeout(self):
from multiprocessing.connection import wait
a, b = multiprocessing.Pipe()
t = time.time()
res = wait([a], timeout=-1)
t = time.time() - t
self.assertEqual(res, [])
self.assertLess(t, 1)
a.close()
b.close()
#
# Issue 14151: Test invalid family on invalid environment
#
class TestInvalidFamily(unittest.TestCase):
@unittest.skipIf(WIN32, "skipped on Windows")
def test_invalid_family(self):
with self.assertRaises(ValueError):
multiprocessing.connection.Listener(r'\\.\test')
@unittest.skipUnless(WIN32, "skipped on non-Windows platforms")
def test_invalid_family_win32(self):
with self.assertRaises(ValueError):
multiprocessing.connection.Listener('/var/test.pipe')
#
# Issue 12098: check sys.flags of child matches that for parent
#
class TestFlags(unittest.TestCase):
@classmethod
def run_in_grandchild(cls, conn):
conn.send(tuple(sys.flags))
@classmethod
def run_in_child(cls):
import json
r, w = multiprocessing.Pipe(duplex=False)
p = multiprocessing.Process(target=cls.run_in_grandchild, args=(w,))
p.start()
grandchild_flags = r.recv()
p.join()
r.close()
w.close()
flags = (tuple(sys.flags), grandchild_flags)
print(json.dumps(flags))
def test_flags(self):
import json, subprocess
# start child process using unusual flags
prog = ('from test._test_multiprocessing import TestFlags; ' +
'TestFlags.run_in_child()')
data = subprocess.check_output(
[sys.executable, '-E', '-S', '-O', '-c', prog])
child_flags, grandchild_flags = json.loads(data.decode('ascii'))
self.assertEqual(child_flags, grandchild_flags)
#
# Test interaction with socket timeouts - see Issue #6056
#
class TestTimeouts(unittest.TestCase):
@classmethod
def _test_timeout(cls, child, address):
time.sleep(1)
child.send(123)
child.close()
conn = multiprocessing.connection.Client(address)
conn.send(456)
conn.close()
def test_timeout(self):
old_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(0.1)
parent, child = multiprocessing.Pipe(duplex=True)
l = multiprocessing.connection.Listener(family='AF_INET')
p = multiprocessing.Process(target=self._test_timeout,
args=(child, l.address))
p.start()
child.close()
self.assertEqual(parent.recv(), 123)
parent.close()
conn = l.accept()
self.assertEqual(conn.recv(), 456)
conn.close()
l.close()
p.join(10)
finally:
socket.setdefaulttimeout(old_timeout)
#
# Test what happens with no "if __name__ == '__main__'"
#
class TestNoForkBomb(unittest.TestCase):
def test_noforkbomb(self):
sm = multiprocessing.get_start_method()
name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py')
if sm != 'fork':
rc, out, err = test.script_helper.assert_python_failure(name, sm)
self.assertEqual('', out.decode('ascii'))
self.assertIn('RuntimeError', err.decode('ascii'))
else:
rc, out, err = test.script_helper.assert_python_ok(name, sm)
self.assertEqual('123', out.decode('ascii').rstrip())
self.assertEqual('', err.decode('ascii'))
#
# Issue #17555: ForkAwareThreadLock
#
class TestForkAwareThreadLock(unittest.TestCase):
# We recurisvely start processes. Issue #17555 meant that the
# after fork registry would get duplicate entries for the same
# lock. The size of the registry at generation n was ~2**n.
@classmethod
def child(cls, n, conn):
if n > 1:
p = multiprocessing.Process(target=cls.child, args=(n-1, conn))
p.start()
conn.close()
p.join(timeout=5)
else:
conn.send(len(util._afterfork_registry))
conn.close()
def test_lock(self):
r, w = multiprocessing.Pipe(False)
l = util.ForkAwareThreadLock()
old_size = len(util._afterfork_registry)
p = multiprocessing.Process(target=self.child, args=(5, w))
p.start()
w.close()
new_size = r.recv()
p.join(timeout=5)
self.assertLessEqual(new_size, old_size)
#
# Check that non-forked child processes do not inherit unneeded fds/handles
#
class TestCloseFds(unittest.TestCase):
def get_high_socket_fd(self):
if WIN32:
# The child process will not have any socket handles, so
# calling socket.fromfd() should produce WSAENOTSOCK even
# if there is a handle of the same number.
return socket.socket().detach()
else:
# We want to produce a socket with an fd high enough that a
# freshly created child process will not have any fds as high.
fd = socket.socket().detach()
to_close = []
while fd < 50:
to_close.append(fd)
fd = os.dup(fd)
for x in to_close:
os.close(x)
return fd
def close(self, fd):
if WIN32:
socket.socket(fileno=fd).close()
else:
os.close(fd)
@classmethod
def _test_closefds(cls, conn, fd):
try:
s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
except Exception as e:
conn.send(e)
else:
s.close()
conn.send(None)
def test_closefd(self):
if not HAS_REDUCTION:
raise unittest.SkipTest('requires fd pickling')
reader, writer = multiprocessing.Pipe()
fd = self.get_high_socket_fd()
try:
p = multiprocessing.Process(target=self._test_closefds,
args=(writer, fd))
p.start()
writer.close()
e = reader.recv()
p.join(timeout=5)
finally:
self.close(fd)
writer.close()
reader.close()
if multiprocessing.get_start_method() == 'fork':
self.assertIs(e, None)
else:
WSAENOTSOCK = 10038
self.assertIsInstance(e, OSError)
self.assertTrue(e.errno == errno.EBADF or
e.winerror == WSAENOTSOCK, e)
#
# Issue #17097: EINTR should be ignored by recv(), send(), accept() etc
#
class TestIgnoreEINTR(unittest.TestCase):
@classmethod
def _test_ignore(cls, conn):
def handler(signum, frame):
pass
signal.signal(signal.SIGUSR1, handler)
conn.send('ready')
x = conn.recv()
conn.send(x)
conn.send_bytes(b'x'*(1024*1024)) # sending 1 MB should block
@unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1')
def test_ignore(self):
conn, child_conn = multiprocessing.Pipe()
try:
p = multiprocessing.Process(target=self._test_ignore,
args=(child_conn,))
p.daemon = True
p.start()
child_conn.close()
self.assertEqual(conn.recv(), 'ready')
time.sleep(0.1)
os.kill(p.pid, signal.SIGUSR1)
time.sleep(0.1)
conn.send(1234)
self.assertEqual(conn.recv(), 1234)
time.sleep(0.1)
os.kill(p.pid, signal.SIGUSR1)
self.assertEqual(conn.recv_bytes(), b'x'*(1024*1024))
time.sleep(0.1)
p.join()
finally:
conn.close()
@classmethod
def _test_ignore_listener(cls, conn):
def handler(signum, frame):
pass
signal.signal(signal.SIGUSR1, handler)
with multiprocessing.connection.Listener() as l:
conn.send(l.address)
a = l.accept()
a.send('welcome')
@unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1')
def test_ignore_listener(self):
conn, child_conn = multiprocessing.Pipe()
try:
p = multiprocessing.Process(target=self._test_ignore_listener,
args=(child_conn,))
p.daemon = True
p.start()
child_conn.close()
address = conn.recv()
time.sleep(0.1)
os.kill(p.pid, signal.SIGUSR1)
time.sleep(0.1)
client = multiprocessing.connection.Client(address)
self.assertEqual(client.recv(), 'welcome')
p.join()
finally:
conn.close()
class TestStartMethod(unittest.TestCase):
@classmethod
def _check_context(cls, conn):
conn.send(multiprocessing.get_start_method())
def check_context(self, ctx):
r, w = ctx.Pipe(duplex=False)
p = ctx.Process(target=self._check_context, args=(w,))
p.start()
w.close()
child_method = r.recv()
r.close()
p.join()
self.assertEqual(child_method, ctx.get_start_method())
def test_context(self):
for method in ('fork', 'spawn', 'forkserver'):
try:
ctx = multiprocessing.get_context(method)
except ValueError:
continue
self.assertEqual(ctx.get_start_method(), method)
self.assertIs(ctx.get_context(), ctx)
self.assertRaises(ValueError, ctx.set_start_method, 'spawn')
self.assertRaises(ValueError, ctx.set_start_method, None)
self.check_context(ctx)
def test_set_get(self):
multiprocessing.set_forkserver_preload(PRELOAD)
count = 0
old_method = multiprocessing.get_start_method()
try:
for method in ('fork', 'spawn', 'forkserver'):
try:
multiprocessing.set_start_method(method, force=True)
except ValueError:
continue
self.assertEqual(multiprocessing.get_start_method(), method)
ctx = multiprocessing.get_context()
self.assertEqual(ctx.get_start_method(), method)
self.assertTrue(type(ctx).__name__.lower().startswith(method))
self.assertTrue(
ctx.Process.__name__.lower().startswith(method))
self.check_context(multiprocessing)
count += 1
finally:
multiprocessing.set_start_method(old_method, force=True)
self.assertGreaterEqual(count, 1)
def test_get_all(self):
methods = multiprocessing.get_all_start_methods()
if sys.platform == 'win32':
self.assertEqual(methods, ['spawn'])
else:
self.assertTrue(methods == ['fork', 'spawn'] or
methods == ['fork', 'spawn', 'forkserver'])
#
# Check that killing process does not leak named semaphores
#
@unittest.skipIf(sys.platform == "win32",
"test semantics don't make sense on Windows")
class TestSemaphoreTracker(unittest.TestCase):
def test_semaphore_tracker(self):
import subprocess
cmd = '''if 1:
import multiprocessing as mp, time, os
mp.set_start_method("spawn")
lock1 = mp.Lock()
lock2 = mp.Lock()
os.write(%d, lock1._semlock.name.encode("ascii") + b"\\n")
os.write(%d, lock2._semlock.name.encode("ascii") + b"\\n")
time.sleep(10)
'''
r, w = os.pipe()
p = subprocess.Popen([sys.executable,
'-c', cmd % (w, w)],
pass_fds=[w],
stderr=subprocess.PIPE)
os.close(w)
with open(r, 'rb', closefd=True) as f:
name1 = f.readline().rstrip().decode('ascii')
name2 = f.readline().rstrip().decode('ascii')
_multiprocessing.sem_unlink(name1)
p.terminate()
p.wait()
time.sleep(2.0)
with self.assertRaises(OSError) as ctx:
_multiprocessing.sem_unlink(name2)
# docs say it should be ENOENT, but OSX seems to give EINVAL
self.assertIn(ctx.exception.errno, (errno.ENOENT, errno.EINVAL))
err = p.stderr.read().decode('utf-8')
p.stderr.close()
expected = 'semaphore_tracker: There appear to be 2 leaked semaphores'
self.assertRegex(err, expected)
self.assertRegex(err, 'semaphore_tracker: %r: \[Errno' % name1)
#
# Mixins
#
class ProcessesMixin(object):
TYPE = 'processes'
Process = multiprocessing.Process
connection = multiprocessing.connection
current_process = staticmethod(multiprocessing.current_process)
active_children = staticmethod(multiprocessing.active_children)
Pool = staticmethod(multiprocessing.Pool)
Pipe = staticmethod(multiprocessing.Pipe)
Queue = staticmethod(multiprocessing.Queue)
JoinableQueue = staticmethod(multiprocessing.JoinableQueue)
Lock = staticmethod(multiprocessing.Lock)
RLock = staticmethod(multiprocessing.RLock)
Semaphore = staticmethod(multiprocessing.Semaphore)
BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore)
Condition = staticmethod(multiprocessing.Condition)
Event = staticmethod(multiprocessing.Event)
Barrier = staticmethod(multiprocessing.Barrier)
Value = staticmethod(multiprocessing.Value)
Array = staticmethod(multiprocessing.Array)
RawValue = staticmethod(multiprocessing.RawValue)
RawArray = staticmethod(multiprocessing.RawArray)
class ManagerMixin(object):
TYPE = 'manager'
Process = multiprocessing.Process
Queue = property(operator.attrgetter('manager.Queue'))
JoinableQueue = property(operator.attrgetter('manager.JoinableQueue'))
Lock = property(operator.attrgetter('manager.Lock'))
RLock = property(operator.attrgetter('manager.RLock'))
Semaphore = property(operator.attrgetter('manager.Semaphore'))
BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore'))
Condition = property(operator.attrgetter('manager.Condition'))
Event = property(operator.attrgetter('manager.Event'))
Barrier = property(operator.attrgetter('manager.Barrier'))
Value = property(operator.attrgetter('manager.Value'))
Array = property(operator.attrgetter('manager.Array'))
list = property(operator.attrgetter('manager.list'))
dict = property(operator.attrgetter('manager.dict'))
Namespace = property(operator.attrgetter('manager.Namespace'))
@classmethod
def Pool(cls, *args, **kwds):
return cls.manager.Pool(*args, **kwds)
@classmethod
def setUpClass(cls):
cls.manager = multiprocessing.Manager()
@classmethod
def tearDownClass(cls):
# only the manager process should be returned by active_children()
# but this can take a bit on slow machines, so wait a few seconds
# if there are other children too (see #17395)
t = 0.01
while len(multiprocessing.active_children()) > 1 and t < 5:
time.sleep(t)
t *= 2
gc.collect() # do garbage collection
if cls.manager._number_of_objects() != 0:
# This is not really an error since some tests do not
# ensure that all processes which hold a reference to a
# managed object have been joined.
print('Shared objects which still exist at manager shutdown:')
print(cls.manager._debug_info())
cls.manager.shutdown()
cls.manager.join()
cls.manager = None
class ThreadsMixin(object):
TYPE = 'threads'
Process = multiprocessing.dummy.Process
connection = multiprocessing.dummy.connection
current_process = staticmethod(multiprocessing.dummy.current_process)
active_children = staticmethod(multiprocessing.dummy.active_children)
Pool = staticmethod(multiprocessing.Pool)
Pipe = staticmethod(multiprocessing.dummy.Pipe)
Queue = staticmethod(multiprocessing.dummy.Queue)
JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue)
Lock = staticmethod(multiprocessing.dummy.Lock)
RLock = staticmethod(multiprocessing.dummy.RLock)
Semaphore = staticmethod(multiprocessing.dummy.Semaphore)
BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore)
Condition = staticmethod(multiprocessing.dummy.Condition)
Event = staticmethod(multiprocessing.dummy.Event)
Barrier = staticmethod(multiprocessing.dummy.Barrier)
Value = staticmethod(multiprocessing.dummy.Value)
Array = staticmethod(multiprocessing.dummy.Array)
#
# Functions used to create test cases from the base ones in this module
#
def install_tests_in_module_dict(remote_globs, start_method):
__module__ = remote_globs['__name__']
local_globs = globals()
ALL_TYPES = {'processes', 'threads', 'manager'}
for name, base in local_globs.items():
if not isinstance(base, type):
continue
if issubclass(base, BaseTestCase):
if base is BaseTestCase:
continue
assert set(base.ALLOWED_TYPES) <= ALL_TYPES, base.ALLOWED_TYPES
for type_ in base.ALLOWED_TYPES:
newname = 'With' + type_.capitalize() + name[1:]
Mixin = local_globs[type_.capitalize() + 'Mixin']
class Temp(base, Mixin, unittest.TestCase):
pass
Temp.__name__ = Temp.__qualname__ = newname
Temp.__module__ = __module__
remote_globs[newname] = Temp
elif issubclass(base, unittest.TestCase):
class Temp(base, object):
pass
Temp.__name__ = Temp.__qualname__ = name
Temp.__module__ = __module__
remote_globs[name] = Temp
dangling = [None, None]
old_start_method = [None]
def setUpModule():
multiprocessing.set_forkserver_preload(PRELOAD)
multiprocessing.process._cleanup()
dangling[0] = multiprocessing.process._dangling.copy()
dangling[1] = threading._dangling.copy()
old_start_method[0] = multiprocessing.get_start_method(allow_none=True)
try:
multiprocessing.set_start_method(start_method, force=True)
except ValueError:
raise unittest.SkipTest(start_method +
' start method not supported')
if sys.platform.startswith("linux"):
try:
lock = multiprocessing.RLock()
except OSError:
raise unittest.SkipTest("OSError raises on RLock creation, "
"see issue 3111!")
check_enough_semaphores()
util.get_temp_dir() # creates temp directory
multiprocessing.get_logger().setLevel(LOG_LEVEL)
def tearDownModule():
multiprocessing.set_start_method(old_start_method[0], force=True)
# pause a bit so we don't get warning about dangling threads/processes
time.sleep(0.5)
multiprocessing.process._cleanup()
gc.collect()
tmp = set(multiprocessing.process._dangling) - set(dangling[0])
if tmp:
print('Dangling processes:', tmp, file=sys.stderr)
del tmp
tmp = set(threading._dangling) - set(dangling[1])
if tmp:
print('Dangling threads:', tmp, file=sys.stderr)
remote_globs['setUpModule'] = setUpModule
remote_globs['tearDownModule'] = tearDownModule
| Orav/kbengine | kbe/src/lib/python/Lib/test/_test_multiprocessing.py | Python | lgpl-3.0 | 124,102 |
# encoding: utf-8
from __future__ import unicode_literals
import re
import itertools
from .common import InfoExtractor
from ..utils import (
compat_str,
compat_urlparse,
compat_urllib_parse,
ExtractorError,
int_or_none,
unified_strdate,
)
class SoundcloudIE(InfoExtractor):
"""Information extractor for soundcloud.com
To access the media, the uid of the song and a stream token
must be extracted from the page source and the script must make
a request to media.soundcloud.com/crossdomain.xml. Then
the media can be grabbed by requesting from an url composed
of the stream token and uid
"""
_VALID_URL = r'''(?x)^(?:https?://)?
(?:(?:(?:www\.|m\.)?soundcloud\.com/
(?P<uploader>[\w\d-]+)/
(?!sets/|likes/?(?:$|[?#]))
(?P<title>[\w\d-]+)/?
(?P<token>[^?]+?)?(?:[?].*)?$)
|(?:api\.soundcloud\.com/tracks/(?P<track_id>\d+)
(?:/?\?secret_token=(?P<secret_token>[^&]+?))?$)
|(?P<player>(?:w|player|p.)\.soundcloud\.com/player/?.*?url=.*)
)
'''
IE_NAME = 'soundcloud'
_TESTS = [
{
'url': 'http://soundcloud.com/ethmusic/lostin-powers-she-so-heavy',
'md5': 'ebef0a451b909710ed1d7787dddbf0d7',
'info_dict': {
'id': '62986583',
'ext': 'mp3',
'upload_date': '20121011',
'description': 'No Downloads untill we record the finished version this weekend, i was too pumped n i had to post it , earl is prolly gonna b hella p.o\'d',
'uploader': 'E.T. ExTerrestrial Music',
'title': 'Lostin Powers - She so Heavy (SneakPreview) Adrian Ackers Blueprint 1',
'duration': 143,
}
},
# not streamable song
{
'url': 'https://soundcloud.com/the-concept-band/goldrushed-mastered?in=the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'id': '47127627',
'ext': 'mp3',
'title': 'Goldrushed',
'description': 'From Stockholm Sweden\r\nPovel / Magnus / Filip / David\r\nwww.theroyalconcept.com',
'uploader': 'The Royal Concept',
'upload_date': '20120521',
'duration': 227,
},
'params': {
# rtmp
'skip_download': True,
},
},
# private link
{
'url': 'https://soundcloud.com/jaimemf/youtube-dl-test-video-a-y-baw/s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# private link (alt format)
{
'url': 'https://api.soundcloud.com/tracks/123998367?secret_token=s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# downloadable song
{
'url': 'https://soundcloud.com/oddsamples/bus-brakes',
'md5': '7624f2351f8a3b2e7cd51522496e7631',
'info_dict': {
'id': '128590877',
'ext': 'mp3',
'title': 'Bus Brakes',
'description': 'md5:0053ca6396e8d2fd7b7e1595ef12ab66',
'uploader': 'oddsamples',
'upload_date': '20140109',
'duration': 17,
},
},
]
_CLIENT_ID = 'b45b1aa10f1ac2941910a7f0d10f8e28'
_IPHONE_CLIENT_ID = '376f225bf427445fc4bfb6b99b72e0bf'
def report_resolve(self, video_id):
"""Report information extraction."""
self.to_screen('%s: Resolving id' % video_id)
@classmethod
def _resolv_url(cls, url):
return 'http://api.soundcloud.com/resolve.json?url=' + url + '&client_id=' + cls._CLIENT_ID
def _extract_info_dict(self, info, full_title=None, quiet=False, secret_token=None):
track_id = compat_str(info['id'])
name = full_title or track_id
if quiet:
self.report_extraction(name)
thumbnail = info['artwork_url']
if thumbnail is not None:
thumbnail = thumbnail.replace('-large', '-t500x500')
ext = 'mp3'
result = {
'id': track_id,
'uploader': info['user']['username'],
'upload_date': unified_strdate(info['created_at']),
'title': info['title'],
'description': info['description'],
'thumbnail': thumbnail,
'duration': int_or_none(info.get('duration'), 1000),
'webpage_url': info.get('permalink_url'),
}
formats = []
if info.get('downloadable', False):
# We can build a direct link to the song
format_url = (
'https://api.soundcloud.com/tracks/{0}/download?client_id={1}'.format(
track_id, self._CLIENT_ID))
formats.append({
'format_id': 'download',
'ext': info.get('original_format', 'mp3'),
'url': format_url,
'vcodec': 'none',
'preference': 10,
})
# We have to retrieve the url
streams_url = ('http://api.soundcloud.com/i1/tracks/{0}/streams?'
'client_id={1}&secret_token={2}'.format(track_id, self._IPHONE_CLIENT_ID, secret_token))
format_dict = self._download_json(
streams_url,
track_id, 'Downloading track url')
for key, stream_url in format_dict.items():
if key.startswith('http'):
formats.append({
'format_id': key,
'ext': ext,
'url': stream_url,
'vcodec': 'none',
})
elif key.startswith('rtmp'):
# The url doesn't have an rtmp app, we have to extract the playpath
url, path = stream_url.split('mp3:', 1)
formats.append({
'format_id': key,
'url': url,
'play_path': 'mp3:' + path,
'ext': ext,
'vcodec': 'none',
})
if not formats:
# We fallback to the stream_url in the original info, this
# cannot be always used, sometimes it can give an HTTP 404 error
formats.append({
'format_id': 'fallback',
'url': info['stream_url'] + '?client_id=' + self._CLIENT_ID,
'ext': ext,
'vcodec': 'none',
})
for f in formats:
if f['format_id'].startswith('http'):
f['protocol'] = 'http'
if f['format_id'].startswith('rtmp'):
f['protocol'] = 'rtmp'
self._sort_formats(formats)
result['formats'] = formats
return result
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url, flags=re.VERBOSE)
if mobj is None:
raise ExtractorError('Invalid URL: %s' % url)
track_id = mobj.group('track_id')
token = None
if track_id is not None:
info_json_url = 'http://api.soundcloud.com/tracks/' + track_id + '.json?client_id=' + self._CLIENT_ID
full_title = track_id
token = mobj.group('secret_token')
if token:
info_json_url += "&secret_token=" + token
elif mobj.group('player'):
query = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
return self.url_result(query['url'][0])
else:
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('title')
token = mobj.group('token')
full_title = resolve_title = '%s/%s' % (uploader, slug_title)
if token:
resolve_title += '/%s' % token
self.report_resolve(full_title)
url = 'http://soundcloud.com/%s' % resolve_title
info_json_url = self._resolv_url(url)
info = self._download_json(info_json_url, full_title, 'Downloading info JSON')
return self._extract_info_dict(info, full_title, secret_token=token)
class SoundcloudSetIE(SoundcloudIE):
_VALID_URL = r'https?://(?:www\.)?soundcloud\.com/(?P<uploader>[\w\d-]+)/sets/(?P<slug_title>[\w\d-]+)(?:/(?P<token>[^?/]+))?'
IE_NAME = 'soundcloud:set'
_TESTS = [{
'url': 'https://soundcloud.com/the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'title': 'The Royal Concept EP',
},
'playlist_mincount': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('slug_title')
full_title = '%s/sets/%s' % (uploader, slug_title)
url = 'http://soundcloud.com/%s/sets/%s' % (uploader, slug_title)
token = mobj.group('token')
if token:
full_title += '/' + token
url += '/' + token
self.report_resolve(full_title)
resolv_url = self._resolv_url(url)
info = self._download_json(resolv_url, full_title)
if 'errors' in info:
for err in info['errors']:
self._downloader.report_error('unable to download video webpage: %s' % compat_str(err['error_message']))
return
return {
'_type': 'playlist',
'entries': [self._extract_info_dict(track, secret_token=token) for track in info['tracks']],
'id': info['id'],
'title': info['title'],
}
class SoundcloudUserIE(SoundcloudIE):
_VALID_URL = r'https?://(www\.)?soundcloud\.com/(?P<user>[^/]+)/?((?P<rsrc>tracks|likes)/?)?(\?.*)?$'
IE_NAME = 'soundcloud:user'
_TESTS = [{
'url': 'https://soundcloud.com/the-concept-band',
'info_dict': {
'id': '9615865',
'title': 'The Royal Concept',
},
'playlist_mincount': 12
}, {
'url': 'https://soundcloud.com/the-concept-band/likes',
'info_dict': {
'id': '9615865',
'title': 'The Royal Concept',
},
'playlist_mincount': 1,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group('user')
resource = mobj.group('rsrc')
if resource is None:
resource = 'tracks'
elif resource == 'likes':
resource = 'favorites'
url = 'http://soundcloud.com/%s/' % uploader
resolv_url = self._resolv_url(url)
user = self._download_json(
resolv_url, uploader, 'Downloading user info')
base_url = 'http://api.soundcloud.com/users/%s/%s.json?' % (uploader, resource)
entries = []
for i in itertools.count():
data = compat_urllib_parse.urlencode({
'offset': i * 50,
'limit': 50,
'client_id': self._CLIENT_ID,
})
new_entries = self._download_json(
base_url + data, uploader, 'Downloading track page %s' % (i + 1))
if len(new_entries) == 0:
self.to_screen('%s: End page received' % uploader)
break
entries.extend(self._extract_info_dict(e, quiet=True) for e in new_entries)
return {
'_type': 'playlist',
'id': compat_str(user['id']),
'title': user['username'],
'entries': entries,
}
class SoundcloudPlaylistIE(SoundcloudIE):
_VALID_URL = r'https?://api\.soundcloud\.com/playlists/(?P<id>[0-9]+)(?:/?\?secret_token=(?P<token>[^&]+?))?$'
IE_NAME = 'soundcloud:playlist'
_TESTS = [{
'url': 'http://api.soundcloud.com/playlists/4110309',
'info_dict': {
'id': '4110309',
'title': 'TILT Brass - Bowery Poetry Club, August \'03 [Non-Site SCR 02]',
'description': 're:.*?TILT Brass - Bowery Poetry Club',
},
'playlist_count': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
base_url = '%s//api.soundcloud.com/playlists/%s.json?' % (self.http_scheme(), playlist_id)
data_dict = {
'client_id': self._CLIENT_ID,
}
token = mobj.group('token')
if token:
data_dict['secret_token'] = token
data = compat_urllib_parse.urlencode(data_dict)
data = self._download_json(
base_url + data, playlist_id, 'Downloading playlist')
entries = [
self._extract_info_dict(t, quiet=True, secret_token=token)
for t in data['tracks']]
return {
'_type': 'playlist',
'id': playlist_id,
'title': data.get('title'),
'description': data.get('description'),
'entries': entries,
}
| raymondanthony/youtube-dl | youtube_dl/extractor/soundcloud.py | Python | unlicense | 14,108 |
import os
source = '../src/Stats.js'
output = '../build/stats.min.js'
os.system('java -jar compiler/compiler.jar --language_in=ECMASCRIPT5 --js ' + source + ' --js_output_file ' + output)
with open(output,'r') as f: text = f.read()
with open(output,'w') as f: f.write("// stats.js - http://github.com/mrdoob/stats.js\n" + text)
| HustLion/HustLionToolkit | linux/usage/closure.py | Python | mit | 328 |
import os
import logging
from thug.ThugAPI.ThugAPI import ThugAPI
log = logging.getLogger("Thug")
class TestMiscSamplesIE(object):
cwd_path = os.path.dirname(os.path.realpath(__file__))
misc_path = os.path.join(cwd_path, os.pardir, "samples/misc")
def do_perform_test(self, caplog, sample, expected):
thug = ThugAPI()
thug.set_useragent('win10ie110')
thug.set_events('click,storage')
thug.set_connect_timeout(2)
thug.disable_cert_logging()
thug.set_features_logging()
thug.set_file_logging()
thug.set_ssl_verify()
thug.log_init(sample)
thug.run_local(sample)
records = [r.message for r in caplog.records]
matches = 0
for e in expected:
for record in records:
if e in record:
matches += 1
assert matches >= len(expected)
def test_plugindetect1(self, caplog):
sample = os.path.join(self.misc_path, "PluginDetect-0.7.6.html")
expected = ['AdobeReader version: 9.1.0.0',
'Flash version: 10.0.64.0']
self.do_perform_test(caplog, sample, expected)
def test_plugindetect2(self, caplog):
sample = os.path.join(self.misc_path, "PluginDetect-0.7.8.html")
expected = ['AdobeReader version: 9,1,0,0',
'Flash version: 10,0,64,0',
'Java version: 1,6,0,32',
'ActiveXObject: javawebstart.isinstalled.1.6.0.0',
'ActiveXObject: javaplugin.160_32']
self.do_perform_test(caplog, sample, expected)
def test_test1(self, caplog):
sample = os.path.join(self.misc_path, "test1.html")
expected = ['[Window] Alert Text: one']
self.do_perform_test(caplog, sample, expected)
def test_test2(self, caplog):
sample = os.path.join(self.misc_path, "test2.html")
expected = ['[Window] Alert Text: Java enabled: true']
self.do_perform_test(caplog, sample, expected)
def test_test3(self, caplog):
sample = os.path.join(self.misc_path, "test3.html")
expected = ['[Window] Alert Text: foo']
self.do_perform_test(caplog, sample, expected)
def test_testAppendChild(self, caplog):
sample = os.path.join(self.misc_path, "testAppendChild.html")
expected = ['Don\'t care about me',
'Just a sample',
'Attempt to append a null element failed',
'Attempt to append an invalid element failed',
'Attempt to append a text element failed',
'Attempt to append a read-only element failed']
self.do_perform_test(caplog, sample, expected)
def test_testClipboardData(self, caplog):
sample = os.path.join(self.misc_path, "testClipboardData.html")
expected = ['Test ClipboardData']
self.do_perform_test(caplog, sample, expected)
def test_testCloneNode(self, caplog):
sample = os.path.join(self.misc_path, "testCloneNode.html")
expected = ['<div id="cloned"><q>Can you copy <em>everything</em> I say?</q></div>']
self.do_perform_test(caplog, sample, expected)
def test_testCloneNode2(self, caplog):
sample = os.path.join(self.misc_path, "testCloneNode2.html")
expected = ['[Window] Alert Text: [object HTMLButtonElement]',
'[Window] Alert Text: Clone node',
'[Window] Alert Text: None',
'[Window] Alert Text: [object Attr]',
'[Window] Alert Text: True']
self.do_perform_test(caplog, sample, expected)
def test_testCreateHTMLDocument(self, caplog):
sample = os.path.join(self.misc_path, "testCreateHTMLDocument.html")
expected = ['[object HTMLDocument]',
'[object HTMLBodyElement]',
'<p>This is a new paragraph.</p>']
self.do_perform_test(caplog, sample, expected)
def test_testDocumentWrite1(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentWrite1.html")
expected = ['Foobar',
"Google</a><script>alert('foobar');</script><script language=\"VBScript\">alert('Gnam');</script><script>alert('Aieeeeee');</script></body>"]
self.do_perform_test(caplog, sample, expected)
def test_testExternalSidebar(self, caplog):
sample = os.path.join(self.misc_path, "testExternalSidebar.html")
expected = ['[Window] Alert Text: Internet Explorer >= 7.0 or Chrome']
self.do_perform_test(caplog, sample, expected)
def test_testGetElementsByClassName(self, caplog):
sample = os.path.join(self.misc_path, "testGetElementsByClassName.html")
expected = ['First',
'Hello World!',
'Second']
self.do_perform_test(caplog, sample, expected)
def test_testInnerHTML(self, caplog):
sample = os.path.join(self.misc_path, "testInnerHTML.html")
expected = ['dude', 'Fred Flinstone']
self.do_perform_test(caplog, sample, expected)
def test_testInsertBefore(self, caplog):
sample = os.path.join(self.misc_path, "testInsertBefore.html")
expected = ["<div>Just a sample</div><div>I'm your reference!</div></body></html>",
"[ERROR] Attempting to insert null element",
"[ERROR] Attempting to insert an invalid element",
"[ERROR] Attempting to insert using an invalid reference element",
"[ERROR] Attempting to insert a text node using an invalid reference element"]
self.do_perform_test(caplog, sample, expected)
def test_testLocalStorage(self, caplog):
sample = os.path.join(self.misc_path, "testLocalStorage.html")
expected = ["Alert Text: Fired",
"Alert Text: bar",
"Alert Text: south"]
self.do_perform_test(caplog, sample, expected)
def test_testPlugins(self, caplog):
sample = os.path.join(self.misc_path, "testPlugins.html")
expected = ["Shockwave Flash 10.0.64.0",
"Windows Media Player 7",
"Adobe Acrobat"]
self.do_perform_test(caplog, sample, expected)
def test_testMetaXUACompatibleEmulateIE(self, caplog):
sample = os.path.join(self.misc_path, "testMetaXUACompatibleEmulateIE.html")
expected = ["[Window] Alert Text: 8"]
self.do_perform_test(caplog, sample, expected)
def test_testNode(self, caplog):
sample = os.path.join(self.misc_path, "testNode.html")
expected = ["thelink",
"thediv"]
self.do_perform_test(caplog, sample, expected)
def test_testNode2(self, caplog):
sample = os.path.join(self.misc_path, "testNode2.html")
expected = ["thelink",
"thediv2"]
self.do_perform_test(caplog, sample, expected)
def test_testQuerySelector(self, caplog):
sample = os.path.join(self.misc_path, "testQuerySelector.html")
expected = ["Alert Text: Have a Good life.",
"CoursesWeb.net"]
self.do_perform_test(caplog, sample, expected)
def test_testQuerySelector2(self, caplog):
sample = os.path.join(self.misc_path, "testQuerySelector2.html")
expected = ['CoursesWeb.net',
"MarPlo.net",
'php.net']
self.do_perform_test(caplog, sample, expected)
def test_testScope(self, caplog):
sample = os.path.join(self.misc_path, "testScope.html")
expected = ["foobar",
"foo",
"bar",
"True",
"3",
"2012-10-07 11:13:00",
"3.14159265359",
"/foo/i"]
self.do_perform_test(caplog, sample, expected)
def test_testSessionStorage(self, caplog):
sample = os.path.join(self.misc_path, "testSessionStorage.html")
expected = ["key1",
"key2",
"value1",
"value3"]
self.do_perform_test(caplog, sample, expected)
def test_testSetInterval(self, caplog):
sample = os.path.join(self.misc_path, "testSetInterval.html")
expected = ["[Window] Alert Text: Hello"]
self.do_perform_test(caplog, sample, expected)
def test_testText(self, caplog):
sample = os.path.join(self.misc_path, "testText.html")
expected = ['<p id="p1">First line of paragraph.<br/> Some text added dynamically. </p>']
self.do_perform_test(caplog, sample, expected)
def test_testWindowOnload(self, caplog):
sample = os.path.join(self.misc_path, "testWindowOnload.html")
expected = ["[Window] Alert Text: Fired"]
self.do_perform_test(caplog, sample, expected)
def test_test_click(self, caplog):
sample = os.path.join(self.misc_path, "test_click.html")
expected = ["[window open redirection] about:blank -> https://buffer.github.io/thug/"]
self.do_perform_test(caplog, sample, expected)
def test_testInsertAdjacentHTML1(self, caplog):
sample = os.path.join(self.misc_path, "testInsertAdjacentHTML1.html")
expected = ['<div id="five">five</div><div id="one">one</div>']
self.do_perform_test(caplog, sample, expected)
def test_testInsertAdjacentHTML2(self, caplog):
sample = os.path.join(self.misc_path, "testInsertAdjacentHTML2.html")
expected = ['<div id="two"><div id="six">six</div>two</div>']
self.do_perform_test(caplog, sample, expected)
def test_testInsertAdjacentHTML3(self, caplog):
sample = os.path.join(self.misc_path, "testInsertAdjacentHTML3.html")
expected = ['<div id="three">three<div id="seven">seven</div></div>']
self.do_perform_test(caplog, sample, expected)
def test_testInsertAdjacentHTML4(self, caplog):
sample = os.path.join(self.misc_path, "testInsertAdjacentHTML4.html")
expected = ['<div id="four">four</div><div id="eight">eight</div>']
self.do_perform_test(caplog, sample, expected)
def test_testInsertAdjacentHTML5(self, caplog):
sample = os.path.join(self.misc_path, "testInsertAdjacentHTML5.html")
expected = ['insertAdjacentHTML does not support notcorrect operation']
self.do_perform_test(caplog, sample, expected)
def test_testCurrentScript(self, caplog):
sample = os.path.join(self.misc_path, "testCurrentScript.html")
expected = ["[Window] Alert Text: This page has scripts",
"[Window] Alert Text: text/javascript",
"[Window] Alert Text: Just a useless script"]
self.do_perform_test(caplog, sample, expected)
def test_testCCInterpreter(self, caplog):
sample = os.path.join(self.misc_path, "testCCInterpreter.html")
expected = ['JavaScript version: 11',
'Running on the 32-bit version of Windows']
self.do_perform_test(caplog, sample, expected)
def test_testTextNode(self, caplog):
sample = os.path.join(self.misc_path, "testTextNode.html")
expected = ['nodeName: #text',
'nodeType: 3',
'Object: [object Text]',
'nodeValue: Hello World',
'Length: 11',
'Substring(2,5): llo W',
'New nodeValue (replace): HelloAWorld',
'New nodeValue (delete 1): HelloWorld',
'Index error (delete 2)',
'New nodeValue (delete 3): Hello',
'New nodeValue (append): Hello Test',
'Index error (insert 1)',
'New nodeValue (insert 2): Hello New Test',
'New nodeValue (reset): Reset']
self.do_perform_test(caplog, sample, expected)
def test_testCommentNode(self, caplog):
sample = os.path.join(self.misc_path, "testCommentNode.html")
expected = ['nodeName: #comment',
'nodeType: 8',
'Object: [object Comment]',
'nodeValue: <!--Hello World-->',
'Length: 18',
'Substring(2,5): --Hel',
'New nodeValue (replace): <!--HAllo World-->',
'New nodeValue (delete 1): <!--Hllo World-->',
'Index error (delete 2)',
'New nodeValue (delete 3): <!--H',
'New nodeValue (append): <!--H Test',
'Index error (insert 1)',
'New nodeValue (insert 2): <!--H New Test',
'New nodeValue (reset): Reset']
self.do_perform_test(caplog, sample, expected)
def test_testDOMImplementation(self, caplog):
sample = os.path.join(self.misc_path, "testDOMImplementation.html")
expected = ["hasFeature('core'): true", ]
self.do_perform_test(caplog, sample, expected)
def test_testAttrNode(self, caplog):
sample = os.path.join(self.misc_path, "testAttrNode.html")
expected = ['Object: [object Attr]',
'nodeName: test',
'nodeType: 2',
'nodeValue: foo',
'Length: undefined',
'New nodeValue: test2',
'Parent: null',
'Owner: null',
'Name: test',
'Specified: true',
'childNodes length: 0']
self.do_perform_test(caplog, sample, expected)
def test_testReplaceChild(self, caplog):
sample = os.path.join(self.misc_path, "testReplaceChild.html")
expected = ['firstChild: Old child',
'lastChild: Old child',
'[innerText: Old child',
'ERROR] Attempting to replace with a null element',
'[ERROR] Attempting to replace a null element',
'[ERROR] Attempting to replace with an invalid element',
'[ERROR] Attempting to replace an invalid element',
'[ERROR] Attempting to replace on a read-only element failed',
'Alert Text: New child',
'<div id="foobar"><!--Just a comment--></div>']
self.do_perform_test(caplog, sample, expected)
def test_testCookie(self, caplog):
sample = os.path.join(self.misc_path, "testCookie.html")
expected = ["Alert Text: favorite_food=tripe; name=oeschger", ]
self.do_perform_test(caplog, sample, expected)
def test_testDocumentFragment1(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentFragment1.html")
expected = ["<div><p>Test</p></div>", ]
self.do_perform_test(caplog, sample, expected)
def test_testDocumentFragment2(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentFragment2.html")
expected = ["<div id=\"foobar\"><b>This is B</b></div>", ]
self.do_perform_test(caplog, sample, expected)
def test_testDocumentFragment3(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentFragment3.html")
expected = ["foo:bar", ]
self.do_perform_test(caplog, sample, expected)
def test_testClassList1(self, caplog):
sample = os.path.join(self.misc_path, "testClassList1.html")
expected = ['[Initial value] <div class="foo"></div>',
'[After remove and add] <div class="anotherclass"></div>',
'[Item] anotherclass',
'[Empty item] null',
'[Toggle visible] true',
'[After toggle] <div class="anotherclass"></div>']
self.do_perform_test(caplog, sample, expected)
def test_testClassList4(self, caplog):
sample = os.path.join(self.misc_path, "testClassList4.html")
expected = ['[After remove and add] <div class="anotherclass"></div>', ]
self.do_perform_test(caplog, sample, expected)
def test_testDocumentType(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentType.html")
expected = ['Doctype: [object DocumentType]',
'Doctype name: html',
'Doctype nodeName: html',
'Doctype nodeType: 10',
'Doctype nodeValue: null',
'Doctype publicId: ',
'Doctype systemId: ',
'Doctype textContent: null']
self.do_perform_test(caplog, sample, expected)
def test_testRemoveChild(self, caplog):
sample = os.path.join(self.misc_path, "testRemoveChild.html")
expected = ['<div>Don\'t care about me</div>',
'[ERROR] Attempting to remove null element',
'[ERROR] Attempting to remove an invalid element',
'[ERROR] Attempting to remove a read-only element',
'[ERROR] Attempting to remove an element not in the tree',
'[ERROR] Attempting to remove from a read-only element']
self.do_perform_test(caplog, sample, expected)
def test_testNamedNodeMap(self, caplog):
sample = os.path.join(self.misc_path, "testNamedNodeMap.html")
expected = ['hasAttributes (before removal): true',
'hasAttribute(\'id\'): true',
'First test: id->p1',
'Second test: id->p1',
'Third test: id->p1',
'Fourth test: id->p1',
'Fifth test failed',
'Not existing: null',
'hasAttributes (after removal): false',
'Sixth test: foo->bar',
'Seventh test: foo->bar2',
'Final attributes length: 1']
self.do_perform_test(caplog, sample, expected)
def test_testEntityReference(self, caplog):
sample = os.path.join(self.misc_path, "testEntityReference.html")
expected = ['node: [object EntityReference]',
'name: &',
'nodeName: &',
'nodeType: 5',
'nodeValue: null']
self.do_perform_test(caplog, sample, expected)
def test_getElementsByTagName(self, caplog):
sample = os.path.join(self.misc_path, "testGetElementsByTagName.html")
expected = ['[object HTMLHtmlElement]',
'[object HTMLHeadElement]',
'[object HTMLBodyElement]',
'[object HTMLParagraphElement]',
'[object HTMLScriptElement]']
self.do_perform_test(caplog, sample, expected)
def test_testDocumentElement(self, caplog):
sample = os.path.join(self.misc_path, "testDocumentElement.html")
expected = ['<a href="http://www.google.com">Google</a>']
self.do_perform_test(caplog, sample, expected)
def test_testSetAttribute1(self, caplog):
sample = os.path.join(self.misc_path, "testSetAttribute1.html")
expected = ['Attribute: bar',
'Attribute (after removal): null']
self.do_perform_test(caplog, sample, expected)
def test_testSetAttribute3(self, caplog):
sample = os.path.join(self.misc_path, "testSetAttribute3.html")
expected = ['Alert Text: foo',
'Alert Text: bar',
'Alert Text: test',
'Alert Text: foobar']
self.do_perform_test(caplog, sample, expected)
def test_testCDATASection(self, caplog):
sample = os.path.join(self.misc_path, "testCDATASection.html")
expected = ['nodeName: #cdata-section',
'nodeType: 4',
'<xml><![CDATA[Some <CDATA> data & then some]]></xml>']
self.do_perform_test(caplog, sample, expected)
def test_testApplyElement(self, caplog):
sample = os.path.join(self.misc_path, "testApplyElement.html")
expected = ['<div id="outer"><div id="test"><div>Just a sample</div></div></div>',
'<div id="outer"><div>Just a div<div id="test"><div>Just a sample</div></div></div></div>']
self.do_perform_test(caplog, sample, expected)
def test_testProcessingInstruction(self, caplog):
sample = os.path.join(self.misc_path, "testProcessingInstruction.html")
expected = ['[object ProcessingInstruction]',
'nodeName: xml-stylesheet',
'nodeType: 7',
'nodeValue: href="mycss.css" type="text/css"',
'target: xml-stylesheet']
self.do_perform_test(caplog, sample, expected)
def test_testWindow(self, caplog):
sample = os.path.join(self.misc_path, "testWindow.html")
expected = ['window: [object Window]',
'self: [object Window]',
'top: [object Window]',
'length: 0',
'history: [object History]',
'pageXOffset: 0',
'pageYOffset: 0',
'screen: [object Screen]',
'screenLeft: 0',
'screenX: 0',
'confirm: true']
self.do_perform_test(caplog, sample, expected)
def test_testObject1(self, caplog):
sample = os.path.join(self.misc_path, "testObject1.html")
expected = ['[object data redirection] about:blank -> https://github.com/buffer/thug/raw/master/tests/test_files/sample.swf']
self.do_perform_test(caplog, sample, expected)
def test_testReplaceChild2(self, caplog):
sample = os.path.join(self.misc_path, "testReplaceChild2.html")
expected = ['<div id="foobar"><div id="test"></div></div>']
self.do_perform_test(caplog, sample, expected)
def test_testNavigator(self, caplog):
sample = os.path.join(self.misc_path, "testNavigator.html")
expected = ['window: [object Window]',
'appCodeName: Mozilla',
'appName: Netscape',
'appVersion: 5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko',
'cookieEnabled: true',
'onLine: true',
'platform: Win32']
self.do_perform_test(caplog, sample, expected)
def test_testAdodbStream(self, caplog):
sample = os.path.join(self.misc_path, "testAdodbStream.html")
expected = ['[Microsoft MDAC RDS.Dataspace ActiveX] CreateObject (Adodb.Stream)',
'[Window] Alert Text: Stream content: Test',
'[Window] Alert Text: Stream content (first 2 chars): Te',
'[Window] Alert Text: Stream size: 4',
'[Adodb.Stream ActiveX] SaveToFile(test.txt, 2)',
'[Adodb.Stream ActiveX] LoadFromFile(test1234.txt)',
'[Window] Alert Text: Attempting to load from a not existing file',
'[Adodb.Stream ActiveX] LoadFromFile(test.txt)',
'[Window] Alert Text: ReadText: Test',
'[Window] Alert Text: ReadText(3): Tes',
'[Window] Alert Text: ReadText(10): Test',
'[Adodb.Stream ActiveX] Changed position in fileobject to: (2)',
'[Window] Alert Text: stTest2',
'[Adodb.Stream ActiveX] Close']
self.do_perform_test(caplog, sample, expected)
def test_testScriptingFileSystemObject(self, caplog):
sample = os.path.join(self.misc_path, "testScriptingFileSystemObject.html")
expected = ['[Microsoft MDAC RDS.Dataspace ActiveX] CreateObject (Scripting.FileSystemObject)',
'[Scripting.FileSystemObject ActiveX] Returning C:\\WINDOWS for GetSpecialFolder("0")',
'[Scripting.FileSystemObject ActiveX] Returning C:\\WINDOWS\\system32 for GetSpecialFolder("1")',
'[WScript.Shell ActiveX] Expanding environment string "%TEMP%"',
'[Window] Alert Text: FolderExists(\'C:\\Windows\\System32\'): true',
'[Window] Alert Text: FileExists(\'\'): true',
'[Window] Alert Text: FileExists(\'C:\\Windows\\System32\\drivers\\etc\\hosts\'): true',
'[Window] Alert Text: FileExists(\'C:\\Windows\\System32\\test.txt\'): true',
'[Window] Alert Text: GetExtensionName("C:\\Windows\\System32\\test.txt"): .txt',
'[Window] Alert Text: FileExists(\'C:\\Windows\\System32\\test.txt\'): true',
'[Window] Alert Text: [After CopyFile] FileExists(\'C:\\Windows\\System32\\test2.txt\'): true',
'[Window] Alert Text: [After MoveFile] FileExists(\'C:\\Windows\\System32\\test2.txt\'): false',
'[Window] Alert Text: [After MoveFile] FileExists(\'C:\\Windows\\System32\\test3.txt\'): true']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLOptionsCollection(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLOptionsCollection.html")
expected = ['length: 4',
'item(0): Volvo',
'namedItem(\'audi\'): Audi',
'namedItem(\'mercedes\').value: mercedes',
'[After remove] item(0): Saab',
'[After first add] length: 4',
'[After first add] item(3): foobar',
'[After second add] length: 5',
'[After second add] item(3): test1234',
'Not found error']
self.do_perform_test(caplog, sample, expected)
def test_testTextStream(self, caplog):
sample = os.path.join(self.misc_path, "testTextStream.html")
expected = ['[Microsoft MDAC RDS.Dataspace ActiveX] CreateObject (Scripting.FileSystemObject)',
'[Scripting.FileSystemObject ActiveX] CreateTextFile("test.txt", "False", "False")',
'[After first write] ReadAll: foobar',
'[After first write] Line: 1',
'[After first write] Column: 7',
'[After first write] AtEndOfLine: true',
'[After first write] AtEndOfStream: true',
'[After second write] Line: 2',
'[After second write] Column: 1',
'[After second write] AtEndOfLine: false',
'[After second write] AtEndOfStream: false',
'[After third write] Line: 5',
'[After third write] Column: 16',
'[After third write] AtEndOfLine: false',
'[After third write] AtEndOfStream: false',
'[After fourth write] Line: 6',
'[After fourth write] Column: 1',
'[After fourth write] AtEndOfLine: false',
'[After fourth write] AtEndOfStream: false',
'[After fourth write] First char: s',
'[After fourth write] Second char: o',
'[After fourth write] Third char: m',
'[After fourth write] Line: some other textnext line',
'[After skip] Read(5): ttest']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLAnchorElement(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLAnchorElement.html")
expected = ['a.protocol: https:',
'a.host: www.example.com:1234',
'a.hostname: www.example.com',
'a.port: 1234',
'b.protocol: :',
'b.host: ',
'b.hostname: ',
'b.port: ',
'c.protocol: https:',
'c.host: www.example.com',
'c.hostname: www.example.com',
'c.port: ']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLTableElement3(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLTableElement3.html")
expected = ['tHead: [object HTMLTableSectionElement]',
'tFoot: [object HTMLTableSectionElement]',
'caption: [object HTMLTableCaptionElement]',
'row: [object HTMLTableRowElement]',
'tBodies: [object HTMLCollection]',
'cell: [object HTMLTableCellElement]',
'cell.innerHTML: New cell 1',
'row.deleteCell(10) failed',
'row.deleteCell(20) failed']
self.do_perform_test(caplog, sample, expected)
def test_testTextArea(self, caplog):
sample = os.path.join(self.misc_path, "testTextArea.html")
expected = ['type: textarea',
'cols: 100',
'rows: 25']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLDocument(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLDocument.html")
expected = ['document.title: Test',
'document.title: Foobar',
'anchors: [object HTMLCollection]',
'anchors length: 1',
'anchors[0].name: foobar',
'applets: [object HTMLCollection]',
'applets length: 2',
'applets[0].code: HelloWorld.class',
'links: [object HTMLCollection]',
'links length: 1',
'links[0].href: https://github.com/buffer/thug/',
'images: [object HTMLCollection]',
'images length: 1',
'images[0].href: test.jpg',
'disabled: false',
'head: [object HTMLHeadElement]',
'referrer: ',
'URL: about:blank',
'Alert Text: Hello, world']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLFormElement(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLFormElement.html")
expected = ['[object HTMLFormElement]',
'f.elements: [object HTMLFormControlsCollection]',
'f.length: 4',
'f.name: [object HTMLFormControlsCollection]',
'f.acceptCharset: ',
'f.action: /cgi-bin/test',
'f.enctype: application/x-www-form-urlencoded',
'f.encoding: application/x-www-form-urlencoded',
'f.method: POST',
'f.target: ']
self.do_perform_test(caplog, sample, expected)
def test_testFile(self, caplog):
sample = os.path.join(self.misc_path, "testFile.html")
expected = ['[Microsoft MDAC RDS.Dataspace ActiveX] CreateObject (Scripting.FileSystemObject)',
'[Scripting.FileSystemObject ActiveX] GetFile("D:\\ Program Files\\ Common Files\\test.txt")',
'[File ActiveX] Path = D:\\ Program Files\\ Common Files\\test.txt, Attributes = 32',
'Drive (test.txt): D:',
'ShortPath (test.txt): D:\\\\ Progr~1\\\\ Commo~1\\\\test.txt',
'ShortName (test.txt): test.txt',
'Attributes: 1',
'[Scripting.FileSystemObject ActiveX] GetFile("test2.txt")',
'[File ActiveX] Path = test2.txt, Attributes = 32',
'Drive (test2.txt): C:',
'ShortPath (test2.txt): test2.txt',
'ShortName (test2.txt): test2.txt',
'Copy(test3.txt, True)',
'Move(test4.txt)',
'Delete(False)',
'OpenAsTextStream(ForReading, 0)']
self.do_perform_test(caplog, sample, expected)
def test_testWScriptNetwork(self, caplog):
sample = os.path.join(self.misc_path, "testWScriptNetwork.html")
expected = ['[WScript.Network ActiveX] Got request to PrinterConnections',
'[WScript.Network ActiveX] Got request to EnumNetworkDrives',
'[WScript.Shell ActiveX] Expanding environment string "%USERDOMAIN%"',
'[WScript.Shell ActiveX] Expanding environment string "%USERNAME%"',
'[WScript.Shell ActiveX] Expanding environment string "%COMPUTERNAME%"']
self.do_perform_test(caplog, sample, expected)
def test_testApplet(self, caplog):
sample = os.path.join(self.misc_path, "testApplet.html")
expected = ['[applet redirection]']
self.do_perform_test(caplog, sample, expected)
def test_testHTMLImageElement(self, caplog):
sample = os.path.join(self.misc_path, "testHTMLImageElement.html")
expected = ['src (before changes): test.jpg',
'src (after first change): test2.jpg',
'onerror handler fired']
self.do_perform_test(caplog, sample, expected)
def test_testTitle(self, caplog):
sample = os.path.join(self.misc_path, "testTitle.html")
expected = ['New title: Foobar']
self.do_perform_test(caplog, sample, expected)
def test_testCSSStyleDeclaration(self, caplog):
sample = os.path.join(self.misc_path, "testCSSStyleDeclaration.html")
expected = ['style: [object CSSStyleDeclaration]',
'length: 1',
'cssText: color: blue;',
'color: blue',
'item(0): color',
'item(100):',
'getPropertyValue(\'color\'): blue',
'length (after removeProperty): 0',
'cssText: foo: bar;']
self.do_perform_test(caplog, sample, expected)
def test_testFormProperty(self, caplog):
sample = os.path.join(self.misc_path, "testFormProperty.html")
expected = ['[object HTMLFormElement]',
'formA']
self.do_perform_test(caplog, sample, expected)
def test_testVBScript(self, caplog):
sample = os.path.join(self.misc_path, "testVBScript.html")
expected = ['[VBS embedded URL redirection]',
'http://192.168.1.100/putty.exe']
self.do_perform_test(caplog, sample, expected)
def test_testFontFaceRule1(self, caplog):
sample = os.path.join(self.misc_path, "testFontFaceRule1.html")
expected = ['[font face redirection]',
'http://192.168.1.100/putty.exe']
self.do_perform_test(caplog, sample, expected)
def test_testFontFaceRule2(self, caplog):
sample = os.path.join(self.misc_path, "testFontFaceRule2.html")
expected = ['[font face redirection]',
'https://mdn.mozillademos.org/files/2468/VeraSeBd.ttf']
self.do_perform_test(caplog, sample, expected)
def test_testSilverLight(self, caplog):
sample = os.path.join(self.misc_path, "testSilverLight.html")
expected = ['[SilverLight] isVersionSupported(\'4.0\')',
'Version 4.0 supported: true']
self.do_perform_test(caplog, sample, expected)
def test_testMSXML2Document(self, caplog):
sample = os.path.join(self.misc_path, "testMSXML2Document.html")
expected = ['[MSXML2.DOMDocument] Microsoft XML Core Services MSXML Uninitialized Memory Corruption',
'CVE-2012-1889']
self.do_perform_test(caplog, sample, expected)
def test_testConsole(self, caplog):
sample = os.path.join(self.misc_path, "testConsole.html")
expected = ['[object Console]',
'[Console] assert(True, \'Test assert\')',
'[Console] count() = 1',
'[Console] count(\'foobar\') = 1',
'[Console] count(\'foobar\') = 2',
'[Console] error(\'Test error\')',
'[Console] log(\'Hello world!\')',
'[Console] group()',
'[Console] log(\'Hello again, this time inside a group!\')',
'[Console] groupEnd()',
'[Console] groupCollapsed()',
'[Console] info(\'Hello again\')',
'[Console] warn(\'Hello again\')']
self.do_perform_test(caplog, sample, expected)
| buffer/thug | tests/functional/test_misc_ie110.py | Python | gpl-2.0 | 36,903 |
from model.contact import Contact
testdata = [
Contact(firstname="firstname1", middlename="middlename1", lastname="lastname1", nickname="nickname1", title="title1", company="company1",
address="address1", homephone="homephone1", mobilephone="mobilephone1", workphone="workphone1", fax="fax1", email1="email11",
email2="email21", address2="address21", secondaryphone="secondaryphone1"),
Contact(firstname="firstname2", middlename="middlename2", lastname="lastname2", nickname="nickname2", title="title2", company="company2",
address="address2", homephone="homephone2", mobilephone="mobilephone2", workphone="workphone2", fax="fax2", email1="email12",
email2="email22", address2="address22", secondaryphone="secondaryphone2")
] | tucan21/python_zadania | data/contacts.py | Python | apache-2.0 | 782 |
class Fib(object):
@staticmethod
def fib(n):
if n <= 1:
return n
return Fib.fib(n - 1) + Fib.fib(n - 2)
| spark008/igor | test/fixtures/files/submission/slow_fib.py | Python | mit | 140 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.